hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cnaur...@apache.org
Subject svn commit: r1548386 - in /hadoop/common/branches/HDFS-4685/hadoop-common-project: hadoop-common/ hadoop-common/src/main/docs/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apache/hadoop/fs/ hadoop-common/src/main/java/org/apache/hadoop/...
Date Fri, 06 Dec 2013 06:57:18 GMT
Author: cnauroth
Date: Fri Dec  6 06:57:15 2013
New Revision: 1548386

URL: http://svn.apache.org/r1548386
Log:
Merge trunk to HDFS-4685.

Added:
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java
      - copied unchanged from r1548385, hadoop/common/trunk/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java
Modified:
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt   (contents,
props changed)
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/docs/  
(props changed)
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/  
(props changed)
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/core/  
(props changed)
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1548386&r1=1548385&r2=1548386&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt Fri Dec
 6 06:57:15 2013
@@ -390,6 +390,11 @@ Release 2.4.0 - UNRELEASED
 
     HADOOP-10126. LightWeightGSet log message is confusing. (Vinay via suresh)
 
+    HADOOP-10127. Add ipc.client.connect.retry.interval to control the frequency
+    of connection retries (Karthik Kambatla via Sandy Ryza)
+
+    HADOOP-10102. Update commons IO from 2.1 to 2.4 (Akira Ajisaka via stevel)
+
   OPTIMIZATIONS
 
     HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
@@ -455,6 +460,11 @@ Release 2.4.0 - UNRELEASED
     HADOOP-10135 writes to swift fs over partition size leave temp files and
     empty output file (David Dobbins via stevel)
 
+    HADOOP-10129. Distcp may succeed when it fails (daryn)
+
+    HADOOP-10058. TestMetricsSystemImpl#testInitFirstVerifyStopInvokedImmediately
+    fails on trunk (Chen He via jeagles)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -519,6 +529,12 @@ Release 2.3.0 - UNRELEASED
     HADOOP-10130. RawLocalFS::LocalFSFileInputStream.pread does not track
     FS::Statistics (Binglin Chang via Colin Patrick McCabe)
 
+    HDFS-5560. Trash configuration log statements prints incorrect units.
+    (Josh Elser via Andrew Wang)
+
+    HADOOP-10081. Client.setupIOStreams can leak socket resources on exception
+    or error (Tsuyoshi OZAWA via jlowe)
+
 Release 2.2.0 - 2013-10-13
 
   INCOMPATIBLE CHANGES
@@ -2326,6 +2342,20 @@ Release 2.0.0-alpha - 05-23-2012
     HADOOP-7606. Upgrade Jackson to version 1.7.1 to match the version required
     by Jersey (Alejandro Abdelnur via atm)
 
+Release 0.23.11 - UNRELEASED
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+    
+  IMPROVEMENTS
+    
+  OPTIMIZATIONS
+
+  BUG FIXES
+
+    HADOOP-10129. Distcp may succeed when it fails (daryn)
+
 Release 0.23.10 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt:r1547755
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1547224-1548385

Propchange: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1547224-1548385

Propchange: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1547224-1548385

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java?rev=1548386&r1=1548385&r2=1548386&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
Fri Dec  6 06:57:15 2013
@@ -192,6 +192,11 @@ public class CommonConfigurationKeysPubl
   /** Default value for IPC_CLIENT_CONNECT_MAX_RETRIES_KEY */
   public static final int     IPC_CLIENT_CONNECT_MAX_RETRIES_DEFAULT = 10;
   /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+  public static final String  IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY =
+      "ipc.client.connect.retry.interval";
+  /** Default value for IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY */
+  public static final int     IPC_CLIENT_CONNECT_RETRY_INTERVAL_DEFAULT = 1000;
+  /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
   public static final String  IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_KEY =
     "ipc.client.connect.max.retries.on.timeouts";
   /** Default value for IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_KEY */

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1548386&r1=1548385&r2=1548386&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
Fri Dec  6 06:57:15 2013
@@ -90,8 +90,8 @@ public class TrashPolicyDefault extends 
         FS_TRASH_CHECKPOINT_INTERVAL_KEY, FS_TRASH_CHECKPOINT_INTERVAL_DEFAULT)
         * MSECS_PER_MINUTE);
     LOG.info("Namenode trash configuration: Deletion interval = " +
-             this.deletionInterval + " minutes, Emptier interval = " +
-             this.emptierInterval + " minutes.");
+             (this.deletionInterval / MSECS_PER_MINUTE) + " minutes, Emptier interval = "
+
+             (this.emptierInterval / MSECS_PER_MINUTE) + " minutes.");
    }
 
   private Path makeTrashRelativePath(Path basePath, Path rmFilePath) {

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java?rev=1548386&r1=1548385&r2=1548386&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
Fri Dec  6 06:57:15 2013
@@ -31,15 +31,25 @@ public class HttpConfig {
   private static Policy policy;
   public enum Policy {
     HTTP_ONLY,
-    HTTPS_ONLY;
+    HTTPS_ONLY,
+    HTTP_AND_HTTPS;
 
     public static Policy fromString(String value) {
-      if (value.equalsIgnoreCase(CommonConfigurationKeysPublic
-              .HTTP_POLICY_HTTPS_ONLY)) {
+      if (HTTPS_ONLY.name().equalsIgnoreCase(value)) {
         return HTTPS_ONLY;
+      } else if (HTTP_AND_HTTPS.name().equalsIgnoreCase(value)) {
+        return HTTP_AND_HTTPS;
       }
       return HTTP_ONLY;
     }
+
+    public boolean isHttpEnabled() {
+      return this == HTTP_ONLY || this == HTTP_AND_HTTPS;
+    }
+
+    public boolean isHttpsEnabled() {
+      return this == HTTPS_ONLY || this == HTTP_AND_HTTPS;
+    }
   }
 
   static {

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1548386&r1=1548385&r2=1548386&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
Fri Dec  6 06:57:15 2013
@@ -1158,6 +1158,7 @@ public class Client {
         // cleanup calls
         cleanupCalls();
       }
+      closeConnection();
       if (LOG.isDebugEnabled())
         LOG.debug(getName() + ": closed");
     }
@@ -1562,8 +1563,13 @@ public class Client {
         final int max = conf.getInt(
             CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY,
             CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_DEFAULT);
+        final int retryInterval = conf.getInt(
+            CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY,
+            CommonConfigurationKeysPublic
+                .IPC_CLIENT_CONNECT_RETRY_INTERVAL_DEFAULT);
+
         connectionRetryPolicy = RetryPolicies.retryUpToMaximumCountWithFixedSleep(
-            max, 1, TimeUnit.SECONDS);
+            max, retryInterval, TimeUnit.MILLISECONDS);
       }
 
       boolean doPing =

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1548386&r1=1548385&r2=1548386&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
Fri Dec  6 06:57:15 2013
@@ -619,6 +619,14 @@
 </property>
 
 <property>
+  <name>ipc.client.connect.retry.interval</name>
+  <value>1000</value>
+  <description>Indicates the number of milliseconds a client will wait for
+    before retrying to establish a server connection.
+  </description>
+</property>
+
+<property>
   <name>ipc.client.connect.timeout</name>
   <value>20000</value>
   <description>Indicates the number of milliseconds a client will wait for the 
@@ -1125,9 +1133,7 @@
   <name>hadoop.ssl.enabled</name>
   <value>false</value>
   <description>
-    Whether to use SSL for the HTTP endpoints. If set to true, the
-    NameNode, DataNode, ResourceManager, NodeManager, HistoryServer and
-    MapReduceAppMaster web UIs will be served over HTTPS instead HTTP.
+    Deprecated. Use dfs.http.policy and yarn.http.policy instead.
   </description>
 </property>
 

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm?rev=1548386&r1=1548385&r2=1548386&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
Fri Dec  6 06:57:15 2013
@@ -754,6 +754,10 @@ KVNO Timestamp         Principal
 | | | Enable HDFS block access tokens for secure operations. |
 *-------------------------+-------------------------+------------------------+
 | <<<dfs.https.enable>>> | <true> | |
+| | | This value is deprecated. Use dfs.http.policy |
+*-------------------------+-------------------------+------------------------+
+| <<<dfs.http.policy>>> | <HTTP_ONLY> or <HTTPS_ONLY> or <HTTP_AND_HTTPS>
| |
+| | | HTTPS_ONLY turns off http access |
 *-------------------------+-------------------------+------------------------+
 | <<<dfs.namenode.https-address>>> | <nn_host_fqdn:50470> | |
 *-------------------------+-------------------------+------------------------+

Propchange: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1547224-1548385

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java?rev=1548386&r1=1548385&r2=1548386&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
Fri Dec  6 06:57:15 2013
@@ -85,6 +85,7 @@ public class TestMetricsSystemImpl {
   }
 
   @Test public void testInitFirstVerifyStopInvokedImmediately() throws Exception {
+    DefaultMetricsSystem.shutdown();
     new ConfigBuilder().add("*.period", 8)
         //.add("test.sink.plugin.urls", getPluginUrlsAsString())
         .add("test.sink.test.class", TestSink.class.getName())

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java?rev=1548386&r1=1548385&r2=1548386&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java
Fri Dec  6 06:57:15 2013
@@ -24,6 +24,7 @@ import java.io.InputStreamReader;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.BiMap;
 import com.google.common.collect.HashBiMap;
 
@@ -44,13 +45,21 @@ public class IdUserGroup {
   // Do update every 15 minutes
   final static long TIMEOUT = 15 * 60 * 1000; // ms
 
-  // Maps for id to name map. Guarded by this object monitor lock */
+  // Maps for id to name map. Guarded by this object monitor lock
   private BiMap<Integer, String> uidNameMap = HashBiMap.create();
   private BiMap<Integer, String> gidNameMap = HashBiMap.create();
 
   private long lastUpdateTime = 0; // Last time maps were updated
 
-  public IdUserGroup() {
+  static public class DuplicateNameOrIdException extends IOException {
+    private static final long serialVersionUID = 1L;
+
+    public DuplicateNameOrIdException(String msg) {
+      super(msg);
+    }
+  }
+  
+  public IdUserGroup() throws IOException {
     updateMaps();
   }
 
@@ -58,18 +67,34 @@ public class IdUserGroup {
     return lastUpdateTime - System.currentTimeMillis() > TIMEOUT;
   }
 
+  // If can't update the maps, will keep using the old ones
   private void checkAndUpdateMaps() {
     if (isExpired()) {
       LOG.info("Update cache now");
-      updateMaps();
+      try {
+        updateMaps();
+      } catch (IOException e) {
+        LOG.error("Can't update the maps. Will use the old ones,"
+            + " which can potentially cause problem.", e);
+      }
     }
   }
 
+  private static final String DUPLICATE_NAME_ID_DEBUG_INFO = "NFS gateway can't start with
duplicate name or id on the host system.\n"
+      + "This is because HDFS (non-kerberos cluster) uses name as the only way to identify
a user or group.\n"
+      + "The host system with duplicated user/group name or id might work fine most of the
time by itself.\n"
+      + "However when NFS gateway talks to HDFS, HDFS accepts only user and group name.\n"
+      + "Therefore, same name means the same user or same group. To find the duplicated names/ids,
one can do:\n"
+      + "<getent passwd | cut -d: -f1,3> and <getent group | cut -d: -f1,3> on
Linux systms,\n"
+      + "<dscl . -list /Users UniqueID> and <dscl . -list /Groups PrimaryGroupID>
on MacOS.";
+  
   /**
    * Get the whole list of users and groups and save them in the maps.
+   * @throws IOException 
    */
-  private void updateMapInternal(BiMap<Integer, String> map, String name,
-      String command, String regex) throws IOException {
+  @VisibleForTesting
+  public static void updateMapInternal(BiMap<Integer, String> map, String mapName,
+      String command, String regex) throws IOException  {
     BufferedReader br = null;
     try {
       Process process = Runtime.getRuntime().exec(
@@ -79,15 +104,31 @@ public class IdUserGroup {
       while ((line = br.readLine()) != null) {
         String[] nameId = line.split(regex);
         if ((nameId == null) || (nameId.length != 2)) {
-          throw new IOException("Can't parse " + name + " list entry:" + line);
+          throw new IOException("Can't parse " + mapName + " list entry:" + line);
+        }
+        LOG.debug("add to " + mapName + "map:" + nameId[0] + " id:" + nameId[1]);
+        // HDFS can't differentiate duplicate names with simple authentication
+        Integer key = Integer.valueOf(nameId[1]);
+        String value = nameId[0];
+        if (map.containsKey(key)) {
+          LOG.error(String.format(
+              "Got duplicate id:(%d, %s), existing entry: (%d, %s).\n%s", key,
+              value, key, map.get(key), DUPLICATE_NAME_ID_DEBUG_INFO));
+          throw new DuplicateNameOrIdException("Got duplicate id.");
+        }
+        if (map.containsValue(nameId[0])) {
+          LOG.error(String.format(
+              "Got duplicate name:(%d, %s), existing entry: (%d, %s) \n%s",
+              key, value, map.inverse().get(value), value,
+              DUPLICATE_NAME_ID_DEBUG_INFO));
+          throw new DuplicateNameOrIdException("Got duplicate name");
         }
-        LOG.debug("add " + name + ":" + nameId[0] + " id:" + nameId[1]);
         map.put(Integer.valueOf(nameId[1]), nameId[0]);
       }
-      LOG.info("Updated " + name + " map size:" + map.size());
+      LOG.info("Updated " + mapName + " map size:" + map.size());
       
     } catch (IOException e) {
-      LOG.error("Can't update map " + name);
+      LOG.error("Can't update " + mapName + " map");
       throw e;
     } finally {
       if (br != null) {
@@ -101,24 +142,26 @@ public class IdUserGroup {
     }
   }
 
-  synchronized public void updateMaps() {
+  synchronized public void updateMaps() throws IOException {
     BiMap<Integer, String> uMap = HashBiMap.create();
     BiMap<Integer, String> gMap = HashBiMap.create();
 
-    try {
-      if (OS.startsWith("Linux")) {
-        updateMapInternal(uMap, "user", LINUX_GET_ALL_USERS_CMD, ":");
-        updateMapInternal(gMap, "group", LINUX_GET_ALL_GROUPS_CMD, ":");
-      } else if (OS.startsWith("Mac")) {
-        updateMapInternal(uMap, "user", MAC_GET_ALL_USERS_CMD, "\\s+");
-        updateMapInternal(gMap, "group", MAC_GET_ALL_GROUPS_CMD, "\\s+");
-      } else {
-        throw new IOException("Platform is not supported:" + OS);
-      }
-    } catch (IOException e) {
-      LOG.error("Can't update maps:" + e);
+    if (!OS.startsWith("Linux") && !OS.startsWith("Mac")) {
+      LOG.error("Platform is not supported:" + OS
+          + ". Can't update user map and group map and"
+          + " 'nobody' will be used for any user and group.");
       return;
     }
+
+    if (OS.startsWith("Linux")) {
+      updateMapInternal(uMap, "user", LINUX_GET_ALL_USERS_CMD, ":");
+      updateMapInternal(gMap, "group", LINUX_GET_ALL_GROUPS_CMD, ":");
+    } else {
+      // Mac
+      updateMapInternal(uMap, "user", MAC_GET_ALL_USERS_CMD, "\\s+");
+      updateMapInternal(gMap, "group", MAC_GET_ALL_GROUPS_CMD, "\\s+");
+    }
+
     uidNameMap = uMap;
     gidNameMap = gMap;
     lastUpdateTime = System.currentTimeMillis();



Mime
View raw message