hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1196458 [1/3] - in /hadoop/common/branches/HDFS-1623: common/ hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/ hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authent...
Date Wed, 02 Nov 2011 05:34:59 GMT
Author: atm
Date: Wed Nov  2 05:34:31 2011
New Revision: 1196458

URL: http://svn.apache.org/viewvc?rev=1196458&view=rev
Log:
Merge trunk into HA branch

Added:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/IncludePublicAnnotationsStandardDoclet.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/IncludePublicAnnotationsStandardDoclet.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/resources/
      - copied from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/site/resources/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/resources/css/
      - copied from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/site/resources/css/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/resources/css/site.css
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/site/resources/css/site.css
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/package-info.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/package-info.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/package-info.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/package-info.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/
      - copied from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/HadoopRpcProtos.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/HadoopRpcProtos.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/package-info.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/package-info.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/package-info.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/package-info.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/package-info.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/package-info.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/package-info.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/package-info.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/package-info.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/package-info.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-validate-setup.sh
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/packages/hadoop-validate-setup.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/proto/
      - copied from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/proto/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/proto/hadoop_rpc.proto
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/proto/hadoop_rpc.proto
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/site/
      - copied from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/site/apt/
      - copied from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/
      - copied from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestProtos.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestProtos.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtos.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtos.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLog4Json.java
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLog4Json.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/proto/
      - copied from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/proto/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/proto/test.proto
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/proto/test.proto
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto
      - copied unchanged from r1196451, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto
Removed:
    hadoop/common/branches/HDFS-1623/common/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsConfig.java
Modified:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/site.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/hadoop-metrics2.properties
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/CachedDNSToSwitchMapping.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/Node.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NodeBase.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/native/configure.ac
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-hdfs.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-env.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-policy.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/mapred-site.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServlet.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/CoreTestDriver.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java Wed Nov  2 05:34:31 2011
@@ -50,6 +50,7 @@ import org.apache.hadoop.classification.
 class RootDocProcessor {
   
   static String stability = StabilityOptions.UNSTABLE_OPTION;
+  static boolean treatUnannotatedClassesAsPrivate = false;
   
   public static RootDoc process(RootDoc root) {
     return (RootDoc) process(root, RootDoc.class);
@@ -201,6 +202,17 @@ class RootDocProcessor {
 	    }
 	  }
 	}
+        for (AnnotationDesc annotation : annotations) {
+          String qualifiedTypeName =
+            annotation.annotationType().qualifiedTypeName();
+          if (qualifiedTypeName.equals(
+              InterfaceAudience.Public.class.getCanonicalName())) {
+            return false;
+          }
+        }
+      }
+      if (treatUnannotatedClassesAsPrivate) {
+        return doc.isClass() || doc.isInterface() || doc.isAnnotationType();
       }
       return false;
     }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java Wed Nov  2 05:34:31 2011
@@ -151,15 +151,13 @@ public class KerberosAuthenticationHandl
         throw new ServletException("Keytab does not exist: " + keytab);
       }
 
-      String nameRules = config.getProperty(NAME_RULES, "DEFAULT");
-      KerberosName.setRules(nameRules);
-
       Set<Principal> principals = new HashSet<Principal>();
       principals.add(new KerberosPrincipal(principal));
       Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
 
       KerberosConfiguration kerberosConfiguration = new KerberosConfiguration(keytab, principal);
 
+      LOG.info("Login using keytab "+keytab+", for principal "+principal);
       loginContext = new LoginContext("", subject, null, kerberosConfiguration);
       loginContext.login();
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/site.xml?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/site.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/site.xml Wed Nov  2 05:34:31 2011
@@ -13,16 +13,10 @@
 -->
 <project name="Hadoop Auth">
 
-  <version position="right"/>
-
-  <bannerLeft>
-    <name>&nbsp;</name>
-  </bannerLeft>
-
   <skin>
     <groupId>org.apache.maven.skins</groupId>
     <artifactId>maven-stylus-skin</artifactId>
-    <version>1.1</version>
+    <version>1.2</version>
   </skin>
 
   <body>

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt Wed Nov  2 05:34:31 2011
@@ -15,46 +15,87 @@ Trunk (unreleased changes)
 
     HADOOP-7635. RetryInvocationHandler should release underlying resources on
                  close (atm)
-    
-    HADOOP-7668. Add a NetUtils method that can tell if an InetAddress 
-                 belongs to local host. (suresh)
 
     HADOOP-7687 Make getProtocolSignature public  (sanjay)
 
     HADOOP-7693. Enhance AvroRpcEngine to support the new #addProtocol
                  interface introduced in HADOOP-7524.  (cutting)
 
-	  HADOOP-7716. RPC protocol registration on SS does not log the protocol name
-	               (only the class which may be different) (sanjay)
+    HADOOP-7716. RPC protocol registration on SS does not log the protocol name
+                 (only the class which may be different) (sanjay)
 
     HADOOP-7717. Move handling of concurrent client fail-overs to
                  RetryInvocationHandler (atm)
 
+    HADOOP-6490. Use StringUtils over String#replace in Path#normalizePath.
+                 (Uma Maheswara Rao G via harsh)
+
+    HADOOP-7736. Remove duplicate Path#normalizePath call. (harsh)
+
+    HADOOP-7664. Remove warmings when overriding final parameter configuration
+    if the override value is same as the final parameter value.
+    (Ravi Prakash via suresh)
+
+    HADOOP-7737. normalize hadoop-mapreduce & hadoop-dist dist/tar build with 
+    common/hdfs. (tucu)
+
+    HADOOP-7743. Add Maven profile to create a full source tarball. (tucu)
+
+    HADOOP-7729. Send back valid HTTP response if user hits IPC port with
+    HTTP GET. (todd)
+
+    HADOOP-7758. Make GlobFilter class public. (tucu)
+
+    HADOOP-7728. Enable task memory management to be configurable in hadoop
+    config setup script. (ramya)
+
+    HADOOP-7424. Log an error if the topology script doesn't handle multiple args.
+    (Uma Maheswara Rao G via eli)
+
+    HADOOP-7792. Add verifyToken method to AbstractDelegationTokenSecretManager.
+    (jitendra)
+
   BUGS
 
     HADOOP-7606. Upgrade Jackson to version 1.7.1 to match the version required
-                 by Jersey (Alejandro Abdelnur via atm)
+    by Jersey (Alejandro Abdelnur via atm)
 
     HADOOP-7610. Fix for hadoop debian package (Eric Yang via gkesavan)
 
-    HADOOP-7641. Add Apache License to template config files (Eric Yang via atm)
+    HADOOP-7641. Add Apache License to template config files.
+    (Eric Yang via atm)
 
     HADOOP-7621. alfredo config should be in a file not readable by users
-                 (Alejandro Abdelnur via atm)
+    (Alejandro Abdelnur via atm)
     
     HADOOP-7669  Fix newly introduced release audit warning. 
-                 (Uma Maheswara Rao G via stevel)
+    (Uma Maheswara Rao G via stevel)
     
-    HADOOP-6220. HttpServer wraps InterruptedExceptions by IOExceptions if interrupted 
-                 in startup (stevel)
+    HADOOP-6220. HttpServer wraps InterruptedExceptions by IOExceptions 
+    if interrupted in startup (stevel)
 
-    HADOOP-7703. Improved excpetion handling of shutting down web server.
+    HADOOP-7703. Improved exception handling of shutting down web server.
     (Devaraj K via Eric Yang)
 
     HADOOP-7704. Reduce number of object created by JMXJsonServlet.
     (Devaraj K via Eric Yang)
 
-Release 0.23.0 - Unreleased
+    HADOOP-7695. RPC.stopProxy can throw unintended exception while logging
+    error (atm)
+
+    HADOOP-7769. TestJMXJsonServlet is failing. (tomwhite)
+
+    HADOOP-7770. ViewFS getFileChecksum throws FileNotFoundException for files in 
+    /tmp and /user. (Ravi Prakash via jitendra)
+
+  OPTIMIZATIONS
+
+    HADOOP-7761. Improve the performance of raw comparisons. (todd)
+
+    HADOOP-7773. Add support for protocol buffer based RPC engine.
+    (suresh)
+
+Release 0.23.0 - 2011-11-01 
 
   INCOMPATIBLE CHANGES
 
@@ -122,6 +163,9 @@ Release 0.23.0 - Unreleased
 
   IMPROVEMENTS
 
+    HADOOP-7655. Provide a small validation script that smoke tests the installed
+    cluster. (Arpit Gupta via mattf)
+
     HADOOP-7042. Updates to test-patch.sh to include failed test names and
     improve other messaging. (nigel)
 
@@ -435,6 +479,47 @@ Release 0.23.0 - Unreleased
     HADOOP-7720. Added parameter for HBase user to setup config script.
     (Arpit Gupta via Eric Yang)
 
+    HADOOP-7624. Set things up for a top level hadoop-tools module. (tucu)
+
+    HADOOP-7627. Improve MetricsAsserts to give more understandable output
+    on failure. (todd)
+
+    HADOOP-7642. create hadoop-dist module where TAR stitching would happen.
+    (Thomas White via tucu)
+
+    HADOOP-7709. Running a set of methods in a Single Test Class. 
+    (Jonathan Eagles via mahadev)
+
+    HADOOP-7705. Add a log4j back end that can push out JSON data,
+    one per line. (stevel)
+
+    HADOOP-7749. Add a NetUtils createSocketAddr call which provides more
+    help in exception messages. (todd)
+
+    HADOOP-7762. Common side of MR-2736. (eli)
+
+    HADOOP-7668. Add a NetUtils method that can tell if an InetAddress 
+    belongs to local host. (suresh)
+
+    HADOOP-7509. Improve exception message thrown when Authentication is 
+    required. (Ravi Prakash via suresh)
+
+    HADOOP-7745. Fix wrong variable name in exception message introduced
+    in HADOOP-7509. (Ravi Prakash via suresh)
+
+    MAPREDUCE-2764. Fix renewal of dfs delegation tokens. (Owen via jitendra)
+
+    HADOOP-7360. Preserve relative paths that do not contain globs in FsShell.
+    (Daryn Sharp and Kihwal Lee via szetszwo)
+
+    HADOOP-7771. FsShell -copyToLocal, -get, etc. commands throw NPE if the
+    destination directory does not exist.  (John George and Daryn Sharp
+    via szetszwo)
+
+    HADOOP-7782. Aggregate project javadocs. (tomwhite)
+
+    HADOOP-7789. Improvements to site navigation. (acmurthy) 
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole
@@ -443,8 +528,20 @@ Release 0.23.0 - Unreleased
     HADOOP-7445. Implement bulk checksum verification using efficient native
     code. (todd)
 
+    HADOOP-7753. Support fadvise and sync_file_range in NativeIO. Add
+    ReadaheadPool infrastructure for use in HDFS and MR. (todd)
+
+    HADOOP-7446. Implement CRC32C native code using SSE4.2 instructions.
+    (Kihwal Lee and todd via todd)
+
+    HADOOP-7763. Add top-level navigation to APT docs. (tomwhite)
+
+    HADOOP-7785. Add equals, hashcode, toString to DataChecksum (todd)
+
   BUG FIXES
 
+    HADOOP-7740. Fixed security audit logger configuration. (Arpit Gupta via Eric Yang)
+
     HADOOP-7630. hadoop-metrics2.properties should have a property *.period 
     set to a default value for metrics. (Eric Yang via mattf)
 
@@ -681,6 +778,26 @@ Release 0.23.0 - Unreleased
     HADOOP-7708. Fixed hadoop-setup-conf.sh to handle config files
     consistently.  (Eric Yang)
 
+    HADOOP-7724. Fixed hadoop-setup-conf.sh to put proxy user in
+    core-site.xml.  (Arpit Gupta via Eric Yang)
+
+    HADOOP-7755. Detect MapReduce PreCommit Trunk builds silently failing
+    when running test-patch.sh. (Jonathan Eagles via tomwhite)
+
+    HADOOP-7744. Ensure failed tests exit with proper error code. (Jonathan
+    Eagles via acmurthy) 
+
+    HADOOP-7764. Allow HttpServer to set both ACL list and path spec filters. 
+    (Jonathan Eagles via acmurthy)
+
+    HADOOP-7766. The auth to local mappings are not being respected, with webhdfs 
+    and security enabled. (jitendra)
+
+    HADOOP-7721. Add log before login in KerberosAuthenticationHandler. 
+    (jitendra)
+
+    HADOOP-7778. FindBugs warning in Token.getKind(). (tomwhite)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES
@@ -933,6 +1050,10 @@ Release 0.22.0 - Unreleased
     HADOOP-7325. The hadoop command should not accept class names starting with
     a hyphen. (Brock Noland via todd)
 
+    HADOOP-7772. javadoc the topology classes (stevel)
+
+    HADOOP-7786. Remove HDFS-specific config keys defined in FsConfig. (eli)
+
   OPTIMIZATIONS
 
     HADOOP-6884. Add LOG.isDebugEnabled() guard for each LOG.debug(..).

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Nov  2 05:34:31 2011
@@ -1,5 +1,5 @@
 /hadoop/common/branches/yahoo-merge/CHANGES.txt:1079157,1079163-1079164,1079167
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1179483
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1196451
 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226
 /hadoop/core/branches/branch-0.19/CHANGES.txt:713112
 /hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml Wed Nov  2 05:34:31 2011
@@ -270,4 +270,8 @@
       <!-- backward compatibility -->
       <Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS"/>
     </Match>
+    <Match>
+      <!-- protobuf generated code -->
+      <Class name="org.apache.hadoop.ipc.protobuf.HadoopRpcProtos"/>
+    </Match>
  </FindBugsFilter>

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml Wed Nov  2 05:34:31 2011
@@ -338,6 +338,7 @@
                 TODO: from a previous run is present
                 -->
                 <delete dir="${test.build.data}"/>
+                <mkdir dir="${test.build.data}"/>
                 <mkdir dir="${hadoop.log.dir}"/>
 
                 <copy toDir="${project.build.directory}/test-classes">
@@ -346,6 +347,18 @@
               </target>
             </configuration>
           </execution>
+          <execution>
+            <phase>pre-site</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <tasks>
+                <copy file="src/main/resources/core-default.xml" todir="src/site/resources"/>
+                <copy file="src/main/xsl/configuration.xsl" todir="src/site/resources"/>
+              </tasks>
+            </configuration>
+          </execution>
         </executions>
       </plugin>
       <plugin>

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop Wed Nov  2 05:34:31 2011
@@ -63,22 +63,6 @@ case $COMMAND in
     fi
     ;;
 
-  #mapred commands  
-  mradmin|jobtracker|tasktracker|pipes|job|queue)
-    echo "DEPRECATED: Use of this script to execute mapred command is deprecated."
-    echo "Instead use the mapred command for it."
-    echo ""
-    #try to locate mapred and if present, delegate to it.
-    if [ -f "${HADOOP_MAPRED_HOME}"/bin/mapred ]; then
-      exec "${HADOOP_MAPRED_HOME}"/bin/mapred $*
-    elif [ -f "${HADOOP_PREFIX}"/bin/mapred ]; then
-      exec "${HADOOP_PREFIX}"/bin/mapred $* 
-    else
-      echo "MAPRED not found."
-      exit
-    fi
-    ;;
-
   classpath)
     if $cygwin; then
       CLASSPATH=`cygpath -p -w "$CLASSPATH"`
@@ -119,6 +103,9 @@ case $COMMAND in
     fi
     shift
     
+    #make sure security appender is turned off
+    HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
+
     if $cygwin; then
       CLASSPATH=`cygpath -p -w "$CLASSPATH"`
     fi

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh Wed Nov  2 05:34:31 2011
@@ -217,7 +217,6 @@ HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.f
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_PREFIX"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,console}"
 if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
   HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
 fi  
@@ -248,24 +247,8 @@ if $cygwin; then
   HADOOP_HDFS_HOME=`cygpath -w "$HADOOP_HDFS_HOME"`
 fi
 
-# set mapred home if mapred is present
-if [ "$HADOOP_MAPRED_HOME" = "" ]; then
-  if [ -d "${HADOOP_PREFIX}/share/hadoop/mapreduce" ]; then
-    HADOOP_MAPRED_HOME=$HADOOP_PREFIX
-  fi
-fi
-
-if [ -d "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/webapps
-fi
-
-if [ -d "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib'/*'
-fi
-
 # cygwin path translation
 if $cygwin; then
-  HADOOP_MAPRED_HOME=`cygpath -w "$HADOOP_MAPRED_HOME"`
   TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`
 fi
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh Wed Nov  2 05:34:31 2011
@@ -29,8 +29,3 @@ bin=`cd "$bin"; pwd`
 if [ -f "${HADOOP_HDFS_HOME}"/bin/start-dfs.sh ]; then
   "${HADOOP_HDFS_HOME}"/bin/start-dfs.sh --config $HADOOP_CONF_DIR
 fi
-
-# start mapred daemons if mapred is present
-if [ -f "${HADOOP_MAPRED_HOME}"/bin/start-mapred.sh ]; then
-  "${HADOOP_MAPRED_HOME}"/bin/start-mapred.sh --config $HADOOP_CONF_DIR
-fi

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh Wed Nov  2 05:34:31 2011
@@ -29,9 +29,3 @@ bin=`cd "$bin"; pwd`
 if [ -f "${HADOOP_HDFS_HOME}"/bin/stop-dfs.sh ]; then
   "${HADOOP_HDFS_HOME}"/bin/stop-dfs.sh --config $HADOOP_CONF_DIR
 fi
-
-# stop mapred daemons if mapred is present
-if [ -f "${HADOOP_MAPRED_HOME}"/bin/stop-mapred.sh ]; then
-  "${HADOOP_MAPRED_HOME}"/bin/stop-mapred.sh --config $HADOOP_CONF_DIR
-fi
-

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/hadoop-metrics2.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/hadoop-metrics2.properties?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/hadoop-metrics2.properties (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/hadoop-metrics2.properties Wed Nov  2 05:34:31 2011
@@ -51,7 +51,6 @@
 #*.sink.ganglia.tagsForPrefix.dfs=
 #*.sink.ganglia.tagsForPrefix.rpc=
 #*.sink.ganglia.tagsForPrefix.mapred=
-#*.sink.ganglia.tagsForPrefix.fairscheduler=
 
 #namenode.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
 

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Nov  2 05:34:31 2011
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1179483
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1196451
 /hadoop/core/branches/branch-0.19/src/docs:713112

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml Wed Nov  2 05:34:31 2011
@@ -627,7 +627,7 @@
 		          </tr>
                   <tr>
                     <td>conf/hdfs-site.xml</td>
-                    <td>dfs.block.size</td>
+                    <td>dfs.blocksize</td>
                     <td>134217728</td>
                     <td>HDFS blocksize of 128MB for large file-systems.</td>
                   </tr>

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Nov  2 05:34:31 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1179483
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1196451
 /hadoop/core/branches/branch-0.19/core/src/java:713112
 /hadoop/core/trunk/src/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Wed Nov  2 05:34:31 2011
@@ -1580,7 +1580,7 @@ public class Configuration implements It
       if (!finalParameters.contains(attr)) {
         properties.setProperty(attr, value);
         updatingResource.put(attr, name.toString());
-      } else {
+      } else if (!value.equals(properties.getProperty(attr))) {
         LOG.warn(name+":an attempt to override final parameter: "+attr
             +";  Ignoring.");
       }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java Wed Nov  2 05:34:31 2011
@@ -93,5 +93,18 @@ public class CommonConfigurationKeys ext
   /** Default value for IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY */
   public static final int IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT =
       256 * 1024;
+  
+  /**
+   * Service Authorization
+   */
+  public static final String 
+  HADOOP_SECURITY_SERVICE_AUTHORIZATION_REFRESH_POLICY = 
+      "security.refresh.policy.protocol.acl";
+  public static final String 
+  HADOOP_SECURITY_SERVICE_AUTHORIZATION_GET_USER_MAPPINGS =
+      "security.get.user.mappings.protocol.acl";
+  public static final String 
+  HADOOP_SECURITY_SERVICE_AUTHORIZATION_REFRESH_USER_MAPPINGS =
+      "security.refresh.user.mappings.protocol.acl";
 }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java Wed Nov  2 05:34:31 2011
@@ -44,6 +44,8 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileSystem.Statistics;
 import org.apache.hadoop.fs.Options.CreateOpts;
 import org.apache.hadoop.fs.permission.FsPermission;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_DEFAULT;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ipc.RpcClientException;
 import org.apache.hadoop.ipc.RpcServerException;
@@ -443,7 +445,9 @@ public final class FileContext {
    */
   public static FileContext getFileContext(final Configuration aConf)
       throws UnsupportedFileSystemException {
-    return getFileContext(URI.create(FsConfig.getDefaultFsURI(aConf)), aConf);
+    return getFileContext(
+      URI.create(aConf.get(FS_DEFAULT_NAME_KEY, FS_DEFAULT_NAME_DEFAULT)), 
+      aConf);
   }
 
   /**

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java Wed Nov  2 05:34:31 2011
@@ -21,8 +21,15 @@ package org.apache.hadoop.fs;
 import java.util.regex.PatternSyntaxException;
 import java.io.IOException;
 
- // A class that could decide if a string matches the glob or not
-class GlobFilter implements PathFilter {
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * A filter for POSIX glob pattern with brace expansions.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class GlobFilter implements PathFilter {
   private final static PathFilter DEFAULT_FILTER = new PathFilter() {
       public boolean accept(Path file) {
         return true;
@@ -32,11 +39,24 @@ class GlobFilter implements PathFilter {
   private PathFilter userFilter = DEFAULT_FILTER;
   private GlobPattern pattern;
 
-  GlobFilter(String filePattern) throws IOException {
+  /**
+   * Creates a glob filter with the specified file pattern.
+   *
+   * @param filePattern the file pattern.
+   * @throws IOException thrown if the file pattern is incorrect.
+   */
+  public GlobFilter(String filePattern) throws IOException {
     init(filePattern, DEFAULT_FILTER);
   }
 
-  GlobFilter(String filePattern, PathFilter filter) throws IOException {
+  /**
+   * Creates a glob filter with the specified file pattern and an user filter.
+   *
+   * @param filePattern the file pattern.
+   * @param filter user filter in addition to the glob pattern.
+   * @throws IOException thrown if the file pattern is incorrect.
+   */
+  public GlobFilter(String filePattern, PathFilter filter) throws IOException {
     init(filePattern, filter);
   }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java Wed Nov  2 05:34:31 2011
@@ -18,10 +18,12 @@
 
 package org.apache.hadoop.fs;
 
-import java.net.*;
-import java.io.*;
-import org.apache.avro.reflect.Stringable;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
 
+import org.apache.avro.reflect.Stringable;
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -75,7 +77,7 @@ public class Path implements Comparable 
       }
     URI resolved = parentUri.resolve(child.uri);
     initialize(resolved.getScheme(), resolved.getAuthority(),
-               normalizePath(resolved.getPath()), resolved.getFragment());
+               resolved.getPath(), resolved.getFragment());
   }
 
   private void checkPathArg( String path ) {
@@ -157,8 +159,8 @@ public class Path implements Comparable 
 
   private String normalizePath(String path) {
     // remove double slashes & backslashes
-    path = path.replace("//", "/");
-    path = path.replace("\\", "/");
+    path = StringUtils.replace(path, "//", "/");
+    path = StringUtils.replace(path, "\\", "/");
     
     // trim trailing slash from non-root path (ignoring windows drive)
     int minLength = hasWindowsDrive(path, true) ? 4 : 1;

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java Wed Nov  2 05:34:31 2011
@@ -55,6 +55,7 @@ abstract public class Command extends Co
   protected int exitCode = 0;
   protected int numErrors = 0;
   protected boolean recursive = false;
+  private int depth = 0;
   protected ArrayList<Exception> exceptions = new ArrayList<Exception>();
 
   private static final Log LOG = LogFactory.getLog(Command.class);
@@ -86,6 +87,10 @@ abstract public class Command extends Co
     return recursive;
   }
 
+  protected int getDepth() {
+    return depth;
+  }
+  
   /** 
    * Execute the command on the input path
    * 
@@ -269,6 +274,7 @@ abstract public class Command extends Co
   protected void processPathArgument(PathData item) throws IOException {
     // null indicates that the call is not via recursion, ie. there is
     // no parent directory that was expanded
+    depth = 0;
     processPaths(null, item);
   }
   
@@ -326,7 +332,12 @@ abstract public class Command extends Co
    *  @throws IOException if anything goes wrong...
    */
   protected void recursePath(PathData item) throws IOException {
-    processPaths(item, item.getDirectoryContents());
+    try {
+      depth++;
+      processPaths(item, item.getDirectoryContents());
+    } finally {
+      depth--;
+    }
   }
 
   /**

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java Wed Nov  2 05:34:31 2011
@@ -20,13 +20,18 @@ package org.apache.hadoop.fs.shell;
 
 import java.io.File;
 import java.io.IOException;
+import java.io.InputStream;
 import java.util.LinkedList;
 
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.shell.PathExceptions.PathExistsException;
 import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
+import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException;
 import org.apache.hadoop.fs.shell.PathExceptions.PathIsNotDirectoryException;
 import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException;
+import org.apache.hadoop.fs.shell.PathExceptions.PathOperationException;
+import org.apache.hadoop.io.IOUtils;
 
 /**
  * Provides: argument processing to ensure the destination is valid
@@ -106,51 +111,136 @@ abstract class CommandWithDestination ex
   }
 
   @Override
-  protected void processPaths(PathData parent, PathData ... items)
+  protected void processPathArgument(PathData src)
   throws IOException {
+    if (src.stat.isDirectory() && src.fs.equals(dst.fs)) {
+      PathData target = getTargetPath(src);
+      String srcPath = src.fs.makeQualified(src.path).toString();
+      String dstPath = dst.fs.makeQualified(target.path).toString();
+      if (dstPath.equals(srcPath)) {
+        PathIOException e = new PathIOException(src.toString(),
+            "are identical");
+        e.setTargetPath(dstPath.toString());
+        throw e;
+      }
+      if (dstPath.startsWith(srcPath+Path.SEPARATOR)) {
+        PathIOException e = new PathIOException(src.toString(),
+            "is a subdirectory of itself");
+        e.setTargetPath(target.toString());
+        throw e;
+      }
+    }
+    super.processPathArgument(src);
+  }
+
+  @Override
+  protected void processPath(PathData src) throws IOException {
+    processPath(src, getTargetPath(src));
+  }
+  
+  /**
+   * Called with a source and target destination pair
+   * @param src for the operation
+   * @param target for the operation
+   * @throws IOException if anything goes wrong
+   */
+  protected void processPath(PathData src, PathData dst) throws IOException {
+    if (src.stat.isSymlink()) {
+      // TODO: remove when FileContext is supported, this needs to either
+      // copy the symlink or deref the symlink
+      throw new PathOperationException(src.toString());        
+    } else if (src.stat.isFile()) {
+      copyFileToTarget(src, dst);
+    } else if (src.stat.isDirectory() && !isRecursive()) {
+      throw new PathIsDirectoryException(src.toString());
+    }
+  }
+
+  @Override
+  protected void recursePath(PathData src) throws IOException {
     PathData savedDst = dst;
     try {
       // modify dst as we descend to append the basename of the
       // current directory being processed
-      if (parent != null) dst = dst.getPathDataForChild(parent);
-      super.processPaths(parent, items);
+      dst = getTargetPath(src);
+      if (dst.exists) {
+        if (!dst.stat.isDirectory()) {
+          throw new PathIsNotDirectoryException(dst.toString());
+        }
+      } else {
+        if (!dst.fs.mkdirs(dst.path)) {
+          // too bad we have no clue what failed
+          PathIOException e = new PathIOException(dst.toString());
+          e.setOperation("mkdir");
+          throw e;
+        }    
+        dst.refreshStatus(); // need to update stat to know it exists now
+      }      
+      super.recursePath(src);
     } finally {
       dst = savedDst;
     }
   }
   
-  @Override
-  protected void processPath(PathData src) throws IOException {
+  protected PathData getTargetPath(PathData src) throws IOException {
     PathData target;
-    // if the destination is a directory, make target a child path,
-    // else use the destination as-is
-    if (dst.exists && dst.stat.isDirectory()) {
+    // on the first loop, the dst may be directory or a file, so only create
+    // a child path if dst is a dir; after recursion, it's always a dir
+    if ((getDepth() > 0) || (dst.exists && dst.stat.isDirectory())) {
       target = dst.getPathDataForChild(src);
     } else {
       target = dst;
     }
-    if (target.exists && !overwrite) {
+    return target;
+  }
+  
+  /**
+   * Copies the source file to the target.
+   * @param src item to copy
+   * @param target where to copy the item
+   * @throws IOException if copy fails
+   */ 
+  protected void copyFileToTarget(PathData src, PathData target) throws IOException {
+    copyStreamToTarget(src.fs.open(src.path), target);
+  }
+  
+  /**
+   * Copies the stream contents to a temporary file.  If the copy is
+   * successful, the temporary file will be renamed to the real path,
+   * else the temporary file will be deleted.
+   * @param in the input stream for the copy
+   * @param target where to store the contents of the stream
+   * @throws IOException if copy fails
+   */ 
+  protected void copyStreamToTarget(InputStream in, PathData target)
+  throws IOException {
+    if (target.exists && (target.stat.isDirectory() || !overwrite)) {
       throw new PathExistsException(target.toString());
     }
-
-    try { 
-      // invoke processPath with both a source and resolved target
-      processPath(src, target);
-    } catch (PathIOException e) {
-      // add the target unless it already has one
-      if (e.getTargetPath() == null) {
+    PathData tempFile = null;
+    try {
+      tempFile = target.createTempFile(target+"._COPYING_");
+      FSDataOutputStream out = target.fs.create(tempFile.path, true);
+      IOUtils.copyBytes(in, out, getConf(), true);
+      // the rename method with an option to delete the target is deprecated
+      if (target.exists && !target.fs.delete(target.path, false)) {
+        // too bad we don't know why it failed
+        PathIOException e = new PathIOException(target.toString());
+        e.setOperation("delete");
+        throw e;
+      }
+      if (!tempFile.fs.rename(tempFile.path, target.path)) {
+        // too bad we don't know why it failed
+        PathIOException e = new PathIOException(tempFile.toString());
+        e.setOperation("rename");
         e.setTargetPath(target.toString());
+        throw e;
+      }
+      tempFile = null;
+    } finally {
+      if (tempFile != null) {
+        tempFile.fs.delete(tempFile.path, false);
       }
-      throw e;
     }
   }
-
-  /**
-   * Called with a source and target destination pair
-   * @param src for the operation
-   * @param target for the operation
-   * @throws IOException if anything goes wrong
-   */
-  protected abstract void processPath(PathData src, PathData target)
-  throws IOException;
 }
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java Wed Nov  2 05:34:31 2011
@@ -26,15 +26,7 @@ import java.util.List;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.ChecksumFileSystem;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.LocalFileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.shell.PathExceptions.PathExistsException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathOperationException;
-import org.apache.hadoop.io.IOUtils;
 
 /** Various commands for copy files */
 @InterfaceAudience.Private
@@ -97,18 +89,10 @@ class CopyCommands {  
       CommandFormat cf = new CommandFormat(2, Integer.MAX_VALUE, "f");
       cf.parse(args);
       setOverwrite(cf.getOpt("f"));
+      // should have a -r option
+      setRecursive(true);
       getRemoteDestination(args);
     }
-
-    @Override
-    protected void processPath(PathData src, PathData target)
-    throws IOException {
-      if (!FileUtil.copy(src.fs, src.path, target.fs, target.path, false, overwrite, getConf())) {
-        // we have no idea what the error is...  FileUtils masks it and in
-        // some cases won't even report an error
-        throw new PathIOException(src.toString());
-      }
-    }
   }
   
   /** 
@@ -128,15 +112,12 @@ class CopyCommands {  
      * It must be at least three characters long, required by
      * {@link java.io.File#createTempFile(String, String, File)}.
      */
-    private static final String COPYTOLOCAL_PREFIX = "_copyToLocal_";
     private boolean copyCrc;
     private boolean verifyChecksum;
-    private LocalFileSystem localFs;
 
     @Override
     protected void processOptions(LinkedList<String> args)
     throws IOException {
-      localFs = FileSystem.getLocal(getConf());
       CommandFormat cf = new CommandFormat(
           1, Integer.MAX_VALUE, "crc", "ignoreCrc");
       cf.parse(args);
@@ -148,7 +129,7 @@ class CopyCommands {  
     }
 
     @Override
-    protected void processPath(PathData src, PathData target)
+    protected void copyFileToTarget(PathData src, PathData target)
     throws IOException {
       src.fs.setVerifyChecksum(verifyChecksum);
 
@@ -157,52 +138,12 @@ class CopyCommands {  
         copyCrc = false;
       }      
 
-      File targetFile = localFs.pathToFile(target.path);
-      if (src.stat.isFile()) {
-        // copy the file and maybe its crc
-        copyFileToLocal(src, target.path);
-        if (copyCrc) {
-          copyCrcToLocal(src, target.path);
-        }
-      } else if (src.stat.isDirectory()) {
-        // create the remote directory structure locally
-        if (!targetFile.mkdirs()) {
-          throw new PathIOException(target.toString());
-        }
-      } else {
-        throw new PathOperationException(src.toString());
-      }
-    }
-
-    private void copyFileToLocal(PathData src, Path target)
-    throws IOException {
-      File targetFile = localFs.pathToFile(target);
-      File tmpFile = FileUtil.createLocalTempFile(
-          targetFile, COPYTOLOCAL_PREFIX, true);
-      // too bad we can't tell exactly why it failed...
-      if (!FileUtil.copy(src.fs, src.path, tmpFile, false, getConf())) {
-        PathIOException e = new PathIOException(src.toString());
-        e.setOperation("copy");
-        e.setTargetPath(tmpFile.toString());
-        throw e;
-      }
-
-      // too bad we can't tell exactly why it failed...
-      if (!tmpFile.renameTo(targetFile)) {
-        PathIOException e = new PathIOException(tmpFile.toString());
-        e.setOperation("rename");
-        e.setTargetPath(targetFile.toString());
-        throw e;
+      super.copyFileToTarget(src, target);
+      if (copyCrc) {
+        // should we delete real file if crc copy fails?
+        super.copyFileToTarget(src.getChecksumFile(), target.getChecksumFile());
       }
     }
-
-    private void copyCrcToLocal(PathData src, Path target)
-    throws IOException {
-      ChecksumFileSystem srcFs = (ChecksumFileSystem)src.fs;
-      Path srcPath = srcFs.getChecksumFile(src.path);
-      src = new PathData(srcFs.getRawFileSystem(), srcPath);
-      copyFileToLocal(src, localFs.getChecksumFile(target));    
-    }
   }
 
   /**
@@ -221,6 +162,8 @@ class CopyCommands {  
       cf.parse(args);
       setOverwrite(cf.getOpt("f"));
       getRemoteDestination(args);
+      // should have a -r option
+      setRecursive(true);
     }
 
     // commands operating on local paths have no need for glob expansion
@@ -236,30 +179,11 @@ class CopyCommands {  
     throws IOException {
       // NOTE: this logic should be better, mimics previous implementation
       if (args.size() == 1 && args.get(0).toString().equals("-")) {
-        if (dst.exists && !overwrite) {
-          throw new PathExistsException(dst.toString());
-        }
-        copyFromStdin();
+        copyStreamToTarget(System.in, getTargetPath(args.get(0)));
         return;
       }
       super.processArguments(args);
     }
-
-    @Override
-    protected void processPath(PathData src, PathData target)
-    throws IOException {
-      target.fs.copyFromLocalFile(false, overwrite, src.path, target.path);
-    }
-
-    /** Copies from stdin to the destination file. */
-    protected void copyFromStdin() throws IOException {
-      FSDataOutputStream out = dst.fs.create(dst.path); 
-      try {
-        IOUtils.copyBytes(System.in, out, getConf(), false);
-      } finally {
-        out.close();
-      }
-    }
   }
 
   public static class CopyFromLocal extends Put {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java Wed Nov  2 05:34:31 2011
@@ -81,6 +81,6 @@ public class Count extends FsCommand {
   @Override
   protected void processPath(PathData src) throws IOException {
     ContentSummary summary = src.fs.getContentSummary(src.path);
-    out.println(summary.toString(showQuotas) + src.path);
+    out.println(summary.toString(showQuotas) + src);
   }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java Wed Nov  2 05:34:31 2011
@@ -113,7 +113,7 @@ class Ls extends FsCommand {
         stat.getGroup(),
         formatSize(stat.getLen()),
         dateFormat.format(new Date(stat.getModificationTime())),
-        item.path.toUri().getPath()
+        item
     );
     out.println(line);
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java Wed Nov  2 05:34:31 2011
@@ -21,27 +21,34 @@ package org.apache.hadoop.fs.shell;
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.ChecksumFileSystem;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
+import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException;
 import org.apache.hadoop.fs.shell.PathExceptions.PathIsNotDirectoryException;
+import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException;
 
 /**
  * Encapsulates a Path (path), its FileStatus (stat), and its FileSystem (fs).
  * The stat field will be null if the path does not exist.
  */
 @InterfaceAudience.Private
-@InterfaceStability.Evolving
+@InterfaceStability.Unstable
 
 public class PathData {
-  protected String string = null;
+  protected final URI uri;
+  public final FileSystem fs;
   public final Path path;
   public FileStatus stat;
-  public final FileSystem fs;
   public boolean exists;
 
   /**
@@ -53,10 +60,7 @@ public class PathData {
    * @throws IOException if anything goes wrong...
    */
   public PathData(String pathString, Configuration conf) throws IOException {
-    this.string = pathString;
-    this.path = new Path(pathString);
-    this.fs = path.getFileSystem(conf);
-    setStat(getStat(fs, path));
+    this(FileSystem.get(URI.create(pathString), conf), pathString);
   }
   
   /**
@@ -68,106 +72,143 @@ public class PathData {
    * @throws IOException if anything goes wrong...
    */
   public PathData(File localPath, Configuration conf) throws IOException {
-    this.string = localPath.toString();
-    this.path = new Path(this.string);
-    this.fs = FileSystem.getLocal(conf);
-    setStat(getStat(fs, path));
+    this(FileSystem.getLocal(conf), localPath.toString());
   }
 
   /**
-   * Creates an object to wrap the given parameters as fields. 
-   * @param fs the FileSystem
-   * @param path a Path
-   * @param stat the FileStatus (may be null if the path doesn't exist)
-   */
-  public PathData(FileSystem fs, Path path, FileStatus stat) {
-    this.string = path.toString();
-    this.path = path;
-    this.fs = fs;
-    setStat(stat);
-  }
-
-  /**
-   * Convenience ctor that looks up the file status for a path.  If the path
+   * Looks up the file status for a path.  If the path
    * doesn't exist, then the status will be null
    * @param fs the FileSystem for the path
-   * @param path the pathname to lookup 
+   * @param pathString a string for a path 
    * @throws IOException if anything goes wrong
    */
-  public PathData(FileSystem fs, Path path) throws IOException {
-    this(fs, path, getStat(fs, path));
+  private PathData(FileSystem fs, String pathString) throws IOException {
+    this(fs, pathString, lookupStat(fs, pathString, true));
   }
 
   /**
    * Creates an object to wrap the given parameters as fields.  The string
    * used to create the path will be recorded since the Path object does not
-   * return exactly the same string used to initialize it.  If the FileStatus
-   * is not null, then its Path will be used to initialized the path, else
-   * the string of the path will be used.
+   * return exactly the same string used to initialize it.
    * @param fs the FileSystem
    * @param pathString a String of the path
    * @param stat the FileStatus (may be null if the path doesn't exist)
    */
-  public PathData(FileSystem fs, String pathString, FileStatus stat) {
-    this.string = pathString;
-    this.path = (stat != null) ? stat.getPath() : new Path(pathString);
+  private PathData(FileSystem fs, String pathString, FileStatus stat)
+  throws IOException {
     this.fs = fs;
+    this.uri = stringToUri(pathString);
+    this.path = fs.makeQualified(new Path(uri));
     setStat(stat);
   }
 
   // need a static method for the ctor above
-  private static FileStatus getStat(FileSystem fs, Path path)
-  throws IOException {  
+  /**
+   * Get the FileStatus info
+   * @param ignoreFNF if true, stat will be null if the path doesn't exist
+   * @return FileStatus for the given path
+   * @throws IOException if anything goes wrong
+   */
+  private static
+  FileStatus lookupStat(FileSystem fs, String pathString, boolean ignoreFNF)
+  throws IOException {
     FileStatus status = null;
     try {
-      status = fs.getFileStatus(path);
-    } catch (FileNotFoundException e) {} // ignore FNF
+      status = fs.getFileStatus(new Path(pathString));
+    } catch (FileNotFoundException e) {
+      if (!ignoreFNF) throw new PathNotFoundException(pathString);
+    }
+    // TODO: should consider wrapping other exceptions into Path*Exceptions
     return status;
   }
   
-  private void setStat(FileStatus theStat) {
-    stat = theStat;
+  private void setStat(FileStatus stat) {
+    this.stat = stat;
     exists = (stat != null);
   }
 
   /**
-   * Convenience ctor that extracts the path from the given file status
-   * @param fs the FileSystem for the FileStatus
-   * @param stat the FileStatus 
-   */
-  public PathData(FileSystem fs, FileStatus stat) {
-    this(fs, stat.getPath(), stat);
-  }
-  
-  /**
    * Updates the paths's file status
    * @return the updated FileStatus
    * @throws IOException if anything goes wrong...
    */
   public FileStatus refreshStatus() throws IOException {
-    setStat(fs.getFileStatus(path));
-    return stat;
+    FileStatus status = null;
+    try {
+      status = lookupStat(fs, toString(), false);
+    } finally {
+      // always set the status.  the caller must get the correct result
+      // if it catches the exception and later interrogates the status
+      setStat(status);
+    }
+    return status;
+  }
+
+  protected enum FileTypeRequirement {
+    SHOULD_NOT_BE_DIRECTORY, SHOULD_BE_DIRECTORY
+  };
+
+  /**
+   * Ensure that the file exists and if it is or is not a directory
+   * @param typeRequirement Set it to the desired requirement.
+   * @throws PathIOException if file doesn't exist or the type does not match
+   * what was specified in typeRequirement.
+   */
+  private void checkIfExists(FileTypeRequirement typeRequirement) 
+  throws PathIOException {
+    if (!exists) {
+      throw new PathNotFoundException(toString());      
+    }
+
+    if ((typeRequirement == FileTypeRequirement.SHOULD_BE_DIRECTORY)
+       && !stat.isDirectory()) {
+      throw new PathIsNotDirectoryException(toString());
+    } else if ((typeRequirement == FileTypeRequirement.SHOULD_NOT_BE_DIRECTORY)
+              && stat.isDirectory()) {
+      throw new PathIsDirectoryException(toString());
+    }
   }
   
   /**
+   * Return the corresponding crc data for a file.  Avoids exposing the fs
+   * contortions to the caller.  
+   * @return PathData of the crc file
+   * @throws IOException is anything goes wrong
+   */
+  public PathData getChecksumFile() throws IOException {
+    checkIfExists(FileTypeRequirement.SHOULD_NOT_BE_DIRECTORY);
+    ChecksumFileSystem srcFs = (ChecksumFileSystem)fs;
+    Path srcPath = srcFs.getChecksumFile(path);
+    return new PathData(srcFs.getRawFileSystem(), srcPath.toString());
+  }
+
+  /**
+   * Returns a temporary file for this PathData with the given extension.
+   * The file will be deleted on exit.
+   * @param extension for the temporary file
+   * @return PathData
+   * @throws IOException shouldn't happen
+   */
+  public PathData createTempFile(String extension) throws IOException {
+    PathData tmpFile = new PathData(fs, uri+"._COPYING_");
+    fs.deleteOnExit(tmpFile.path);
+    return tmpFile;
+  }
+
+  /**
    * Returns a list of PathData objects of the items contained in the given
    * directory.
    * @return list of PathData objects for its children
    * @throws IOException if anything else goes wrong...
    */
   public PathData[] getDirectoryContents() throws IOException {
-    if (!stat.isDirectory()) {
-      throw new PathIsNotDirectoryException(string);
-    }
-
+    checkIfExists(FileTypeRequirement.SHOULD_BE_DIRECTORY);
     FileStatus[] stats = fs.listStatus(path);
     PathData[] items = new PathData[stats.length];
     for (int i=0; i < stats.length; i++) {
       // preserve relative paths
-      String basename = stats[i].getPath().getName();
-      String parent = string;
-      if (!parent.endsWith(Path.SEPARATOR)) parent += Path.SEPARATOR;
-      items[i] = new PathData(fs, parent + basename, stats[i]);
+      String child = getStringForChildPath(stats[i].getPath());
+      items[i] = new PathData(fs, child, stats[i]);
     }
     return items;
   }
@@ -179,13 +220,31 @@ public class PathData {
    * @throws IOException if this object does not exist or is not a directory
    */
   public PathData getPathDataForChild(PathData child) throws IOException {
-    if (!stat.isDirectory()) {
-      throw new PathIsNotDirectoryException(string);
-    }
-    return new PathData(fs, new Path(path, child.path.getName()));
+    checkIfExists(FileTypeRequirement.SHOULD_BE_DIRECTORY);
+    return new PathData(fs, getStringForChildPath(child.path));
   }
 
   /**
+   * Given a child of this directory, use the directory's path and the child's
+   * basename to construct the string to the child.  This preserves relative
+   * paths since Path will fully qualify.
+   * @param child a path contained within this directory
+   * @return String of the path relative to this directory
+   */
+  private String getStringForChildPath(Path childPath) {
+    String basename = childPath.getName();
+    if (Path.CUR_DIR.equals(toString())) {
+      return basename;
+    }
+    // check getPath() so scheme slashes aren't considered part of the path
+    String separator = uri.getPath().endsWith(Path.SEPARATOR)
+        ? "" : Path.SEPARATOR;
+    return uri + separator + basename;
+  }
+  
+  protected enum PathType { HAS_SCHEME, SCHEMELESS_ABSOLUTE, RELATIVE };
+  
+  /**
    * Expand the given path as a glob pattern.  Non-existent paths do not
    * throw an exception because creation commands like touch and mkdir need
    * to create them.  The "stat" field will be null if the path does not
@@ -207,35 +266,184 @@ public class PathData {
     if (stats == null) {
       // not a glob & file not found, so add the path with a null stat
       items = new PathData[]{ new PathData(fs, pattern, null) };
-    } else if (
-        // this is very ugly, but needed to avoid breaking hdfs tests...
-        // if a path has no authority, then the FileStatus from globStatus
-        // will add the "-fs" authority into the path, so we need to sub
-        // it back out to satisfy the tests
-        stats.length == 1
-        &&
-        stats[0].getPath().equals(fs.makeQualified(globPath)))
-    {
-      // if the fq path is identical to the pattern passed, use the pattern
-      // to initialize the string value
-      items = new PathData[]{ new PathData(fs, pattern, stats[0]) };
     } else {
+      // figure out what type of glob path was given, will convert globbed
+      // paths to match the type to preserve relativity
+      PathType globType;
+      URI globUri = globPath.toUri();
+      if (globUri.getScheme() != null) {
+        globType = PathType.HAS_SCHEME;
+      } else if (new File(globUri.getPath()).isAbsolute()) {
+        globType = PathType.SCHEMELESS_ABSOLUTE;
+      } else {
+        globType = PathType.RELATIVE;
+      }
+
       // convert stats to PathData
       items = new PathData[stats.length];
       int i=0;
       for (FileStatus stat : stats) {
-        items[i++] = new PathData(fs, stat);
+        URI matchUri = stat.getPath().toUri();
+        String globMatch = null;
+        switch (globType) {
+          case HAS_SCHEME: // use as-is, but remove authority if necessary
+            if (globUri.getAuthority() == null) {
+              matchUri = removeAuthority(matchUri);
+            }
+            globMatch = matchUri.toString();
+            break;
+          case SCHEMELESS_ABSOLUTE: // take just the uri's path
+            globMatch = matchUri.getPath();
+            break;
+          case RELATIVE: // make it relative to the current working dir
+            URI cwdUri = fs.getWorkingDirectory().toUri();
+            globMatch = relativize(cwdUri, matchUri, stat.isDirectory());
+            break;
+        }
+        items[i++] = new PathData(fs, globMatch, stat);
       }
     }
     return items;
   }
 
+  private static URI removeAuthority(URI uri) {
+    try {
+      uri = new URI(
+          uri.getScheme(), "",
+          uri.getPath(), uri.getQuery(), uri.getFragment()
+      );
+    } catch (URISyntaxException e) {
+      throw new IllegalArgumentException(e.getLocalizedMessage());
+    }
+    return uri;
+  }
+  
+  private static String relativize(URI cwdUri, URI srcUri, boolean isDir) {
+    String uriPath = srcUri.getPath();
+    String cwdPath = cwdUri.getPath();
+    if (cwdPath.equals(uriPath)) {
+      return Path.CUR_DIR;
+    }
+
+    // find common ancestor
+    int lastSep = findLongestDirPrefix(cwdPath, uriPath, isDir);
+    
+    StringBuilder relPath = new StringBuilder();    
+    // take the remaining path fragment after the ancestor
+    if (lastSep < uriPath.length()) {
+      relPath.append(uriPath.substring(lastSep+1));
+    }
+
+    // if cwd has a path fragment after the ancestor, convert them to ".."
+    if (lastSep < cwdPath.length()) {
+      while (lastSep != -1) {
+        if (relPath.length() != 0) relPath.insert(0, Path.SEPARATOR);
+        relPath.insert(0, "..");
+        lastSep = cwdPath.indexOf(Path.SEPARATOR, lastSep+1);
+      }
+    }
+    return relPath.toString();
+  }
+
+  private static int findLongestDirPrefix(String cwd, String path, boolean isDir) {
+    // add the path separator to dirs to simplify finding the longest match
+    if (!cwd.endsWith(Path.SEPARATOR)) {
+      cwd += Path.SEPARATOR;
+    }
+    if (isDir && !path.endsWith(Path.SEPARATOR)) {
+      path += Path.SEPARATOR;
+    }
+
+    // find longest directory prefix 
+    int len = Math.min(cwd.length(), path.length());
+    int lastSep = -1;
+    for (int i=0; i < len; i++) {
+      if (cwd.charAt(i) != path.charAt(i)) break;
+      if (cwd.charAt(i) == Path.SEPARATOR_CHAR) lastSep = i;
+    }
+    return lastSep;
+  }
+  
   /**
    * Returns the printable version of the path that is either the path
    * as given on the commandline, or the full path
    * @return String of the path
    */
   public String toString() {
-    return (string != null) ? string : path.toString();
+    String scheme = uri.getScheme();
+    // No interpretation of symbols. Just decode % escaped chars.
+    String decodedRemainder = uri.getSchemeSpecificPart();
+
+    if (scheme == null) {
+      return decodedRemainder;
+    } else {
+      StringBuilder buffer = new StringBuilder();
+      buffer.append(scheme);
+      buffer.append(":");
+      buffer.append(decodedRemainder);
+      return buffer.toString();
+    }
   }
+  
+  /**
+   * Get the path to a local file
+   * @return File representing the local path
+   * @throws IllegalArgumentException if this.fs is not the LocalFileSystem
+   */
+  public File toFile() {
+    if (!(fs instanceof LocalFileSystem)) {
+       throw new IllegalArgumentException("Not a local path: " + path);
+    }
+    return ((LocalFileSystem)fs).pathToFile(path);
+  }
+
+  /** Construct a URI from a String with unescaped special characters
+   *  that have non-standard sematics. e.g. /, ?, #. A custom parsing
+   *  is needed to prevent misbihaviors.
+   *  @param pathString The input path in string form
+   *  @return URI
+   */
+  private static URI stringToUri(String pathString) {
+    // We can't use 'new URI(String)' directly. Since it doesn't do quoting
+    // internally, the internal parser may fail or break the string at wrong
+    // places. Use of multi-argument ctors will quote those chars for us,
+    // but we need to do our own parsing and assembly.
+    
+    // parse uri components
+    String scheme = null;
+    String authority = null;
+
+    int start = 0;
+
+    // parse uri scheme, if any
+    int colon = pathString.indexOf(':');
+    int slash = pathString.indexOf('/');
+    if (colon > 0 && (slash == colon +1)) {
+      // has a non zero-length scheme
+      scheme = pathString.substring(0, colon);
+      start = colon + 1;
+    }
+
+    // parse uri authority, if any
+    if (pathString.startsWith("//", start) &&
+        (pathString.length()-start > 2)) {
+      start += 2;
+      int nextSlash = pathString.indexOf('/', start);
+      int authEnd = nextSlash > 0 ? nextSlash : pathString.length();
+      authority = pathString.substring(start, authEnd);
+      start = authEnd;
+    }
+
+    // uri path is the rest of the string. ? or # are not interpreated,
+    // but any occurrence of them will be quoted by the URI ctor.
+    String path = pathString.substring(start, pathString.length());
+
+    // Construct the URI
+    try {
+      return new URI(scheme, authority, path, null, null);
+    } catch (URISyntaxException e) {
+      throw new IllegalArgumentException(e);
+    }
+  }
+
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java Wed Nov  2 05:34:31 2011
@@ -318,7 +318,7 @@ public class ViewFileSystem extends File
       IOException {
     InodeTree.ResolveResult<FileSystem> res = 
       fsState.resolve(getUriPath(f), true);
-    return res.targetFileSystem.getFileChecksum(f);
+    return res.targetFileSystem.getFileChecksum(res.remainingPath);
   }
 
   @Override

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java Wed Nov  2 05:34:31 2011
@@ -316,7 +316,7 @@ public class ViewFs extends AbstractFile
       UnresolvedLinkException, IOException {
     InodeTree.ResolveResult<AbstractFileSystem> res = 
       fsState.resolve(getUriPath(f), true);
-    return res.targetFileSystem.getFileChecksum(f);
+    return res.targetFileSystem.getFileChecksum(res.remainingPath);
   }
 
   @Override



Mime
View raw message