hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r550942 - in /lucene/hadoop/trunk: ./ src/ant/ src/ant/org/ src/ant/org/apache/ src/ant/org/apache/hadoop/ src/ant/org/apache/hadoop/ant/ src/ant/org/apache/hadoop/ant/condition/ src/contrib/streaming/src/test/org/apache/hadoop/streaming/ s...
Date Tue, 26 Jun 2007 20:50:02 GMT
Author: cutting
Date: Tue Jun 26 13:50:01 2007
New Revision: 550942

URL: http://svn.apache.org/viewvc?view=rev&rev=550942
Log:
HADOOP-1508.  Add an Ant task for FsShell commands.  Contributed by Chris Douglas.

Added:
    lucene/hadoop/trunk/src/ant/
    lucene/hadoop/trunk/src/ant/org/
    lucene/hadoop/trunk/src/ant/org/apache/
    lucene/hadoop/trunk/src/ant/org/apache/hadoop/
    lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/
    lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/DfsTask.java
    lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/antlib.xml
    lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/
    lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsBaseConditional.java
    lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsExists.java
    lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsIsDir.java
    lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsZeroLen.java
Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/build.xml
    lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=550942&r1=550941&r2=550942
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Tue Jun 26 13:50:01 2007
@@ -257,6 +257,10 @@
  78. HADOOP-1514.  Make reducers report progress while waiting for map
      outputs, so they're not killed.  (Vivek Ratan via cutting)
 
+ 79. HADOOP-1508.  Add an Ant task for FsShell operations.  Also add
+     new FsShell commands "touchz", "test" and "stat".
+     (Chris Douglas via cutting)
+
 
 Release 0.13.0 - 2007-06-08
 

Modified: lucene/hadoop/trunk/build.xml
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/build.xml?view=diff&rev=550942&r1=550941&r2=550942
==============================================================================
--- lucene/hadoop/trunk/build.xml (original)
+++ lucene/hadoop/trunk/build.xml Tue Jun 26 13:50:01 2007
@@ -17,6 +17,7 @@
   <property name="src.dir" value="${basedir}/src/java"/>
   <property name="native.src.dir" value="${basedir}/src/native"/>
   <property name="examples.dir" value="${basedir}/src/examples"/>
+  <property name="anttasks.dir" value="${basedir}/src/ant"/>
   <property name="lib.dir" value="${basedir}/lib"/>
   <property name="conf.dir" value="${basedir}/conf"/>
   <property name="docs.dir" value="${basedir}/docs"/>
@@ -33,6 +34,7 @@
   <property name="build.src" value="${build.dir}/src"/>
   <property name="build.webapps" value="${build.dir}/webapps"/>
   <property name="build.examples" value="${build.dir}/examples"/>
+  <property name="build.anttasks" value="${build.dir}/ant"/>
   <property name="build.libhdfs" value="${build.dir}/libhdfs"/>
   <property name="build.platform" 
             value="${os.name}-${os.arch}-${sun.arch.data.model}"/>
@@ -122,6 +124,7 @@
     <mkdir dir="${build.webapps}/dfs/WEB-INF"/>
     <mkdir dir="${build.webapps}/datanode/WEB-INF"/>
     <mkdir dir="${build.examples}"/>
+    <mkdir dir="${build.anttasks}"/>
     <mkdir dir="${build.dir}/c++"/>
  
     <mkdir dir="${test.build.dir}"/>
@@ -891,5 +894,29 @@
 
   <target name="compile-c++-examples" 
           depends="compile-c++-examples-pipes"/>
+
+  <target name="compile-ant-tasks" depends="compile-core">
+    <javac
+        encoding="${build.encoding}"
+        srcdir="${anttasks.dir}"
+        includes="org/apache/hadoop/ant/**/*.java"
+        destdir="${build.anttasks}"
+        debug="${javac.debug}"
+        optimize="${javac.optimize}"
+        target="${javac.version}"
+        source="${javac.version}"
+        deprecation="${javac.deprecation}">
+        <compilerarg line="${javac.args}"/>
+        <classpath refid="classpath"/>
+    </javac>
+  </target>
+
+  <target name="ant-tasks" depends="jar, compile-ant-tasks">
+    <copy file="${anttasks.dir}/org/apache/hadoop/ant/antlib.xml"
+          todir="${build.anttasks}/org/apache/hadoop/ant"/>
+    <jar destfile="${build.dir}/ant-${final.name}.jar">
+      <fileset dir="${build.anttasks}"/>
+    </jar>
+  </target>
 
 </project>

Added: lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/DfsTask.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/DfsTask.java?view=auto&rev=550942
==============================================================================
--- lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/DfsTask.java (added)
+++ lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/DfsTask.java Tue Jun 26 13:50:01 2007
@@ -0,0 +1,208 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ant;
+
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStream;
+import java.io.PrintStream;
+
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.WeakHashMap;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FsShell;
+
+import org.apache.tools.ant.AntClassLoader;
+import org.apache.tools.ant.BuildException;
+import org.apache.tools.ant.Task;
+import org.apache.tools.ant.Project;
+import org.apache.tools.ant.types.Path;
+
+/**
+ * {@link org.apache.hadoop.fs.FsShell FsShell} wrapper for ant Task.
+ */
+public class DfsTask extends Task {
+
+  /**
+   * Default sink for {@link java.lang.System.out System.out}
+   * and {@link java.lang.System.err System.err}.
+   */
+  private static final OutputStream nullOut = new OutputStream() {
+      public void write(int b)    { /* ignore */ }
+      public String toString()    { return ""; }
+  };
+  private static final Map<Project, AntClassLoader> clCache =
+    new WeakHashMap<Project, AntClassLoader>();
+
+  protected AntClassLoader confloader;
+  protected OutputStream out = nullOut;
+  protected OutputStream err = nullOut;
+
+  // set by ant
+  protected String cmd;
+  protected final LinkedList<String> argv = new LinkedList<String>();
+  protected String outprop;
+  protected String errprop;
+  protected boolean failonerror = true;
+
+  // saved ant context
+  private PrintStream antOut;
+  private PrintStream antErr;
+
+  /**
+   * Sets the command to run in {@link org.apache.hadoop.fs.FsShell FsShell}.
+   * @param cmd A valid command to FsShell, sans &quot;-&quot;.
+   */
+  public void setCmd(String cmd) {
+    this.cmd = "-" + cmd.trim();
+  }
+
+  /**
+   * Sets the argument list from a String of comma-separated values.
+   * @param args A String of comma-separated arguments to FsShell.
+   */
+  public void setArgs(String args) {
+    for (String s : args.trim().split("\\s*,\\s*"))
+      argv.add(s);
+  }
+
+  /**
+   * Sets the property into which System.out will be written.
+   * @param outprop The name of the property into which System.out is written.
+   * If the property is defined before this task is executed, it will not be updated.
+   */
+  public void setOut(String outprop) {
+    this.outprop = outprop;
+    out = new ByteArrayOutputStream();
+    if (outprop.equals(errprop))
+      err = out;
+  }
+
+  /**
+   * Sets the property into which System.err will be written. If this property
+   * has the same name as the property for System.out, the two will be interlaced.
+   * @param errprop The name of the property into which System.err is written.
+   * If the property is defined before this task is executed, it will not be updated.
+   */
+  public void setErr(String errprop) {
+    this.errprop = errprop;
+    err = (errprop.equals(outprop)) ? err = out : new ByteArrayOutputStream();
+  }
+
+  /**
+   * Sets the path for the parent-last ClassLoader, intended to be used for
+   * {@link org.apache.hadoop.conf.Configuration Configuration}.
+   * @param confpath The path to search for resources, classes, etc. before
+   * parent ClassLoaders.
+   */
+  public void setConf(String confpath) {
+    confloader = new AntClassLoader(getClass().getClassLoader(), false);
+    confloader.setProject(getProject());
+    if (null != confpath)
+      confloader.addPathElement(confpath);
+  }
+
+  /**
+   * Sets a property controlling whether or not a
+   * {@link org.apache.tools.ant.BuildException BuildException} will be thrown
+   * if the command returns a value less than zero or throws an exception.
+   * @param failonerror If true, throw a BuildException on error.
+   */
+  public void setFailonerror(boolean failonerror) {
+    this.failonerror = failonerror;
+  }
+
+  /**
+   * Save the current values of System.out, System.err and configure output
+   * streams for FsShell.
+   */
+  protected void pushContext() {
+    antOut = System.out;
+    antErr = System.err;
+    System.setOut(new PrintStream(out));
+    System.setErr(out == err ? System.out : new PrintStream(err));
+  }
+
+  /**
+   * Create the appropriate output properties with their respective output,
+   * restore System.out, System.err and release any resources from created
+   * ClassLoaders to aid garbage collection.
+   */
+  protected void popContext() {
+    // write output to property, if applicable
+    if (outprop != null && !System.out.checkError())
+      getProject().setNewProperty(outprop, out.toString());
+    if (out != err && errprop != null && !System.err.checkError())
+      getProject().setNewProperty(errprop, err.toString());
+
+    System.setErr(antErr);
+    System.setOut(antOut);
+    // permit conf ClassLoader to be garbage collected when last ref to
+    // Project disappears
+    confloader.cleanup();
+    confloader.setParent(null);
+  }
+
+  // in case DfsTask is overridden
+  protected int postCmd(int exit_code) {
+    if ("-test".equals(cmd) && exit_code == 0)
+      outprop = null;
+    return exit_code;
+  }
+
+  /**
+   * Invoke {@link org.apache.hadoop.fs.FsShell#doMain FsShell.doMain} after a
+   * few cursory checks of the configuration.
+   */
+  public void execute() throws BuildException {
+    if (null == cmd)
+      throw new BuildException("Missing command (cmd) argument");
+    argv.add(0, cmd);
+
+    if (null == confloader) {
+      if (null == clCache.get(getProject())) {
+        setConf(getProject().getProperty("hadoop.conf.dir"));
+        clCache.put(getProject(), confloader);
+      } else {
+        confloader = clCache.get(getProject());
+      }
+    }
+
+    int exit_code = 0;
+    try {
+      pushContext();
+
+      Configuration conf = new Configuration();
+      conf.setClassLoader(confloader);
+      exit_code = new FsShell().doMain(conf, argv.toArray(new String[argv.size()]));
+      exit_code = postCmd(exit_code);
+
+      if (0 > exit_code) {
+        StringBuilder msg = new StringBuilder();
+        for (String s : argv)
+          msg.append(s + " ");
+        msg.append("failed: " + exit_code);
+        throw new Exception(msg.toString());
+      }
+    } catch (Exception e) {
+      if (failonerror)
+          throw new BuildException(e);
+    } finally {
+      popContext();
+    }
+  }
+}

Added: lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/antlib.xml
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/antlib.xml?view=auto&rev=550942
==============================================================================
--- lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/antlib.xml (added)
+++ lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/antlib.xml Tue Jun 26 13:50:01 2007
@@ -0,0 +1,12 @@
+<?xml version="1.0"?>
+
+<antlib>
+  <taskdef name="hdfs"
+           classname="org.apache.hadoop.ant.DfsTask" />
+  <taskdef name="exists"
+           classname="org.apache.hadoop.ant.condition.DfsExists" />
+  <taskdef name="isdir"
+           classname="org.apache.hadoop.ant.condition.DfsIsDir" />
+  <taskdef name="sizezero"
+           classname="org.apache.hadoop.ant.condition.DfsZeroLen" />
+</antlib>

Added: lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsBaseConditional.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsBaseConditional.java?view=auto&rev=550942
==============================================================================
--- lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsBaseConditional.java (added)
+++ lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsBaseConditional.java Tue
Jun 26 13:50:01 2007
@@ -0,0 +1,50 @@
+package org.apache.hadoop.ant.condition;
+
+import org.apache.tools.ant.taskdefs.condition.Condition;
+
+/**
+ * This wrapper around {@link org.apache.hadoop.ant.DfsTask} implements the
+ * Ant &gt;1.5
+ * {@link org.apache.tools.ant.taskdefs.condition.Condition Condition}
+ * interface for HDFS tests. So one can test conditions like this:
+ * {@code
+ *   <condition property="precond">
+ *     <and>
+ *       <hadoop:exists file="fileA" />
+ *       <hadoop:exists file="fileB" />
+ *       <hadoop:sizezero file="fileB" />
+ *     </and>
+ *   </condition>
+ * }
+ * This will define the property precond if fileA exists and fileB has zero
+ * length.
+ */
+public abstract class DfsBaseConditional extends org.apache.hadoop.ant.DfsTask
+                       implements Condition {
+
+  protected boolean result;
+  String file;
+
+  private void initArgs() {
+    setCmd("test");
+    setArgs("-"  +  getFlag() + "," + file);
+  }
+
+  public void setFile(String file) {
+    this.file = file;
+  }
+
+  protected abstract char getFlag();
+
+  protected int postCmd(int exit_code) {
+    exit_code = super.postCmd(exit_code);
+    result = exit_code == 1;
+    return exit_code;
+  }
+
+  public boolean eval() {
+    initArgs();
+    execute();
+    return result;
+  }
+}

Added: lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsExists.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsExists.java?view=auto&rev=550942
==============================================================================
--- lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsExists.java (added)
+++ lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsExists.java Tue Jun 26
13:50:01 2007
@@ -0,0 +1,6 @@
+package org.apache.hadoop.ant.condition;
+
+public class DfsExists extends DfsBaseConditional {
+  protected final char flag = 'e';
+  protected char getFlag() { return flag; }
+}

Added: lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsIsDir.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsIsDir.java?view=auto&rev=550942
==============================================================================
--- lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsIsDir.java (added)
+++ lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsIsDir.java Tue Jun 26 13:50:01
2007
@@ -0,0 +1,6 @@
+package org.apache.hadoop.ant.condition;
+
+public class DfsIsDir extends DfsBaseConditional {
+  protected final char flag = 'd';
+  protected char getFlag() { return flag; }
+}

Added: lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsZeroLen.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsZeroLen.java?view=auto&rev=550942
==============================================================================
--- lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsZeroLen.java (added)
+++ lucene/hadoop/trunk/src/ant/org/apache/hadoop/ant/condition/DfsZeroLen.java Tue Jun 26
13:50:01 2007
@@ -0,0 +1,6 @@
+package org.apache.hadoop.ant.condition;
+
+public class DfsZeroLen extends DfsBaseConditional {
+  protected final char flag = 'z';
+  protected char getFlag() { return flag; }
+}

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java?view=diff&rev=550942&r1=550941&r2=550942
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
(original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
Tue Jun 26 13:50:01 2007
@@ -105,10 +105,7 @@
   void lsr() {
     try {
       System.out.println("lsr /");
-      FsShell shell = new FsShell();
-      shell.setConf(conf_);
-      shell.init();
-      shell.ls("/", true);
+      new FsShell().doMain(conf_, new String[]{ "-lsr", "/" });
     } catch (Exception e) {
       e.printStackTrace();
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java?view=diff&rev=550942&r1=550941&r2=550942
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java Tue Jun 26 13:50:01 2007
@@ -32,13 +32,18 @@
   private Trash trash;
   public static final SimpleDateFormat dateForm = 
     new SimpleDateFormat("yyyy-MM-dd HH:mm");
+  protected static final SimpleDateFormat modifFmt =
+    new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+  {
+    modifFmt.setTimeZone(TimeZone.getTimeZone("UTC"));
+  }
 
   /**
    */
   public FsShell() {
   }
 
-  public void init() throws IOException {
+  protected void init() throws IOException {
     conf.setQuietMode(true);
     this.fs = FileSystem.get(conf);
     this.trash = new Trash(conf);
@@ -333,7 +338,7 @@
    * @throws IOException  
    * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
    */
-  public void setReplication(short newRep, String srcf, boolean recursive)
+  void setReplication(short newRep, String srcf, boolean recursive)
     throws IOException {
     Path[] srcs = fs.globPaths(new Path(srcf));
     for(int i=0; i<srcs.length; i++) {
@@ -389,7 +394,7 @@
    * @throws IOException  
    * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
    */
-  public void ls(String srcf, boolean recursive) throws IOException {
+  void ls(String srcf, boolean recursive) throws IOException {
     Path[] srcs = fs.globPaths(new Path(srcf));
     boolean printHeader = (srcs.length == 1) ? true: false;
     for(int i=0; i<srcs.length; i++) {
@@ -429,7 +434,7 @@
    * @throws IOException  
    * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
    */
-  public void du(String src) throws IOException {
+  void du(String src) throws IOException {
     Path items[] = fs.listPaths(fs.globPaths(new Path(src)));
     if (items == null) {
       throw new IOException("Could not get listing for " + src);
@@ -449,7 +454,7 @@
    * @throws IOException  
    * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
    */
-  public void dus(String src) throws IOException {
+  void dus(String src) throws IOException {
     Path paths[] = fs.globPaths(new Path(src));
     if (paths==null || paths.length==0) {
       throw new IOException("dus: No match: " + src);
@@ -471,13 +476,107 @@
   /**
    * Create the given dir
    */
-  public void mkdir(String src) throws IOException {
+  void mkdir(String src) throws IOException {
     Path f = new Path(src);
     if (!fs.mkdirs(f)) {
       throw new IOException("Mkdirs failed to create " + src);
     }
   }
-    
+
+  /**
+   * (Re)create zero-length file at the specified path.
+   * This will be replaced by a more UNIX-like touch when files may be
+   * modified.
+   */
+  void touchz(String src) throws IOException {
+    Path f = new Path(src);
+    FileStatus st;
+    if (fs.exists(f)) {
+      st = fs.getFileStatus(f);
+      if (st.isDir()) {
+        // TODO: handle this
+        throw new IOException(src + " is a directory");
+      } else if (st.getLen() != 0)
+        throw new IOException(src + " must be a zero-length file");
+    }
+    FSDataOutputStream out = fs.create(f);
+    out.close();
+  }
+
+  /**
+   * Check file types.
+   */
+  int test(String argv[], int i) throws IOException {
+    if (!argv[i].startsWith("-") || argv[i].length() > 2)
+      throw new IOException("Not a flag: " + argv[i]);
+    char flag = argv[i].toCharArray()[1];
+    Path f = new Path(argv[++i]);
+    switch(flag) {
+      case 'e':
+        return fs.exists(f) ? 1 : 0;
+      case 'z':
+        return fs.getFileStatus(f).getLen() == 0 ? 1 : 0;
+      case 'd':
+        return fs.getFileStatus(f).isDir() ? 1 : 0;
+      default:
+        throw new IOException("Unknown flag: " + flag);
+    }
+  }
+
+  /**
+   * Print statistics about path in specified format.
+   * Format sequences:
+   *   %b: Size of file in blocks
+   *   %n: Filename
+   *   %o: Block size
+   *   %r: replication
+   *   %y: UTC date as &quot;yyyy-MM-dd HH:mm:ss&quot;
+   *   %Y: Milliseconds since January 1, 1970 UTC
+   */
+  void stat(char[] fmt, String src) throws IOException {
+    Path glob[] = fs.globPaths(new Path(src));
+    if (null == glob)
+      throw new IOException("cannot stat `" + src + "': No such file or directory");
+    for (Path f : glob) {
+      FileStatus st = fs.getFileStatus(f);
+      StringBuilder buf = new StringBuilder();
+      for (int i = 0; i < fmt.length; ++i) {
+        if (fmt[i] != '%') {
+          buf.append(fmt[i]);
+        } else {
+          if (i + 1 == fmt.length) break;
+          switch(fmt[++i]) {
+            case 'b':
+              buf.append(st.getLen());
+              break;
+            case 'F':
+              buf.append(st.isDir() ? "directory" : "regular file");
+              break;
+            case 'n':
+              buf.append(f.getName());
+              break;
+            case 'o':
+              buf.append(st.getBlockSize());
+              break;
+            case 'r':
+              buf.append(st.getReplication());
+              break;
+            case 'y':
+              buf.append(modifFmt.format(new Date(st.getModificationTime())));
+              break;
+            case 'Y':
+              buf.append(st.getModificationTime());
+              break;
+            default:
+              buf.append(fmt[i]);
+              break;
+          }
+        }
+      }
+      System.out.println(buf.toString());
+    }
+  }
+
   /**
    * Move files that match the file pattern <i>srcf</i>
    * to a destination file.
@@ -488,7 +587,7 @@
    * @throws IOException  
    * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
    */
-  public void rename(String srcf, String dstf) throws IOException {
+  void rename(String srcf, String dstf) throws IOException {
     Path [] srcs = fs.globPaths(new Path(srcf));
     Path dst = new Path(dstf);
     if (srcs.length > 1 && !fs.isDirectory(dst)) {
@@ -574,7 +673,7 @@
    * @throws IOException  
    * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
    */
-  public void copy(String srcf, String dstf, Configuration conf) throws IOException {
+  void copy(String srcf, String dstf, Configuration conf) throws IOException {
     Path [] srcs = fs.globPaths(new Path(srcf));
     Path dst = new Path(dstf);
     if (srcs.length > 1 && !fs.isDirectory(dst)) {
@@ -653,7 +752,7 @@
    * @throws IOException  
    * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
    */
-  public void delete(String srcf, final boolean recursive) throws IOException {
+  void delete(String srcf, final boolean recursive) throws IOException {
     //rm behavior in Linux
     //  [~/1207]$ ls ?.txt
     //  x.txt  z.txt
@@ -735,7 +834,8 @@
       "[-getmerge <src> <localdst> [addnl]] [-cat <src>]\n\t" +
       "[-copyToLocal <src><localdst>] [-moveToLocal <src> <localdst>]\n\t"
+
       "[-mkdir <path>] [-report] [-setrep [-R] <rep> <path/file>]\n" +
-      "[-help [cmd]]\n"; 
+      "[-touchz <path>] [-test -[ezd] <path>] [-stat [format] <path>]\n"
+
+      "[-help [cmd]]\n";
 
     String conf ="-conf <configuration file>:  Specify an application configuration
file.";
  
@@ -822,7 +922,17 @@
     String setrep = "-setrep [-R] <rep> <path/file>:  Set the replication level
of a file. \n" +
       "\t\tThe -R flag requests a recursive change of replication level \n" + 
       "\t\tfor an entire tree.\n"; 
-        
+
+    String touchz = "-touchz <path>: Write a timestamp in yyyy-MM-dd HH:mm:ss format\n"
+
+      "\t\tin a file at <path>. An error is returned if the file exists with non-zero
length\n";
+
+    String test = "-test -[ezd] <path>: If file { exists, has zero length, is a directory\n"
+
+      "\t\tthen return 1, else return 0.\n";
+
+    String stat = "-stat [format] <path>: Print statistics about the file/directory
at <path>\n" +
+      "\t\tin the specified format. Format accepts filesize in blocks (%b), filename (%n),\n"
+
+      "\t\tblock size (%o), replication (%r), modification date (%y, %Y)\n";
+
     String help = "-help [cmd]: \tDisplays help for given command or all commands if none\n"
+
       "\t\tis specified.\n";
 
@@ -870,6 +980,12 @@
       System.out.println(get);
     } else if ("setrep".equals(cmd)) {
       System.out.println(setrep);
+    } else if ("touchz".equals(cmd)) {
+      System.out.println(touchz);
+    } else if ("test".equals(cmd)) {
+      System.out.println(test);
+    } else if ("stat".equals(cmd)) {
+      System.out.println(stat);
     } else if ("help".equals(cmd)) {
       System.out.println(help);
     } else {
@@ -931,6 +1047,8 @@
           ls(argv[i], false);
         } else if ("-lsr".equals(cmd)) {
           ls(argv[i], true);
+        } else if ("-touchz".equals(cmd)) {
+          touchz(argv[i]);
         }
       } catch (RemoteException e) {
         //
@@ -963,7 +1081,7 @@
    * Displays format of commands.
    * 
    */
-  public void printUsage(String cmd) {
+  void printUsage(String cmd) {
     if ("-fs".equals(cmd)) {
       System.err.println("Usage: java FsShell" + 
                          " [-fs <local | file system URI>]");
@@ -974,9 +1092,9 @@
       System.err.println("Usage: java FsShell" + 
                          " [-D <[property=value>]");
     } else if ("-ls".equals(cmd) || "-lsr".equals(cmd) ||
-               "-du".equals(cmd) || "-dus".equals(cmd) || 
-               "-rm".equals(cmd) || "-rmr".equals(cmd) || 
-               "-mkdir".equals(cmd)) {
+               "-du".equals(cmd) || "-dus".equals(cmd) ||
+               "-rm".equals(cmd) || "-rmr".equals(cmd) ||
+               "-touchz".equals(cmd) || "-mkdir".equals(cmd)) {
       System.err.println("Usage: java FsShell" + 
                          " [" + cmd + " <path>]");
     } else if ("-mv".equals(cmd) || "-cp".equals(cmd)) {
@@ -999,6 +1117,12 @@
     } else if ("-setrep".equals(cmd)) {
       System.err.println("Usage: java FsShell" + 
                          " [-setrep [-R] <rep> <path/file>]");
+    } else if ("-test".equals(cmd)) {
+      System.err.println("Usage: java FsShell" +
+                         " [-test -[ezd] <path>]");
+    } else if ("-stat".equals(cmd)) {
+      System.err.println("Usage: java FsShell" +
+                         " [-stat [format] <path>]");
     } else {
       System.err.println("Usage: java FsShell");
       System.err.println("           [-fs <local | file system URI>]");
@@ -1023,6 +1147,9 @@
       System.err.println("           [-moveToLocal [-crc] <src> <localdst>]");
       System.err.println("           [-mkdir <path>]");
       System.err.println("           [-setrep [-R] <rep> <path/file>]");
+      System.err.println("           [-touchz <path>]");
+      System.err.println("           [-test -[ezd] <path>]");
+      System.err.println("           [-stat [format] <path>]");
       System.err.println("           [-help [cmd]]");
     }
   }
@@ -1044,7 +1171,7 @@
     //
     // verify that we have enough command line parameters
     //
-    if ("-put".equals(cmd) || 
+    if ("-put".equals(cmd) || "-test".equals(cmd) ||
         "-copyFromLocal".equals(cmd) || "-moveFromLocal".equals(cmd)) {
       if (argv.length != 3) {
         printUsage(cmd);
@@ -1062,7 +1189,8 @@
         return exitCode;
       }
     } else if ("-rm".equals(cmd) || "-rmr".equals(cmd) ||
-               "-cat".equals(cmd) || "-mkdir".equals(cmd)) {
+               "-cat".equals(cmd) || "-mkdir".equals(cmd) ||
+               "-touchz".equals(cmd) || "-stat".equals(cmd)) {
       if (argv.length < 2) {
         printUsage(cmd);
         return exitCode;
@@ -1136,6 +1264,16 @@
         }         
       } else if ("-mkdir".equals(cmd)) {
         exitCode = doall(cmd, argv, conf, i);
+      } else if ("-touchz".equals(cmd)) {
+        exitCode = doall(cmd, argv, conf, i);
+      } else if ("-test".equals(cmd)) {
+        exitCode = test(argv, i);
+      } else if ("-stat".equals(cmd)) {
+        if (i + 1 < argv.length) {
+          stat(argv[i++].toCharArray(), argv[i++]);
+        } else {
+          stat("%y".toCharArray(), argv[i]);
+        }
       } else if ("-help".equals(cmd)) {
         if (i < argv.length) {
           printHelp(argv[i]);

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java?view=diff&rev=550942&r1=550941&r2=550942
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java Tue Jun 26 13:50:01
2007
@@ -261,6 +261,30 @@
         }
         assertTrue(val == 0);
       }
+
+      // Verify touch/test
+      {
+        String[] args = new String[2];
+        args[0] = "-touchz";
+        args[1] = "/test/mkdirs/noFileHere";
+        int val = -1;
+        try {
+          val = shell.run(args);
+        } catch (Exception e) {
+          System.err.println("Exception raised from DFSShell.run " +
+                             e.getLocalizedMessage());
+        }
+        assertTrue(val == 0);
+
+        args[0] = "-test";
+        args[1] = "-e " + args[1];
+        try {
+        } catch (Exception e) {
+          System.err.println("Exception raised from DFSShell.run " +
+                             e.getLocalizedMessage());
+        }
+        assertTrue(val == 0);
+      }
         
     } finally {
       try {



Mime
View raw message