hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From j...@apache.org
Subject svn commit: r659245 - in /hadoop/hbase/trunk: ./ lib/ src/java/org/apache/hadoop/hbase/filter/ src/java/org/apache/hadoop/hbase/mapred/ src/java/org/apache/hadoop/hbase/master/ src/java/org/apache/hadoop/hbase/regionserver/ src/test/org/apache/hadoop/h...
Date Thu, 22 May 2008 20:32:27 GMT
Author: jimk
Date: Thu May 22 13:32:25 2008
New Revision: 659245

URL: http://svn.apache.org/viewvc?rev=659245&view=rev
Log:
HBASE-589 Remove references to deprecated methods in Hadoop once hadoop-0.17.0 is released
HBASE-579 Add hadoop 0.17.0

Added:
    hadoop/hbase/trunk/lib/hadoop-0.17.0-core.jar   (with props)
    hadoop/hbase/trunk/lib/hadoop-0.17.0-test.jar   (with props)
Removed:
    hadoop/hbase/trunk/lib/hadoop-0.17.0-dev-2008.04.04-13.34.00-core.jar
    hadoop/hbase/trunk/lib/hadoop-0.17.0-dev-2008.04.04-13.34.00-test.jar
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/DisabledTestScanner2.java
Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/build.xml
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/BuildTableIndex.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMigrate.java

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Thu May 22 13:32:25 2008
@@ -17,6 +17,8 @@
    HBASE-629   Split reports incorrect elapsed time
    HBASE-623   Migration script for hbase-82
    HBASE-630   Default hbase.rootdir is garbage
+   HBASE-589   Remove references to deprecated methods in Hadoop once
+               hadoop-0.17.0 is released
 
   IMPROVEMENTS
    HBASE-559   MR example job to count table rows
@@ -48,6 +50,7 @@
    HBASE-583   RangeRowFilter/ColumnValueFilter to allow choice of rows based on
                a (lexicographic) comparison to column's values
                (Clint Morgan via Stack)
+   HBASE-579   Add hadoop 0.17.x
 
 Release 0.1.2 - 05/13/2008
    

Modified: hadoop/hbase/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/build.xml?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/build.xml (original)
+++ hadoop/hbase/trunk/build.xml Thu May 22 13:32:25 2008
@@ -355,7 +355,8 @@
        srcdir="${src.test}" 
        includes="**/*.java" 
        destdir="${build.test}" 
-       debug="${javac.debug}"> 
+       debug="${javac.debug}"
+       deprecation="${javac.deprecation}"> 
     <classpath refid="test.classpath"/> 
     </javac>
     <jar jarfile="${build.dir}/${final.name}-test.jar" >

Added: hadoop/hbase/trunk/lib/hadoop-0.17.0-core.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/hadoop-0.17.0-core.jar?rev=659245&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/hadoop-0.17.0-core.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/hadoop-0.17.0-test.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/hadoop-0.17.0-test.jar?rev=659245&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/hadoop-0.17.0-test.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java Thu
May 22 13:32:25 2008
@@ -82,13 +82,13 @@
   }
 
   /** {@inheritDoc} */
-  public boolean filterRowKey(final byte[] rowKey) {
+  public boolean filterRowKey(@SuppressWarnings("unused") final byte[] rowKey) {
     return false;
   }
 
   /** {@inheritDoc} */
-  public boolean filterColumn(final byte[] rowKey, final byte[] colKey,
-      final byte[] data) {
+  public boolean filterColumn(@SuppressWarnings("unused") final byte[] rowKey,
+      final byte[] colKey, final byte[] data) {
     if (!Arrays.equals(colKey, columnName)) {
       return false;
     }
@@ -150,12 +150,14 @@
     // Nothing.
   }
 
-  public void rowProcessed(final boolean filtered, final byte[] key) {
+  /** {@inheritDoc} */
+  public void rowProcessed(@SuppressWarnings("unused") final boolean filtered,
+      @SuppressWarnings("unused") final byte[] key) {
     // Nothing
   }
 
   /** {@inheritDoc} */
-  public void validate(final byte[][] columns) {
+  public void validate(@SuppressWarnings("unused") final byte[][] columns) {
     // Nothing
   }
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java Thu May
22 13:32:25 2008
@@ -47,8 +47,10 @@
   private Pattern rowKeyPattern = null;
   private String rowKeyRegExp = null;
 
+  @Deprecated
   private Map<byte [], byte[]> equalsMap =
     new TreeMap<byte [], byte[]>(Bytes.BYTES_COMPARATOR);
+  @Deprecated
   private Set<byte []> nullColumns =
     new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
 
@@ -199,6 +201,7 @@
     return false;
   }
 
+  @Deprecated
   private boolean filtersByColumnValue() {
     return equalsMap != null && equalsMap.size() > 0;
   }
@@ -277,6 +280,7 @@
     }
   }
 
+  @Deprecated
   private Set<byte []> getFilterColumns() {
     Set<byte []> cols = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
     cols.addAll(equalsMap.keySet());

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/BuildTableIndex.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/BuildTableIndex.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/BuildTableIndex.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/BuildTableIndex.java Thu May
22 13:32:25 2008
@@ -27,6 +27,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 
@@ -61,10 +62,15 @@
     System.exit(-1);
   }
 
+  /** default constructor */
   public BuildTableIndex() {
     super();
   }
 
+  /**
+   * @param args
+   * @throws IOException
+   */
   public void run(String[] args) throws IOException {
     if (args.length < 6) {
       printUsage("Too few arguments");
@@ -115,11 +121,22 @@
       conf.set("hbase.index.conf", content);
     }
 
-    JobConf jobConf = createJob(conf, numMapTasks, numReduceTasks, indexDir,
-      tableName, columnNames.toString());
-    JobClient.runJob(jobConf);
+    if (columnNames != null) {
+      JobConf jobConf = createJob(conf, numMapTasks, numReduceTasks, indexDir,
+          tableName, columnNames.toString());
+      JobClient.runJob(jobConf);
+    }
   }
 
+  /**
+   * @param conf
+   * @param numMapTasks
+   * @param numReduceTasks
+   * @param indexDir
+   * @param tableName
+   * @param columnNames
+   * @return JobConf
+   */
   public JobConf createJob(Configuration conf, int numMapTasks,
       int numReduceTasks, String indexDir, String tableName,
       String columnNames) {
@@ -135,7 +152,7 @@
 
     // use IndexTableReduce to build a Lucene index
     jobConf.setReducerClass(IndexTableReduce.class);
-    jobConf.setOutputPath(new Path(indexDir));
+    FileOutputFormat.setOutputPath(jobConf, new Path(indexDir));
     jobConf.setOutputFormat(IndexOutputFormat.class);
 
     return jobConf;
@@ -177,6 +194,10 @@
     return new String(bytes, 0, bytesRead, HConstants.UTF8_ENCODING);
   }
 
+  /**
+   * @param args
+   * @throws IOException
+   */
   public static void main(String[] args) throws IOException {
     BuildTableIndex build = new BuildTableIndex();
     build.run(args);

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java Thu
May 22 13:32:25 2008
@@ -28,7 +28,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.OutputFormatBase;
+import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.RecordWriter;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.util.Progressable;
@@ -42,22 +42,24 @@
  * the index, and copy the index to the destination.
  */
 public class IndexOutputFormat extends
-    OutputFormatBase<ImmutableBytesWritable, LuceneDocumentWrapper> {
+    FileOutputFormat<ImmutableBytesWritable, LuceneDocumentWrapper> {
   static final Log LOG = LogFactory.getLog(IndexOutputFormat.class);
 
+  /** {@inheritDoc} */
   @Override
-  public RecordWriter<ImmutableBytesWritable, LuceneDocumentWrapper> getRecordWriter(
-    final FileSystem fs, JobConf job, String name, final Progressable progress)
+  public RecordWriter<ImmutableBytesWritable, LuceneDocumentWrapper>
+  getRecordWriter(final FileSystem fs, JobConf job, String name,
+      final Progressable progress)
   throws IOException {
 
-    final Path perm = new Path(job.getOutputPath(), name);
+    final Path perm = new Path(FileOutputFormat.getOutputPath(job), name);
     final Path temp = job.getLocalPath("index/_"
         + Integer.toString(new Random().nextInt()));
 
     LOG.info("To index into " + perm);
 
     // delete old, if any
-    fs.delete(perm);
+    fs.delete(perm, true);
 
     final IndexConfiguration indexConf = new IndexConfiguration();
     String content = job.get("hbase.index.conf");
@@ -68,7 +70,7 @@
     String analyzerName = indexConf.getAnalyzerName();
     Analyzer analyzer;
     try {
-      Class analyzerClass = Class.forName(analyzerName);
+      Class<?> analyzerClass = Class.forName(analyzerName);
       analyzer = (Analyzer) analyzerClass.newInstance();
     } catch (Exception e) {
       throw new IOException("Error in creating an analyzer object "
@@ -87,7 +89,7 @@
     String similarityName = indexConf.getSimilarityName();
     if (similarityName != null) {
       try {
-        Class similarityClass = Class.forName(similarityName);
+        Class<?> similarityClass = Class.forName(similarityName);
         Similarity similarity = (Similarity) similarityClass.newInstance();
         writer.setSimilarity(similarity);
       } catch (Exception e) {
@@ -98,7 +100,7 @@
     writer.setUseCompoundFile(indexConf.isUseCompoundFile());
 
     return new RecordWriter<ImmutableBytesWritable, LuceneDocumentWrapper>() {
-      private boolean closed;
+      boolean closed;
       private long docCount = 0;
 
       public void write(@SuppressWarnings("unused") ImmutableBytesWritable key,
@@ -114,6 +116,7 @@
       public void close(final Reporter reporter) throws IOException {
         // spawn a thread to give progress heartbeats
         Thread prog = new Thread() {
+          @Override
           public void run() {
             while (!closed) {
               try {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java Thu May
22 13:32:25 2008
@@ -27,7 +27,7 @@
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobConfigurable;
 
@@ -48,7 +48,7 @@
 
   /** {@inheritDoc} */
   public void configure(JobConf job) {
-    Path[] tableNames = job.getInputPaths();
+    Path[] tableNames = FileInputFormat.getInputPaths(job);
     String colArg = job.get(COLUMN_LIST);
     String[] colNames = colArg.split(" ");
     byte [][] m_cols = new byte[colNames.length][];
@@ -66,7 +66,7 @@
   /** {@inheritDoc} */
   public void validateInput(JobConf job) throws IOException {
     // expecting exactly one path
-    Path [] tableNames = job.getInputPaths();
+    Path [] tableNames = FileInputFormat.getInputPaths(job);
     if (tableNames == null || tableNames.length > 1) {
       throw new IOException("expecting one table name");
     }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java Thu May 22 13:32:25
2008
@@ -21,11 +21,11 @@
 
 import java.io.IOException;
 
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.io.RowResult;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MapReduceBase;
 import org.apache.hadoop.mapred.Mapper;
@@ -60,7 +60,7 @@
     job.setMapOutputValueClass(outputValueClass);
     job.setMapOutputKeyClass(outputKeyClass);
     job.setMapperClass(mapper);
-    job.setInputPath(new Path(table));
+    FileInputFormat.addInputPaths(job, table);
     job.set(TableInputFormat.COLUMN_LIST, columns);
   }
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java Thu
May 22 13:32:25 2008
@@ -31,7 +31,7 @@
 import org.apache.hadoop.mapred.FileAlreadyExistsException;
 import org.apache.hadoop.mapred.InvalidJobConfException;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.OutputFormatBase;
+import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.RecordWriter;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.util.Progressable;
@@ -39,7 +39,8 @@
 /**
  * Convert Map/Reduce output and write it to an HBase table
  */
-public class TableOutputFormat extends OutputFormatBase<ImmutableBytesWritable, BatchUpdate>
{
+public class TableOutputFormat extends
+FileOutputFormat<ImmutableBytesWritable, BatchUpdate> {
 
   /** JobConf parameter that specifies the output table */
   public static final String OUTPUT_TABLE = "hbase.mapred.outputtable";
@@ -68,7 +69,8 @@
     }
 
     /** {@inheritDoc} */
-    public void write(ImmutableBytesWritable key, BatchUpdate value) throws IOException {
+    public void write(@SuppressWarnings("unused") ImmutableBytesWritable key,
+        BatchUpdate value) throws IOException {
       m_table.commit(value);
     }
   }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java Thu May 22
13:32:25 2008
@@ -47,9 +47,9 @@
       // Delete the directories used by the column
       int encodedName = i.getEncodedName();
       this.master.fs.delete(
-        HStoreFile.getMapDir(tabledir, encodedName, columnName));
+        HStoreFile.getMapDir(tabledir, encodedName, columnName), true);
       this.master.fs.delete(
-        HStoreFile.getInfoDir(tabledir, encodedName, columnName));
+        HStoreFile.getInfoDir(tabledir, encodedName, columnName), true);
     }
   }
 }
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java Thu May 22
13:32:25 2008
@@ -21,7 +21,6 @@
 
 import java.io.IOException;
 
-import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.RemoteExceptionHandler;
@@ -66,7 +65,6 @@
     }
     
     // delete the table's folder from fs.
-    FileUtil.fullyDelete(master.fs,
-        new Path(master.rootdir, tableName.toString()));
+    master.fs.delete(new Path(master.rootdir, tableName.toString()), true);
   }
 }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java Thu May 22
13:32:25 2008
@@ -35,7 +35,6 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
@@ -629,7 +628,7 @@
     }
 
     try {
-      FileUtil.fullyDelete(fs, srcDir);
+      fs.delete(srcDir, true);
     } catch (IOException e) {
       e = RemoteExceptionHandler.checkIOException(e);
       IOException io = new IOException("Cannot delete: " + srcDir);

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java Thu May
22 13:32:25 2008
@@ -41,7 +41,6 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.DroppedSnapshotException;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -458,7 +457,7 @@
       if (LOG.isDebugEnabled()) {
         LOG.debug("Deleting old log file: " + oldLogFile);
       }
-      fs.delete(oldLogFile);
+      fs.delete(oldLogFile, false);
     }
     
     // Add one to the current maximum sequence id so new edits are beyond.
@@ -472,11 +471,11 @@
     // Get rid of any splits or merges that were lost in-progress
     Path splits = new Path(regiondir, SPLITDIR);
     if (fs.exists(splits)) {
-      fs.delete(splits);
+      fs.delete(splits, true);
     }
     Path merges = new Path(regiondir, MERGEDIR);
     if (fs.exists(merges)) {
-      fs.delete(merges);
+      fs.delete(merges, true);
     }
 
     // By default, we flush the cache when 64M.
@@ -770,7 +769,7 @@
       regionB.close();
 
       // Cleanup
-      boolean deleted = fs.delete(splits);    // Get rid of splits directory
+      boolean deleted = fs.delete(splits, true); // Get rid of splits directory
       if (LOG.isDebugEnabled()) {
         LOG.debug("Cleaned up " + FSUtils.getPath(splits) + " " + deleted);
       }
@@ -802,7 +801,7 @@
    */
   private void doRegionCompactionCleanup() throws IOException {
     if (this.fs.exists(this.regionCompactionDir)) {
-      FileUtil.fullyDelete(this.fs, this.regionCompactionDir);
+      this.fs.delete(this.regionCompactionDir, true);
     }
   }
 
@@ -1864,6 +1863,9 @@
       }
     }
 
+    /**
+     * @return an iterator for the scanner
+     */
     public Iterator<Entry<HStoreKey, SortedMap<Text, byte[]>>> iterator()
{
       throw new UnsupportedOperationException("Unimplemented serverside. " +
         "next(HStoreKey, StortedMap(...) is more efficient");
@@ -2018,7 +2020,7 @@
     if (LOG.isDebugEnabled()) {
       LOG.debug("DELETING region " + regiondir.toString());
     }
-    FileUtil.fullyDelete(fs, regiondir);
+    fs.delete(regiondir, true);
   }
 
   /**

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java Thu May 22
13:32:25 2008
@@ -405,7 +405,7 @@
 
       Path mapfile = curfile.getMapFilePath();
       if (!fs.exists(mapfile)) {
-        fs.delete(curfile.getInfoFilePath());
+        fs.delete(curfile.getInfoFilePath(), false);
         LOG.warn("Mapfile " + mapfile.toString() + " does not exist. " +
           "Cleaned up info file.  Continuing...");
         continue;
@@ -431,7 +431,7 @@
       Path p = datfiles[i].getPath();
       // If does not have sympathetic info file, delete.
       if (!mapfiles.contains(fs.makeQualified(p))) {
-        fs.delete(p);
+        fs.delete(p, false);
       }
     }
     return results;

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java Thu May
22 13:32:25 2008
@@ -356,8 +356,8 @@
    * @throws IOException 
    */
   public void delete() throws IOException {
-    fs.delete(getMapFilePath());
-    fs.delete(getInfoFilePath());
+    fs.delete(getMapFilePath(), true);
+    fs.delete(getInfoFilePath(), true);
   }
   
   /**
@@ -497,10 +497,6 @@
     return r.equals(Range.top);
   }
 
-  private static String createHStoreFilename(final long fid) {
-    return createHStoreFilename(fid, HRegionInfo.NO_HASH);
-  }
-
   private static String createHStoreFilename(final long fid,
       final int encodedRegionName) {
     return Long.toString(fid) + 

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java Thu May
22 13:32:25 2008
@@ -110,7 +110,7 @@
         // mangle the conf so that the fs parameter points to the minidfs we
         // just started up
         FileSystem fs = dfsCluster.getFileSystem();
-        conf.set("fs.default.name", fs.getName());      
+        conf.set("fs.default.name", fs.getUri().toString());      
         Path parentdir = fs.getHomeDirectory();
         conf.set(HConstants.HBASE_DIR, parentdir.toString());
         fs.mkdirs(parentdir);

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java Thu May 22 13:32:25
2008
@@ -119,7 +119,7 @@
       if (localfs) {
         this.testDir = getUnitTestdir(getName());
         if (fs.exists(testDir)) {
-          fs.delete(testDir);
+          fs.delete(testDir, true);
         }
       } else {
         this.testDir =
@@ -137,7 +137,7 @@
     try {
       if (localfs) {
         if (this.fs.exists(testDir)) {
-          this.fs.delete(testDir);
+          this.fs.delete(testDir, true);
         }
       }
     } catch (Exception e) {

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java
(original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java
Thu May 22 13:32:25 2008
@@ -56,7 +56,7 @@
     FileSystem fs = FileSystem.get(conf);
     Path mf = fs.makeQualified(new Path("performanceevaluation.mapfile"));
     if (fs.exists(mf)) {
-      fs.delete(mf);
+      fs.delete(mf, true);
     }
 
     runBenchmark(new SequentialWriteBenchmark(conf, fs, mf, ROW_COUNT),

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java Thu May
22 13:32:25 2008
@@ -41,6 +41,8 @@
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MapReduceBase;
@@ -230,7 +232,7 @@
     Path inputDir = writeInputFile(this.conf);
     this.conf.set(EvaluationMapTask.CMD_KEY, cmd);
     JobConf job = new JobConf(this.conf, this.getClass());
-    job.setInputPath(inputDir);
+    FileInputFormat.setInputPaths(job, inputDir);
     job.setInputFormat(TextInputFormat.class);
     job.setJobName("HBase Performance Evaluation");
     job.setMapperClass(EvaluationMapTask.class);
@@ -239,7 +241,7 @@
     job.setNumMapTasks(this.N * 10); // Ten maps per client.
     job.setNumReduceTasks(1);
     job.setOutputFormat(TextOutputFormat.class);
-    job.setOutputPath(new Path(inputDir, "outputs"));
+    FileOutputFormat.setOutputPath(job, new Path(inputDir, "outputs"));
     JobClient.runJob(job);
   }
   
@@ -569,7 +571,7 @@
       // mangle the conf so that the fs parameter points to the minidfs we
       // just started up
       FileSystem fs = dfsCluster.getFileSystem();
-      conf.set("fs.default.name", fs.getName());      
+      conf.set("fs.default.name", fs.getUri().toString());      
       Path parentdir = fs.getHomeDirectory();
       conf.set(HConstants.HBASE_DIR, parentdir.toString());
       fs.mkdirs(parentdir);

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java Thu May
22 13:32:25 2008
@@ -43,6 +43,7 @@
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MultiRegionTable;
+import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
@@ -124,7 +125,7 @@
 
       // use IndexTableReduce to build a Lucene index
       jobConf.setReducerClass(IndexTableReduce.class);
-      jobConf.setOutputPath(new Path(INDEX_DIR));
+      FileOutputFormat.setOutputPath(jobConf, new Path(INDEX_DIR));
       jobConf.setOutputFormat(IndexOutputFormat.class);
 
       JobClient.runJob(jobConf);

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java Thu May
22 13:32:25 2008
@@ -47,7 +47,7 @@
     super.setUp();
     this.dir = new Path("/hbase", getName());
     if (fs.exists(dir)) {
-      fs.delete(dir);
+      fs.delete(dir, true);
     }
   }
 
@@ -55,7 +55,7 @@
   @Override
   public void tearDown() throws Exception {
     if (this.fs.exists(this.dir)) {
-      this.fs.delete(this.dir);
+      this.fs.delete(this.dir, true);
     }
     shutdownDfs(cluster);
     super.tearDown();

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java Thu
May 22 13:32:25 2008
@@ -538,9 +538,9 @@
       region = new HRegionIncommon(r);
       LOG.info("Merge regions elapsed time: "
           + ((System.currentTimeMillis() - startTime) / 1000.0));
-      fs.delete(oldRegion1);
-      fs.delete(oldRegion2);
-      fs.delete(oldRegionPath);
+      fs.delete(oldRegion1, true);
+      fs.delete(oldRegion2, true);
+      fs.delete(oldRegionPath, true);
     }
     LOG.info("splitAndMerge completed.");
   }

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java Thu
May 22 13:32:25 2008
@@ -319,7 +319,7 @@
       if (bottom != null) {
         bottom.close();
       }
-      fs.delete(p);
+      fs.delete(p, true);
     }
   }
   
@@ -385,10 +385,10 @@
         if (bottom != null) {
           bottom.close();
         }
-        fs.delete(p);
+        fs.delete(p, true);
       }
     } finally {
-      this.fs.delete(p);
+      this.fs.delete(p, true);
     }
   }
 }
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMigrate.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMigrate.java?rev=659245&r1=659244&r2=659245&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMigrate.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMigrate.java Thu May 22 13:32:25
2008
@@ -101,7 +101,7 @@
       listPaths(dfs, root, root.toString().length() + 1);
       
       // Remove version file and try again
-      dfs.delete(new Path(root, HConstants.VERSION_FILE_NAME));
+      dfs.delete(new Path(root, HConstants.VERSION_FILE_NAME), false);
       u = new Migrate(conf);
       u.run(new String[] {"upgrade"});
       listPaths(dfs, root, root.toString().length() + 1);



Mime
View raw message