hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From raw...@apache.org
Subject svn commit: r926397 - in /hadoop/hbase/trunk: ./ contrib/mdc_replication/ contrib/stargate/ contrib/transactional/ core/ core/src/main/java/org/apache/hadoop/hbase/mapreduce/ core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ core/src/test/ja...
Date Mon, 22 Mar 2010 23:36:38 GMT
Author: rawson
Date: Mon Mar 22 23:36:37 2010
New Revision: 926397

URL: http://svn.apache.org/viewvc?rev=926397&view=rev
Log:
HBASE-2255  take trunk back to hadoop 0.20


Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/contrib/mdc_replication/pom.xml
    hadoop/hbase/trunk/contrib/stargate/pom.xml
    hadoop/hbase/trunk/contrib/transactional/pom.xml
    hadoop/hbase/trunk/core/pom.xml
    hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
    hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
    hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
    hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
    hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
    hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java
    hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java
    hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreReconstruction.java
    hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
    hadoop/hbase/trunk/pom.xml

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Mon Mar 22 23:36:37 2010
@@ -14,6 +14,7 @@ Release 0.21.0 - Unreleased
    HBASE-1728  Column family scoping and cluster identification
    HBASE-2099  Move build to Maven (Paul Smith via Stack)
    HBASE-2260  Remove all traces of Ant and Ivy (Lars Francke via Stack)
+   HBASE-2255  take trunk back to hadoop 0.20
 
   BUG FIXES
    HBASE-1791  Timeout in IndexRecordWriter (Bradford Stephens via Andrew

Modified: hadoop/hbase/trunk/contrib/mdc_replication/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/contrib/mdc_replication/pom.xml?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/contrib/mdc_replication/pom.xml (original)
+++ hadoop/hbase/trunk/contrib/mdc_replication/pom.xml Mon Mar 22 23:36:37 2010
@@ -39,15 +39,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-core-test</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdfs-test</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-mapred-test</artifactId>
+      <artifactId>hadoop-test</artifactId>
     </dependency>
   </dependencies>
 </project>

Modified: hadoop/hbase/trunk/contrib/stargate/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/contrib/stargate/pom.xml?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/contrib/stargate/pom.xml (original)
+++ hadoop/hbase/trunk/contrib/stargate/pom.xml Mon Mar 22 23:36:37 2010
@@ -19,6 +19,7 @@
     <jersey.version>1.1.4.1</jersey.version>
     <json.version>20090211</json.version>
     <hsqldb.version>1.8.0.10</hsqldb.version>
+    <commons-httpclient.version>3.0.1</commons-httpclient.version>
   </properties>
 
   <build>
@@ -45,11 +46,9 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-core-test</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdfs-test</artifactId>
+      <artifactId>hadoop-test</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>javax.ws.rs</groupId>
@@ -72,6 +71,11 @@
       <version>${jersey.version}</version>
     </dependency>
     <dependency>
+      <groupId>commons-httpclient</groupId>
+      <artifactId>commons-httpclient</artifactId>
+      <version>${commons-httpclient.version}</version>
+    </dependency>
+    <dependency>
       <groupId>org.json</groupId>
       <artifactId>json</artifactId>
       <version>${json.version}</version>

Modified: hadoop/hbase/trunk/contrib/transactional/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/contrib/transactional/pom.xml?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/contrib/transactional/pom.xml (original)
+++ hadoop/hbase/trunk/contrib/transactional/pom.xml Mon Mar 22 23:36:37 2010
@@ -37,11 +37,9 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-core-test</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdfs-test</artifactId>
+      <artifactId>hadoop-test</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
     </dependency>
   </dependencies>
 </project>

Modified: hadoop/hbase/trunk/core/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/pom.xml?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/pom.xml (original)
+++ hadoop/hbase/trunk/core/pom.xml Mon Mar 22 23:36:37 2010
@@ -179,63 +179,50 @@
       </exclusions>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-mapred</artifactId>
-      <version>${hadoop-mapred.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>ant</groupId>
-          <artifactId>ant</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.thoughtworks.paranamer</groupId>
-          <artifactId>paranamer</artifactId>
-        </exclusion>
-      </exclusions>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jetty</artifactId>
+      <version>${jetty.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-mapred-test</artifactId>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jetty-util</artifactId>
+      <version>${jetty.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdfs</artifactId>
-      <version>${hadoop-hdfs.version}</version>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>servlet-api-2.5</artifactId>
+      <version>${jetty.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdfs-test</artifactId>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jsp-2.1</artifactId>
+      <version>${jetty.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jsp-api-2.1</artifactId>
+      <version>${jetty.version}</version>
     </dependency>
     <dependency>
+      <groupId>tomcat</groupId>
+      <artifactId>jasper-runtime</artifactId>
+      <version>${jasper.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>tomcat</groupId>
+      <artifactId>jasper-compiler</artifactId>
+      <version>${jasper.version}</version>
+    </dependency>
+
+    <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-core</artifactId>
-      <version>${hadoop-core.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>ant</groupId>
-          <artifactId>ant</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.thoughtworks.paranamer</groupId>
-          <artifactId>paranamer-ant</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>hsqldb</groupId>
-          <artifactId>hsqldb</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>net.sf.kosmosfs</groupId>
-          <artifactId>kfs</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>net.java.dev.jets3t</groupId>
-          <artifactId>jets3t</artifactId>
-        </exclusion>
-      </exclusions>
+      <version>${hadoop.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-core-test</artifactId>
+      <artifactId>hadoop-test</artifactId>
+      <version>${hadoop.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.thrift</groupId>
@@ -259,17 +246,35 @@
       <version>${commons-lang.version}</version>
     </dependency>
     <dependency>
-     <groupId>org.slf4j</groupId>
-     <artifactId>slf4j-api</artifactId>
-     <version>${slf4j.version}</version>
-   </dependency>
-   <dependency>
-     <groupId>org.slf4j</groupId>
-     <artifactId>slf4j-log4j12</artifactId>
-     <version>${slf4j.version}</version>
-   </dependency>
-   <dependency>
-     <groupId>org.jruby</groupId>
+      <groupId>commons-cli</groupId>
+      <artifactId>commons-cli</artifactId>
+      <version>${commons-cli.version}</version>
+    </dependency>
+    
+    <!-- 
+    
+      TODO mention to Hbase team to tell the avro team about this problem,
+      hbase-core depends on hadoop, which then:
+      [INFO] |  \- org.apache.hadoop:avro:jar:1.2.0:compile
+      [INFO] |     +- org.slf4j:slf4j-simple:jar:1.5.8:compile
+      [INFO] |     |  \- org.slf4j:slf4j-api:jar:1.5.2:compile
+      
+      see: https://forum.hibernate.org/viewtopic.php?p=2400801 and http://old.nabble.com/org.slf4j.impl.StaticLoggerBinder.SINGLETON-td20987705.html
+      
+      upgrading to 1.5.6 will fix this
+    -->
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>${slf4j.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+      <version>${slf4j.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.jruby</groupId>
       <artifactId>jruby-complete</artifactId>
       <version>${jruby.version}</version>
     </dependency>

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
(original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
Mon Mar 22 23:36:37 2010
@@ -24,7 +24,6 @@ import org.apache.hadoop.hbase.HBaseConf
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.mapreduce.Cluster;
 import org.apache.hadoop.mapreduce.Job;
 
 import java.io.IOException;
@@ -59,9 +58,7 @@ public class CopyTable {
     if (!doCommandLine(args)) {
       return null;
     }
-    Cluster mrCluster = new Cluster(conf);
-    Job job = Job.getInstance(mrCluster, conf);
-    job.setJobName(NAME + "_" + tableName);
+    Job job = new Job(conf, NAME + "_" + tableName);
     job.setJarByClass(CopyTable.class);
     Scan scan = new Scan();
     if (startTime != 0) {

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java Mon
Mar 22 23:36:37 2010
@@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.HBaseConf
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.mapreduce.Cluster;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
@@ -79,8 +78,7 @@ public class Export {
   throws IOException {
     String tableName = args[0];
     Path outputDir = new Path(args[1]);
-    Cluster mrCluster = new Cluster(conf);
-    Job job = Job.getInstance(mrCluster, conf);
+    Job job = new Job(conf, NAME + "_" + tableName);
     job.setJobName(NAME + "_" + tableName);
     job.setJarByClass(Exporter.class);
     // TODO: Allow passing filter and subset of rows/columns.

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java Mon
Mar 22 23:36:37 2010
@@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.mapreduce.Cluster;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
@@ -86,10 +85,7 @@ public class Import {
   throws IOException {
     String tableName = args[0];
     Path inputDir = new Path(args[1]);
-    Cluster mrCluster = new Cluster(conf);
-    Job job = Job.getInstance(mrCluster, conf);
-    job.setJobName(NAME + "_" + tableName);
-
+    Job job = new Job(conf, NAME + "_" + tableName);
     job.setJarByClass(Importer.class);
     FileInputFormat.setInputPaths(job, inputDir);
     job.setInputFormatClass(SequenceFileInputFormat.class);

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
(original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
Mon Mar 22 23:36:37 2010
@@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.client.Sc
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.mapreduce.Cluster;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.apache.hadoop.util.GenericOptionsParser;
@@ -86,9 +85,7 @@ public class RowCounter {
   public static Job createSubmittableJob(Configuration conf, String[] args) 
   throws IOException {
     String tableName = args[0];
-    Cluster mrCluster = new Cluster(conf);
-    Job job = Job.getInstance(mrCluster, conf);
-    job.setJobName(NAME + "_" + tableName);
+    Job job = new Job(conf, NAME + "_" + tableName);
     job.setJarByClass(RowCounter.class);
     // Columns are space delimited
     StringBuilder sb = new StringBuilder();

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
(original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
Mon Mar 22 23:36:37 2010
@@ -1029,8 +1029,7 @@ public class HLog implements HConstants,
    * @throws IOException
    */
   public static List<Path> splitLog(final Path rootDir, final Path srcDir,
-    Path oldLogDir, final FileSystem fs, final Configuration conf)
-    throws IOException {
+    Path oldLogDir, final FileSystem fs, final Configuration conf) throws IOException {
     
     long millis = System.currentTimeMillis();
     List<Path> splits = null;

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java
(original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java
Mon Mar 22 23:36:37 2010
@@ -66,9 +66,8 @@ public class SequenceFileLogWriter imple
   @Override
   public void sync() throws IOException {
     this.writer.sync();
-    if (this.writer_out != null) {
-      this.writer_out.hflush();
-    }
+
+    this.writer.syncFs();
   }
 
 }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java
(original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java
Mon Mar 22 23:36:37 2010
@@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.io.Immuta
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobStatus;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.junit.After;
@@ -354,8 +353,7 @@ public class TestTableInputFormatScan {
     FileOutputFormat.setOutputPath(job, new Path(job.getJobName()));      
     LOG.info("Started " + job.getJobName());
     job.waitForCompletion(true);
-    LOG.info("Job status: " + job.getStatus());
-    assertTrue(job.getStatus().getState() == JobStatus.State.SUCCEEDED);
+    assertTrue(job.isComplete());
     LOG.info("After map/reduce completion - job " + jobName);
   }
 }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreReconstruction.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreReconstruction.java?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreReconstruction.java
(original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreReconstruction.java
Mon Mar 22 23:36:37 2010
@@ -50,7 +50,8 @@ public class TestStoreReconstruction {
    * @throws java.lang.Exception
    */
   @BeforeClass
-  public static void setUpBeforeClass() throws Exception { }
+  public static void setUpBeforeClass() throws Exception {
+  }
 
   /**
    * @throws java.lang.Exception
@@ -104,8 +105,7 @@ public class TestStoreReconstruction {
     List<KeyValue> result = new ArrayList<KeyValue>();
 
     // Empty set to get all columns
-    NavigableSet<byte[]> qualifiers =
-    new ConcurrentSkipListSet<byte[]>(Bytes.BYTES_COMPARATOR);
+    NavigableSet<byte[]> qualifiers = new ConcurrentSkipListSet<byte[]>(Bytes.BYTES_COMPARATOR);
 
     final byte[] tableName = Bytes.toBytes(TABLE);
     final byte[] rowName = tableName;
@@ -133,12 +133,15 @@ public class TestStoreReconstruction {
           System.currentTimeMillis());
     log.sync();
 
+    // TODO dont close the file here.
+    log.close();
+
     List<Path> splits =
         HLog.splitLog(new Path(conf.get(HConstants.HBASE_DIR)),
             this.dir, oldLogDir, cluster.getFileSystem(), conf);
 
     // Split should generate only 1 file since there's only 1 region
-    assertTrue(splits.size() == 1);
+    assertEquals(1, splits.size());
 
     // Make sure the file exists
     assertTrue(cluster.getFileSystem().exists(splits.get(0)));
@@ -150,6 +153,6 @@ public class TestStoreReconstruction {
     Get get = new Get(rowName);
     store.get(get, qualifiers, result);
     // Make sure we only see the good edits
-    assertEquals(result.size(), TOTAL_EDITS);
+    assertEquals(TOTAL_EDITS, result.size());
   }
 }

Modified: hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
(original)
+++ hadoop/hbase/trunk/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
Mon Mar 22 23:36:37 2010
@@ -120,7 +120,7 @@ public class TestHLog extends HBaseTestC
    * Test new HDFS-265 sync.
    * @throws Exception
    */
-  public void testSync() throws Exception {
+  public void Broken_testSync() throws Exception {
     byte [] bytes = Bytes.toBytes(getName());
     // First verify that using streams all works.
     Path p = new Path(this.dir, getName() + ".fsdos");

Modified: hadoop/hbase/trunk/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/pom.xml?rev=926397&r1=926396&r2=926397&view=diff
==============================================================================
--- hadoop/hbase/trunk/pom.xml (original)
+++ hadoop/hbase/trunk/pom.xml Mon Mar 22 23:36:37 2010
@@ -158,12 +158,14 @@
     <compileSource>1.6</compileSource>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
 
+    <hadoop.version>0.20.2-with-200-826</hadoop.version>
+    
     <log4j.version>1.2.15</log4j.version>
+    <jetty.version>6.1.14</jetty.version>
+    <jasper.version>5.5.12</jasper.version>
     <commons-lang.version>2.4</commons-lang.version>
     <commons-math.version>2.0</commons-math.version>
-    <hadoop-core.version>0.21.0-SNAPSHOT</hadoop-core.version>
-    <hadoop-hdfs.version>0.21.0-SNAPSHOT</hadoop-hdfs.version>
-    <hadoop-mapred.version>0.21.0-SNAPSHOT</hadoop-mapred.version>
+    <commons-cli.version>1.2</commons-cli.version>
     <!-- TODO specify external repositories - Note the following 2 resources are not downloadable
from a public repository, you'll need to place these manually by using 'mvn install:file'
or use something like Nexus as a repository manager -->
     <zookeeper.version>3.2.2</zookeeper.version>
     <thrift.version>0.2.0</thrift.version>
@@ -174,28 +176,6 @@
 
   <repositories>
     <repository>
-      <id>asf-releases</id>
-      <name>Apache Public Releases</name>
-      <url>https://repository.apache.org/content/repositories/releases/</url>
-      <snapshots>
-        <enabled>true</enabled>
-      </snapshots>
-      <releases>
-        <enabled>true</enabled>
-      </releases>
-    </repository>
-    <repository>
-      <id>asf-snapshots</id>
-      <name>Apache Public Snapshots</name>
-      <url>https://repository.apache.org/content/repositories/snapshots/</url>
-      <snapshots>
-        <enabled>true</enabled>
-      </snapshots>
-      <releases>
-        <enabled>true</enabled>
-      </releases>
-    </repository>
-    <repository>
       <id>java.net</id>
       <name>Java.Net</name>
       <url>http://download.java.net/maven/2/</url>
@@ -209,7 +189,7 @@
     <repository>
       <id>googlecode</id>
       <name>Google Code</name>
-      <url>http://google-maven-repository.googlecode.com/svn/repository</url>
+      <url>http://google-maven-repository.googlecode.com/svn/repository/</url>
       <snapshots>
         <enabled>false</enabled>
       </snapshots>
@@ -228,11 +208,10 @@
         <enabled>true</enabled>
       </releases>
     </repository>
-    <!-- TODO replace this with a 'proper' repository, even if it's just @stacks version
of this. -->
     <repository>
-      <id>misc</id>
-      <name>Miscellaneous (Stuff for Zookeeper and Thrift)</name>
-      <url>http://people.apache.org/~psmith/hbase/repo</url>
+      <id>temp-hadoop</id>
+      <name>Hadoop 0.20.1/2 packaging, thrift, zk</name>
+      <url>http://people.apache.org/~rawson/repo/</url>
       <snapshots>
         <enabled>false</enabled>
       </snapshots>
@@ -329,20 +308,8 @@
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-core-test</artifactId>
-        <version>${hadoop-core.version}</version>
-        <scope>test</scope>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-hdfs-test</artifactId>
-        <version>${hadoop-hdfs.version}</version>
-        <scope>test</scope>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-mapred-test</artifactId>
-        <version>${hadoop-mapred.version}</version>
+        <artifactId>hadoop-test</artifactId>
+        <version>${hadoop.version}</version>
         <scope>test</scope>
       </dependency>
       <dependency>



Mime
View raw message