hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject svn commit: r789197 - in /hadoop/hbase/trunk_on_hadoop-0.18.3: ./ contrib/ src/contrib/ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/io/hfile/ src/java/org/apache/hadoop/hbase/ipc/ src/java...
Date Mon, 29 Jun 2009 03:03:59 GMT
Author: apurtell
Date: Mon Jun 29 03:03:59 2009
New Revision: 789197

URL: http://svn.apache.org/viewvc?rev=789197&view=rev
Log:
move contrib to correct subdir

Added:
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/
      - copied from r789195, hadoop/hbase/trunk_on_hadoop-0.18.3/contrib/
Removed:
    hadoop/hbase/trunk_on_hadoop-0.18.3/contrib/
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ipc/IndexedRegionInterface.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ipc/TransactionalRegionInterface.java
Modified:
    hadoop/hbase/trunk_on_hadoop-0.18.3/build.xml
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/KeyValue.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/zoo.cfg

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/build.xml?rev=789197&r1=789196&r2=789197&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/build.xml (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/build.xml Mon Jun 29 03:03:59 2009
@@ -39,6 +39,8 @@
   <property name="docs.dir" value="${basedir}/docs"/>
   <property name="docs.src" value="${basedir}/src/docs"/>
 
+  <property name="contrib.dir" value="${basedir}/src/contrib"/>
+
   <property name="test.output" value="no"/>
   <property name="test.timeout" value="600000"/>
 
@@ -96,7 +98,7 @@
   <fileset id="lib.jars" dir="${basedir}" includes="lib/*.jar"/>
   <path id="classpath">
     <fileset refid="lib.jars"/>
-    <fileset dir="${lib.dir}/jetty-ext/">
+    <fileset dir="${lib.dir}/jsp-2.1/">
       <include name="*jar" />
     </fileset>
     <pathelement location="${build.classes}"/>
@@ -135,12 +137,17 @@
     <chmod perm="ugo+x" type="file">
       <fileset dir="${build.bin}" />
     </chmod>
+    <!--Copy contrib. templates if any-->
+    <copy todir="${contrib.dir}" verbose="true">
+      <fileset dir="${contrib.dir}" includes="**/*.template"/>
+      <mapper type="glob" from="*.template" to="*"/>
+    </copy>
     <exec executable="sh">
        <arg line="src/saveVersion.sh ${version}"/>
     </exec>
   </target>
 
-  <target name="compile" depends="clover,init,jspc">
+  <target name="compile-core" depends="clover,init,jspc" description="Compile hbase core">
    <!--Compile whats under src and generated java classes made from jsp-->
    <javac
     encoding="${build.encoding}"
@@ -154,17 +161,28 @@
      <classpath refid="classpath"/>
    </javac>
   </target>
+
+  <target name="compile-contrib" depends="compile-core" description="Compile contribs">
+     <subant target="compile">
+        <property name="version" value="${version}"/>
+        <fileset file="${contrib.dir}/build.xml"/>
+     </subant>  	
+  </target>
+
+  <target name="compile" depends="compile-core, compile-contrib"
+     description="Compile core, contrib">
+  </target>
 	
-  <target name="jar" depends="compile" description="Build jar">
+  <target name="jar" depends="compile"
+       description="Build jar">
     <!--Copy over any properties under src-->
     <copy todir="${build.classes}">
       <fileset dir="${src.dir}">
         <include name="**/*.properties" />
       </fileset>
     </copy>
-    <jar jarfile="${jarfile}"
-        basedir="${build.classes}" >
-      <fileset file="${basedir}/conf/hbase-default.xml"/>
+    <jar jarfile="${jarfile}" basedir="${build.classes}" >
+      <zipfileset dir="conf" prefix="conf" includes="zoo.cfg,hbase-default.xml" />
       <zipfileset dir="${build.webapps}" prefix="webapps"/>
    		<manifest>
             <attribute name="Main-Class" value="org/apache/hadoop/hbase/mapred/Driver"
/>
@@ -177,11 +195,14 @@
      -->
   <target name="jspc" depends="init" unless="jspc.not.required">
     <path id="jspc.classpath">
+      <fileset dir="${basedir}/lib/jsp-2.1/">
+        <include name="*jar" />
+      </fileset>
       <fileset dir="${basedir}/lib/">
         <include name="servlet-api*jar" />
         <include name="commons-logging*jar" />
+        <include name="jasper-*jar" />
         <include name="jetty-*jar" />
-        <include name="jetty-ext/*jar" />
       </fileset>
     </path>
     <taskdef classname="org.apache.jasper.JspC" name="jspcompiler" >
@@ -205,7 +226,8 @@
     <echo message="Setting jspc.notRequired property. jsp pages generated once per ant
session only" />
   </target>
 
-  <target name="clover" depends="clover.setup, clover.info" description="Instrument the
Unit tests using Clover.  To use, specify -Dclover.home=&lt;base of clover installation&gt;
-Drun.clover=true on the command line."/>
+  <target name="clover" depends="clover.setup, clover.info"
+    description="Instrument the Unit tests using Clover.  To use, specify -Dclover.home=&lt;base
of clover installation&gt; -Drun.clover=true on the command line."/>
 
   <target name="clover.setup" if="clover.enabled">
     <taskdef resource="cloverlib.xml" classpath="${clover.jar}"/>
@@ -248,8 +270,9 @@
   <!-- ================================================================== -->
   <!-- Package                                                            -->
   <!-- ================================================================== -->
-  <target name="package" depends="jar,javadoc,compile-test" 
-      description="Build distribution"> 
+  <target name="package" depends="jar,javadoc" 
+      description="Build distribution; must define -Djava5.home and -Dforrest.home so can
generate doc"> 
+    <echo message="Be sure to run 'docs' target before this one else package will be missing
site documentation" />
     <mkdir dir="${dist.dir}"/>
     <copy todir="${dist.dir}" includeEmptyDirs="false" flatten="true">
       <fileset dir="${build.dir}">
@@ -257,6 +280,14 @@
         <include name="${final.name}-test.jar" />
       </fileset>
     </copy>
+    <mkdir dir="${dist.dir}/contrib"/>
+    <subant target="package">
+      <!--Pass down the version in case its needed again and the target
+      distribution directory so contribs know where to install to.-->
+      <property name="version" value="${version}"/>
+      <property name="dist.dir" value="${dist.dir}"/>
+      <fileset file="${contrib.dir}/build.xml"/>
+    </subant>  	
     <mkdir dir="${dist.dir}/webapps"/>
     <copy todir="${dist.dir}/webapps">
       <fileset dir="${build.webapps}" />
@@ -278,7 +309,6 @@
     </chmod>
     <mkdir dir="${dist.dir}/docs" />
     <copy todir="${dist.dir}/docs">
-      <fileset dir="${docs.dir}" />
       <fileset dir="${build.docs}"/>
     </copy>
     <copy todir="${dist.dir}">
@@ -291,6 +321,13 @@
     <copy todir="${dist.dir}/src" includeEmptyDirs="true">
       <fileset dir="src" excludes="**/*.template **/docs/build/**/*"/>
     </copy>
+    <copy todir="${dist.dir}/" file="build.xml"/>
+    <chmod perm="ugo+x" type="file" parallel="false">
+        <fileset dir="${dist.dir}/bin"/>
+        <fileset dir="${dist.dir}/src/contrib/">
+          <include name="*/bin/*" />
+        </fileset>
+    </chmod>
   </target>
 
   <!-- ================================================================== -->
@@ -325,12 +362,14 @@
       <param.listofitems>
         <tarfileset dir="${build.dir}" mode="664">
           <exclude name="${final.name}/bin/*" />
+          <exclude name="${final.name}/contrib/*/bin/*" />
           <exclude name="${final.name}/src/**" />
           <exclude name="${final.name}/docs/**" />
           <include name="${final.name}/**" />
         </tarfileset>
         <tarfileset dir="${build.dir}" mode="755">
           <include name="${final.name}/bin/*" />
+          <exclude name="${final.name}/contrib/*/bin/*" />
         </tarfileset>
       </param.listofitems>
     </macro_tar>
@@ -339,15 +378,20 @@
   <!-- ================================================================== -->
   <!-- Doc                                                                -->
   <!-- ================================================================== -->
-  <target name="docs" depends="forrest.check" description="Generate forrest-based documentation.
To use, specify -Dforrest.home=&lt;base of Apache Forrest installation&gt; on the
command line." if="forrest.home">
+  <target name="docs" depends="forrest.check"
+      description="Generate forrest-based documentation. To use, specify -Dforrest.home=&lt;base
of Apache Forrest installation&gt; on the command line." if="forrest.home">
     <exec dir="${docs.src}" executable="${forrest.home}/bin/forrest" failonerror="true"
>
       <env key="JAVA_HOME" value="${java5.home}"/>
     </exec>
-    <copy todir="${docs.dir}">
+    <copy todir="${build.docs}">
       <fileset dir="${docs.src}/build/site/" />
     </copy>
-    <style basedir="${conf.dir}" destdir="${docs.dir}"
+    <style basedir="${conf.dir}" destdir="${build.docs}"
            includes="hadoop-default.xml" style="conf/configuration.xsl"/>
+    <!--Copy to $HBASE_HOME/docs ... need to check it in to persist it.-->
+    <copy todir="${docs.dir}">
+      <fileset dir="${build.docs}" />
+    </copy>
   </target>
 
   <target name="forrest.check" unless="forrest.home" depends="java5.check">
@@ -403,7 +447,7 @@
       redoing init and jscpc at this stage of the game; i.e. the prereqs
       for compile.  TODO: Investigate why.  For now, test will fail
       if not preceeded by manual 'jar' or 'compile' invokation -->
-  <target name="compile-test" depends="compile" description="Build test jar">
+  <target name="compile-core-test" depends="compile" description="Build test jar">
     <javac encoding="${build.encoding}" 
        srcdir="${src.test}" 
        includes="**/*.java" 
@@ -425,8 +469,8 @@
     </jar>
   </target>
 
-  <target name="test" depends="compile-test"
-  	description="Build test jar and run tests">
+  <target name="test-core" depends="compile-core-test"
+   	  description="Build test jar and run tests">
     <delete dir="${test.log.dir}"/>
     <mkdir dir="${test.log.dir}"/>
     <junit
@@ -457,10 +501,28 @@
     <fail if="tests.failed">Tests failed!</fail>
   </target>
 
+  <target name="test-contrib" depends="compile, compile-core-test"
+      description="Run contrib unit tests">
+    <subant target="test">
+       <property name="version" value="${version}"/>
+       <fileset file="${contrib.dir}/build.xml"/>
+    </subant> 
+  </target>
+
+  <target name="test" depends="test-core, test-contrib"
+    description="Run core, contrib unit tests">
+  </target>
+
   <!-- ================================================================== -->
   <!-- Clean.  Delete the build files, and their directories              -->
   <!-- ================================================================== -->
-  <target name="clean" description="Clean all old builds">
+  <target name="clean-contrib" description="Clean contrib">
+     <subant target="clean">        
+        <fileset file="src/contrib/build.xml"/>
+     </subant>  	
+  </target>
+  <target name="clean" depends="clean-contrib" description="Clean all old builds">
     <delete dir="${build.dir}"/>
+    <delete dir="${docs.src}/build"/>
   </target>
 </project>

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/HTableDescriptor.java?rev=789197&r1=789196&r2=789197&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
Mon Jun 29 03:03:59 2009
@@ -1,5 +1,5 @@
 /**
- * Copyright 2007 The Apache Software Foundation
+ * Copyright 2009 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -31,7 +31,6 @@
 import java.util.TreeMap;
 
 import org.apache.hadoop.fs.Path;
-//import org.apache.hadoop.hbase.client.tableindexed.IndexSpecification;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.io.hfile.Compression;
 import org.apache.hadoop.hbase.rest.exception.HBaseRestException;
@@ -52,6 +51,7 @@
   // Changes prior to version 3 were not recorded here.
   // Version 3 adds metadata as a map where keys and values are byte[].
   // Version 4 adds indexes
+  // FIXME version 5 should remove indexes
   public static final byte TABLE_DESCRIPTOR_VERSION = 4;
 
   private byte [] name = HConstants.EMPTY_BYTE_ARRAY;
@@ -104,13 +104,7 @@
   // Key is hash of the family name.
   public final Map<byte [], HColumnDescriptor> families =
     new TreeMap<byte [], HColumnDescriptor>(KeyValue.FAMILY_COMPARATOR);
-//  private final Map<byte [], HColumnDescriptor> families =
-//    new TreeMap<byte [], HColumnDescriptor>(KeyValue.FAMILY_COMPARATOR);
-  
-  // Key is indexId
-//  private final Map<String, IndexSpecification> indexes =
-//    new HashMap<String, IndexSpecification>();
-  
+   
   /**
    * Private constructor used internally creating table descriptors for 
    * catalog tables: e.g. .META. and -ROOT-.
@@ -129,23 +123,6 @@
    * Private constructor used internally creating table descriptors for 
    * catalog tables: e.g. .META. and -ROOT-.
    */
-//  protected HTableDescriptor(final byte [] name, HColumnDescriptor[] families,
-//      Collection<IndexSpecification> indexes,
-//       Map<ImmutableBytesWritable,ImmutableBytesWritable> values) {
-//    this.name = name.clone();
-//    this.nameAsString = Bytes.toString(this.name);
-//    setMetaFlags(name);
-//    for(HColumnDescriptor descriptor : families) {
-//      this.families.put(descriptor.getName(), descriptor);
-//    }
-//    for(IndexSpecification index : indexes) {
-//      this.indexes.put(index.getIndexId(), index);
-//    }
-//    for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry:
-//        values.entrySet()) {
-//      this.values.put(entry.getKey(), entry.getValue());
-//    }
-//  }
   protected HTableDescriptor(final byte [] name, HColumnDescriptor[] families,
       Map<ImmutableBytesWritable,ImmutableBytesWritable> values) {
     this.name = name.clone();
@@ -216,7 +193,6 @@
         desc.values.entrySet()) {
       this.values.put(e.getKey(), e.getValue());
     }
-//    this.indexes.putAll(desc.indexes);
   }
 
   /*
@@ -454,18 +430,6 @@
     setValue(MEMSTORE_FLUSHSIZE_KEY,
       Bytes.toBytes(Integer.toString(memstoreFlushSize)));
   }
-    
-//  public Collection<IndexSpecification> getIndexes() {
-//    return indexes.values();
-//  }
-//  
-//  public IndexSpecification getIndex(String indexId) {
-//    return indexes.get(indexId);
-//  }
-//  
-//  public void addIndex(IndexSpecification index) {
-//    indexes.put(index.getIndexId(), index);
-//  }
 
   /**
    * Adds a column family.
@@ -524,13 +488,6 @@
     s.append(FAMILIES);
     s.append(" => ");
     s.append(families.values());
-//    if (!indexes.isEmpty()) {
-//      // Don't emit if empty.  Has to do w/ transactional hbase.
-//      s.append(", ");
-//      s.append("INDEXES");
-//      s.append(" => ");
-//      s.append(indexes.values());
-//    }
     s.append('}');
     return s.toString();
   }
@@ -595,16 +552,9 @@
       c.readFields(in);
       families.put(c.getName(), c);
     }
-//    indexes.clear();
     if (version < 4) {
       return;
     }
-//    int numIndexes = in.readInt();
-//    for (int i = 0; i < numIndexes; i++) {
-//      IndexSpecification index = new IndexSpecification();
-//      index.readFields(in);
-//      addIndex(index);
-//    }
   }
 
   public void write(DataOutput out) throws IOException {
@@ -624,10 +574,6 @@
       HColumnDescriptor family = it.next();
       family.write(out);
     }
-//    out.writeInt(indexes.size());
-//    for(IndexSpecification index : indexes.values()) {
-//      index.write(out);
-//    }
   }
 
   // Comparable
@@ -733,4 +679,4 @@
   public void restSerialize(IRestSerializer serializer) throws HBaseRestException {
     serializer.serializeTableDescriptor(this);
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/KeyValue.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/KeyValue.java?rev=789197&r1=789196&r2=789197&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/KeyValue.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/KeyValue.java Mon
Jun 29 03:03:59 2009
@@ -194,18 +194,10 @@
      * @return Type associated with passed code.
      */
     public static Type codeToType(final byte b) {
-      // This is messy repeating each type here below but no way around it; we
-      // can't use the enum ordinal.
-      if (b == Put.getCode()) {
-        return Put;
-      } else if (b == Delete.getCode()) {
-        return Delete;
-      } else if (b == DeleteColumn.getCode()) {
-        return DeleteColumn;
-      } else if (b == DeleteFamily.getCode()) {
-        return DeleteFamily;
-      } else if (b == Maximum.getCode()) {
-        return Maximum;
+      for (Type t : Type.values()) {
+        if (t.getCode() == b) {
+          return t;
+        }
       }
       throw new RuntimeException("Unknown code " + b);
     }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java?rev=789197&r1=789196&r2=789197&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java
Mon Jun 29 03:03:59 2009
@@ -1805,7 +1805,7 @@
       // respect.
     }
 
-    protected void initialize() throws IOException {
+    public void initialize() throws IOException {
       nextScanner(this.scannerCaching);
     }
 

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java?rev=789197&r1=789196&r2=789197&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
Mon Jun 29 03:03:59 2009
@@ -37,9 +37,6 @@
    * Create an unmodifyable copy of an HTableDescriptor
    * @param desc
    */
-//  UnmodifyableHTableDescriptor(final HTableDescriptor desc) {
-//    super(desc.getName(), getUnmodifyableFamilies(desc), desc.getIndexes(), desc.getValues());
-//  }
   UnmodifyableHTableDescriptor(final HTableDescriptor desc) {
     super(desc.getName(), getUnmodifyableFamilies(desc), desc.getValues());
   }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java?rev=789197&r1=789196&r2=789197&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
Mon Jun 29 03:03:59 2009
@@ -39,7 +39,7 @@
  * A block cache implementation that is memory-aware using {@link HeapSize},
  * memory-bound using an LRU eviction algorithm, and concurrent: backed by a
  * {@link ConcurrentHashMap} and with a non-blocking eviction thread giving
- * constant-time {@link cacheBlock} and {@link getBlock} operations.<p>
+ * constant-time {@link #cacheBlock} and {@link #getBlock} operations.<p>
  * 
  * Contains three levels of block priority to allow for
  * scan-resistance and in-memory families.  A block is added with an inMemory

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HLog.java?rev=789197&r1=789196&r2=789197&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
Mon Jun 29 03:03:59 2009
@@ -290,12 +290,7 @@
         Path oldFile = cleanupCurrentWriter(this.filenum);
         this.filenum = System.currentTimeMillis();
         Path newPath = computeFilename(this.filenum);
-        this.writer = SequenceFile.createWriter(this.fs, this.conf, newPath,
-          HLogKey.class, KeyValue.class,
-          fs.getConf().getInt("io.file.buffer.size", 4096),
-          fs.getDefaultReplication(), this.blocksize,
-          SequenceFile.CompressionType.NONE, new DefaultCodec(), null,
-          new Metadata());
+        this.writer = createWriter(newPath);
         LOG.info((oldFile != null?
             "Roll " + FSUtils.getPath(oldFile) + ", entries=" +
             this.numEntries.get() +
@@ -326,6 +321,20 @@
     }
     return regionToFlush;
   }
+
+  protected SequenceFile.Writer createWriter(Path path) throws IOException {
+    return createWriter(path, HLogKey.class, KeyValue.class);
+  }
+  
+  protected SequenceFile.Writer createWriter(Path path,
+      Class<? extends HLogKey> keyClass, Class<? extends KeyValue> valueClass)
+      throws IOException {
+    return SequenceFile.createWriter(this.fs, this.conf, path, keyClass,
+        valueClass, fs.getConf().getInt("io.file.buffer.size", 4096), fs
+            .getDefaultReplication(), this.blocksize,
+        SequenceFile.CompressionType.NONE, new DefaultCodec(), null,
+        new Metadata());
+  }
   
   /*
    * Clean up old commit logs.
@@ -463,45 +472,56 @@
     }
   }
 
-
-  /** Append an entry without a row to the log.
+   /** Append an entry to the log.
    * 
    * @param regionInfo
    * @param logEdit
-   * @param now
+   * @param now Time of this edit write.
    * @throws IOException
    */
-  public void append(HRegionInfo regionInfo, KeyValue logEdit, final long now)
+  public void append(HRegionInfo regionInfo, KeyValue logEdit,
+    final long now)
   throws IOException {
-    this.append(regionInfo, new byte[0], logEdit, now);
+    byte [] regionName = regionInfo.getRegionName();
+    byte [] tableName = regionInfo.getTableDesc().getName();
+    this.append(regionInfo, makeKey(regionName, tableName, -1, now), logEdit);
   }
 
+  /**
+   * @param now
+   * @param regionName
+   * @param tableName
+   * @return New log key.
+   */
+  protected HLogKey makeKey(byte[] regionName, byte[] tableName, long seqnum, long now) {
+    return new HLogKey(regionName, tableName, seqnum, now);
+  }
+  
+  
+  
   /** Append an entry to the log.
    * 
    * @param regionInfo
-   * @param row
    * @param logEdit
-   * @param now Time of this edit write.
+   * @param logKey
    * @throws IOException
    */
-  public void append(HRegionInfo regionInfo, byte [] row, KeyValue logEdit,
-    final long now)
+  public void append(HRegionInfo regionInfo, HLogKey logKey, KeyValue logEdit)
   throws IOException {
     if (this.closed) {
       throw new IOException("Cannot append; log is closed");
     }
     byte [] regionName = regionInfo.getRegionName();
-    byte [] tableName = regionInfo.getTableDesc().getName();
     synchronized (updateLock) {
       long seqNum = obtainSeqNum();
+      logKey.setLogSeqNum(seqNum);
       // The 'lastSeqWritten' map holds the sequence number of the oldest
       // write for each region. When the cache is flushed, the entry for the
       // region being flushed is removed if the sequence number of the flush
       // is greater than or equal to the value in lastSeqWritten.
       this.lastSeqWritten.putIfAbsent(regionName, Long.valueOf(seqNum));
-      HLogKey logKey = new HLogKey(regionName, tableName, seqNum, now);
       boolean sync = regionInfo.isMetaRegion() || regionInfo.isRootRegion();
-      doWrite(logKey, logEdit, sync, now);
+      doWrite(logKey, logEdit, sync, logKey.getWriteTime());
       this.numEntries.incrementAndGet();
       updateLock.notifyAll();
     }
@@ -518,7 +538,7 @@
    *
    * Later, if we sort by these keys, we obtain all the relevant edits for a
    * given key-range of the HRegion (TODO). Any edits that do not have a
-   * matching {@link HConstants#COMPLETE_CACHEFLUSH} message can be discarded.
+   * matching COMPLETE_CACHEFLUSH message can be discarded.
    *
    * <p>
    * Logs cannot be restarted once closed, or once the HLog process dies. Each
@@ -536,7 +556,7 @@
    * @param now
    * @throws IOException
    */
-  void append(byte [] regionName, byte [] tableName, List<KeyValue> edits,
+  public void append(byte [] regionName, byte [] tableName, List<KeyValue> edits,
     boolean sync, final long now)
   throws IOException {
     if (this.closed) {
@@ -551,8 +571,7 @@
       this.lastSeqWritten.putIfAbsent(regionName, Long.valueOf(seqNum[0]));
       int counter = 0;
       for (KeyValue kv: edits) {
-        HLogKey logKey =
-          new HLogKey(regionName, tableName, seqNum[counter++], now);
+        HLogKey logKey = makeKey(regionName, tableName, seqNum[counter++], now);
         doWrite(logKey, kv, sync, now);
         this.numEntries.incrementAndGet();
       }
@@ -686,8 +705,8 @@
         return;
       }
       synchronized (updateLock) {
-        this.writer.append(new HLogKey(regionName, tableName, logSeqId,
-          System.currentTimeMillis()), completeCacheFlushLogEdit());
+        this.writer.append(makeKey(regionName, tableName, logSeqId, System.currentTimeMillis()),

+            completeCacheFlushLogEdit());
         this.numEntries.incrementAndGet();
         Long seq = this.lastSeqWritten.get(regionName);
         if (seq != null && logSeqId >= seq.longValue()) {

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java?rev=789197&r1=789196&r2=789197&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java
Mon Jun 29 03:03:59 2009
@@ -87,6 +87,10 @@
   public long getLogSeqNum() {
     return logSeqNum;
   }
+  
+  void setLogSeqNum(long logSeqNum) {
+    this.logSeqNum = logSeqNum;
+  }
 
   /**
    * @return the write time
@@ -156,4 +160,4 @@
   public long heapSize() {
     return this.regionName.length + this.tablename.length + HEAP_TAX;
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java?rev=789197&r1=789196&r2=789197&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
Mon Jun 29 03:03:59 2009
@@ -20,6 +20,7 @@
 package org.apache.hadoop.hbase.regionserver;
 
 import java.io.IOException;
+import java.io.UnsupportedEncodingException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
@@ -311,6 +312,8 @@
       }
     }
 
+    // Play log if one.  Delete when done.
+    doReconstructionLog(oldLogFile, minSeqId, maxSeqId, reporter);
     if (fs.exists(oldLogFile)) {
       if (LOG.isDebugEnabled()) {
         LOG.debug("Deleting old log file: " + oldLogFile);
@@ -419,7 +422,7 @@
    * 
    * @throws IOException
    */
-  List<StoreFile> close(final boolean abort) throws IOException {
+  public List<StoreFile> close(final boolean abort) throws IOException {
     if (isClosed()) {
       LOG.warn("region " + this + " already closed");
       return null;
@@ -578,6 +581,7 @@
    * @throws IOException
    */
   HRegion [] splitRegion(final byte [] splitRow) throws IOException {
+    prepareToSplit();
     synchronized (splitLock) {
       if (closed.get()) {
         return null;
@@ -666,6 +670,10 @@
     }
   }
   
+  protected void prepareToSplit() {
+    // nothing
+  }
+  
   /*
    * @param dir
    * @return compaction directory for the passed in <code>dir</code>
@@ -1476,6 +1484,13 @@
   private boolean isFlushSize(final long size) {
     return size > this.memstoreFlushSize;
   }
+  
+  // Do any reconstruction needed from the log
+  protected void doReconstructionLog(Path oldLogFile, long minSeqId, long maxSeqId,
+    Progressable reporter)
+  throws UnsupportedEncodingException, IOException {
+    // Nothing to do (Replaying is done in HStores)
+  }
 
   protected Store instantiateHStore(Path baseDir, 
     HColumnDescriptor c, Path oldLogFile, Progressable reporter)

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java?rev=789197&r1=789196&r2=789197&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
Mon Jun 29 03:03:59 2009
@@ -1042,10 +1042,24 @@
         "running at " + this.serverInfo.getServerAddress().toString() +
         " because logdir " + logdir.toString() + " exists");
     }
+    HLog newlog = instantiateHLog(logdir);
+    return newlog;
+  }
+
+  // instantiate 
+  protected HLog instantiateHLog(Path logdir) throws IOException {
     HLog newlog = new HLog(fs, logdir, conf, hlogRoller);
     return newlog;
   }
 
+  
+  protected LogRoller getLogRoller() {
+    return hlogRoller;
+  }  
+  
+  /*
+   * @param interval Interval since last time metrics were called.
+   */
   protected void doMetrics() {
     try {
       metrics();
@@ -1576,7 +1590,7 @@
     getOutboundMsgs().add(new HMsg(HMsg.Type.MSG_REPORT_PROCESS_OPEN, hri));
   }
 
-  void closeRegion(final HRegionInfo hri, final boolean reportWhenCompleted)
+  protected void closeRegion(final HRegionInfo hri, final boolean reportWhenCompleted)
   throws IOException {
     HRegion region = this.removeFromOnlineRegions(hri);
     if (region != null) {

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/zoo.cfg
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/zoo.cfg?rev=789197&r1=789196&r2=789197&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/zoo.cfg (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/zoo.cfg Mon Jun 29 03:03:59 2009
@@ -9,6 +9,6 @@
 # the directory where the snapshot is stored.
 dataDir=${hbase.tmp.dir}/zookeeper
 # the port at which the clients will connect
-clientPort=2181
+clientPort=21810
 
 server.0=localhost:2888:3888



Mime
View raw message