hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r647989 - in /hadoop/hbase/trunk: CHANGES.txt src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java src/java/overview.html
Date Mon, 14 Apr 2008 20:33:07 GMT
Author: stack
Date: Mon Apr 14 13:33:03 2008
New Revision: 647989

URL: http://svn.apache.org/viewvc?rev=647989&view=rev
Log:
HBASE-573 HBase does not read hadoop-*.xml for dfs configuration after moving out hadoop/contrib

Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java
    hadoop/hbase/trunk/src/java/overview.html

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=647989&r1=647988&r2=647989&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Mon Apr 14 13:33:03 2008
@@ -1,8 +1,10 @@
 Hbase Change Log
   BUG FIXES
    HBASE-574   HBase does not load hadoop native libs (Rong-En Fan via Stack)
+   HBASE-573   HBase does not read hadoop-*.xml for dfs configuration after 
+               moving out hadoop/contrib
 
-Release 0.1.1 04/11/2008
+Release 0.1.1 - 04/11/2008
 
   INCOMPATIBLE CHANGES
    HBASE-521   Improve client scanner interface

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java?rev=647989&r1=647988&r2=647989&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java Mon Apr
14 13:33:03 2008
@@ -25,12 +25,9 @@
 import java.util.Map;
 
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.io.RowResult;
 import org.apache.hadoop.hbase.io.Cell;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.hbase.io.RowResult;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
@@ -85,8 +82,6 @@
    * 
    * Pass the new key and value to reduce.
    * If any of the grouping columns are not found in the value, the record is skipped.
-   *
-   * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.HStoreKey,
org.apache.hadoop.io.MapWritable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter)
    */
   @Override
   public void map(@SuppressWarnings("unused") Text key,

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java?rev=647989&r1=647988&r2=647989&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java Mon Apr
14 13:33:03 2008
@@ -21,13 +21,11 @@
 
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.HStoreKey;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.hbase.io.RowResult;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.hbase.io.RowResult;
 
 /**
  * Pass the given key and record as-is to reduce
@@ -48,6 +46,7 @@
    * @param mapper mapper class
    * @param job job configuration
    */
+  @SuppressWarnings("unchecked")
   public static void initJob(String table, String columns,
     Class<? extends TableMap> mapper, JobConf job) {
     TableMap.initJob(table, columns, mapper, Text.class, RowResult.class, job);
@@ -55,8 +54,6 @@
 
   /**
    * Pass the key, value to reduce
-   *
-   * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.HStoreKey,
org.apache.hadoop.io.MapWritable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter)
    */
   @Override
   public void map(Text key, RowResult value,

Modified: hadoop/hbase/trunk/src/java/overview.html
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/overview.html?rev=647989&r1=647988&r2=647989&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/overview.html (original)
+++ hadoop/hbase/trunk/src/java/overview.html Mon Apr 14 13:33:03 2008
@@ -22,23 +22,25 @@
     <title>HBase</title>
 </head>
 <body bgcolor="white">
-<a href="http://hbase.org">HBase</a> builds on <a href="http://hadoop.apache.org/core">Hadoop
Core</a>
-to build a scalable, distributed database.
+<a href="http://hbase.org">HBase</a> is a scalable, distributed database built
on <a href="http://hadoop.apache.org/core">Hadoop Core</a>.
 
 <h2><a name="requirements">Requirements</a></h2>
 <ul>
-<li>Java 1.5.x, preferably from <a href="http://www.java.com/en/download/">Sun</a>.</li>
+  <li>Java 1.5.x, preferably from <a href="http://www.java.com/en/download/">Sun</a>.
+  </li>
+  <li>
+    ssh must be installed and sshd must be running to use Hadoop's
+    scripts to manage remote Hadoop daemons.
+  </li>
 </ul>
 
 <h2><a name="getting_started" >Getting Started</a></h2>
 <p>
-What follows presumes you are installing HBase for the first time. If upgrading your
+What follows presumes you have obtained a copy of HBase and are installing
+for the first time. If upgrading your
 HBase instance, see <a href="#upgrading">Upgrading</a>.
 </p>
 <p>
-Start by defining the following environment variables for your convenience:
-</p>
-<p>
 <ul>
 <li><code>${HBASE_HOME}</code>: Set HBASE_HOME to the location of the HBase
root: e.g. <code>/user/local/hbase</code>.
 </li>
@@ -62,8 +64,7 @@
 The former needs to be pointed at the running Hadoop DFS instance.  The latter file lists
 all the members of the HBase cluster.
 </p>
-<p>
-Use <code>hbase-site.xml</code> to override the properties defined in 
+<p>Use <code>hbase-site.xml</code> to override the properties defined in

 <code>${HBASE_HOME}/conf/hbase-default.xml</code> (<code>hbase-default.xml</code>
itself 
 should never be modified).  At a minimum the <code>hbase.master</code> and the

 <code>hbase.rootdir</code> properties should be redefined 
@@ -97,6 +98,17 @@
 The <code>regionserver</code> file lists all the hosts running HRegionServers,
one 
 host per line  (This file is HBase synonym of the hadoop slaves file at 
 <code>${HADOOP_HOME}/conf/slaves</code>).
+</p>
+<p>Of note, if you have made <i>HDFS client configuration</i> on your hadoop
cluster, hbase will not
+see this configuration unless you do one of the following:
+<ul>
+    <li>Add a pointer to your <code>HADOOP_CONF_DIR</code> to <code>CLASSPATH</code>
in <code>hbase-env.sh</code></li>
+    <li>Add a copy of <code>hadoop-site.xml</code> to <code>${HBASE_HOME}/conf</code>,
or</li>
+    <li>If only a small set of HDFS client configurations, add them to <code>hbase-site.xml</code></li>
+</ul>
+An example of such an HDFS client configuration is <code>dfs.replication</code>.
 If for example,
+you want to run with a replication factor of 5, hbase will make files will create files with
+the default of 3 unless you do the above to make the configuration available to hbase.
 </p>
 
 <h2><a name="runandconfirm">Running and Confirming Your Installation</a></h2>



Mime
View raw message