hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From j...@apache.org
Subject svn commit: r653941 - in /hadoop/hbase/trunk: CHANGES.txt src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
Date Tue, 06 May 2008 22:18:24 GMT
Author: jimk
Date: Tue May  6 15:18:24 2008
New Revision: 653941

URL: http://svn.apache.org/viewvc?rev=653941&view=rev
Log:
HBASE-405   TIF and TOF use log4j directly rather than apache commons-logging

Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=653941&r1=653940&r2=653941&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Tue May  6 15:18:24 2008
@@ -30,6 +30,7 @@
    HBASE-609   Master doesn't see regionserver edits because of clock skew
    HBASE-607   MultiRegionTable.makeMultiRegionTable is not deterministic enough
                for regression tests
+   HBASE-405   TIF and TOF use log4j directly rather than apache commons-logging
 
   IMPROVEMENTS
    HBASE-559   MR example job to count table rows

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java?rev=653941&r1=653940&r2=653941&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java Tue
May  6 15:18:24 2008
@@ -1,121 +1,109 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.mapred;
-
-import java.io.IOException;
-import java.util.Map;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapred.FileAlreadyExistsException;
-import org.apache.hadoop.mapred.InvalidJobConfException;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.OutputFormatBase;
-import org.apache.hadoop.mapred.RecordWriter;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.util.Progressable;
-
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.io.BatchUpdate;
-
-import org.apache.log4j.Logger;
-
-/**
- * Convert Map/Reduce output and write it to an HBase table
- */
-public class TableOutputFormat
-  extends OutputFormatBase<Text, BatchUpdate> {
-
-  /** JobConf parameter that specifies the output table */
-  public static final String OUTPUT_TABLE = "hbase.mapred.outputtable";
-
-  static final Logger LOG = Logger.getLogger(TableOutputFormat.class.getName());
-
-  /** constructor */
-  public TableOutputFormat() {
-    super();
-  }
-
-  /**
-   * Convert Reduce output (key, value) to (HStoreKey, KeyedDataArrayWritable) 
-   * and write to an HBase table
-   */
-  protected class TableRecordWriter
-    implements RecordWriter<Text, BatchUpdate> {
-    private HTable m_table;
-
-    /**
-     * Instantiate a TableRecordWriter with the HBase HClient for writing.
-     * 
-     * @param table
-     */
-    public TableRecordWriter(HTable table) {
-      m_table = table;
-    }
-
-    /** {@inheritDoc} */
-    public void close(@SuppressWarnings("unused") Reporter reporter) {
-      // Nothing to do.
-    }
-
-    /** {@inheritDoc} */
-    public void write(Text key, BatchUpdate value) throws IOException {
-      m_table.commit(value);
-    }
-  }
-  
-  /** {@inheritDoc} */
-  @Override
-  @SuppressWarnings("unchecked")
-  public RecordWriter getRecordWriter(
-      @SuppressWarnings("unused") FileSystem ignored,
-      JobConf job,
-      @SuppressWarnings("unused") String name,
-      @SuppressWarnings("unused") Progressable progress) throws IOException {
-    
-    // expecting exactly one path
-    
-    Text tableName = new Text(job.get(OUTPUT_TABLE));
-    HTable table = null;
-    try {
-      table = new HTable(new HBaseConfiguration(job), tableName);
-    } catch(IOException e) {
-      LOG.error(e);
-      throw e;
-    }
-    return new TableRecordWriter(table);
-  }
-
-  /** {@inheritDoc} */
-  @Override
-  @SuppressWarnings("unused")
-  public void checkOutputSpecs(FileSystem ignored, JobConf job)
-  throws FileAlreadyExistsException, InvalidJobConfException, IOException {
-    
-    String tableName = job.get(OUTPUT_TABLE);
-    if(tableName == null) {
-      throw new IOException("Must specify table name");
-    }
-  }
-}
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.mapred;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.io.BatchUpdate;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileAlreadyExistsException;
+import org.apache.hadoop.mapred.InvalidJobConfException;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.OutputFormatBase;
+import org.apache.hadoop.mapred.RecordWriter;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.util.Progressable;
+
+/**
+ * Convert Map/Reduce output and write it to an HBase table
+ */
+public class TableOutputFormat extends OutputFormatBase<Text, BatchUpdate> {
+
+  /** JobConf parameter that specifies the output table */
+  public static final String OUTPUT_TABLE = "hbase.mapred.outputtable";
+  private final Log LOG = LogFactory.getLog(TableOutputFormat.class);
+
+  /**
+   * Convert Reduce output (key, value) to (HStoreKey, KeyedDataArrayWritable) 
+   * and write to an HBase table
+   */
+  protected class TableRecordWriter
+    implements RecordWriter<Text, BatchUpdate> {
+    private HTable m_table;
+
+    /**
+     * Instantiate a TableRecordWriter with the HBase HClient for writing.
+     * 
+     * @param table
+     */
+    public TableRecordWriter(HTable table) {
+      m_table = table;
+    }
+
+    /** {@inheritDoc} */
+    public void close(@SuppressWarnings("unused") Reporter reporter) {
+      // Nothing to do.
+    }
+
+    /** {@inheritDoc} */
+    public void write(Text key, BatchUpdate value) throws IOException {
+      m_table.commit(value);
+    }
+  }
+  
+  /** {@inheritDoc} */
+  @Override
+  @SuppressWarnings("unchecked")
+  public RecordWriter getRecordWriter(
+      @SuppressWarnings("unused") FileSystem ignored,
+      JobConf job,
+      @SuppressWarnings("unused") String name,
+      @SuppressWarnings("unused") Progressable progress) throws IOException {
+    
+    // expecting exactly one path
+    
+    Text tableName = new Text(job.get(OUTPUT_TABLE));
+    HTable table = null;
+    try {
+      table = new HTable(new HBaseConfiguration(job), tableName);
+    } catch(IOException e) {
+      LOG.error(e);
+      throw e;
+    }
+    return new TableRecordWriter(table);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  @SuppressWarnings("unused")
+  public void checkOutputSpecs(FileSystem ignored, JobConf job)
+  throws FileAlreadyExistsException, InvalidJobConfException, IOException {
+    
+    String tableName = job.get(OUTPUT_TABLE);
+    if(tableName == null) {
+      throw new IOException("Must specify table name");
+    }
+  }
+}



Mime
View raw message