hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r612016 [2/4] - in /lucene/hadoop/trunk/src/contrib/hbase: ./ conf/ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/generated/master/ src/java/org/apache/hadoop/hbase/hql/ src/java/org/apache/hadoop/hbase/hql/formatter/ s...
Date Tue, 15 Jan 2008 05:14:37 GMT
Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/HelpCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/HelpCommand.java?rev=612016&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/HelpCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/HelpCommand.java Mon Jan 14 21:14:34 2008
@@ -0,0 +1,183 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hql;
+
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.UnsupportedEncodingException;
+import java.io.Writer;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+
+public class HelpCommand extends BasicCommand {
+  private String argument;
+  private static final String[] HEADER = new String[] { "Command",
+      "Description", "Example" };
+
+  /** application name */
+  public static final String APP_NAME = "Hbase Shell";
+
+  /** version of the code */
+  public static final String APP_VERSION = "0.0.2";
+
+  /** help contents map */
+  public final Map<String, String[]> help = new HashMap<String, String[]>();
+
+  private final TableFormatter formatter;
+
+  public HelpCommand(final Writer o, final TableFormatter f) {
+    super(o);
+    this.help.putAll(load());
+    this.formatter = f;
+  }
+
+  public ReturnMsg execute(@SuppressWarnings("unused")
+  HBaseConfiguration conf) {
+    try {
+      printHelp(this.argument);
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    return null;
+  }
+
+  public void setArgument(String argument) {
+    this.argument = argument;
+  }
+
+  /**
+   * add help contents
+   */
+  private Map<? extends String, ? extends String[]> load() {
+    Map<String, String[]> load = new HashMap<String, String[]>();
+    load.put("SHOW", new String[] { "Show information about selected title",
+        "SHOW TABLES [or substitution variable name];" });
+
+    load.put("FS", new String[] {
+        "Hadoop FsShell; entering a lone 'FS;' " + "will emit usage",
+        "FS [-option] arguments..;" });
+
+    load.put("JAR", new String[] { "Hadoop RunJar util",
+        "JAR jarFile [mainClass] arguments...;" });
+    load.put("CLEAR", new String[] { "Clear the screen", "CLEAR;" });
+
+    load.put("DESCRIBE", new String[] { "Print table information",
+        "[DESCRIBE|DESC] table_name;" });
+
+    load
+        .put(
+            "CREATE",
+            new String[] {
+                "Create tables",
+                "CREATE TABLE table_name (column_family_name [MAX_VERSIONS=n] "
+                    + "[MAX_LENGTH=n] [COMPRESSION=NONE|RECORD|BLOCK] [IN_MEMORY] "
+                    + "[BLOOMFILTER=NONE|BLOOMFILTER|COUNTING_BLOOMFILTER|RETOUCHED_BLOOMFILTER "
+                    + "VECTOR_SIZE=n NUM_HASH=n], " + "...)" });
+    load.put("DROP", new String[] { "Drop tables",
+        "DROP TABLE table_name [, table_name] ...;" });
+
+    load.put("INSERT", new String[] {
+        "Insert values into table",
+        "INSERT INTO table_name (column_name, ...) "
+            + "VALUES ('value', ...) WHERE row='row_key'" +
+            " [TIMESTAMP 'timestamp'];" });
+
+    load.put("DELETE", new String[] {
+        "Delete table data",
+        "DELETE {column_name, [, column_name] ... | *} FROM table_name "
+            + "WHERE row='row-key';" });
+
+    load.put("SELECT", new String[] {
+        "Select values from table",
+        "SELECT {column_name, [, column_name] ... | expr[alias] | * } FROM table_name "
+            + "[WHERE row='row_key' | STARTING FROM 'row-key' [UNTIL 'stop-key']] "
+            + "[NUM_VERSIONS = version_count] " + "[TIMESTAMP 'timestamp'] "
+            + "[LIMIT = row_count] " + "[INTO FILE 'file_name'];" });
+
+    load.put("ALTER", new String[] {
+        "Alter structure of table",
+        "ALTER TABLE table_name ADD column_spec | "
+            + "ADD (column_spec, column_spec, ...) | "
+            + "CHANGE column_family column_spec | "
+            + "DROP column_family_name | " + "CHANGE column_spec;" });
+
+    load.put("TRUNCATE", new String[] {
+         "Truncate table is used to clean all data from a table",
+         "TRUNCATE TABLE table_name;" });
+    
+    load.put("EXIT", new String[] { "Exit shell", "EXIT;" });
+
+    return load;
+  }
+
+  /**
+   * Print out the program version.
+   * 
+   * @throws IOException
+   */
+  public void printVersion() throws IOException {
+    println(APP_NAME + ", " + APP_VERSION + " version.\n"
+        + "Copyright (c) 2007 by udanax, "
+        + "licensed to Apache Software Foundation.\n"
+        + "Type 'help;' for usage.\n");
+  }
+
+  public void printHelp(final String cmd) throws IOException {
+    if (cmd.equals("")) {
+      println("Type 'help COMMAND;' to see command-specific usage.");
+      printHelp(this.help);
+    } else {
+      if (this.help.containsKey(cmd.toUpperCase())) {
+        final Map<String, String[]> m = new HashMap<String, String[]>();
+        m.put(cmd.toUpperCase(), this.help.get(cmd.toUpperCase()));
+        printHelp(m);
+      } else {
+        println("Unknown Command : Type 'help;' for usage.");
+      }
+    }
+  }
+
+  private void printHelp(final Map<String, String[]> m) throws IOException {
+    this.formatter.header(HEADER);
+    for (Map.Entry<String, String[]> e : m.entrySet()) {
+      String[] value = e.getValue();
+      if (value.length == 2) {
+        this.formatter.row(new String[] { e.getKey().toUpperCase(), value[0],
+            value[1] });
+      } else {
+        throw new IOException("Value has too many elements:" + value);
+      }
+    }
+    this.formatter.footer();
+  }
+
+  public static void main(String[] args) throws UnsupportedEncodingException {
+    HBaseConfiguration conf = new HBaseConfiguration();
+    Writer out = new OutputStreamWriter(System.out, "UTF-8");
+    TableFormatterFactory tff = new TableFormatterFactory(out, conf);
+    HelpCommand cmd = new HelpCommand(out, tff.get());
+    cmd.setArgument("");
+    cmd.execute(conf);
+    cmd.setArgument("select");
+    cmd.execute(conf);
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/InsertCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/InsertCommand.java?rev=612016&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/InsertCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/InsertCommand.java Mon Jan 14 21:14:34 2008
@@ -0,0 +1,120 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hql;
+
+import java.io.IOException;
+import java.io.Writer;
+import java.util.List;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConnection;
+import org.apache.hadoop.hbase.HConnectionManager;
+import org.apache.hadoop.hbase.HTable;
+import org.apache.hadoop.io.Text;
+
+/**
+ * Inserts values into tables.
+ */
+public class InsertCommand extends BasicCommand {
+  private Text tableName;
+  private List<String> columnfamilies;
+  private List<String> values;
+  private String rowKey;
+  private String timestamp = null;
+
+  public InsertCommand(Writer o) {
+    super(o);
+  }
+
+  public ReturnMsg execute(HBaseConfiguration conf) {
+    if (tableName == null || values == null || rowKey == null)
+      return new ReturnMsg(0, "Syntax error : Please check 'Insert' syntax.");
+
+    HConnection conn = HConnectionManager.getConnection(conf);
+    if (!conn.tableExists(tableName)) {
+      return new ReturnMsg(0, "'" + tableName + "'" + TABLE_NOT_FOUND);
+    }
+
+    if (columnfamilies.size() != values.size())
+      return new ReturnMsg(0,
+          "Mismatch between values list and columnfamilies list.");
+
+    try {
+      HTable table = new HTable(conf, tableName);
+      long lockId = table.startUpdate(getRow());
+
+      for (int i = 0; i < values.size(); i++) {
+        Text column = null;
+        if (getColumn(i).toString().contains(":"))
+          column = getColumn(i);
+        else
+          column = new Text(getColumn(i) + ":");
+        table.put(lockId, column, getValue(i));
+      }
+      
+      if(timestamp != null) 
+        table.commit(lockId, Long.parseLong(timestamp));
+      else
+        table.commit(lockId);
+
+      return new ReturnMsg(1, "1 row inserted successfully.");
+    } catch (IOException e) {
+      String[] msg = e.getMessage().split("[\n]");
+      return new ReturnMsg(0, msg[0]);
+    }
+  }
+
+  public void setTable(String table) {
+    this.tableName = new Text(table);
+  }
+
+  public void setColumnfamilies(List<String> columnfamilies) {
+    this.columnfamilies = columnfamilies;
+  }
+
+  public void setValues(List<String> values) {
+    this.values = values;
+  }
+
+  public void setRow(String row) {
+    this.rowKey = row;
+  }
+
+  public Text getRow() {
+    return new Text(this.rowKey);
+  }
+
+  public Text getColumn(int i) {
+    return new Text(this.columnfamilies.get(i));
+  }
+
+  public byte[] getValue(int i) {
+    return this.values.get(i).getBytes();
+  }
+  
+  public void setTimestamp(String timestamp) {
+    this.timestamp = timestamp;
+  }
+  
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.INSERT;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/JarCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/JarCommand.java?rev=612016&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/JarCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/JarCommand.java Mon Jan 14 21:14:34 2008
@@ -0,0 +1,156 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hql;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.Writer;
+import java.lang.reflect.Array;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.jar.JarFile;
+import java.util.jar.Manifest;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.util.RunJar;
+
+/**
+ * Run hadoop jar commands.
+ */
+public class JarCommand extends BasicCommand {
+  private List<String> query;
+
+  public JarCommand(Writer o) {
+    super(o);
+  }
+
+  @SuppressWarnings("deprecation")
+  public ReturnMsg execute(@SuppressWarnings("unused")
+  HBaseConfiguration conf) {
+
+    try {
+      String[] args = getQuery();
+      String usage = "JAR jarFile [mainClass] args...;\n";
+
+      if (args.length < 1) {
+        return new ReturnMsg(0, usage);
+      }
+
+      int firstArg = 0;
+      String fileName = args[firstArg++];
+      File file = new File(fileName);
+      String mainClassName = null;
+
+      JarFile jarFile;
+      try {
+        jarFile = new JarFile(fileName);
+      } catch (IOException io) {
+        throw new IOException("Error opening job jar: " + fileName + "\n")
+            .initCause(io);
+      }
+
+      Manifest manifest = jarFile.getManifest();
+      if (manifest != null) {
+        mainClassName = manifest.getMainAttributes().getValue("Main-Class");
+      }
+      jarFile.close();
+
+      if (mainClassName == null) {
+        if (args.length < 2) {
+          return new ReturnMsg(0, usage);
+        }
+        mainClassName = args[firstArg++];
+      }
+      mainClassName = mainClassName.replaceAll("/", ".");
+
+      File tmpDir = new File(new Configuration().get("hadoop.tmp.dir"));
+      tmpDir.mkdirs();
+      if (!tmpDir.isDirectory()) {
+        return new ReturnMsg(0, "Mkdirs failed to create " + tmpDir + "\n");
+      }
+      final File workDir = File.createTempFile("hadoop-unjar", "", tmpDir);
+      workDir.delete();
+      workDir.mkdirs();
+      if (!workDir.isDirectory()) {
+        return new ReturnMsg(0, "Mkdirs failed to create " + workDir + "\n");
+      }
+
+      Runtime.getRuntime().addShutdownHook(new Thread() {
+        public void run() {
+          try {
+            FileUtil.fullyDelete(workDir);
+          } catch (IOException e) {
+            e.printStackTrace();
+          }
+        }
+      });
+
+      RunJar.unJar(file, workDir);
+
+      ArrayList<URL> classPath = new ArrayList<URL>();
+      classPath.add(new File(workDir + "/").toURL());
+      classPath.add(file.toURL());
+      classPath.add(new File(workDir, "classes/").toURL());
+      File[] libs = new File(workDir, "lib").listFiles();
+      if (libs != null) {
+        for (int i = 0; i < libs.length; i++) {
+          classPath.add(libs[i].toURL());
+        }
+      }
+      ClassLoader loader = new URLClassLoader(classPath.toArray(new URL[0]));
+
+      Thread.currentThread().setContextClassLoader(loader);
+      Class<?> mainClass = Class.forName(mainClassName, true, loader);
+      Method main = mainClass.getMethod("main", new Class[] { Array.newInstance(
+          String.class, 0).getClass() });
+      String[] newArgs = Arrays.asList(args).subList(firstArg, args.length)
+          .toArray(new String[0]);
+      try {
+        main.invoke(null, new Object[] { newArgs });
+      } catch (InvocationTargetException e) {
+        throw e.getTargetException();
+      }
+    } catch (Throwable e) {
+      e.printStackTrace();
+    }
+
+    return null;
+  }
+
+  public void setQuery(List<String> query) {
+    this.query = query;
+  }
+
+  private String[] getQuery() {
+    return query.toArray(new String[] {});
+  }
+
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.SHELL;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/ReturnMsg.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/ReturnMsg.java?rev=612016&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/ReturnMsg.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/ReturnMsg.java Mon Jan 14 21:14:34 2008
@@ -0,0 +1,54 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hql;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+
+/**
+ * Message returned when a {@link Command} is
+ * {@link Command#execute(HBaseConfiguration)}'ed.
+ */
+public class ReturnMsg {
+  private final String msg;
+  private final int type;
+
+  public ReturnMsg(int i, String string) {
+    this.type = i;
+    this.msg = string;
+  }
+
+  public ReturnMsg(int i) {
+    this.type = i;
+    this.msg = "";
+  }
+
+  public String getMsg() {
+    return this.msg;
+  }
+
+  public int getType() {
+    return this.type;
+  }
+
+  @Override
+  public String toString() {
+    return this.msg;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java?rev=612016&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java Mon Jan 14 21:14:34 2008
@@ -0,0 +1,110 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hql;
+
+import java.io.Writer;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.BloomFilterDescriptor;
+import org.apache.hadoop.hbase.BloomFilterDescriptor.BloomFilterType;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.io.Text;
+
+/**
+ * The base class of schema modification commands, CreateCommand and Alter
+ * Command. Provides utility methods for alteration operations.
+ */
+public abstract class SchemaModificationCommand extends BasicCommand {
+  protected int maxVersions;
+  protected int maxLength;
+  protected HColumnDescriptor.CompressionType compression;
+  protected boolean inMemory;
+  protected BloomFilterDescriptor bloomFilterDesc;
+  protected BloomFilterType bloomFilterType;
+  protected int vectorSize;
+  protected int numHash;
+  protected int numEntries;
+
+  public SchemaModificationCommand(Writer o) {
+    super(o);
+  }
+
+  protected void initOptions() {
+    maxVersions = HColumnDescriptor.DEFAULT_N_VERSIONS;
+    maxLength = HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH;
+    compression = HColumnDescriptor.DEFAULT_COMPRESSION_TYPE;
+    inMemory = HColumnDescriptor.DEFAULT_IN_MEMORY;
+    bloomFilterDesc = HColumnDescriptor.DEFAULT_BLOOM_FILTER_DESCRIPTOR;
+  }
+
+  /**
+   * Given a column name and column spec, returns an instance of
+   * HColumnDescriptor representing the column spec.
+   */
+  protected HColumnDescriptor getColumnDescriptor(String column,
+      Map<String, Object> columnSpec) throws IllegalArgumentException {
+    initOptions();
+
+    Set<String> specs = columnSpec.keySet();
+    for (String spec : specs) {
+      spec = spec.toUpperCase();
+
+      if (spec.equals("MAX_VERSIONS")) {
+        maxVersions = (Integer) columnSpec.get(spec);
+      } else if (spec.equals("MAX_LENGTH")) {
+        maxLength = (Integer) columnSpec.get(spec);
+      } else if (spec.equals("COMPRESSION")) {
+        compression = HColumnDescriptor.CompressionType
+            .valueOf(((String) columnSpec.get(spec)).toUpperCase());
+      } else if (spec.equals("IN_MEMORY")) {
+        inMemory = (Boolean) columnSpec.get(spec);
+      } else if (spec.equals("BLOOMFILTER")) {
+        bloomFilterType = BloomFilterType.valueOf(((String) columnSpec.get(spec))
+            .toUpperCase());
+      } else if (spec.equals("VECTOR_SIZE")) {
+        vectorSize = (Integer) columnSpec.get(spec);
+      } else if (spec.equals("NUM_HASH")) {
+        numHash = (Integer) columnSpec.get(spec);
+      } else if (spec.equals("NUM_ENTRIES")) {
+        numEntries = (Integer) columnSpec.get(spec);
+      } else {
+        throw new IllegalArgumentException("Invalid option: " + spec);
+      }
+    }
+
+    // Now we gather all the specified options for this column.
+    if (bloomFilterType != null) {
+      if (specs.contains("NUM_ENTRIES")) {
+        bloomFilterDesc = new BloomFilterDescriptor(bloomFilterType, numEntries);
+      } else {
+        bloomFilterDesc = new BloomFilterDescriptor(bloomFilterType, vectorSize,
+            numHash);
+      }
+    }
+
+    column = appendDelimiter(column);
+
+    HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
+        maxVersions, compression, inMemory, maxLength, bloomFilterDesc);
+
+    return columnDesc;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java?rev=612016&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java Mon Jan 14 21:14:34 2008
@@ -0,0 +1,385 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hql;
+
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.hadoop.hbase.HBaseAdmin;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConnection;
+import org.apache.hadoop.hbase.HConnectionManager;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HScannerInterface;
+import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.HTable;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.Shell;
+import org.apache.hadoop.hbase.filter.RowFilterInterface;
+import org.apache.hadoop.hbase.filter.StopRowFilter;
+import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
+import org.apache.hadoop.hbase.hql.generated.HQLParser;
+import org.apache.hadoop.hbase.util.Writables;
+import org.apache.hadoop.io.Text;
+
+/**
+ * Selects values from tables.
+ */
+public class SelectCommand extends BasicCommand {
+  private Text tableName;
+  private Text rowKey = new Text("");
+  private Text stopRow = new Text("");
+  private List<String> columns;
+  private long timestamp;
+  private int limit;
+  // Count of versions to return.
+  private int version;
+  private boolean countFunction = false;
+  private boolean whereClause = false;
+  private static final String[] HEADER_ROW_CELL = new String[] { "Row", "Cell" };
+  private static final String[] HEADER_COLUMN_CELL = new String[] { "Column", "Cell" };
+  private static final String[] HEADER = new String[] { "Row", "Column", "Cell" };
+  private static final String ASTERISK = "*";
+
+  private final TableFormatter formatter;
+
+  // Not instantiable
+  @SuppressWarnings("unused")
+  private SelectCommand() {
+    this(null, null);
+  }
+
+  public SelectCommand(final Writer o, final TableFormatter f) {
+    super(o);
+    this.formatter = f;
+  }
+
+  public ReturnMsg execute(final HBaseConfiguration conf) {
+    if (tableName.equals("") || rowKey == null || columns.size() == 0) {
+      return new ReturnMsg(0, "Syntax error : Please check 'Select' syntax.");
+    }
+    try {
+      HConnection conn = HConnectionManager.getConnection(conf);
+      if (!conn.tableExists(tableName) && !isMetaTable()) {
+        return new ReturnMsg(0, "'" + tableName + "'" + TABLE_NOT_FOUND);
+      }
+
+      HTable table = new HTable(conf, tableName);
+      HBaseAdmin admin = new HBaseAdmin(conf);
+      int count = 0;
+      if (whereClause) {
+        if (countFunction) {
+          count = 1;
+        } else {
+          count = compoundWherePrint(table, admin);
+        }
+      } else {
+        count = scanPrint(table, admin);
+      }
+      return new ReturnMsg(1, Integer.toString(count) + " row(s) in set.");
+    } catch (IOException e) {
+      String[] msg = e.getMessage().split("[,]");
+      return new ReturnMsg(0, msg[0]);
+    }
+  }
+
+  private boolean isMetaTable() {
+    return (tableName.equals(HConstants.ROOT_TABLE_NAME) || tableName
+        .equals(HConstants.META_TABLE_NAME)) ? true : false;
+  }
+
+  private int compoundWherePrint(HTable table, HBaseAdmin admin) {
+    int count = 0;
+    try {
+      if (version != 0) {
+        // A number of versions has been specified.
+        byte[][] result = null;
+        ParsedColumns parsedColumns = getColumns(admin, false);
+        boolean multiple = parsedColumns.isMultiple() || version > 1;
+        for (Text column : parsedColumns.getColumns()) {
+          if (count == 0) {
+            formatter.header(multiple ? HEADER_COLUMN_CELL : null);
+          }
+          if (timestamp != 0) {
+            result = table.get(rowKey, column, timestamp, version);
+          } else {
+            result = table.get(rowKey, column, version);
+          }
+          for (int ii = 0; result != null && ii < result.length; ii++) {
+            if (multiple) {
+              formatter.row(new String[] { column.toString(),
+                  toString(column, result[ii]) });
+            } else {
+              formatter.row(new String[] { toString(column, result[ii]) });
+            }
+            count++;
+          }
+        }
+      } else {
+        for (Map.Entry<Text, byte[]> e : table.getRow(rowKey).entrySet()) {
+          if (count == 0) {
+            formatter.header(isMultiple() ? HEADER_COLUMN_CELL : null);
+          }
+          Text key = e.getKey();
+          String keyStr = key.toString();
+          if (!columns.contains(ASTERISK) && !columns.contains(keyStr)) {
+            continue;
+          }
+          String cellData = toString(key, e.getValue());
+          if (isMultiple()) {
+            formatter.row(new String[] { key.toString(), cellData });
+          } else {
+            formatter.row(new String[] { cellData });
+          }
+          count++;
+        }
+      }
+
+      if (count == 0 && Shell.HTML_OPTION != null) {
+        formatter.header(isMultiple() ? HEADER_COLUMN_CELL : null);
+      }
+      formatter.footer();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    return 1;
+  }
+
+  private String toString(final Text columnName, final byte[] cell)
+      throws IOException {
+    String result = null;
+    if (columnName.equals(HConstants.COL_REGIONINFO)
+        || columnName.equals(HConstants.COL_SPLITA)
+        || columnName.equals(HConstants.COL_SPLITA)) {
+      result = Writables.getHRegionInfoOrNull(cell).toString();
+    } else if (columnName.equals(HConstants.COL_STARTCODE)) {
+      result = Long.toString(Writables.bytesToLong(cell));
+    } else {
+      result = Writables.bytesToString(cell);
+    }
+    return result;
+  }
+
+  /**
+   * Data structure with columns to use scanning and whether or not the scan
+   * could return more than one column.
+   */
+  class ParsedColumns {
+    private final List<Text> cols;
+    private final boolean isMultiple;
+
+    ParsedColumns(final List<Text> columns) {
+      this(columns, true);
+    }
+
+    ParsedColumns(final List<Text> columns, final boolean isMultiple) {
+      this.cols = columns;
+      this.isMultiple = isMultiple;
+    }
+
+    public List<Text> getColumns() {
+      return this.cols;
+    }
+
+    public boolean isMultiple() {
+      return this.isMultiple;
+    }
+  }
+
+  private int scanPrint(HTable table, HBaseAdmin admin) {
+    int count = 0;
+    HScannerInterface scan = null;
+    try {
+      ParsedColumns parsedColumns = getColumns(admin, true);
+      Text[] cols = parsedColumns.getColumns().toArray(new Text[] {});
+      if (timestamp == 0) {
+        scan = table.obtainScanner(cols, rowKey);
+      } else {
+        scan = table.obtainScanner(cols, rowKey, timestamp);
+      }
+
+      if (this.stopRow.toString().length() > 0) {
+        RowFilterInterface filter = new WhileMatchRowFilter(new StopRowFilter(
+            stopRow));
+        scan = table.obtainScanner(cols, rowKey, filter);
+      }
+
+      HStoreKey key = new HStoreKey();
+      TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
+      // If only one column in query, then don't print out the column.
+      while (scan.next(key, results) && checkLimit(count)) {
+        if (count == 0 && !countFunction) {
+          formatter.header((parsedColumns.isMultiple()) ? HEADER : HEADER_ROW_CELL);
+        }
+
+        Text r = key.getRow();
+
+        if (!countFunction) {
+          for (Text columnKey : results.keySet()) {
+            String cellData = toString(columnKey, results.get(columnKey));
+            if (parsedColumns.isMultiple()) {
+              formatter.row(new String[] { r.toString(), columnKey.toString(),
+                  cellData });
+            } else {
+              // Don't print out the column since only one specified in query.
+              formatter.row(new String[] { r.toString(), cellData });
+            }
+            if (limit > 0 && count >= limit) {
+              break;
+            }
+          }
+        }
+
+        count++;
+        // Clear results else subsequent results polluted w/ previous finds.
+        results.clear();
+      }
+
+      if (count == 0 && Shell.HTML_OPTION != null && !countFunction) {
+        formatter.header((parsedColumns.isMultiple()) ? HEADER : HEADER_ROW_CELL);
+      }
+
+      formatter.footer();
+      scan.close();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    return count;
+  }
+
+  /**
+   * Make sense of the supplied list of columns.
+   * 
+   * @param admin Admin to use.
+   * @return Interpretation of supplied list of columns.
+   */
+  public ParsedColumns getColumns(final HBaseAdmin admin, final boolean scanning) {
+    ParsedColumns result = null;
+    try {
+      if (columns.contains(ASTERISK)) {
+        if (tableName.equals(HConstants.ROOT_TABLE_NAME)
+            || tableName.equals(HConstants.META_TABLE_NAME)) {
+          result = new ParsedColumns(Arrays.asList(HConstants.COLUMN_FAMILY_ARRAY));
+        } else {
+          HTableDescriptor[] tables = admin.listTables();
+          for (int i = 0; i < tables.length; i++) {
+            if (tables[i].getName().equals(tableName)) {
+              result = new ParsedColumns(new ArrayList<Text>(tables[i].families()
+                  .keySet()));
+              break;
+            }
+          }
+        }
+      } else {
+        List<Text> tmpList = new ArrayList<Text>();
+        for (int i = 0; i < columns.size(); i++) {
+          Text column = null;
+          // Add '$' to column name if we are scanning. Scanners support
+          // regex column names. Adding '$', the column becomes a
+          // regex that does an explicit match on the supplied column name.
+          // Otherwise, if the specified column is a column family, then
+          // default behavior is to fetch all columns that have a matching
+          // column family.
+          column = (columns.get(i).contains(":")) ? new Text(columns.get(i)
+              + (scanning ? "$" : "")) : new Text(columns.get(i) + ":"
+              + (scanning ? "$" : ""));
+          tmpList.add(column);
+        }
+        result = new ParsedColumns(tmpList, tmpList.size() > 1);
+      }
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    return result;
+  }
+
+  /*
+   * @return True if query contains multiple columns.
+   */
+  private boolean isMultiple() {
+    return this.columns.size() > 1 || this.columns.contains(ASTERISK);
+  }
+
+  private boolean checkLimit(int count) {
+    return (this.limit == 0) ? true : (this.limit > count) ? true : false;
+  }
+
+  public void setTable(String table) {
+    this.tableName = new Text(table);
+  }
+
+  public void setLimit(int limit) {
+    this.limit = limit;
+  }
+
+  public void setWhere(boolean isWhereClause) {
+    if (isWhereClause)
+      this.whereClause = true;
+  }
+
+  public void setTimestamp(String timestamp) {
+    this.timestamp = Long.parseLong(timestamp);
+  }
+
+  public void setColumns(List<String> columns) {
+    this.columns = columns;
+  }
+
+  public void setRowKey(String rowKey) {
+    if (rowKey == null)
+      this.rowKey = null;
+    else
+      this.rowKey = new Text(rowKey);
+  }
+
+  public void setCountFunction(boolean countFunction) {
+    this.countFunction = countFunction;
+  }
+
+  public void setStopRow(String stopRow) {
+    this.stopRow = new Text(stopRow);
+  }
+
+  /**
+   * @param version Set maximum versions for this selection
+   */
+  public void setVersion(int version) {
+    this.version = version;
+  }
+
+  public static void main(String[] args) throws Exception {
+    Writer out = new OutputStreamWriter(System.out, "UTF-8");
+    HBaseConfiguration c = new HBaseConfiguration();
+    // For debugging
+    TableFormatterFactory tff = new TableFormatterFactory(out, c);
+    HQLParser parser = new HQLParser("select * from 'x' where row='x';", out, tff.get());
+    Command cmd = parser.terminatedCommand();
+
+    ReturnMsg rm = cmd.execute(c);
+    out.write(rm == null ? "" : rm.toString());
+    out.flush();
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/ShowCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/ShowCommand.java?rev=612016&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/ShowCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/ShowCommand.java Mon Jan 14 21:14:34 2008
@@ -0,0 +1,81 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hql;
+
+import java.io.IOException;
+import java.io.Writer;
+
+import org.apache.hadoop.hbase.HBaseAdmin;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HTableDescriptor;
+
+/**
+ * Shows all available tables.
+ */
+public class ShowCommand extends BasicCommand {
+  private static final String[] HEADER = new String[] { "Name", "Descriptor" };
+  private String command;
+  private final TableFormatter formatter;
+
+  // Not instantiable
+  @SuppressWarnings("unused")
+  private ShowCommand() {
+    this(null, null);
+  }
+
+  public ShowCommand(final Writer o, final TableFormatter f) {
+    this(o, f, null);
+  }
+
+  public ShowCommand(final Writer o, final TableFormatter f,
+      final String argument) {
+    super(o);
+    this.formatter = f;
+    this.command = argument;
+  }
+
+  public ReturnMsg execute(final HBaseConfiguration conf) {
+    if (command == null) {
+      return new ReturnMsg(0, "Syntax error : Please check 'Show' syntax.");
+    }
+    try {
+      HBaseAdmin admin = new HBaseAdmin(conf);
+      int tableLength = 0;
+      HTableDescriptor[] tables = admin.listTables();
+      tableLength = tables.length;
+      if (tableLength == 0) {
+        return new ReturnMsg(0, "No tables found.");
+      }
+      formatter.header(HEADER);
+      for (int i = 0; i < tableLength; i++) {
+        String tableName = tables[i].getName().toString();
+        formatter.row(new String[] { tableName, tables[i].toString() });
+      }
+      formatter.footer();
+      return new ReturnMsg(1, tableLength + " table(s) in set.");
+    } catch (IOException e) {
+      return new ReturnMsg(0, "error msg : " + e.toString());
+    }
+  }
+
+  public void setArgument(String argument) {
+    this.command = argument;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/TableFormatter.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/TableFormatter.java?rev=612016&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/TableFormatter.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/TableFormatter.java Mon Jan 14 21:14:34 2008
@@ -0,0 +1,63 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hql;
+
+import java.io.IOException;
+import java.io.Writer;
+
+import org.apache.hadoop.hbase.hql.formatter.AsciiTableFormatter;
+
+/**
+ * Interface implemented by table formatters outputting select results.
+ * Implementations must have a constructor that takes a Writer.
+ * 
+ * @see AsciiTableFormatter
+ */
+public interface TableFormatter {
+  /**
+   * Output header.
+   * 
+   * @param titles Titles to emit.
+   * @throws IOException
+   */
+  public void header(final String[] titles) throws IOException;
+
+  /**
+   * Output footer.
+   * 
+   * @throws IOException
+   */
+  public void footer() throws IOException;
+
+  /**
+   * Output a row.
+   * 
+   * @param cells
+   * @throws IOException
+   */
+  public void row(final String[] cells) throws IOException;
+
+  /**
+   * @return Output stream being used (This is in interface to enforce fact that
+   *         formatters use Writers -- that they operate on character streams
+   *         rather than on byte streams).
+   */
+  public Writer getOut();
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/TableFormatterFactory.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/TableFormatterFactory.java?rev=612016&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/TableFormatterFactory.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/TableFormatterFactory.java Mon Jan 14 21:14:34 2008
@@ -0,0 +1,83 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hql;
+
+import java.io.Writer;
+import java.lang.reflect.Constructor;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.hql.formatter.AsciiTableFormatter;
+
+/**
+ * Table formatter. Specify formatter by setting "hbaseshell.formatter" property
+ * in <code>hbase-site.xml</code> or by setting system property
+ * <code>hbaseshell.formatter</code>. System property setting prevails over
+ * all other configurations. Outputs UTF-8 encoded Strings even if original data
+ * is binary. On static initialization, changes System.out to be a UTF-8 output
+ * stream. .
+ * <p>
+ * TODO: Mysql has --skip-column-names and --silent which inserts a tab as
+ * separator. Also has --html and --xml.
+ * <p>
+ * To use the html formatter, currently set HBASE_OPTS as in:
+ * <code>$ HBASE_OPTS="-Dhbaseshell.formatter=org.apache.hadoop.hbase.shell.formatter.HtmlTableFormatter" ./bin/hbase shell</code>
+ * </p>
+ */
+public class TableFormatterFactory {
+  private static final Log LOG = LogFactory.getLog(TableFormatterFactory.class
+      .getName());
+  private static final String FORMATTER_KEY = "hbaseshell.formatter";
+  private final TableFormatter formatter;
+
+  /**
+   * Not instantiable
+   */
+  @SuppressWarnings( { "unchecked", "unused" })
+  private TableFormatterFactory() {
+    this(null, null);
+  }
+
+  @SuppressWarnings("unchecked")
+  public TableFormatterFactory(final Writer out, final Configuration c) {
+    String className = System.getProperty(FORMATTER_KEY);
+    if (className == null) {
+      className = c.get(FORMATTER_KEY, AsciiTableFormatter.class.getName());
+    }
+    LOG.debug("Table formatter class: " + className);
+    try {
+      Class<TableFormatter> clazz = (Class<TableFormatter>) Class
+          .forName(className);
+      Constructor<?> constructor = clazz.getConstructor(Writer.class);
+      this.formatter = (TableFormatter) constructor.newInstance(out);
+    } catch (Exception e) {
+      throw new RuntimeException("Failed instantiation of " + className, e);
+    }
+  }
+
+  /**
+   * @return The table formatter instance
+   */
+  @SuppressWarnings("unchecked")
+  public TableFormatter get() {
+    return this.formatter;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/TruncateCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/TruncateCommand.java?rev=612016&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/TruncateCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/TruncateCommand.java Mon Jan 14 21:14:34 2008
@@ -0,0 +1,86 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hql;
+
+import java.io.IOException;
+import java.io.Writer;
+
+import org.apache.hadoop.hbase.HBaseAdmin;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConnection;
+import org.apache.hadoop.hbase.HConnectionManager;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.io.Text;
+
+/**
+ * Truncate table is used to clean all data from a table.
+ */
+public class TruncateCommand extends BasicCommand {
+  private Text tableName;
+
+  public TruncateCommand(Writer o) {
+    super(o);
+  }
+
+  public ReturnMsg execute(final HBaseConfiguration conf) {
+    if (this.tableName == null)
+      return new ReturnMsg(0, "Syntax error : Please check 'Truncate' syntax.");
+
+    try {
+      HConnection conn = HConnectionManager.getConnection(conf);
+      HBaseAdmin admin = new HBaseAdmin(conf);
+
+      if (!conn.tableExists(tableName)) {
+        return new ReturnMsg(0, "Table not found.");
+      }
+
+      HTableDescriptor[] tables = conn.listTables();
+      HColumnDescriptor[] columns = null;
+      for (int i = 0; i < tables.length; i++) {
+        if (tables[i].getName().equals(tableName)) {
+          columns = tables[i].getFamilies().values().toArray(
+              new HColumnDescriptor[] {});
+          break;
+        }
+      }
+      println("Truncating a '" + tableName + "' table ... Please wait.");
+
+      admin.deleteTable(tableName); // delete the table
+      HTableDescriptor tableDesc = new HTableDescriptor(tableName.toString());
+      for (int i = 0; i < columns.length; i++) {
+        tableDesc.addFamily(columns[i]);
+      }
+      admin.createTable(tableDesc); // re-create the table
+    } catch (IOException e) {
+      return new ReturnMsg(0, "error msg : " + e.toString());
+    }
+    return new ReturnMsg(0, "'" + tableName + "' is successfully truncated.");
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = new Text(tableName);
+  }
+
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.DDL;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/formatter/AsciiTableFormatter.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/formatter/AsciiTableFormatter.java?rev=612016&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/formatter/AsciiTableFormatter.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/formatter/AsciiTableFormatter.java Mon Jan 14 21:14:34 2008
@@ -0,0 +1,168 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hql.formatter;
+
+import java.io.IOException;
+import java.io.Writer;
+
+import org.apache.hadoop.hbase.hql.TableFormatter;
+
+
+/**
+ * Formatter that outputs data inside an ASCII table.
+ * If only a single cell result, then no formatting is done.  Presumption is
+ * that client manages serial access outputting tables. Does not close passed
+ * {@link Writer}.
+ */
+public class AsciiTableFormatter implements TableFormatter {
+  private static final String COLUMN_DELIMITER = "| ";
+  private static final String COLUMN_CLOSER = "|";
+  private static final int DEFAULT_COLUMN_WIDTH = 26;
+  // Width is a line of content + delimiter
+  private int columnWidth = DEFAULT_COLUMN_WIDTH;
+  // Amount of width to use for a line of content.
+  private int columnContentWidth =
+    DEFAULT_COLUMN_WIDTH - COLUMN_DELIMITER.length();
+  // COLUMN_LINE is put at head and foot of a column and per column, is drawn
+  // as row delimiter
+  private String columnHorizLine;
+  private final String COLUMN_HORIZ_LINE_CLOSER = "+";
+  // Used padding content to fill column
+  private final String PADDING_CHAR = " ";
+  // True if we are to output no formatting.
+  private boolean noFormatting = false;
+  private final Writer out;
+  private final String LINE_SEPARATOR = System.getProperty("line.separator");
+
+  // Not instantiable
+  @SuppressWarnings("unused")
+  private AsciiTableFormatter() {
+    this(null);
+  }
+  
+  public AsciiTableFormatter(final Writer o) {
+    this.out = o;
+  }
+  
+  public Writer getOut() {
+    return this.out;
+  }
+  
+  /**
+   * @param titles List of titles.  Pass null if no formatting (i.e.
+   * no header, no footer, etc.
+   * @throws IOException 
+   */
+  public void header(String[] titles) throws IOException {
+    if (titles == null) {
+      // print nothing.
+      setNoFormatting(true);
+      return;
+    }
+    // Calculate width of columns.
+    this.columnWidth = titles.length == 1? 3 * DEFAULT_COLUMN_WIDTH:
+      titles.length == 2? 39: DEFAULT_COLUMN_WIDTH;
+    this.columnContentWidth = this.columnWidth - COLUMN_DELIMITER.length();
+    // Create the horizontal line to draw across the top of each column.
+    this.columnHorizLine = calculateColumnHorizLine(this.columnWidth);
+    // Print out a column topper per column.
+    printRowDelimiter(titles.length);
+    row(titles);
+  }
+
+  public void row(String [] cells) throws IOException {
+    if (isNoFormatting()) {
+      getOut().write(cells[0]);
+      getOut().flush();
+      return;
+    }
+    // Ok.  Output cells a line at a time w/ delimiters between cells.
+    int [] indexes = new int[cells.length];
+    for (int i = 0; i < indexes.length; i++) {
+      indexes[i] = 0;
+    }
+    int allFinished = 0;
+    while (allFinished < indexes.length) {
+      StringBuffer sb = new StringBuffer();
+      for (int i = 0; i < cells.length; i++) {
+        sb.append(COLUMN_DELIMITER);
+        int offset = indexes[i];
+        if (offset + this.columnContentWidth >= cells[i].length()) {
+          String substr = cells[i].substring(offset);
+          if (substr.length() > 0) {
+            // This column is finished
+            allFinished++;
+            sb.append(substr);
+          }
+          for (int j = 0; j < this.columnContentWidth - substr.length(); j++) {
+            sb.append(PADDING_CHAR);
+          }
+          indexes[i] = cells[i].length();
+        } else {
+          String substr = cells[i].substring(indexes[i],
+            indexes[i] + this.columnContentWidth);
+          indexes[i] += this.columnContentWidth;
+          sb.append(substr);
+        }
+      }
+      sb.append(COLUMN_CLOSER);
+      getOut().write(sb.toString());
+      getOut().write(LINE_SEPARATOR);
+      getOut().flush();
+    }
+    printRowDelimiter(cells.length);
+  }
+
+  public void footer() throws IOException {
+    if (isNoFormatting()) {
+      // If no formatting, output a newline to delimit cell and the
+      // result summary output at end of every command.
+      getOut().write(LINE_SEPARATOR);
+      getOut().flush();
+    }
+    // We're done. Clear flag.
+    setNoFormatting(false);
+  }
+  
+  private void printRowDelimiter(final int columnCount) throws IOException {
+    for (int i = 0; i < columnCount; i++) {
+      getOut().write(this.columnHorizLine);
+
+    }
+    getOut().write(COLUMN_HORIZ_LINE_CLOSER);
+    getOut().write(LINE_SEPARATOR);
+    getOut().flush();
+  }
+  
+  private String calculateColumnHorizLine(final int width) {
+    StringBuffer sb = new StringBuffer();
+    sb.append("+");
+    for (int i = 1; i < width; i++) {
+      sb.append("-");
+    }
+    return sb.toString();
+  }
+  
+  public boolean isNoFormatting() {
+    return this.noFormatting;
+  }
+
+  public void setNoFormatting(boolean noFormatting) {
+    this.noFormatting = noFormatting;
+  }
+}
\ No newline at end of file

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/formatter/HtmlTableFormatter.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/formatter/HtmlTableFormatter.java?rev=612016&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/formatter/HtmlTableFormatter.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/hql/formatter/HtmlTableFormatter.java Mon Jan 14 21:14:34 2008
@@ -0,0 +1,183 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hql.formatter;
+
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.UnsupportedEncodingException;
+import java.io.Writer;
+
+import org.apache.hadoop.hbase.hql.TableFormatter;
+import org.znerd.xmlenc.LineBreak;
+import org.znerd.xmlenc.XMLOutputter;
+import org.znerd.xmlenc.XMLEncoder;
+import org.znerd.xmlenc.InvalidXMLException;
+
+/**
+ * Formatter that outputs data inside an HTML table. If only a single cell
+ * result, then no formatting is done.  Presumption is that client manages
+ * serial access outputting tables.  Does not close passed {@link Writer}.
+ * Since hbase columns have no typing, the formatter presumes a type of
+ * UTF-8 String.  If cells contain images, etc., this formatter will mangle
+ * their display.
+ * <p>TODO: Uses xmlenc. Hopefully it flushes every so often (Claims its a 
+ * stream-based outputter).  Verify.
+ */
+public class HtmlTableFormatter implements TableFormatter {
+  private final XMLOutputter outputter;
+  private boolean noFormatting = false;
+  private final Writer out;
+  
+  // Uninstantiable
+  @SuppressWarnings("unused")
+  private HtmlTableFormatter() {
+    this(null);
+  }
+
+  /*
+   * An encoder that replaces illegal XML characters with the '@' sign.
+   */
+  private static class HbaseXMLEncoder extends XMLEncoder {
+    @SuppressWarnings("deprecation")
+    public HbaseXMLEncoder()
+    throws IllegalArgumentException, UnsupportedEncodingException {
+      super("UTF-8");
+    }
+    
+    @Override
+    public void text(Writer w, char c, boolean escape)
+    throws InvalidXMLException, IOException {
+      super.text(w, legalize(c), escape);
+    }
+    
+    @Override
+    public void text(Writer w, char[] cs, int start, int length, boolean b)
+        throws NullPointerException, IndexOutOfBoundsException,
+        InvalidXMLException, IOException {
+      for (int i = start; i < start + length; i++) {
+        cs[i] = legalize(cs[i]);
+      }
+      super.text(w, cs, start, length, b);
+    }
+    
+    /**
+     * If character is in range A, C, or E, then replace with '@'
+     * <pre>
+     * A   0-8     Control characters   -- Not allowed in XML 1.0 --
+     * B   9-10    Normal characters    Never needed
+     * C   11-12   Control characters   -- Not allowed in XML 1.0 --
+     * D   13      Normal character     Never needed
+     * E   14-31   Control characters   -- Not allowed in XML 1.0 --
+     * </pre>
+     * @param c Character to look at.
+     * @return
+     */
+    private char legalize(final char c) {
+      return (c <= 8 || c == 11 || c == 12 || (c >= 14 && c <= 31))? '@': c;
+    }
+  }
+  
+  public HtmlTableFormatter(final Writer o) {
+    this.out = o;
+    try {
+      // Looking at the xmlenc source, there should be no issue w/ wrapping
+      // the stream -- i.e. no hanging resources.
+      this.outputter = new XMLOutputter(this.out, new HbaseXMLEncoder());
+      String os = System.getProperty("os.name").toLowerCase();
+      // Shell likes the DOS output.
+      this.outputter.setLineBreak(os.contains("windows")?
+        LineBreak.DOS: LineBreak.UNIX);
+      this.outputter.setIndentation(" ");
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+  
+
+  /**
+   * @param titles List of titles.  Pass null if no formatting (i.e.
+   * no header, no footer, etc.
+   * @throws IOException 
+   */
+  public void header(String[] titles) throws IOException {
+    if (titles == null) {
+      // print nothing.
+      setNoFormatting(true);
+      return;
+    }
+    // Can't add a 'border=1' attribute because its included on the end in
+    
+    this.outputter.startTag("table");
+    this.outputter.startTag("tr");
+    for (int i = 0; i < titles.length; i++) {
+      this.outputter.startTag("th");
+      this.outputter.pcdata(titles[i]);
+      this.outputter.endTag();
+    }
+    this.outputter.endTag();
+  }
+
+  public void row(String [] cells) throws IOException{
+    if (isNoFormatting()) {
+      getOut().write(cells[0]);
+      return;
+    }
+    this.outputter.startTag("tr");
+    for (int i = 0; i < cells.length; i++) {
+      this.outputter.startTag("td");
+      this.outputter.pcdata(cells[i]);
+      this.outputter.endTag();
+    }
+    this.outputter.endTag();
+  }
+
+  public void footer() throws IOException {
+    if (!isNoFormatting()) {
+      // To close the table
+      this.outputter.endTag();
+      this.outputter.endDocument();
+    }
+    // We're done. Clear flag.
+    this.setNoFormatting(false);
+    // If no formatting, output a newline to delimit cell and the
+    // result summary output at end of every command.  If html, also emit a
+    // newline to delimit html and summary line.
+    getOut().write(System.getProperty("line.separator"));
+    getOut().flush();
+  }
+
+  public Writer getOut() {
+    return this.out;
+  }
+  
+  public boolean isNoFormatting() {
+    return this.noFormatting;
+  }
+
+  public void setNoFormatting(boolean noFormatting) {
+    this.noFormatting = noFormatting;
+  }
+  
+  public static void main(String[] args) throws IOException {
+    HtmlTableFormatter f =
+      new HtmlTableFormatter(new OutputStreamWriter(System.out, "UTF-8"));
+    f.header(new String [] {"a", "b"});
+    f.row(new String [] {"a", "b"});
+    f.footer();
+  }
+}
\ No newline at end of file



Mime
View raw message