Return-Path: Delivered-To: apmail-lucene-hadoop-commits-archive@locus.apache.org Received: (qmail 54755 invoked from network); 11 Dec 2007 17:07:45 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.2) by minotaur.apache.org with SMTP; 11 Dec 2007 17:07:45 -0000 Received: (qmail 51708 invoked by uid 500); 11 Dec 2007 17:07:34 -0000 Delivered-To: apmail-lucene-hadoop-commits-archive@lucene.apache.org Received: (qmail 51673 invoked by uid 500); 11 Dec 2007 17:07:33 -0000 Mailing-List: contact hadoop-commits-help@lucene.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hadoop-dev@lucene.apache.org Delivered-To: mailing list hadoop-commits@lucene.apache.org Received: (qmail 51664 invoked by uid 99); 11 Dec 2007 17:07:33 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 11 Dec 2007 09:07:33 -0800 X-ASF-Spam-Status: No, hits=-100.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.3] (HELO eris.apache.org) (140.211.11.3) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 11 Dec 2007 17:07:42 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 0B3791A9832; Tue, 11 Dec 2007 09:07:17 -0800 (PST) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r603304 - in /lucene/hadoop/trunk/src/contrib/hbase: ./ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/shell/ Date: Tue, 11 Dec 2007 17:07:15 -0000 To: hadoop-commits@lucene.apache.org From: stack@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20071211170717.0B3791A9832@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: stack Date: Tue Dec 11 09:07:12 2007 New Revision: 603304 URL: http://svn.apache.org/viewvc?rev=603304&view=rev Log: HADOOP-2395 Implement "ALTER TABLE ... CHANGE column" operation Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMasterInterface.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?rev=603304&r1=603303&r2=603304&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original) +++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Tue Dec 11 09:07:12 2007 @@ -15,6 +15,8 @@ HADOOP-1550 No means of deleting a'row' (Bryan Duxbuery via Stack) HADOOP-2384 Delete all members of a column family on a specific row (Bryan Duxbury via Stack) + HADOOP-2395 Implement "ALTER TABLE ... CHANGE column" operation + (Bryan Duxbury via Stack) OPTIMIZATIONS Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java?rev=603304&r1=603303&r2=603304&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java Tue Dec 11 09:07:12 2007 @@ -479,6 +479,31 @@ throw RemoteExceptionHandler.decodeRemoteException(e); } } + + /** + * Modify an existing column family on a table + * + * @param tableName name of table + * @param columnName name of column to be modified + * @param descriptor new column descriptor to use + * @throws IOException + */ + public void modifyColumn(Text tableName, Text columnName, + HColumnDescriptor descriptor) + throws IOException { + if (this.master == null) { + throw new MasterNotRunningException("master has been shut down"); + } + + checkReservedTableName(tableName); + try { + this.master.modifyColumn(tableName, columnName, descriptor); + + } catch (RemoteException e) { + throw RemoteExceptionHandler.decodeRemoteException(e); + } + } + /** * Shuts down the HBase instance Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java?rev=603304&r1=603303&r2=603304&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java Tue Dec 11 09:07:12 2007 @@ -2636,6 +2636,13 @@ } /** {@inheritDoc} */ + public void modifyColumn(Text tableName, Text columnName, + HColumnDescriptor descriptor) + throws IOException { + new ModifyColumn(tableName, columnName, descriptor).process(); + } + + /** {@inheritDoc} */ public void deleteColumn(Text tableName, Text columnName) throws IOException { new DeleteColumn(tableName, HStoreKey.extractFamily(columnName)).process(); } @@ -3075,6 +3082,41 @@ } } } + + /** Instantiated to modify an existing column family on a table */ + private class ModifyColumn extends ColumnOperation { + private HColumnDescriptor descriptor; + private Text columnName; + + ModifyColumn(Text tableName, Text columnName, HColumnDescriptor _descriptor) + throws IOException { + super(tableName); + this.descriptor = _descriptor; + this.columnName = columnName; + } + + @Override + protected void postProcessMeta(MetaRegion m, HRegionInterface server) + throws IOException { + + for (HRegionInfo i: unservedRegions) { + // get the column families map from the table descriptor + Map families = i.getTableDesc().families(); + + // if the table already has this column, then put the new descriptor + // version. + if (families.get(columnName) != null){ + families.put(columnName, descriptor); + updateRegionInfo(server, m.getRegionName(), i); + } + else{ // otherwise, we have an error. + throw new IOException("Column family '" + columnName + + "' doesn't exist, so cannot be modified."); + } + } + } + } + /* * Managing leases Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMasterInterface.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMasterInterface.java?rev=603304&r1=603303&r2=603304&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMasterInterface.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMasterInterface.java Tue Dec 11 09:07:12 2007 @@ -61,6 +61,18 @@ public void addColumn(Text tableName, HColumnDescriptor column) throws IOException; /** + * Modifies an existing column on the specified table + * @param tableName + * @param columnName name of the column to edit + * @param descriptor new column descriptor + * @throws IOException + */ + public void modifyColumn(Text tableName, Text columnName, + HColumnDescriptor descriptor) + throws IOException; + + + /** * Deletes a column from the specified table * @param tableName * @param columnName Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java?rev=603304&r1=603303&r2=603304&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java Tue Dec 11 09:07:12 2007 @@ -28,10 +28,14 @@ import org.apache.hadoop.hbase.HBaseAdmin; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HConnection; import org.apache.hadoop.hbase.HConnectionManager; import org.apache.hadoop.io.Text; +import org.apache.hadoop.hbase.BloomFilterDescriptor; +import org.apache.hadoop.hbase.BloomFilterDescriptor.BloomFilterType; + /** * Alters tables. */ @@ -79,8 +83,29 @@ enableTable(admin, tableName); break; case CHANGE: - // Not yet supported - return new ReturnMsg(0, "" + operationType + " is not yet supported."); + disableTable(admin, tableName); + + Map.Entry> columnEntry = + (Map.Entry>)columnSpecMap.entrySet().toArray()[0]; + + // add the : if there isn't one + Text columnName = new Text(columnEntry.getKey().endsWith(":") ? + columnEntry.getKey() : columnEntry.getKey() + ":"); + + // get the table descriptor so we can get the old column descriptor + HTableDescriptor tDesc = getTableDescByName(admin, tableName); + HColumnDescriptor oldColumnDesc = tDesc.families().get(columnName); + + // combine the options specified in the shell with the options + // from the exiting descriptor to produce the new descriptor + columnDesc = getColumnDescriptor(columnName.toString(), + columnEntry.getValue(), oldColumnDesc); + + // send the changes out to the master + admin.modifyColumn(new Text(tableName), columnName, columnDesc); + + enableTable(admin, tableName); + break; case NOOP: return new ReturnMsg(0, "Invalid operation type."); } @@ -141,4 +166,84 @@ public CommandType getCommandType() { return CommandType.DDL; } + + private HTableDescriptor getTableDescByName(HBaseAdmin admin, String tableName) + throws IOException{ + HTableDescriptor[] tables = admin.listTables(); + for(HTableDescriptor tDesc : tables){ + if (tDesc.getName().toString().equals(tableName)) { + return tDesc; + } + } + return null; + } + + /** + * Given a column name, column spec, and original descriptor, returns an + * instance of HColumnDescriptor representing the column spec, with empty + * values drawn from the original as defaults + */ + protected HColumnDescriptor getColumnDescriptor(String column, + Map columnSpec, HColumnDescriptor original) + throws IllegalArgumentException { + initOptions(original); + + Set specs = columnSpec.keySet(); + for (String spec : specs) { + spec = spec.toUpperCase(); + + if (spec.equals("MAX_VERSIONS")) { + maxVersions = (Integer) columnSpec.get(spec); + } else if (spec.equals("MAX_LENGTH")) { + maxLength = (Integer) columnSpec.get(spec); + } else if (spec.equals("COMPRESSION")) { + compression = HColumnDescriptor.CompressionType + .valueOf(((String) columnSpec.get(spec)).toUpperCase()); + } else if (spec.equals("IN_MEMORY")) { + inMemory = (Boolean) columnSpec.get(spec); + } else if (spec.equals("BLOOMFILTER")) { + bloomFilterType = BloomFilterType.valueOf(((String) columnSpec.get(spec)) + .toUpperCase()); + } else if (spec.equals("VECTOR_SIZE")) { + vectorSize = (Integer) columnSpec.get(spec); + } else if (spec.equals("NUM_HASH")) { + numHash = (Integer) columnSpec.get(spec); + } else if (spec.equals("NUM_ENTRIES")) { + numEntries = (Integer) columnSpec.get(spec); + } else { + throw new IllegalArgumentException("Invalid option: " + spec); + } + } + + // Now we gather all the specified options for this column. + if (bloomFilterType != null) { + if (specs.contains("NUM_ENTRIES")) { + bloomFilterDesc = new BloomFilterDescriptor(bloomFilterType, numEntries); + } else { + bloomFilterDesc = new BloomFilterDescriptor(bloomFilterType, vectorSize, + numHash); + } + } + + column = appendDelimiter(column); + + HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column), + maxVersions, compression, inMemory, maxLength, bloomFilterDesc); + + return columnDesc; + } + + private void initOptions(HColumnDescriptor original) { + if (original == null) { + initOptions(); + return; + } + maxVersions = original.getMaxVersions(); + maxLength = original.getMaxValueLength(); + compression = original.getCompression(); + inMemory = original.isInMemory(); + bloomFilterDesc = original.getBloomFilter(); + } + + } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java?rev=603304&r1=603303&r2=603304&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java Tue Dec 11 09:07:12 2007 @@ -116,6 +116,7 @@ "Alter structure of table", "ALTER TABLE table_name ADD column_spec | " + "ADD (column_spec, column_spec, ...) | " + + "CHANGE column_family column_spec | " + "DROP column_family_name | " + "CHANGE column_spec;" }); load.put("EXIT", new String[] { "Exit shell", "EXIT;" }); Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java?rev=603304&r1=603303&r2=603304&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java Tue Dec 11 09:07:12 2007 @@ -33,21 +33,21 @@ * Command. Provides utility methods for alteration operations. */ public abstract class SchemaModificationCommand extends BasicCommand { - private int maxVersions; - private int maxLength; - private HColumnDescriptor.CompressionType compression; - private boolean inMemory; - private BloomFilterDescriptor bloomFilterDesc; - private BloomFilterType bloomFilterType; - private int vectorSize; - private int numHash; - private int numEntries; + protected int maxVersions; + protected int maxLength; + protected HColumnDescriptor.CompressionType compression; + protected boolean inMemory; + protected BloomFilterDescriptor bloomFilterDesc; + protected BloomFilterType bloomFilterType; + protected int vectorSize; + protected int numHash; + protected int numEntries; public SchemaModificationCommand(Writer o) { super(o); } - private void initOptions() { + protected void initOptions() { maxVersions = HColumnDescriptor.DEFAULT_N_VERSIONS; maxLength = HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH; compression = HColumnDescriptor.DEFAULT_COMPRESSION_TYPE;