phoenix-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jeffr...@apache.org
Subject [38/50] git commit: merge tip of master to 4.0
Date Mon, 10 Mar 2014 06:20:55 GMT
merge tip of master to 4.0


Project: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/commit/662cabfe
Tree: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/tree/662cabfe
Diff: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/diff/662cabfe

Branch: refs/heads/master
Commit: 662cabfe8a25b81d7542167b06c1893ac9052241
Parents: b3a330c 6228d0b
Author: Jeffrey Zhong <jzhong@JZhongs-MacBook-Pro.local>
Authored: Tue Mar 4 18:30:59 2014 -0800
Committer: Jeffrey Zhong <jzhong@JZhongs-MacBook-Pro.local>
Committed: Wed Mar 5 11:07:17 2014 -0800

----------------------------------------------------------------------
 bin/performance.py                              |   2 +-
 bin/readme.txt                                  |   8 +-
 examples/STOCK_SYMBOL.csv                       |   9 +
 examples/STOCK_SYMBOL.sql                       |   5 +
 examples/WEB_STAT.csv                           |  39 +
 examples/WEB_STAT.sql                           |  10 +
 examples/WEB_STAT_QUERIES.sql                   |  17 +
 examples/stock_symbol.csv                       |   9 -
 examples/stock_symbol.sql                       |   5 -
 examples/web_stat.csv                           |  39 -
 examples/web_stat.sql                           |  10 -
 examples/web_stat_queries.sql                   |  17 -
 phoenix-assembly/src/build/all.xml              |   8 +
 phoenix-assembly/src/build/client.xml           |   3 +
 phoenix-core/pom.xml                            |  22 +-
 .../java/org/apache/commons/csv/Assertions.java |  36 +
 .../java/org/apache/commons/csv/CSVFormat.java  | 884 +++++++++++++++++++
 .../java/org/apache/commons/csv/CSVParser.java  | 470 ++++++++++
 .../java/org/apache/commons/csv/CSVPrinter.java | 429 +++++++++
 .../java/org/apache/commons/csv/CSVRecord.java  | 225 +++++
 .../java/org/apache/commons/csv/Constants.java  |  68 ++
 .../commons/csv/ExtendedBufferedReader.java     | 178 ++++
 .../main/java/org/apache/commons/csv/Lexer.java | 431 +++++++++
 .../main/java/org/apache/commons/csv/Quote.java |  48 +
 .../main/java/org/apache/commons/csv/Token.java |  75 ++
 .../org/apache/commons/csv/package-info.java    |  82 ++
 .../apache/phoenix/compile/FromCompiler.java    |   7 +-
 .../apache/phoenix/compile/OrderByCompiler.java |  11 +-
 .../phoenix/compile/ProjectionCompiler.java     |  63 +-
 .../phoenix/exception/SQLExceptionCode.java     |   2 +-
 .../apache/phoenix/execute/MutationState.java   |   2 +-
 .../expression/ComparisonExpression.java        |  11 +-
 .../phoenix/iterate/ParallelIterators.java      |  65 +-
 .../apache/phoenix/jdbc/PhoenixConnection.java  |   4 +
 .../apache/phoenix/map/reduce/MapReduceJob.java |  20 +-
 .../query/ConnectionQueryServicesImpl.java      | 372 ++++----
 .../query/ConnectionlessQueryServicesImpl.java  |   4 +
 .../apache/phoenix/query/QueryConstants.java    |   2 +-
 .../org/apache/phoenix/query/StatsManager.java  |   2 +
 .../apache/phoenix/query/StatsManagerImpl.java  |   6 +-
 .../apache/phoenix/schema/MetaDataClient.java   |  59 +-
 .../apache/phoenix/schema/PArrayDataType.java   |   2 +-
 .../org/apache/phoenix/schema/PDataType.java    |  21 +-
 .../apache/phoenix/util/CSVCommonsLoader.java   | 419 +++++++++
 .../java/org/apache/phoenix/util/CSVLoader.java | 247 ------
 .../org/apache/phoenix/util/ColumnInfo.java     |   6 +
 .../org/apache/phoenix/util/PhoenixRuntime.java |   7 +-
 .../phoenix/compile/QueryCompilerTest.java      |  71 +-
 .../org/apache/phoenix/end2end/ArrayTest.java   | 175 ++++
 ...BaseParallelIteratorsRegionSplitterTest.java |  92 ++
 .../phoenix/end2end/CSVCommonsLoaderTest.java   | 560 ++++++++++++
 .../apache/phoenix/end2end/CSVLoaderTest.java   | 341 -------
 ...aultParallelIteratorsRegionSplitterTest.java | 214 +----
 .../phoenix/end2end/StatsManagerTest.java       | 195 ++++
 .../end2end/UpsertSelectAutoCommitTest.java     |  49 +
 .../phoenix/end2end/index/MutableIndexTest.java |  14 +
 pom.xml                                         |  20 +-
 57 files changed, 4972 insertions(+), 1220 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/662cabfe/phoenix-assembly/src/build/all.xml
----------------------------------------------------------------------
diff --cc phoenix-assembly/src/build/all.xml
index 13c7f2d,1f57730..e0c9ac3
--- a/phoenix-assembly/src/build/all.xml
+++ b/phoenix-assembly/src/build/all.xml
@@@ -35,6 -35,27 +35,11 @@@
        <!-- Enable access to all projects in the current multimodule build. Eclipse 
          says this is an error, but builds from the command line just fine. -->
        <useAllReactorProjects>true</useAllReactorProjects>
 -       <!-- Include all the sources in the top directory -->
 -      <sources>
 -         <fileSets>
 -          <fileSet>
 -            <!-- Make sure this excludes is same as the phoenix-hadoop2-compat
 -                 excludes below -->
 -            <excludes>
 -              <exclude>target/</exclude>
 -              <exclude>test/</exclude>
 -              <exclude>.classpath</exclude>
 -              <exclude>.project</exclude>
 -              <exclude>.settings/</exclude>
 -            </excludes>
 -          </fileSet>
 -        </fileSets>
 -      </sources>
+       <!-- Binaries for the dependencies also go in the lib directory -->
+       <binaries>
+         <outputDirectory>lib</outputDirectory>
+         <unpack>false</unpack>
+       </binaries>
      </moduleSet>
    </moduleSets>
  
@@@ -171,6 -143,6 +176,9 @@@
          <include>org.apache.hbase:hbase*</include>
          <include>net.sf.opencsv:opencsv</include>
          <include>org.antlr:antlr</include>
++        <include>org.cloudera.htrace:htrace-core</include>
++        <include>io.netty:netty</include>
++        <include>commons-codec:commons-codec</include>
        </includes>
      </dependencySet>
      <!-- Separate dependency set to just pull in the jackson stuff since its test 

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/662cabfe/phoenix-assembly/src/build/client.xml
----------------------------------------------------------------------
diff --cc phoenix-assembly/src/build/client.xml
index 64aa707,64aa707..0af0bf2
--- a/phoenix-assembly/src/build/client.xml
+++ b/phoenix-assembly/src/build/client.xml
@@@ -46,6 -46,6 +46,9 @@@
          <include>jline:jline</include>
          <include>sqlline:sqlline</include>
          <include>org.apache.hbase:hbase*</include>
++        <include>org.cloudera.htrace:htrace-core</include>
++        <include>io.netty:netty</include>
++        <include>commons-codec:commons-codec</include>
        </includes>
      </dependencySet>
  

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/662cabfe/phoenix-core/pom.xml
----------------------------------------------------------------------
diff --cc phoenix-core/pom.xml
index 6e82cc5,1e83ad6..90a8b89
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@@ -242,60 -251,6 +238,66 @@@
        <groupId>org.mockito</groupId>
        <artifactId>mockito-all</artifactId>
      </dependency>
 +    <dependency>
 +      <groupId>com.google.protobuf</groupId>
 +      <artifactId>protobuf-java</artifactId>
 +      <version>${protobuf-java.version}</version>
 +    </dependency>
 +    <dependency>
 +      <groupId>org.apache.httpcomponents</groupId>
 +      <artifactId>httpclient</artifactId>
 +      <version>4.0.1</version>
 +    </dependency>
 +    <dependency>
 +      <groupId>org.jruby</groupId>
 +      <artifactId>jruby-complete</artifactId>
 +      <version>${jruby.version}</version>
 +    </dependency>
 +    <dependency>
 +      <groupId>log4j</groupId>
 +      <artifactId>log4j</artifactId>
 +      <version>${log4j.version}</version>
 +    </dependency>
 +    <dependency>
 +      <groupId>org.slf4j</groupId>
 +      <artifactId>slf4j-api</artifactId>
 +      <version>${slf4j.version}</version>
 +    </dependency>
 +    <dependency>
 +      <groupId>org.xerial.snappy</groupId>
 +      <artifactId>snappy-java</artifactId>
 +      <version>${snappy.version}</version>
 +    </dependency>
 +    <dependency>
 +      <groupId>com.github.stephenc.high-scale-lib</groupId>
 +      <artifactId>high-scale-lib</artifactId>
 +      <version>1.1.1</version>
 +      <scope>test</scope>
 +    </dependency>
 +    <dependency>
 +      <groupId>com.yammer.metrics</groupId>
 +      <artifactId>metrics-core</artifactId>
 +      <version>2.1.2</version>
 +      <scope>test</scope>
 +    </dependency>
 +    <dependency>
-       <groupId>org.cloudera.htrace</groupId>
-       <artifactId>htrace-core</artifactId>
-       <version>2.04</version>
-       <scope>test</scope>
-     </dependency>
-     <dependency>
 +      <groupId>com.lmax</groupId>
 +      <artifactId>disruptor</artifactId>
 +      <version>3.2.0</version>
 +      <scope>test</scope>
 +    </dependency>
++    <dependency>
++      <groupId>org.cloudera.htrace</groupId>
++      <artifactId>htrace-core</artifactId>
++    </dependency>
++    <dependency>
++      <groupId>io.netty</groupId>
++      <artifactId>netty</artifactId>
++    </dependency>
++    <dependency>
++      <groupId>commons-codec</groupId>
++      <artifactId>commons-codec</artifactId>
++    </dependency>    
    </dependencies>
  
    <profiles>

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/662cabfe/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/662cabfe/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
----------------------------------------------------------------------
diff --cc phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index dff2a11,a124faf..ba45477
--- a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@@ -67,6 -65,6 +65,7 @@@ import java.util.concurrent.ConcurrentH
  import java.util.concurrent.ConcurrentMap;
  
  import org.apache.hadoop.conf.Configuration;
++import org.apache.hadoop.hbase.Cell;
  import org.apache.hadoop.hbase.HBaseConfiguration;
  import org.apache.hadoop.hbase.HColumnDescriptor;
  import org.apache.hadoop.hbase.HConstants;
@@@ -224,18 -207,16 +226,9 @@@ public class ConnectionQueryServicesImp
          // Without making a copy of the configuration we cons up, we lose some of our properties
          // on the server side during testing.
          this.config = HBaseConfiguration.create(config);
 +        // set replication required parameter
 +        ConfigUtil.setReplicationConfigIfAbsent(this.config);
          this.props = new ReadOnlyProps(this.config.iterator());
--        try {
--            this.connection = HBaseFactoryProvider.getHConnectionFactory().createConnection(this.config);
-         } catch (IOException e) {
-             throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION)
-                 .setRootCause(e).build().buildException();
-         }
-         if (this.connection.isClosed()) { // TODO: why the heck doesn't this throw above?
-             throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION).build().buildException();
-         }
 -        } catch (ZooKeeperConnectionException e) {
 -            throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION)
 -                .setRootCause(e).build().buildException();
 -        }
 -        if (this.connection.isClosed()) { // TODO: why the heck doesn't this throw above?
 -            throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION).build().buildException();
 -        }
          this.latestMetaData = newEmptyMetaData();
          // TODO: should we track connection wide memory usage or just org-wide usage?
          // If connection-wide, create a MemoryManager here, otherwise just use the one from
the delegate
@@@ -247,6 -228,6 +240,9 @@@
          // find the HBase version and use that to determine the KeyValueBuilder that should
be used
          String hbaseVersion = VersionInfo.getVersion();
          this.kvBuilder = KeyValueBuilder.get(hbaseVersion);
++        
++        // connection is initialized inside init()
++        connection = null;
      }
  
      @Override
@@@ -531,44 -506,7 +528,49 @@@
      private static final String OLD_PACKAGE = "com.salesforce.";
      private static final String NEW_PACKAGE = "org.apache.";
      
 +    private HTableDescriptor generateTableDescriptor(byte[] tableName, HTableDescriptor
existingDesc, PTableType tableType, Map<String,Object> tableProps, List<Pair<byte[],Map<String,Object>>>
families, byte[][] splits) throws SQLException {
 +        String defaultFamilyName = (String)tableProps.remove(PhoenixDatabaseMetaData.DEFAULT_COLUMN_FAMILY_NAME);
               
 +        HTableDescriptor descriptor = (existingDesc != null) ? new HTableDescriptor(existingDesc)
: 
 +          new HTableDescriptor(TableName.valueOf(tableName));
 +        for (Entry<String,Object> entry : tableProps.entrySet()) {
 +            String key = entry.getKey();
 +            Object value = entry.getValue();
 +            descriptor.setValue(key, value == null ? null : value.toString());
 +        }
 +        if (families.isEmpty()) {
 +            if (tableType != PTableType.VIEW) {
 +                byte[] defaultFamilyByes = defaultFamilyName == null ? QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES
: Bytes.toBytes(defaultFamilyName);
 +                // Add dummy column family so we have key values for tables that 
 +                HColumnDescriptor columnDescriptor = generateColumnFamilyDescriptor(new
Pair<byte[],Map<String,Object>>(defaultFamilyByes,Collections.<String,Object>emptyMap()),
tableType);
 +                descriptor.addFamily(columnDescriptor);
 +            }
 +        } else {
 +            for (Pair<byte[],Map<String,Object>> family : families) {
 +                // If family is only in phoenix description, add it. otherwise, modify its
property accordingly.
 +                byte[] familyByte = family.getFirst();
 +                if (descriptor.getFamily(familyByte) == null) {
 +                    if (tableType == PTableType.VIEW) {
 +                        String fullTableName = Bytes.toString(tableName);
 +                        throw new ReadOnlyTableException(
 +                                "The HBase column families for a read-only table must already
exist",
 +                                SchemaUtil.getSchemaNameFromFullName(fullTableName),
 +                                SchemaUtil.getTableNameFromFullName(fullTableName),
 +                                Bytes.toString(familyByte));
 +                    }
 +                    HColumnDescriptor columnDescriptor = generateColumnFamilyDescriptor(family,
tableType);
 +                    descriptor.addFamily(columnDescriptor);
 +                } else {
 +                    if (tableType != PTableType.VIEW) {
 +                        modifyColumnFamilyDescriptor(descriptor.getFamily(familyByte), family);
 +                    }
 +                }
 +            }
 +        }
++        addCoprocessors(tableName, descriptor, tableType);
++        return descriptor;
++    }
++
+     private void addCoprocessors(byte[] tableName, HTableDescriptor descriptor, PTableType
tableType) throws SQLException {
          // The phoenix jar must be available on HBase classpath
          try {
              if (!descriptor.hasCoprocessor(ScanRegionObserver.class.getName())) {
@@@ -621,8 -563,48 +627,7 @@@
          } catch (IOException e) {
              throw ServerUtil.parseServerException(e);
          }
-         return descriptor;
      }
 -    
 -    private HTableDescriptor generateTableDescriptor(byte[] tableName, HTableDescriptor
existingDesc, PTableType tableType, Map<String,Object> tableProps, List<Pair<byte[],Map<String,Object>>>
families, byte[][] splits) throws SQLException {
 -        String defaultFamilyName = (String)tableProps.remove(PhoenixDatabaseMetaData.DEFAULT_COLUMN_FAMILY_NAME);
               
 -        HTableDescriptor descriptor = (existingDesc != null) ? new HTableDescriptor(existingDesc)
: new HTableDescriptor(tableName);
 -        for (Entry<String,Object> entry : tableProps.entrySet()) {
 -            String key = entry.getKey();
 -            Object value = entry.getValue();
 -            descriptor.setValue(key, value == null ? null : value.toString());
 -        }
 -        if (families.isEmpty()) {
 -            if (tableType != PTableType.VIEW) {
 -                byte[] defaultFamilyByes = defaultFamilyName == null ? QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES
: Bytes.toBytes(defaultFamilyName);
 -                // Add dummy column family so we have key values for tables that 
 -                HColumnDescriptor columnDescriptor = generateColumnFamilyDescriptor(new
Pair<byte[],Map<String,Object>>(defaultFamilyByes,Collections.<String,Object>emptyMap()),
tableType);
 -                descriptor.addFamily(columnDescriptor);
 -            }
 -        } else {
 -            for (Pair<byte[],Map<String,Object>> family : families) {
 -                // If family is only in phoenix description, add it. otherwise, modify its
property accordingly.
 -                byte[] familyByte = family.getFirst();
 -                if (descriptor.getFamily(familyByte) == null) {
 -                    if (tableType == PTableType.VIEW) {
 -                        String fullTableName = Bytes.toString(tableName);
 -                        throw new ReadOnlyTableException(
 -                                "The HBase column families for a read-only table must already
exist",
 -                                SchemaUtil.getSchemaNameFromFullName(fullTableName),
 -                                SchemaUtil.getTableNameFromFullName(fullTableName),
 -                                Bytes.toString(familyByte));
 -                    }
 -                    HColumnDescriptor columnDescriptor = generateColumnFamilyDescriptor(family,
tableType);
 -                    descriptor.addFamily(columnDescriptor);
 -                } else {
 -                    if (tableType != PTableType.VIEW) {
 -                        modifyColumnFamilyDescriptor(descriptor.getFamily(familyByte), family);
 -                    }
 -                }
 -            }
 -        }
 -        addCoprocessors(tableName, descriptor, tableType);
 -        return descriptor;
 -    }
  
      private void ensureFamilyCreated(byte[] tableName, PTableType tableType , Pair<byte[],Map<String,Object>>
family) throws SQLException {
          HBaseAdmin admin = null;
@@@ -1347,19 -1248,9 +1340,19 @@@
      
      @Override
      public void init(String url, Properties props) throws SQLException {
 +        try {
-             this.connection = HConnectionManager.createConnection(this.config);
-         } catch (IOException ioe) {
++          this.connection = HBaseFactoryProvider.getHConnectionFactory().createConnection(this.config);
++        } catch (IOException e) {
 +            throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION)
-                     .setRootCause(ioe).build().buildException();
++                .setRootCause(e).build().buildException();
 +        }
 +        if (this.connection.isClosed()) { // TODO: why the heck doesn't this throw above?
 +            throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION).build().buildException();
 +        }
-     	
-         props = new Properties(props);
-         props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP));
-         PhoenixConnection metaConnection = new PhoenixConnection(this, url, props, newEmptyMetaData());
++        
+         Properties scnProps = new Properties(props);
+         scnProps.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP));
+         PhoenixConnection metaConnection = new PhoenixConnection(this, url, scnProps, newEmptyMetaData());
          SQLException sqlE = null;
          try {
              try {
@@@ -2086,31 -1999,51 +2120,51 @@@
              filter.setFilterIfMissing(true);
              // Add filter so that we only get the table row and not the column rows
              scan.setFilter(filter);
-             HTableInterface table = HBaseFactoryProvider.getHTableFactory().getTable(SYSTEM_TABLE_NAME_BYTES,
connection, getExecutor());
-             ResultScanner scanner = table.getScanner(scan);
-             Result result = null;
-             while ((result = scanner.next()) != null) {
-                 byte[] rowKey = result.getRow();
-                 byte[][] rowKeyMetaData = new byte[2][];
-                 SchemaUtil.getVarChars(rowKey, rowKeyMetaData);
-                 byte[] schemaBytes = rowKeyMetaData[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
-                 byte[] tableBytes = rowKeyMetaData[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
-                 byte[] tableName = SchemaUtil.getTableNameAsBytes(schemaBytes, tableBytes);
-                 if (!SchemaUtil.isMetaTable(tableName) && whiteList.matches(tableName))
{
-                     if (logger.isInfoEnabled()) {
-                         logger.info("Upgrading coprocessors for: " + SchemaUtil.getTableName(schemaBytes,
tableBytes));
+             HTableInterface table = null;
+             try {
+                 table = getTable(SYSTEM_TABLE_NAME_BYTES);
+                 ResultScanner scanner = table.getScanner(scan);
+                 Result result = null;
+                 while ((result = scanner.next()) != null) {
+                     byte[] rowKey = result.getRow();
+                     byte[][] rowKeyMetaData = new byte[2][];
+                     SchemaUtil.getVarChars(rowKey, rowKeyMetaData);
+                     byte[] schemaBytes = rowKeyMetaData[0];
+                     byte[] tableBytes = rowKeyMetaData[1];
+                     byte[] tableName = SchemaUtil.getTableNameAsBytes(schemaBytes, tableBytes);
 -                    KeyValue tableTypeKv = result.getColumnLatest(OLD_DEFAULT_COLUMN_FAMILY_BYTES,
TABLE_TYPE_BYTES);
 -                    PTableType tableType = PTableType.fromSerializedValue(tableTypeKv.getBuffer()[tableTypeKv.getValueOffset()]);
 -                    KeyValue dataNameKv = result.getColumnLatest(OLD_DEFAULT_COLUMN_FAMILY_BYTES,
DATA_TABLE_NAME_BYTES);
++                    Cell tableTypeKv = result.getColumnLatestCell(OLD_DEFAULT_COLUMN_FAMILY_BYTES,
TABLE_TYPE_BYTES);
++                    PTableType tableType = PTableType.fromSerializedValue(tableTypeKv.getValueArray()[tableTypeKv.getValueOffset()]);
++                    Cell dataNameKv = result.getColumnLatestCell(OLD_DEFAULT_COLUMN_FAMILY_BYTES,
DATA_TABLE_NAME_BYTES);
+                     // Update coprocessors if table is on white list or it's data table
is on the white list
+                     if (tableType != PTableType.SYSTEM
+                             && (coprocUpgradeWhiteList.matches(tableName) || (dataNameKv
!= null
+                             && coprocUpgradeWhiteList.matches(SchemaUtil.getTableName(schemaBytes,
dataNameKv.getValue()))))) {
+                         HTableDescriptor existingDesc = admin.getTableDescriptor(tableName);
+                         HTableDescriptor newDesc = new HTableDescriptor(existingDesc);
+                         addCoprocessors(tableName, newDesc, tableType);
+                         String fullTableName = Bytes.toString(tableName);
+                         if (forceUpgrade) {
+                             upgradedTables.add(fullTableName);
+                         }
+ 
+                         if (!existingDesc.equals(newDesc)) {
+                             if (logger.isInfoEnabled()) {
+                                 logger.info("Upgrading coprocessors for: " + SchemaUtil.getTableName(schemaBytes,
tableBytes));
+                             }
+                             upgradedTables.add(fullTableName);
+                             admin.disableTable(tableName);
+                             admin.modifyTable(tableName, newDesc);
+                             admin.enableTable(tableName);
+                         }
                      }
-                     HTableDescriptor existingDesc = admin.getTableDescriptor(tableName);
-                     HTableDescriptor newDesc = generateTableDescriptor(tableName, existingDesc,
PTableType.VIEW, Collections.<String,Object>emptyMap(), Collections.<Pair<byte[],Map<String,Object>>>emptyList(),
null);
-                     admin.disableTable(tableName);
-                     admin.modifyTable(tableName, newDesc);
-                     admin.enableTable(tableName);
                  }
+                 return new WhiteList(upgradedTables);
+             } catch (TableNotFoundException ignore) {
+                 return new WhiteList(); // No SYSTEM.TABLE, so nothing to do
+             } finally {
+                 if (table != null) Closeables.closeQuietly(table);
              }
          }
-         admin.disableTable(SYSTEM_CATALOG_NAME_BYTES);
-         admin.modifyTable(SYSTEM_CATALOG_NAME_BYTES, systemCatalogDesc);
-         admin.enableTable(SYSTEM_CATALOG_NAME_BYTES);
      }
          
  }

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/662cabfe/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionlessQueryServicesImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/662cabfe/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/662cabfe/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/662cabfe/pom.xml
----------------------------------------------------------------------
diff --cc pom.xml
index 84dbaad,f3d9103..465adcf
--- a/pom.xml
+++ b/pom.xml
@@@ -74,7 -85,6 +74,10 @@@
      <findbugs.version>1.3.2</findbugs.version>
      <jline.version>2.11</jline.version>
      <snappy.version>1.1.0.1</snappy.version>
 +    <jruby.version>1.6.8</jruby.version>
++    <netty.version>3.6.6.Final</netty.version>
++    <commons-codec.version>1.7</commons-codec.version>
++    <htrace.version>2.04</htrace.version>
  
      <!-- Test Dependencies -->
      <mockito-all.version>1.8.5</mockito-all.version>
@@@ -147,7 -157,7 +150,7 @@@
              <excludes>
                <exclude>CHANGES.txt</exclude>
                <exclude>README.md</exclude>
--	      <exclude>dev/phoenix.importorder</exclude>
++	            <exclude>dev/phoenix.importorder</exclude>
                <exclude>**/target/**</exclude>
                <exclude>**/*.versionsBackup</exclude>
                <!-- exclude docs -->
@@@ -378,31 -416,6 +381,46 @@@
          <version>${mockito-all.version}</version>
          <scope>test</scope>
        </dependency>
 +      <dependency>
 +        <groupId>com.google.protobuf</groupId>
 +        <artifactId>protobuf-java</artifactId>
 +        <version>${protobuf-java.version}</version>
 +      </dependency>
 +      <dependency>
 +        <groupId>org.apache.httpcomponents</groupId>
 +        <artifactId>httpclient</artifactId>
 +        <version>4.0.1</version>
 +      </dependency>
 +      <dependency>
 +        <groupId>org.jruby</groupId>
 +        <artifactId>jruby-complete</artifactId>
 +        <version>${jruby.version}</version>
 +      </dependency>
 +      <dependency>
 +        <groupId>log4j</groupId>
 +        <artifactId>log4j</artifactId>
 +        <version>${log4j.version}</version>
 +      </dependency>
 +      <dependency>
 +        <groupId>org.slf4j</groupId>
 +        <artifactId>slf4j-api</artifactId>
 +        <version>${slf4j.version}</version>
 +      </dependency>
++      <dependency>
++        <groupId>org.cloudera.htrace</groupId>
++        <artifactId>htrace-core</artifactId>
++        <version>${htrace.version}</version>
++      </dependency>
++      <dependency>
++        <groupId>io.netty</groupId>
++        <artifactId>netty</artifactId>
++        <version>${netty.version}</version>
++      </dependency>
++      <dependency>
++        <groupId>commons-codec</groupId>
++        <artifactId>commons-codec</artifactId>
++        <version>${commons-codec.version}</version>
++      </dependency>
      </dependencies>
    </dependencyManagement>
  


Mime
View raw message