hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r1501908 [1/4] - in /hbase/branches/0.95: hbase-client/ hbase-client/src/main/java/org/apache/hadoop/hbase/ hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/ hbase-client/src/main/java/org/apache/hadoop/hbase/client/ hbase-client/...
Date Wed, 10 Jul 2013 18:49:36 GMT
Author: stack
Date: Wed Jul 10 18:49:33 2013
New Revision: 1501908

URL: http://svn.apache.org/r1501908
Log:
HBASE-8918 Removes redundant identifiers from interfaces; REVERT -- PREMATURE APPLICATION

Modified:
    hbase/branches/0.95/hbase-client/pom.xml
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Abortable.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Server.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Stoppable.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnection.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterAdminKeepAliveConnection.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeers.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java
    hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
    hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java
    hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java
    hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
    hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java
    hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java
    hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java
    hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java
    hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java
    hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
    hbase/branches/0.95/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
    hbase/branches/0.95/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSource.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/metrics/BaseSource.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSource.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsEditsReplaySource.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWALSource.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSource.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/rest/MetricsRESTSource.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSource.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactory.java
    hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/metrics2/MetricHistogram.java
    hbase/branches/0.95/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/HadoopShims.java
    hbase/branches/0.95/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelper.java
    hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTokenizerData.java
    hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnData.java
    hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java
    hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/InterProcessLock.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/InterProcessReadWriteLock.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraint.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionListener.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionSnare.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/WritableWithSize.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/Cacheable.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TotesHRegionInfo.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinatorRpcs.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMemberRpcs.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/SubprocedureFactory.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ColumnTracker.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionRequestor.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LastSequenceId.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LeaseListener.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/OnlineRegions.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationService.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSinkService.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSourceService.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreConfigInformation.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileManager.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Dictionary.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceInterface.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/Constants.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ProtobufMessageHandler.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CodeToClassAndBackFor96Migration.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescerMBean.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CancelableProgressable.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/util/KeyRange.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
    hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
    hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
    hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomDistribution.java
    hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
    hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
    hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java

Modified: hbase/branches/0.95/hbase-client/pom.xml
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/pom.xml?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/pom.xml (original)
+++ hbase/branches/0.95/hbase-client/pom.xml Wed Jul 10 18:49:33 2013
@@ -1,45 +1,46 @@
 <?xml version="1.0"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <!--
-    /**
-     * Licensed to the Apache Software Foundation (ASF) under one
-     * or more contributor license agreements.  See the NOTICE file
-     * distributed with this work for additional information
-     * regarding copyright ownership.  The ASF licenses this file
-     * to you under the Apache License, Version 2.0 (the
-     * "License"); you may not use this file except in compliance
-     * with the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-    -->
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <artifactId>hbase</artifactId>
-        <groupId>org.apache.hbase</groupId>
-        <version>0.95.2-SNAPSHOT</version>
-        <relativePath>..</relativePath>
-    </parent>
-
-    <artifactId>hbase-client</artifactId>
-    <name>HBase - Client</name>
-    <description>Client of HBase</description>
-
-    <build>
-        <plugins>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-site-plugin</artifactId>
-          <configuration>
-            <skip>true</skip>
-          </configuration>
-        </plugin>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <!--
+  /**
+   * Licensed to the Apache Software Foundation (ASF) under one
+   * or more contributor license agreements.  See the NOTICE file
+   * distributed with this work for additional information
+   * regarding copyright ownership.  The ASF licenses this file
+   * to you under the Apache License, Version 2.0 (the
+   * "License"); you may not use this file except in compliance
+   * with the License.  You may obtain a copy of the License at
+   *
+   *     http://www.apache.org/licenses/LICENSE-2.0
+   *
+   * Unless required by applicable law or agreed to in writing, software
+   * distributed under the License is distributed on an "AS IS" BASIS,
+   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   * See the License for the specific language governing permissions and
+   * limitations under the License.
+   */
+  -->
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>hbase</artifactId>
+    <groupId>org.apache.hbase</groupId>
+    <version>0.95.2-SNAPSHOT</version>
+    <relativePath>..</relativePath>
+  </parent>
+
+  <artifactId>hbase-client</artifactId>
+  <name>HBase - Client</name>
+  <description>Client of HBase</description>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-site-plugin</artifactId>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
       <plugin>
         <!--Make it so assembly:single does nothing in here-->
         <artifactId>maven-assembly-plugin</artifactId>
@@ -48,141 +49,170 @@
           <skipAssembly>true</skipAssembly>
         </configuration>
       </plugin>
-        <plugin>
-          <artifactId>maven-surefire-plugin</artifactId>
-          <!-- Always skip the second part executions, since we only run
-            simple unit tests in this module -->
-          <executions>
-            <execution>
-              <id>secondPartTestsExecution</id>
-              <phase>test</phase>
-              <goals>
+      <plugin>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <!-- Always skip the second part executions, since we only run
+          simple unit tests in this module -->
+        <executions>
+          <execution>
+            <id>secondPartTestsExecution</id>
+            <phase>test</phase>
+            <goals>
               <goal>test</goal>
-              </goals>
-              <configuration>
+            </goals>
+            <configuration>
               <skip>true</skip>
-              </configuration>
-            </execution>
-          </executions>
-        </plugin>
-        <!-- Make a jar and put the sources in the jar -->
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-source-plugin</artifactId>
-        </plugin>
-      </plugins>
-    </build>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <!-- Make a jar and put the sources in the jar -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-source-plugin</artifactId>
+      </plugin>
+    </plugins>
+  </build>
 
-    <dependencies>
-        <!-- Intra-project dependencies -->
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-common</artifactId>
-        </dependency>
+  <dependencies>
+    <!-- Intra-project dependencies -->
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-protocol</artifactId>
+    </dependency>
+    <!-- General dependencies -->
+    <dependency>
+      <groupId>commons-codec</groupId>
+      <artifactId>commons-codec</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>commons-lang</groupId>
+      <artifactId>commons-lang</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.cloudera.htrace</groupId>
+      <artifactId>htrace</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>io.netty</groupId>
+      <artifactId>netty</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <!-- Skip the tests in this module -->
+    <profile>
+      <id>skipClientTests</id>
+      <activation>
+        <property>
+          <name>skipClientTests</name>
+        </property>
+      </activation>
+      <properties>
+        <surefire.skipFirstPart>true</surefire.skipFirstPart>
+      </properties>
+    </profile>
+    <!-- profile against Hadoop 1.0.x: This is the default. It has to have the same
+ activation property as the parent Hadoop 1.0.x profile to make sure it gets run at
+ the same time. -->
+    <profile>
+      <id>hadoop-1.0</id>
+      <activation>
+        <property>
+          <name>!hadoop.profile</name>
+        </property>
+      </activation>
+      <dependencies>
         <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-common</artifactId>
-            <type>test-jar</type>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
         </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-protocol</artifactId>
-        </dependency>
-        <!-- General dependencies -->
-        <dependency>
-            <groupId>com.google.protobuf</groupId>
-            <artifactId>protobuf-java</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.zookeeper</groupId>
-            <artifactId>zookeeper</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.cloudera.htrace</groupId>
-            <artifactId>htrace</artifactId>
+      </dependencies>
+    </profile>
+
+    <!--
+      profile for building against Hadoop 2.0.0-alpha. Activate using:
+       mvn -Dhadoop.profile=2.0
+    -->
+    <profile>
+      <id>hadoop-2.0</id>
+      <activation>
+        <property>
+          <name>hadoop.profile</name>
+          <value>2.0</value>
+        </property>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client</artifactId>
         </dependency>
         <dependency>
-            <groupId>io.netty</groupId>
-            <artifactId>netty</artifactId>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-annotations</artifactId>
         </dependency>
-    </dependencies>
+      </dependencies>
+    </profile>
 
-    <profiles>
-        <!-- Skip the tests in this module -->
-        <profile>
-            <id>skipClientTests</id>
-            <activation>
-                <property>
-                    <name>skipClientTests</name>
-                </property>
-            </activation>
-            <properties>
-                <surefire.skipFirstPart>true</surefire.skipFirstPart>
-            </properties>
-        </profile>
-        <!-- profile against Hadoop 1.0.x: This is the default. It has to have the same
-     activation property as the parent Hadoop 1.0.x profile to make sure it gets run at
-     the same time. -->
-        <profile>
-            <id>hadoop-1.0</id>
-            <activation>
-                <property>
-                    <name>!hadoop.profile</name>
-                </property>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-core</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-
-        <!--
-          profile for building against Hadoop 2.0.0-alpha. Activate using:
-           mvn -Dhadoop.profile=2.0
-        -->
-        <profile>
-            <id>hadoop-2.0</id>
-            <activation>
-                <property>
-                    <name>hadoop.profile</name>
-                    <value>2.0</value>
-                </property>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-client</artifactId>
-                </dependency>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-annotations</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-
-        <!--
-          profile for building against Hadoop 3.0.x. Activate using:
-           mvn -Dhadoop.profile=3.0
-        -->
-        <profile>
-            <id>hadoop-3.0</id>
-            <activation>
-                <property>
-                    <name>hadoop.profile</name>
-                    <value>3.0</value>
-                </property>
-            </activation>
-            <properties>
-                <hadoop.version>3.0-SNAPSHOT</hadoop.version>
-            </properties>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-common</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-    </profiles>
+    <!--
+      profile for building against Hadoop 3.0.x. Activate using:
+       mvn -Dhadoop.profile=3.0
+    -->
+    <profile>
+      <id>hadoop-3.0</id>
+      <activation>
+        <property>
+          <name>hadoop.profile</name>
+          <value>3.0</value>
+        </property>
+      </activation>
+      <properties>
+        <hadoop.version>3.0-SNAPSHOT</hadoop.version>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
 </project>

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Abortable.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Abortable.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Abortable.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Abortable.java Wed Jul 10 18:49:33 2013
@@ -35,11 +35,11 @@ public interface Abortable {
    * @param why Why we're aborting.
    * @param e Throwable that caused abort. Can be null.
    */
-  void abort(String why, Throwable e);
+  public void abort(String why, Throwable e);
   
   /**
    * Check if the server or client was aborted. 
    * @return true if the server or client was aborted, false otherwise
    */
-  boolean isAborted();
+  public boolean isAborted();
 }

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Coprocessor.java Wed Jul 10 18:49:33 2013
@@ -26,21 +26,21 @@ import java.io.IOException;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public interface Coprocessor {
-  int VERSION = 1;
+  static final int VERSION = 1;
 
   /** Highest installation priority */
-  int PRIORITY_HIGHEST = 0;
+  static final int PRIORITY_HIGHEST = 0;
   /** High (system) installation priority */
-  int PRIORITY_SYSTEM = Integer.MAX_VALUE / 4;
+  static final int PRIORITY_SYSTEM = Integer.MAX_VALUE / 4;
   /** Default installation priority for user coprocessors */
-  int PRIORITY_USER = Integer.MAX_VALUE / 2;
+  static final int PRIORITY_USER = Integer.MAX_VALUE / 2;
   /** Lowest installation priority */
-  int PRIORITY_LOWEST = Integer.MAX_VALUE;
+  static final int PRIORITY_LOWEST = Integer.MAX_VALUE;
 
   /**
    * Lifecycle state of a given coprocessor instance.
    */
-  enum State {
+  public enum State {
     UNINSTALLED,
     INSTALLED,
     STARTING,

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java Wed Jul 10 18:49:33 2013
@@ -30,26 +30,26 @@ import java.io.IOException;
 public interface CoprocessorEnvironment {
 
   /** @return the Coprocessor interface version */
-  int getVersion();
+  public int getVersion();
 
   /** @return the HBase version as a string (e.g. "0.21.0") */
-  String getHBaseVersion();
+  public String getHBaseVersion();
 
   /** @return the loaded coprocessor instance */
-  Coprocessor getInstance();
+  public Coprocessor getInstance();
 
   /** @return the priority assigned to the loaded coprocessor */
-  int getPriority();
+  public int getPriority();
 
   /** @return the load sequence number */
-  int getLoadSequence();
+  public int getLoadSequence();
 
   /** @return the configuration */
-  Configuration getConfiguration();
+  public Configuration getConfiguration();
 
   /**
    * @return an interface for accessing the given table
    * @throws IOException
    */
-  HTableInterface getTable(byte[] tableName) throws IOException;
+  public HTableInterface getTable(byte[] tableName) throws IOException;
 }

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Server.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Server.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Server.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Server.java Wed Jul 10 18:49:33 2013
@@ -32,20 +32,20 @@ public interface Server extends Abortabl
   /**
    * Gets the configuration object for this server.
    */
-  Configuration getConfiguration();
+  public Configuration getConfiguration();
 
   /**
    * Gets the ZooKeeper instance for this server.
    */
-  ZooKeeperWatcher getZooKeeper();
+  public ZooKeeperWatcher getZooKeeper();
 
   /**
    * @return Master's instance of {@link CatalogTracker}
    */
-  CatalogTracker getCatalogTracker();
+  public CatalogTracker getCatalogTracker();
 
   /**
    * @return The unique server name for this server.
    */
-  ServerName getServerName();
+  public ServerName getServerName();
 }

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Stoppable.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Stoppable.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Stoppable.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/Stoppable.java Wed Jul 10 18:49:33 2013
@@ -29,10 +29,10 @@ public interface Stoppable {
    * Stop this service.
    * @param why Why we're stopping.
    */
-  void stop(String why);
+  public void stop(String why);
 
   /**
    * @return True if {@link #stop(String)} has been closed.
    */
-  boolean isStopped();
+  public boolean isStopped();
 }

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java Wed Jul 10 18:49:33 2013
@@ -558,7 +558,7 @@ public class MetaReader {
      * @return True if we are to proceed scanning the table, else false if
      * we are to stop now.
      */
-    boolean visit(final Result r) throws IOException;
+    public boolean visit(final Result r) throws IOException;
   }
 
   /**

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java Wed Jul 10 18:49:33 2013
@@ -117,7 +117,7 @@ class AsyncProcess<CResult> {
    * </li>
    * </list>
    */
-  interface AsyncProcessCallback<CResult> {
+  static interface AsyncProcessCallback<CResult> {
 
     /**
      * Called on success. originalIndex holds the index in the action list.

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java Wed Jul 10 18:49:33 2013
@@ -34,18 +34,18 @@ public interface Attributes {
    * @param name attribute name
    * @param value attribute value
    */
-  void setAttribute(String name, byte[] value);
+  public void setAttribute(String name, byte[] value);
 
   /**
    * Gets an attribute
    * @param name attribute name
    * @return attribute value if attribute is set, <tt>null</tt> otherwise
    */
-  byte[] getAttribute(String name);
+  public byte[] getAttribute(String name);
 
   /**
    * Gets all attributes
    * @return unmodifiable map of all attributes
    */
-  Map<String, byte[]> getAttributesMap();
+  public Map<String, byte[]> getAttributesMap();
 }

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java Wed Jul 10 18:49:33 2013
@@ -82,19 +82,19 @@ class ClusterStatusListener implements C
      *
      * @param sn - the server name
      */
-    void newDead(ServerName sn);
+    public void newDead(ServerName sn);
   }
 
 
   /**
    * The interface to be implented by a listener of a cluster status event.
    */
-  interface Listener extends Closeable {
+  static interface Listener extends Closeable {
     /**
      * Called to close the resources, if any. Cannot throw an exception.
      */
     @Override
-    void close();
+    public void close();
 
     /**
      * Called to connect.
@@ -102,7 +102,7 @@ class ClusterStatusListener implements C
      * @param conf Configuration to use.
      * @throws IOException
      */
-    void connect(Configuration conf) throws IOException;
+    public void connect(Configuration conf) throws IOException;
   }
 
   public ClusterStatusListener(DeadServerHandler dsh, Configuration conf,

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnection.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnection.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnection.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnection.java Wed Jul 10 18:49:33 2013
@@ -62,10 +62,10 @@ public interface HConnection extends Abo
   /**
    * @return Configuration instance being used by this HConnection instance.
    */
-  Configuration getConfiguration();
+  public Configuration getConfiguration();
 
   /** @return - true if the master server is running */
-  boolean isMasterRunning()
+  public boolean isMasterRunning()
   throws MasterNotRunningException, ZooKeeperConnectionException;
 
   /**
@@ -76,21 +76,21 @@ public interface HConnection extends Abo
    * @return true if the table is enabled, false otherwise
    * @throws IOException if a remote or network exception occurs
    */
-  boolean isTableEnabled(byte[] tableName) throws IOException;
+  public boolean isTableEnabled(byte[] tableName) throws IOException;
 
   /**
    * @param tableName table name
    * @return true if the table is disabled, false otherwise
    * @throws IOException if a remote or network exception occurs
    */
-  boolean isTableDisabled(byte[] tableName) throws IOException;
+  public boolean isTableDisabled(byte[] tableName) throws IOException;
 
   /**
    * @param tableName table name
    * @return true if all regions of the table are available, false otherwise
    * @throws IOException if a remote or network exception occurs
    */
-  boolean isTableAvailable(byte[] tableName) throws IOException;
+  public boolean isTableAvailable(byte[] tableName) throws IOException;
 
   /**
    * Use this api to check if the table has been created with the specified number of
@@ -104,7 +104,7 @@ public interface HConnection extends Abo
    * @throws IOException
    *           if a remote or network exception occurs
    */
-  boolean isTableAvailable(byte[] tableName, byte[][] splitKeys) throws IOException;
+  public boolean isTableAvailable(byte[] tableName, byte[][] splitKeys) throws IOException;
 
   /**
    * List all the userspace tables.  In other words, scan the META table.
@@ -116,14 +116,14 @@ public interface HConnection extends Abo
    * @return - returns an array of HTableDescriptors
    * @throws IOException if a remote or network exception occurs
    */
-  HTableDescriptor[] listTables() throws IOException;
+  public HTableDescriptor[] listTables() throws IOException;
 
   /**
    * @param tableName table name
    * @return table metadata
    * @throws IOException if a remote or network exception occurs
    */
-  HTableDescriptor getHTableDescriptor(byte[] tableName)
+  public HTableDescriptor getHTableDescriptor(byte[] tableName)
   throws IOException;
 
   /**
@@ -135,15 +135,14 @@ public interface HConnection extends Abo
    * question
    * @throws IOException if a remote or network exception occurs
    */
-  HRegionLocation locateRegion(
-    final byte[] tableName, final byte[] row
-  )
+  public HRegionLocation locateRegion(final byte [] tableName,
+      final byte [] row)
   throws IOException;
 
   /**
    * Allows flushing the region cache.
    */
-  void clearRegionCache();
+  public void clearRegionCache();
 
   /**
    * Allows flushing the region cache of all locations that pertain to
@@ -151,13 +150,13 @@ public interface HConnection extends Abo
    * @param tableName Name of the table whose regions we are to remove from
    * cache.
    */
-  void clearRegionCache(final byte[] tableName);
+  public void clearRegionCache(final byte [] tableName);
 
   /**
    * Deletes cached locations for the specific region.
    * @param location The location object for the region, to be purged from cache.
    */
-  void deleteCachedRegionLocation(final HRegionLocation location);
+  public void deleteCachedRegionLocation(final HRegionLocation location);
 
   /**
    * Find the location of the region of <i>tableName</i> that <i>row</i>
@@ -168,9 +167,8 @@ public interface HConnection extends Abo
    * question
    * @throws IOException if a remote or network exception occurs
    */
-  HRegionLocation relocateRegion(
-    final byte[] tableName, final byte[] row
-  )
+  public HRegionLocation relocateRegion(final byte [] tableName,
+      final byte [] row)
   throws IOException;
 
   /**
@@ -181,9 +179,8 @@ public interface HConnection extends Abo
    * @param exception the exception if any. Can be null.
    * @param source the previous location
    */
-  void updateCachedLocations(
-    byte[] tableName, byte[] rowkey, Object exception, HRegionLocation source
-  );
+  public void updateCachedLocations(byte[] tableName, byte[] rowkey,
+                                    Object exception, HRegionLocation source);
 
   /**
    * Gets the location of the region of <i>regionName</i>.
@@ -192,7 +189,7 @@ public interface HConnection extends Abo
    * question
    * @throws IOException if a remote or network exception occurs
    */
-  HRegionLocation locateRegion(final byte[] regionName)
+  public HRegionLocation locateRegion(final byte [] regionName)
   throws IOException;
 
   /**
@@ -201,7 +198,7 @@ public interface HConnection extends Abo
    * @return list of region locations for all regions of table
    * @throws IOException
    */
-  List<HRegionLocation> locateRegions(final byte[] tableName)
+  public List<HRegionLocation> locateRegions(final byte[] tableName)
   throws IOException;
 
   /**
@@ -213,19 +210,18 @@ public interface HConnection extends Abo
    * @return list of region locations for all regions of table
    * @throws IOException
    */
-  List<HRegionLocation> locateRegions(
-    final byte[] tableName, final boolean useCache, final boolean offlined
-  ) throws IOException;
+  public List<HRegionLocation> locateRegions(final byte[] tableName, final boolean useCache,
+      final boolean offlined) throws IOException;
 
   /**
    * Returns a {@link MasterAdminKeepAliveConnection} to the active master
    */
-  MasterAdminService.BlockingInterface getMasterAdmin() throws IOException;
+  public MasterAdminService.BlockingInterface getMasterAdmin() throws IOException;
 
   /**
    * Returns an {@link MasterMonitorKeepAliveConnection} to the active master
    */
-  MasterMonitorService.BlockingInterface getMasterMonitor() throws IOException;
+  public MasterMonitorService.BlockingInterface getMasterMonitor() throws IOException;
 
   /**
    * Establishes a connection to the region server at the specified address.
@@ -233,7 +229,7 @@ public interface HConnection extends Abo
    * @return proxy for HRegionServer
    * @throws IOException if a remote or network exception occurs
    */
-  AdminService.BlockingInterface getAdmin(final ServerName serverName) throws IOException;
+  public AdminService.BlockingInterface getAdmin(final ServerName serverName) throws IOException;
 
   /**
    * Establishes a connection to the region server at the specified address, and returns
@@ -244,7 +240,7 @@ public interface HConnection extends Abo
    * @throws IOException if a remote or network exception occurs
    *
    */
-  ClientService.BlockingInterface getClient(final ServerName serverName) throws IOException;
+  public ClientService.BlockingInterface getClient(final ServerName serverName) throws IOException;
 
   /**
    * Establishes a connection to the region server at the specified address.
@@ -254,7 +250,7 @@ public interface HConnection extends Abo
    * @throws IOException if a remote or network exception occurs
    * @deprecated You can pass master flag but nothing special is done.
    */
-  AdminService.BlockingInterface getAdmin(final ServerName serverName, boolean getMaster)
+  public AdminService.BlockingInterface getAdmin(final ServerName serverName, boolean getMaster)
       throws IOException;
 
   /**
@@ -281,7 +277,7 @@ public interface HConnection extends Abo
    * @throws RuntimeException other unspecified error
    */
   @Deprecated
-  <T> T getRegionServerWithRetries(ServerCallable<T> callable)
+  public <T> T getRegionServerWithRetries(ServerCallable<T> callable)
   throws IOException, RuntimeException;
 
   /**
@@ -294,7 +290,7 @@ public interface HConnection extends Abo
    * @throws RuntimeException other unspecified error
    */
   @Deprecated
-  <T> T getRegionServerWithoutRetries(ServerCallable<T> callable)
+  public <T> T getRegionServerWithoutRetries(ServerCallable<T> callable)
   throws IOException, RuntimeException;
 
   /**
@@ -313,9 +309,8 @@ public interface HConnection extends Abo
    * @deprecated since 0.96 - Use {@link HTableInterface#batch} instead
    */
   @Deprecated
-  void processBatch(
-    List<? extends Row> actions, final byte[] tableName, ExecutorService pool, Object[] results
-  )
+  public void processBatch(List<? extends Row> actions, final byte[] tableName,
+      ExecutorService pool, Object[] results)
       throws IOException, InterruptedException;
 
   /**
@@ -324,13 +319,11 @@ public interface HConnection extends Abo
    * @deprecated since 0.96 - Use {@link HTableInterface#batchCallback} instead
    */
   @Deprecated
-  <R> void processBatchCallback(
-    List<? extends Row> list,
-    byte[] tableName,
-    ExecutorService pool,
-    Object[] results,
-    Batch.Callback<R> callback
-  ) throws IOException, InterruptedException;
+  public <R> void processBatchCallback(List<? extends Row> list,
+      byte[] tableName,
+      ExecutorService pool,
+      Object[] results,
+      Batch.Callback<R> callback) throws IOException, InterruptedException;
 
   /**
    * Enable or disable region cache prefetch for the table. It will be
@@ -339,9 +332,8 @@ public interface HConnection extends Abo
    * @param tableName name of table to configure.
    * @param enable Set to true to enable region cache prefetch.
    */
-  void setRegionCachePrefetch(
-    final byte[] tableName, final boolean enable
-  );
+  public void setRegionCachePrefetch(final byte[] tableName,
+      final boolean enable);
 
   /**
    * Check whether region cache prefetch is enabled or not.
@@ -349,34 +341,34 @@ public interface HConnection extends Abo
    * @return true if table's region cache prefetch is enabled. Otherwise
    * it is disabled.
    */
-  boolean getRegionCachePrefetch(final byte[] tableName);
+  public boolean getRegionCachePrefetch(final byte[] tableName);
 
   /**
    * @return the number of region servers that are currently running
    * @throws IOException if a remote or network exception occurs
    * @deprecated This method will be changed from public to package protected.
    */
-  int getCurrentNrHRS() throws IOException;
+  public int getCurrentNrHRS() throws IOException;
 
   /**
    * @param tableNames List of table names
    * @return HTD[] table metadata
    * @throws IOException if a remote or network exception occurs
    */
-  HTableDescriptor[] getHTableDescriptors(List<String> tableNames)
+  public HTableDescriptor[] getHTableDescriptors(List<String> tableNames)
   throws IOException;
 
   /**
    * @return true if this connection is closed
    */
-  boolean isClosed();
+  public boolean isClosed();
 
 
   /**
    * Clear any caches that pertain to server name <code>sn</code>.
    * @param sn A server name
    */
-  void clearCaches(final ServerName sn);
+  public void clearCaches(final ServerName sn);
 
   /**
    * This function allows HBaseAdmin and potentially others to get a shared MasterMonitor
@@ -385,7 +377,7 @@ public interface HConnection extends Abo
    * @throws MasterNotRunningException
    */
   // TODO: Why is this in the public interface when the returned type is shutdown package access?
-  MasterMonitorKeepAliveConnection getKeepAliveMasterMonitorService()
+  public MasterMonitorKeepAliveConnection getKeepAliveMasterMonitorService()
   throws MasterNotRunningException;
 
   /**
@@ -395,11 +387,11 @@ public interface HConnection extends Abo
    * @throws MasterNotRunningException
    */
   // TODO: Why is this in the public interface when the returned type is shutdown package access?
-  MasterAdminKeepAliveConnection getKeepAliveMasterAdminService() throws MasterNotRunningException;
+  public MasterAdminKeepAliveConnection getKeepAliveMasterAdminService() throws MasterNotRunningException;
 
   /**
    * @param serverName
    * @return true if the server is known as dead, false otherwise.
    */
-  boolean isDeadServer(ServerName serverName);
+  public boolean isDeadServer(ServerName serverName);
 }

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java Wed Jul 10 18:49:33 2013
@@ -127,9 +127,8 @@ public interface HTableInterface extends
    * Same as {@link #batch(List, Object[])}, but with a callback.
    * @since 0.96.0
    */
-  <R> void batchCallback(
-    final List<? extends Row> actions, final Object[] results, final Batch.Callback<R> callback
-  )
+  public <R> void batchCallback(
+    final List<? extends Row> actions, final Object[] results, final Batch.Callback<R> callback)
     throws IOException, InterruptedException;
 
 
@@ -137,9 +136,8 @@ public interface HTableInterface extends
    * Same as {@link #batch(List)}, but with a callback.
    * @since 0.96.0
    */
-  <R> Object[] batchCallback(
-    List<? extends Row> actions, Batch.Callback<R> callback
-  ) throws IOException,
+  public <R> Object[] batchCallback(
+    List<? extends Row> actions, Batch.Callback<R> callback) throws IOException,
     InterruptedException;
 
   /**
@@ -311,7 +309,7 @@ public interface HTableInterface extends
    * @param rm object that specifies the set of mutations to perform atomically
    * @throws IOException
    */
-  void mutateRow(final RowMutations rm) throws IOException;
+  public void mutateRow(final RowMutations rm) throws IOException;
 
   /**
    * Appends values to one or more columns within a single row.
@@ -326,7 +324,7 @@ public interface HTableInterface extends
    * @throws IOException e
    * @return values of columns after the append operation (maybe null)
    */
-  Result append(final Append append) throws IOException;
+  public Result append(final Append append) throws IOException;
 
   /**
    * Increments one or more columns within a single row.
@@ -341,7 +339,7 @@ public interface HTableInterface extends
    * @throws IOException e
    * @return values of columns after the increment
    */
-  Result increment(final Increment increment) throws IOException;
+  public Result increment(final Increment increment) throws IOException;
 
   /**
    * See {@link #incrementColumnValue(byte[], byte[], byte[], long, Durability)}
@@ -495,7 +493,7 @@ public interface HTableInterface extends
    * @param autoFlush
    *        Whether or not to enable 'auto-flush'.
    */
-  void setAutoFlush(boolean autoFlush);
+  public void setAutoFlush(boolean autoFlush);
 
   /**
    * Turns 'auto-flush' on or off.
@@ -524,7 +522,7 @@ public interface HTableInterface extends
    *        Whether to keep Put failures in the writeBuffer
    * @see #flushCommits
    */
-  void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail);
+  public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail);
 
   /**
    * Returns the maximum size in bytes of the write buffer for this HTable.
@@ -533,7 +531,7 @@ public interface HTableInterface extends
    * {@code hbase.client.write.buffer}.
    * @return The size of the write buffer in bytes.
    */
-  long getWriteBufferSize();
+  public long getWriteBufferSize();
 
   /**
    * Sets the size of the buffer in bytes.
@@ -543,5 +541,5 @@ public interface HTableInterface extends
    * @param writeBufferSize The new write buffer size, in bytes.
    * @throws IOException if a remote or network exception occurs.
    */
-  void setWriteBufferSize(long writeBufferSize) throws IOException;
+  public void setWriteBufferSize(long writeBufferSize) throws IOException;
 }

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterAdminKeepAliveConnection.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterAdminKeepAliveConnection.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterAdminKeepAliveConnection.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterAdminKeepAliveConnection.java Wed Jul 10 18:49:33 2013
@@ -40,5 +40,5 @@ extends MasterAdminProtos.MasterAdminSer
    */
   // The Closeable Interface wants to throw an IOE out of a close.
   //  Thats a PITA.  Do this below instead of Closeable.
-  void close();
-}
+  public void close();
+}
\ No newline at end of file

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java Wed Jul 10 18:49:33 2013
@@ -297,7 +297,7 @@ public class MetaScanner {
      * @return A boolean to know if it should continue to loop in the region
      * @throws IOException e
      */
-    boolean processRow(Result rowResult) throws IOException;
+    public boolean processRow(Result rowResult) throws IOException;
   }
 
   public static abstract class MetaScannerVisitorBase implements MetaScannerVisitor {

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java Wed Jul 10 18:49:33 2013
@@ -38,17 +38,17 @@ public interface ResultScanner extends C
    * exhausted.
    * @throws IOException e
    */
-  Result next() throws IOException;
+  public Result next() throws IOException;
 
   /**
    * @param nbRows number of rows to return
    * @return Between zero and <param>nbRows</param> Results
    * @throws IOException e
    */
-  Result [] next(int nbRows) throws IOException;
+  public Result [] next(int nbRows) throws IOException;
 
   /**
    * Closes the scanner and releases any resources it has allocated
    */
-  void close();
+  public void close();
 }

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java Wed Jul 10 18:49:33 2013
@@ -30,5 +30,5 @@ public interface Row extends Comparable<
   /**
    * @return The row.
    */
-  byte [] getRow();
-}
+  public byte [] getRow();
+}
\ No newline at end of file

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java Wed Jul 10 18:49:33 2013
@@ -50,8 +50,8 @@ public abstract class Batch {
    * {@link Batch.Call#call(Object)}
    * @param <R> the return type from {@link Batch.Call#call(Object)}
    */
-  public interface Call<T,R> {
-    R call(T instance) throws IOException;
+  public static interface Call<T,R> {
+    public R call(T instance) throws IOException;
   }
 
   /**
@@ -68,7 +68,7 @@ public abstract class Batch {
    * @param <R> the return type from the associated {@link Batch.Call#call(Object)}
    * @see org.apache.hadoop.hbase.client.HTable#coprocessorService(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)
    */
-  public interface Callback<R> {
-    void update(byte[] region, byte[] row, R result);
+  public static interface Callback<R> {
+    public void update(byte[] region, byte[] row, R result);
   }
-}
+}
\ No newline at end of file

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeers.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeers.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeers.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeers.java Wed Jul 10 18:49:33 2013
@@ -46,7 +46,7 @@ public interface ReplicationPeers {
    * Initialize the ReplicationPeers interface.
    * @throws KeeperException
    */
-  void init() throws IOException, KeeperException;
+  public void init() throws IOException, KeeperException;
 
   /**
    * Add a new remote slave cluster for replication.
@@ -54,65 +54,65 @@ public interface ReplicationPeers {
    * @param clusterKey the concatenation of the slave cluster's:
    *          hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
    */
-  void addPeer(String peerId, String clusterKey) throws IOException;
+  public void addPeer(String peerId, String clusterKey) throws IOException;
 
   /**
    * Removes a remote slave cluster and stops the replication to it.
    * @param peerId a short that identifies the cluster
    */
-  void removePeer(String peerId) throws IOException;
+  public void removePeer(String peerId) throws IOException;
 
   /**
    * Restart the replication to the specified remote slave cluster.
    * @param peerId a short that identifies the cluster
    */
-  void enablePeer(String peerId) throws IOException;
+  public void enablePeer(String peerId) throws IOException;
 
   /**
    * Stop the replication to the specified remote slave cluster.
    * @param peerId a short that identifies the cluster
    */
-  void disablePeer(String peerId) throws IOException;
+  public void disablePeer(String peerId) throws IOException;
 
   /**
    * Get the replication status for the specified connected remote slave cluster.
    * @param peerId a short that identifies the cluster
    * @return true if replication is enabled, false otherwise.
    */
-  boolean getStatusOfConnectedPeer(String peerId);
+  public boolean getStatusOfConnectedPeer(String peerId);
 
   /**
    * Get a set of all connected remote slave clusters.
    * @return set of peer ids
    */
-  Set<String> getConnectedPeers();
+  public Set<String> getConnectedPeers();
 
   /**
    * List the cluster keys of all remote slave clusters (whether they are enabled/disabled or
    * connected/disconnected).
    * @return A map of peer ids to peer cluster keys
    */
-  Map<String, String> getAllPeerClusterKeys();
+  public Map<String, String> getAllPeerClusterKeys();
 
   /**
    * List the peer ids of all remote slave clusters (whether they are enabled/disabled or
    * connected/disconnected).
    * @return A list of peer ids
    */
-  List<String> getAllPeerIds();
+  public List<String> getAllPeerIds();
 
   /**
    * Attempt to connect to a new remote slave cluster.
    * @param peerId a short that identifies the cluster
    * @return true if a new connection was made, false if no new connection was made.
    */
-  boolean connectToPeer(String peerId) throws IOException, KeeperException;
+  public boolean connectToPeer(String peerId) throws IOException, KeeperException;
 
   /**
    * Disconnect from a remote slave cluster.
    * @param peerId a short that identifies the cluster
    */
-  void disconnectFromPeer(String peerId);
+  public void disconnectFromPeer(String peerId);
 
   /**
    * Returns all region servers from given connected remote slave cluster.
@@ -120,19 +120,19 @@ public interface ReplicationPeers {
    * @return addresses of all region servers in the peer cluster. Returns an empty list if the peer
    *         cluster is unavailable or there are no region servers in the cluster.
    */
-  List<ServerName> getRegionServersOfConnectedPeer(String peerId);
+  public List<ServerName> getRegionServersOfConnectedPeer(String peerId);
 
   /**
    * Returns the UUID of the provided peer id.
    * @param peerId the peer's ID that will be converted into a UUID
    * @return a UUID or null if the peer cluster does not exist or is not connected.
    */
-  UUID getPeerUUID(String peerId);
+  public UUID getPeerUUID(String peerId);
 
   /**
    * Returns the configuration needed to talk to the remote slave cluster.
    * @param peerId a short that identifies the cluster
    * @return the configuration for the peer cluster, null if it was unable to get the configuration
    */
-  Configuration getPeerConf(String peerId) throws KeeperException;
-}
+  public Configuration getPeerConf(String peerId) throws KeeperException;
+}
\ No newline at end of file

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueues.java Wed Jul 10 18:49:33 2013
@@ -37,13 +37,13 @@ public interface ReplicationQueues {
    * @param serverName The server name of the region server that owns the replication queues this
    *          interface manages.
    */
-  void init(String serverName) throws KeeperException;
+  public void init(String serverName) throws KeeperException;
 
   /**
    * Remove a replication queue.
    * @param queueId a String that identifies the queue.
    */
-  void removeQueue(String queueId);
+  public void removeQueue(String queueId);
 
   /**
    * Add a new HLog file to the given queue. If the queue does not exist it is created.
@@ -51,14 +51,14 @@ public interface ReplicationQueues {
    * @param filename name of the HLog
    * @throws KeeperException
    */
-  void addLog(String queueId, String filename) throws KeeperException;
+  public void addLog(String queueId, String filename) throws KeeperException;
 
   /**
    * Remove an HLog file from the given queue.
    * @param queueId a String that identifies the queue.
    * @param filename name of the HLog
    */
-  void removeLog(String queueId, String filename);
+  public void removeLog(String queueId, String filename);
 
   /**
    * Set the current position for a specific HLog in a given queue.
@@ -66,7 +66,7 @@ public interface ReplicationQueues {
    * @param filename name of the HLog
    * @param position the current position in the file
    */
-  void setLogPosition(String queueId, String filename, long position);
+  public void setLogPosition(String queueId, String filename, long position);
 
   /**
    * Get the current position for a specific HLog in a given queue.
@@ -74,25 +74,25 @@ public interface ReplicationQueues {
    * @param filename name of the HLog
    * @return the current position in the file
    */
-  long getLogPosition(String queueId, String filename) throws KeeperException;
+  public long getLogPosition(String queueId, String filename) throws KeeperException;
 
   /**
    * Remove all replication queues for this region server.
    */
-  void removeAllQueues();
+  public void removeAllQueues();
 
   /**
    * Get a list of all HLogs in the given queue.
    * @param queueId a String that identifies the queue
    * @return a list of HLogs, null if this region server is dead and has no outstanding queues
    */
-  List<String> getLogsInQueue(String queueId);
+  public List<String> getLogsInQueue(String queueId);
 
   /**
    * Get a list of all queues for this region server.
    * @return a list of queueIds, null if this region server is dead and has no outstanding queues
    */
-  List<String> getAllQueues();
+  public List<String> getAllQueues();
 
   /**
    * Take ownership for the set of queues belonging to a dead region server.
@@ -100,12 +100,12 @@ public interface ReplicationQueues {
    * @return A SortedMap of the queues that have been claimed, including a SortedSet of HLogs in
    *         each queue. Returns an empty map if no queues were failed-over.
    */
-  SortedMap<String, SortedSet<String>> claimQueues(String regionserver);
+  public SortedMap<String, SortedSet<String>> claimQueues(String regionserver);
 
   /**
    * Get a list of all region servers that have outstanding replication queues. These servers could
    * be alive, dead or from a previous run of the cluster.
    * @return a list of server names
    */
-  List<String> getListOfReplicators();
-}
+  public List<String> getListOfReplicators();
+}
\ No newline at end of file

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java Wed Jul 10 18:49:33 2013
@@ -31,7 +31,7 @@ public interface ReplicationQueuesClient
    * be alive, dead or from a previous run of the cluster.
    * @return a list of server names
    */
-  List<String> getListOfReplicators();
+  public List<String> getListOfReplicators();
 
   /**
    * Get a list of all HLogs in the given queue on the given region server.
@@ -39,12 +39,12 @@ public interface ReplicationQueuesClient
    * @param queueId a String that identifies the queue
    * @return a list of HLogs, null if this region server is dead and has no outstanding queues
    */
-  List<String> getLogsInQueue(String serverName, String queueId);
+  public List<String> getLogsInQueue(String serverName, String queueId);
 
   /**
    * Get a list of all queues for the specified region server.
    * @param serverName the server name of the region server that owns the set of queues
    * @return a list of queueIds, null if this region server is not a replicator.
    */
-  List<String> getAllQueues(String serverName);
-}
+  public List<String> getAllQueues(String serverName);
+}
\ No newline at end of file

Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java Wed Jul 10 18:49:33 2013
@@ -216,17 +216,17 @@ public class PoolMap<K, V> implements Ma
   }
 
   protected interface Pool<R> {
-    R get();
+    public R get();
 
-    R put(R resource);
+    public R put(R resource);
 
-    boolean remove(R resource);
+    public boolean remove(R resource);
 
-    void clear();
+    public void clear();
 
-    Collection<R> values();
+    public Collection<R> values();
 
-    int size();
+    public int size();
   }
 
   public enum PoolType {

Modified: hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java (original)
+++ hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java Wed Jul 10 18:49:33 2013
@@ -24,6 +24,9 @@ import java.nio.ByteBuffer;
 import java.util.Arrays;
 
 import org.apache.commons.lang.time.StopWatch;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
@@ -39,15 +42,16 @@ import org.apache.hadoop.hbase.util.Clas
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.io.compress.GzipCodec;
+import org.apache.log4j.Level;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.mortbay.log.Log;
-import org.apache.commons.logging.impl.Log4JLogger;
-import org.apache.log4j.Level;
 
 @Category(SmallTests.class) 
 public class TestIPCUtil {
+
+  public static final Log LOG = LogFactory.getLog(IPCUtil.class);
+
   IPCUtil util;
   @Before
   public void before() {
@@ -148,7 +152,7 @@ public class TestIPCUtil {
       timerTest(util, timer, count, size, codec, compressor, false);
     }
     timer.stop();
-    Log.info("Codec=" + codec + ", compression=" + compressor + ", sized=" + false +
+    LOG.info("Codec=" + codec + ", compression=" + compressor + ", sized=" + false +
         ", count=" + count + ", size=" + size + ", + took=" + timer.getTime() + "ms");
     timer.reset();
     timer.start();
@@ -156,7 +160,7 @@ public class TestIPCUtil {
       timerTest(util, timer, count, size, codec, compressor, true);
     }
     timer.stop();
-    Log.info("Codec=" + codec + ", compression=" + compressor + ", sized=" + true +
+    LOG.info("Codec=" + codec + ", compression=" + compressor + ", sized=" + true +
       ", count=" + count + ", size=" + size + ", + took=" + timer.getTime() + "ms");
   }
 
@@ -189,4 +193,4 @@ public class TestIPCUtil {
     timerTests(util, count, size,  new KeyValueCodec(), new DefaultCodec());
     timerTests(util, count, size,  new KeyValueCodec(), new GzipCodec());
   }
-}
\ No newline at end of file
+}

Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java Wed Jul 10 18:49:33 2013
@@ -66,7 +66,7 @@ public class CompoundConfiguration exten
 
   // Devs: these APIs are the same contract as their counterparts in
   // Configuration.java
-  private interface ImmutableConfigMap extends Iterable<Map.Entry<String,String>> {
+  private static interface ImmutableConfigMap extends Iterable<Map.Entry<String,String>> {
     String get(String key);
     String getRaw(String key);
     Class<?> getClassByName(String name) throws ClassNotFoundException;

Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java Wed Jul 10 18:49:33 2013
@@ -2514,14 +2514,14 @@ public class KeyValue implements Cell, H
   /**
    * Avoids redundant comparisons for better performance.
    */
-  public interface SamePrefixComparator<T> {
+  public static interface SamePrefixComparator<T> {
     /**
      * Compare two keys assuming that the first n bytes are the same.
      * @param commonPrefix How many bytes are the same.
      */
-    int compareIgnoringPrefix(
-      int commonPrefix, T left, int loffset, int llength, T right, int roffset, int rlength
-    );
+    public int compareIgnoringPrefix(int commonPrefix,
+        T left, int loffset, int llength,
+        T right, int roffset, int rlength);
   }
 
   /**

Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java Wed Jul 10 18:49:33 2013
@@ -37,14 +37,14 @@ public interface Codec {
    * Call flush when done.  Some encoders may not put anything on the stream until flush is called.
    * On flush, let go of any resources used by the encoder.
    */
-  interface Encoder extends CellOutputStream {}
+  public interface Encoder extends CellOutputStream {}
 
   /**
    * Implementations should implicitly clean up any resources allocated when the
    * Decoder/CellScanner runs off the end of the cell block. Do this rather than require the user
    * call close explicitly.
    */
-  interface Decoder extends CellScanner {};
+  public interface Decoder extends CellScanner {};
 
   Decoder getDecoder(InputStream is);
   Encoder getEncoder(OutputStream os);

Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java Wed Jul 10 18:49:33 2013
@@ -45,5 +45,5 @@ public interface HeapSize {
    * @return Approximate 'exclusive deep size' of implementing object.  Includes
    * count of payload and hosting object sizings.
   */
-  long heapSize();
-}
+  public long heapSize();
+}
\ No newline at end of file

Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java Wed Jul 10 18:49:33 2013
@@ -57,9 +57,9 @@ public interface DataBlockEncoder {
    * @throws IOException
    *           If there is an error writing to output stream.
    */
-  void encodeKeyValues(
-    ByteBuffer in, boolean includesMemstoreTS, HFileBlockEncodingContext encodingContext
-  ) throws IOException;
+  public void encodeKeyValues(
+      ByteBuffer in, boolean includesMemstoreTS,
+      HFileBlockEncodingContext encodingContext) throws IOException;
 
   /**
    * Decode.
@@ -69,9 +69,8 @@ public interface DataBlockEncoder {
    * @return Uncompressed block of KeyValues.
    * @throws IOException If there is an error in source.
    */
-  ByteBuffer decodeKeyValues(
-    DataInputStream source, boolean includesMemstoreTS
-  ) throws IOException;
+  public ByteBuffer decodeKeyValues(DataInputStream source,
+      boolean includesMemstoreTS) throws IOException;
 
   /**
    * Uncompress.
@@ -83,9 +82,8 @@ public interface DataBlockEncoder {
    * @return Uncompressed block of KeyValues.
    * @throws IOException If there is an error in source.
    */
-  ByteBuffer decodeKeyValues(
-    DataInputStream source, int allocateHeaderLength, int skipLastBytes, boolean includesMemstoreTS
-  )
+  public ByteBuffer decodeKeyValues(DataInputStream source,
+      int allocateHeaderLength, int skipLastBytes, boolean includesMemstoreTS)
       throws IOException;
 
   /**
@@ -96,7 +94,7 @@ public interface DataBlockEncoder {
    * @param block encoded block we want index, the position will not change
    * @return First key in block.
    */
-  ByteBuffer getFirstKeyInBlock(ByteBuffer block);
+  public ByteBuffer getFirstKeyInBlock(ByteBuffer block);
 
   /**
    * Create a HFileBlock seeker which find KeyValues within a block.
@@ -105,9 +103,8 @@ public interface DataBlockEncoder {
    *          key-value pair
    * @return A newly created seeker.
    */
-  EncodedSeeker createSeeker(
-    RawComparator<byte[]> comparator, boolean includesMemstoreTS
-  );
+  public EncodedSeeker createSeeker(RawComparator<byte[]> comparator,
+      boolean includesMemstoreTS);
 
   /**
    * Creates a encoder specific encoding context
@@ -122,9 +119,9 @@ public interface DataBlockEncoder {
    *          is unknown
    * @return a newly created encoding context
    */
-  HFileBlockEncodingContext newDataBlockEncodingContext(
-    Algorithm compressionAlgorithm, DataBlockEncoding encoding, byte[] headerBytes
-  );
+  public HFileBlockEncodingContext newDataBlockEncodingContext(
+      Algorithm compressionAlgorithm, DataBlockEncoding encoding,
+      byte[] headerBytes);
 
   /**
    * Creates an encoder specific decoding context, which will prepare the data
@@ -134,9 +131,8 @@ public interface DataBlockEncoder {
    *          compression algorithm used if the data needs to be decompressed
    * @return a newly created decoding context
    */
-  HFileBlockDecodingContext newDataBlockDecodingContext(
-    Algorithm compressionAlgorithm
-  );
+  public HFileBlockDecodingContext newDataBlockDecodingContext(
+      Algorithm compressionAlgorithm);
 
   /**
    * An interface which enable to seek while underlying data is encoded.
@@ -144,19 +140,19 @@ public interface DataBlockEncoder {
    * It works on one HFileBlock, but it is reusable. See
    * {@link #setCurrentBuffer(ByteBuffer)}.
    */
-  interface EncodedSeeker {
+  public static interface EncodedSeeker {
     /**
      * Set on which buffer there will be done seeking.
      * @param buffer Used for seeking.
      */
-    void setCurrentBuffer(ByteBuffer buffer);
+    public void setCurrentBuffer(ByteBuffer buffer);
 
     /**
      * Does a deep copy of the key at the current position. A deep copy is
      * necessary because buffers are reused in the decoder.
      * @return key at current position
      */
-    ByteBuffer getKeyDeepCopy();
+    public ByteBuffer getKeyDeepCopy();
 
     /**
      * Does a shallow copy of the value at the current position. A shallow
@@ -164,25 +160,25 @@ public interface DataBlockEncoder {
      * of the original encoded buffer.
      * @return value at current position
      */
-    ByteBuffer getValueShallowCopy();
+    public ByteBuffer getValueShallowCopy();
 
     /** @return key value at current position with position set to limit */
-    ByteBuffer getKeyValueBuffer();
+    public ByteBuffer getKeyValueBuffer();
 
     /**
      * @return the KeyValue object at the current position. Includes memstore
      *         timestamp.
      */
-    KeyValue getKeyValue();
+    public KeyValue getKeyValue();
 
     /** Set position to beginning of given block */
-    void rewind();
+    public void rewind();
 
     /**
      * Move to next position
      * @return true on success, false if there is no more positions.
      */
-    boolean next();
+    public boolean next();
 
     /**
      * Moves the seeker position within the current block to:
@@ -201,8 +197,7 @@ public interface DataBlockEncoder {
      *          of an exact match. Does not matter in case of an inexact match.
      * @return 0 on exact match, 1 on inexact match.
      */
-    int seekToKeyInBlock(
-      byte[] key, int offset, int length, boolean seekBefore
-    );
+    public int seekToKeyInBlock(byte[] key, int offset, int length,
+        boolean seekBefore);
   }
 }

Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java Wed Jul 10 18:49:33 2013
@@ -32,7 +32,7 @@ public interface HFileBlockDecodingConte
   /**
    * @return the compression algorithm used by this decoding context
    */
-  Compression.Algorithm getCompression();
+  public Compression.Algorithm getCompression();
 
   /**
    * Perform all actions that need to be done before the encoder's real decoding process.
@@ -47,12 +47,7 @@ public interface HFileBlockDecodingConte
    * @param offset data start offset in onDiskBlock
    * @throws IOException
    */
-  void prepareDecoding(
-    int onDiskSizeWithoutHeader,
-    int uncompressedSizeWithoutHeader,
-    ByteBuffer blockBufferWithoutHeader,
-    byte[] onDiskBlock,
-    int offset
-  ) throws IOException;
+  public void prepareDecoding(int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,
+      ByteBuffer blockBufferWithoutHeader, byte[] onDiskBlock, int offset) throws IOException;
 
 }

Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java Wed Jul 10 18:49:33 2013
@@ -34,39 +34,39 @@ public interface HFileBlockEncodingConte
   /**
    * @return OutputStream to which encoded data is written
    */
-  OutputStream getOutputStreamForEncoder();
+  public OutputStream getOutputStreamForEncoder();
 
   /**
    * @return encoded and compressed bytes with header which are ready to write
    *         out to disk
    */
-  byte[] getOnDiskBytesWithHeader();
+  public byte[] getOnDiskBytesWithHeader();
 
   /**
    * @return encoded but not heavily compressed bytes with header which can be
    *         cached in block cache
    */
-  byte[] getUncompressedBytesWithHeader();
+  public byte[] getUncompressedBytesWithHeader();
 
   /**
    * @return the block type after encoding
    */
-  BlockType getBlockType();
+  public BlockType getBlockType();
 
   /**
    * @return the compression algorithm used by this encoding context
    */
-  Compression.Algorithm getCompression();
+  public Compression.Algorithm getCompression();
 
   /**
    * sets the dummy header bytes
    */
-  void setDummyHeader(byte[] headerBytes);
+  public void setDummyHeader(byte[] headerBytes);
 
   /**
    * @return the {@link DataBlockEncoding} encoding used
    */
-  DataBlockEncoding getDataBlockEncoding();
+  public DataBlockEncoding getDataBlockEncoding();
 
   /**
    * Do any action that needs to be performed after the encoding.
@@ -76,11 +76,11 @@ public interface HFileBlockEncodingConte
    * @param blockType
    * @throws IOException
    */
-  void postEncoding(BlockType blockType) throws IOException;
+  public void postEncoding(BlockType blockType) throws IOException;
 
   /**
    * Releases the resources used.
    */
-  void close();
+  public void close();
 
 }

Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java?rev=1501908&r1=1501907&r2=1501908&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java Wed Jul 10 18:49:33 2013
@@ -1000,9 +1000,8 @@ public class Bytes {
   }
 
   interface Comparer<T> {
-    int compareTo(
-      T buffer1, int offset1, int length1, T buffer2, int offset2, int length2
-    );
+    abstract public int compareTo(T buffer1, int offset1, int length1,
+        T buffer2, int offset2, int length2);
   }
 
   @VisibleForTesting



Mime
View raw message