hawq-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nh...@apache.org
Subject [2/2] incubator-hawq git commit: HAWQ-165. Change PXF logger to be private static final
Date Mon, 11 Jan 2016 21:58:56 GMT
HAWQ-165. Change PXF logger to be private static final

Includes many minor indentations and doc fixes.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/127cac3e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/127cac3e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/127cac3e

Branch: refs/heads/master
Commit: 127cac3e23fb502bfedab5cf0910564636c284bd
Parents: e48a07b
Author: Noa Horn <nhorn@pivotal.io>
Authored: Mon Jan 11 13:01:37 2016 -0800
Committer: Noa Horn <nhorn@pivotal.io>
Committed: Mon Jan 11 13:01:37 2016 -0800

----------------------------------------------------------------------
 .../hawq-hadoop/hawq-mapreduce-tool/.gitignore  |   1 +
 .../org/apache/hawq/pxf/api/FragmentsStats.java |  10 +-
 .../hawq/pxf/api/utilities/ProfilesConf.java    |  17 +--
 .../apache/hawq/pxf/api/FragmentsStatsTest.java |   4 +-
 .../pxf/api/utilities/ProfilesConfTest.java     | 142 +++++++++++++------
 .../hbase/utilities/HBaseLookupTable.java       |  62 ++++----
 .../pxf/plugins/hdfs/LineBreakAccessor.java     |  10 +-
 .../pxf/plugins/hdfs/SequenceFileAccessor.java  |  14 +-
 .../plugins/hdfs/utilities/HdfsUtilities.java   |  10 +-
 .../hdfs/utilities/RecordkeyAdapter.java        | 123 ++++++++--------
 .../plugins/hdfs/StringPassResolverTest.java    | 111 ++++++++-------
 .../hdfs/utilities/RecordkeyAdapterTest.java    |  20 +--
 .../hawq/pxf/plugins/hive/HiveAccessor.java     |   5 +-
 .../pxf/plugins/hive/HiveDataFragmenter.java    |  11 +-
 .../hawq/pxf/plugins/hive/HiveResolver.java     |  17 +--
 .../hawq/pxf/service/BridgeOutputBuilder.java   |  19 ++-
 .../hawq/pxf/service/FragmentsResponse.java     |  30 ++--
 .../pxf/service/FragmentsResponseFormatter.java |  49 ++++---
 .../pxf/service/MetadataResponseFormatter.java  |   8 +-
 .../org/apache/hawq/pxf/service/ReadBridge.java |  15 +-
 .../hawq/pxf/service/ReadSamplingBridge.java    |   9 +-
 .../hawq/pxf/service/io/GPDBWritable.java       |  10 +-
 .../org/apache/hawq/pxf/service/io/Text.java    |   8 +-
 .../hawq/pxf/service/rest/BridgeResource.java   |  29 ++--
 .../pxf/service/rest/ClusterNodesResource.java  |  87 +++++++-----
 .../pxf/service/rest/FragmenterResource.java    |  15 +-
 .../pxf/service/rest/InvalidPathResource.java   |   4 +-
 .../hawq/pxf/service/rest/MetadataResource.java |  72 +++++-----
 .../hawq/pxf/service/rest/RestResource.java     |  25 ++--
 .../service/rest/ServletLifecycleListener.java  |  16 +--
 .../hawq/pxf/service/rest/VersionResource.java  |   2 +-
 .../pxf/service/utilities/AnalyzeUtils.java     |  13 +-
 .../pxf/service/utilities/Log4jConfigure.java   |  43 +++---
 .../hawq/pxf/service/utilities/SecureLogin.java |  55 +++----
 .../hawq/pxf/service/utilities/SecuredHDFS.java |  21 +--
 .../hawq/pxf/service/utilities/Utilities.java   |  10 +-
 .../hawq/pxf/service/io/GPDBWritableTest.java   |  14 +-
 37 files changed, 602 insertions(+), 509 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/contrib/hawq-hadoop/hawq-mapreduce-tool/.gitignore
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-tool/.gitignore b/contrib/hawq-hadoop/hawq-mapreduce-tool/.gitignore
index 314002f..9e2a5d7 100644
--- a/contrib/hawq-hadoop/hawq-mapreduce-tool/.gitignore
+++ b/contrib/hawq-hadoop/hawq-mapreduce-tool/.gitignore
@@ -1,2 +1,3 @@
 target/
 test-data/*/output
+lib/

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FragmentsStats.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FragmentsStats.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FragmentsStats.java
index fad1a2d..425922c 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FragmentsStats.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FragmentsStats.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.api;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.api;
  * under the License.
  */
 
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.codehaus.jackson.map.ObjectMapper;
@@ -37,7 +36,7 @@ public class FragmentsStats {
      */
     public static final long DEFAULT_FRAGMENT_SIZE = 67108864L;
 
-    private static Log Log = LogFactory.getLog(FragmentsStats.class);
+    private static final Log LOG = LogFactory.getLog(FragmentsStats.class);
 
     // number of fragments
     private long fragmentsNumber;
@@ -139,8 +138,7 @@ public class FragmentsStats {
     /**
      * Given a {@link FragmentsStats}, serialize it in JSON to be used as the
      * result string for HAWQ. An example result is as follows:
-     * <code>{"PXFFragmentsStats":{"fragmentsNumber"
-     * :3,"firstFragmentSize":67108864,"totalSize":200000000}}</code>
+     * <code>{"PXFFragmentsStats":{"fragmentsNumber":3,"firstFragmentSize":{"size"=67108864,"unit":"B"},"totalSize":{"size"=200000000,"unit"="B"}}}</code>
      *
      * @param stats the data to be serialized
      * @return the result in json format

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ProfilesConf.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ProfilesConf.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ProfilesConf.java
index 607f28b..2c20ab7 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ProfilesConf.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ProfilesConf.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.api.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -41,8 +41,9 @@ import static org.apache.hawq.pxf.api.utilities.ProfileConfException.MessageForm
  * It exposes a public static method getProfilePluginsMap(String plugin) which returns the requested profile plugins
  */
 public enum ProfilesConf {
-    INSTANCE;
-    private Log log = LogFactory.getLog(ProfilesConf.class);
+    INSTANCE; // enum singleton
+    // not necessary to declare LOG as static final, because this is a singleton
+    private Log LOG = LogFactory.getLog(ProfilesConf.class);
     private Map<String, Map<String, String>> profilesMap;
     private final static String EXTERNAL_PROFILES = "pxf-profiles.xml";
     private final static String INTERNAL_PROFILES = "pxf-profiles-default.xml";
@@ -59,7 +60,7 @@ public enum ProfilesConf {
         if (profilesMap.isEmpty()) {
             throw new ProfileConfException(PROFILES_FILE_NOT_FOUND, EXTERNAL_PROFILES);
         }
-        log.info("PXF profiles loaded: " + profilesMap.keySet());
+        LOG.info("PXF profiles loaded: " + profilesMap.keySet());
     }
 
     /**
@@ -88,7 +89,7 @@ public enum ProfilesConf {
     private void loadConf(String fileName, boolean isMandatory) {
         URL url = getClassLoader().getResource(fileName);
         if (url == null) {
-            log.warn(fileName + " not found in the classpath");
+            LOG.warn(fileName + " not found in the classpath");
             if (isMandatory) {
                 throw new ProfileConfException(PROFILES_FILE_NOT_FOUND, fileName);
             }
@@ -105,14 +106,14 @@ public enum ProfilesConf {
     private void loadMap(XMLConfiguration conf) {
         String[] profileNames = conf.getStringArray("profile.name");
         if (profileNames.length == 0) {
-            log.warn("Profile file: " + conf.getFileName() + " is empty");
+            LOG.warn("Profile file: " + conf.getFileName() + " is empty");
             return;
         }
         Map<String, Map<String, String>> profileMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
         for (int profileIdx = 0; profileIdx < profileNames.length; profileIdx++) {
             String profileName = profileNames[profileIdx];
             if (profileMap.containsKey(profileName)) {
-                log.warn("Duplicate profile definition found in " + conf.getFileName() + " for: " + profileName);
+                LOG.warn("Duplicate profile definition found in " + conf.getFileName() + " for: " + profileName);
                 continue;
             }
             Configuration profileSubset = conf.subset("profile(" + profileIdx + ").plugins");

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FragmentsStatsTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FragmentsStatsTest.java b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FragmentsStatsTest.java
index e139ce6..6248f9d 100644
--- a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FragmentsStatsTest.java
+++ b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FragmentsStatsTest.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.api;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/utilities/ProfilesConfTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/utilities/ProfilesConfTest.java b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/utilities/ProfilesConfTest.java
index 495e48c..0631cb2 100644
--- a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/utilities/ProfilesConfTest.java
+++ b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/utilities/ProfilesConfTest.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.api.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.api.utilities;
  * under the License.
  */
 
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.junit.Before;
@@ -44,14 +43,18 @@ import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
 /**
- * Base test class for all ProfilesConf tests.
- * Each test case is encapsulated inside its own inner class to force reloading of ProfilesConf enum singleton
+ * Base test class for all ProfilesConf tests. Each test case is encapsulated
+ * inside its own inner class to force reloading of ProfilesConf enum singleton
  */
 @RunWith(PowerMockRunner.class)
-@PrepareForTest({ProfilesConf.class, Log.class, LogFactory.class, ClassLoader.class})
+@PrepareForTest({
+        ProfilesConf.class,
+        Log.class,
+        LogFactory.class,
+        ClassLoader.class })
 public class ProfilesConfTest {
     static ClassLoader classLoader;
-    static Log log;
+    static Log LOG;
     String mandatoryFileName = "mandatory.xml";
     String optionalFileName = "optional.xml";
     File mandatoryFile;
@@ -65,10 +68,12 @@ public class ProfilesConfTest {
         mandatoryFile = testFolder.newFile(mandatoryFileName);
         optionalFile = testFolder.newFile(optionalFileName);
         PowerMockito.mockStatic(LogFactory.class);
-        log = mock(Log.class);
-        when(LogFactory.getLog(ProfilesConf.class)).thenReturn(log);
+        LOG = mock(Log.class);
+        when(LogFactory.getLog(ProfilesConf.class)).thenReturn(LOG);
         classLoader = mock(ClassLoader.class);
-        PowerMockito.stub(PowerMockito.method(ProfilesConf.class, "getClassLoader")).toReturn(classLoader);
+        PowerMockito.stub(
+                PowerMockito.method(ProfilesConf.class, "getClassLoader")).toReturn(
+                classLoader);
     }
 
     void writeFile(File file, String content) throws IOException {
@@ -79,36 +84,53 @@ public class ProfilesConfTest {
 class ProfilesConfTestDefinedProfile extends ProfilesConfTest {
     @Test
     public void definedProfile() throws Exception {
-        writeFile(mandatoryFile, "<profiles><profile><name>HBase</name><plugins><plugin1>X</plugin1><plugin2>XX</plugin2></plugins></profile></profiles>");
-        writeFile(optionalFile, "<profiles><profile><name>Hive</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
-        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(optionalFile.toURI().toURL());
+        writeFile(
+                mandatoryFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>X</plugin1><plugin2>XX</plugin2></plugins></profile></profiles>");
+        writeFile(
+                optionalFile,
+                "<profiles><profile><name>Hive</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
+        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(
+                optionalFile.toURI().toURL());
 
         Map<String, String> hbaseProfile = ProfilesConf.getProfilePluginsMap("HBase");
         assertEquals(2, hbaseProfile.keySet().size());
         assertEquals(hbaseProfile.get("X-GP-PLUGIN1"), "X");
         assertEquals(hbaseProfile.get("X-GP-PLUGIN2"), "XX");
 
-        Map<String, String> hiveProfile = ProfilesConf.getProfilePluginsMap("hIVe");// case insensitive profile name
+        Map<String, String> hiveProfile = ProfilesConf.getProfilePluginsMap("hIVe");// case
+                                                                                    // insensitive
+                                                                                    // profile
+                                                                                    // name
         assertEquals(1, hiveProfile.keySet().size());
         assertEquals(hiveProfile.get("X-GP-PLUGIN1"), "Y");
 
-        Mockito.verify(log).info("PXF profiles loaded: [HBase, Hive]");
+        Mockito.verify(LOG).info("PXF profiles loaded: [HBase, Hive]");
     }
 }
 
 class ProfilesConfTestUndefinedProfile extends ProfilesConfTest {
     @Test
     public void undefinedProfile() throws Exception {
-        writeFile(mandatoryFile, "<profiles><profile><name>HBase</name><plugins><plugin1>X</plugin1></plugins></profile></profiles>");
-        writeFile(optionalFile, "<profiles><profile><name>Hive</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
-        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(optionalFile.toURI().toURL());
+        writeFile(
+                mandatoryFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>X</plugin1></plugins></profile></profiles>");
+        writeFile(
+                optionalFile,
+                "<profiles><profile><name>Hive</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
+        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(
+                optionalFile.toURI().toURL());
         try {
             ProfilesConf.getProfilePluginsMap("UndefinedProfile");
             fail("undefined profile should have thrown exception");
         } catch (ProfileConfException pce) {
-            assertEquals(pce.getMessage(), String.format(NO_PROFILE_DEF.getFormat(), "UndefinedProfile", "pxf-profiles.xml"));
+            assertEquals(pce.getMessage(), String.format(
+                    NO_PROFILE_DEF.getFormat(), "UndefinedProfile",
+                    "pxf-profiles.xml"));
         }
     }
 }
@@ -116,22 +138,36 @@ class ProfilesConfTestUndefinedProfile extends ProfilesConfTest {
 class ProfilesConfTestDuplicateProfileDefinition extends ProfilesConfTest {
     @Test
     public void duplicateProfileDefinition() throws Exception {
-        writeFile(mandatoryFile, "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1><plugin1>YY</plugin1></plugins></profile><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        writeFile(optionalFile, "<profiles><profile><name>Hive</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
-        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(optionalFile.toURI().toURL());
+        writeFile(
+                mandatoryFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1><plugin1>YY</plugin1></plugins></profile><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        writeFile(
+                optionalFile,
+                "<profiles><profile><name>Hive</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
+        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(
+                optionalFile.toURI().toURL());
         ProfilesConf.getProfilePluginsMap("HBase");
-        Mockito.verify(log).warn("Duplicate profile definition found in " + mandatoryFileName + " for: HBase");
+        Mockito.verify(LOG).warn(
+                "Duplicate profile definition found in " + mandatoryFileName
+                        + " for: HBase");
     }
 }
 
 class ProfilesConfTestOverrideProfile extends ProfilesConfTest {
     @Test
     public void overrideProfile() throws Exception {
-        writeFile(mandatoryFile, "<profiles><profile><name>HBase</name><plugins><plugin1>X</plugin1></plugins></profile></profiles>");
-        writeFile(optionalFile, "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1><plugin2>YY</plugin2></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
-        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(optionalFile.toURI().toURL());
+        writeFile(
+                mandatoryFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>X</plugin1></plugins></profile></profiles>");
+        writeFile(
+                optionalFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1><plugin2>YY</plugin2></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
+        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(
+                optionalFile.toURI().toURL());
         Map profile = ProfilesConf.getProfilePluginsMap("HBase");
         assertEquals(2, profile.keySet().size());
         assertEquals(profile.get("X-GP-PLUGIN1"), "Y");
@@ -143,11 +179,16 @@ class ProfilesConfTestEmptyProfileFile extends ProfilesConfTest {
     @Test
     public void emptyProfileFile() throws Exception {
         writeFile(mandatoryFile, "<profiles/>");
-        writeFile(optionalFile, "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
-        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(optionalFile.toURI().toURL());
+        writeFile(
+                optionalFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
+        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(
+                optionalFile.toURI().toURL());
         ProfilesConf.getProfilePluginsMap("HBase");
-        Mockito.verify(log).warn("Profile file: " + mandatoryFileName + " is empty");
+        Mockito.verify(LOG).warn(
+                "Profile file: " + mandatoryFileName + " is empty");
     }
 }
 
@@ -155,14 +196,20 @@ class ProfilesConfTestMalformedProfileFile extends ProfilesConfTest {
     @Test
     public void malformedProfileFile() throws Exception {
         writeFile(mandatoryFile, "I'm a malford x.m.l@#$#<%");
-        writeFile(optionalFile, "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
-        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(optionalFile.toURI().toURL());
+        writeFile(
+                optionalFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
+        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(
+                optionalFile.toURI().toURL());
         try {
             ProfilesConf.getProfilePluginsMap("HBase");
             fail("malformed profile file should have thrown exception");
         } catch (ExceptionInInitializerError pce) {
-            assertTrue(pce.getCause().getMessage().contains(mandatoryFileName + " could not be loaded: org.xml.sax.SAXParseException"));
+            assertTrue(pce.getCause().getMessage().contains(
+                    mandatoryFileName
+                            + " could not be loaded: org.xml.sax.SAXParseException"));
         }
     }
 }
@@ -170,13 +217,17 @@ class ProfilesConfTestMalformedProfileFile extends ProfilesConfTest {
 class ProfilesConfTestMissingMandatoryProfileFile extends ProfilesConfTest {
     @Test
     public void missingMandatoryProfileFile() throws Exception {
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(null);
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                null);
         try {
             ProfilesConf.getProfilePluginsMap("HBase");
             fail("missing mandatory profile file should have thrown exception");
         } catch (ExceptionInInitializerError pce) {
-            Mockito.verify(log).warn("pxf-profiles-default.xml not found in the classpath");
-            assertEquals(pce.getCause().getMessage(), String.format(PROFILES_FILE_NOT_FOUND.getFormat(), "pxf-profiles-default.xml"));
+            Mockito.verify(LOG).warn(
+                    "pxf-profiles-default.xml not found in the classpath");
+            assertEquals(pce.getCause().getMessage(), String.format(
+                    PROFILES_FILE_NOT_FOUND.getFormat(),
+                    "pxf-profiles-default.xml"));
         }
     }
 }
@@ -184,11 +235,14 @@ class ProfilesConfTestMissingMandatoryProfileFile extends ProfilesConfTest {
 class ProfilesConfTestMissingOptionalProfileFile extends ProfilesConfTest {
     @Test
     public void missingOptionalProfileFile() throws Exception {
-        writeFile(mandatoryFile, "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
+        writeFile(
+                mandatoryFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
         when(classLoader.getResource("pxf-profiles.xml")).thenReturn(null);
         Map<String, String> hbaseProfile = ProfilesConf.getProfilePluginsMap("HBase");
         assertEquals("Y", hbaseProfile.get("X-GP-PLUGIN1"));
-        Mockito.verify(log).warn("pxf-profiles.xml not found in the classpath");
+        Mockito.verify(LOG).warn("pxf-profiles.xml not found in the classpath");
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/utilities/HBaseLookupTable.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/utilities/HBaseLookupTable.java b/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/utilities/HBaseLookupTable.java
index f68c3c2..1515687 100644
--- a/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/utilities/HBaseLookupTable.java
+++ b/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/utilities/HBaseLookupTable.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hbase.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.plugins.hbase.utilities;
  * under the License.
  */
 
-
 import org.apache.commons.collections.MapUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -36,19 +35,23 @@ import java.util.HashMap;
 import java.util.Map;
 
 /**
- * HBaseLookupTable will load a table's lookup information
- * from HBase pxflookup table if exists.<br>
- * This table holds mappings between HAWQ column names (key) and HBase column names (value).<br>
- * E.g. for an HBase table "hbase_table", mappings between HAWQ column names and HBase column names,
- * when <code>"hawq1"</code> is mapped to <code>"cf1:hbase1"</code> and
- * <code>"hawq2"</code> is mapped to <code>"cf1:hbase2"</code>, will be:<br>
+ * HBaseLookupTable will load a table's lookup information from HBase pxflookup
+ * table if exists.<br>
+ * This table holds mappings between HAWQ column names (key) and HBase column
+ * names (value).<br>
+ * E.g. for an HBase table "hbase_table", mappings between HAWQ column names and
+ * HBase column names, when <code>"hawq1"</code> is mapped to
+ * <code>"cf1:hbase1"</code> and <code>"hawq2"</code> is mapped to
+ * <code>"cf1:hbase2"</code>, will be:<br>
+ *
  * <pre>
  * 	ROW                     COLUMN+CELL
  *  hbase_table             column=mapping:hawq1, value=cf1:hbase1
  *  hbase_table             column=mapping:hawq2, value=cf1:hbase2
  * </pre>
  *
- * Data is returned as a map of string and byte array from {@link #getMappings(String)}.
+ * Data is returned as a map of string and byte array from
+ * {@link #getMappings(String)}.
  * <p>
  * Once created, {@link #close()} MUST be called to cleanup resources.
  */
@@ -65,8 +68,8 @@ public class HBaseLookupTable implements Closeable {
     private Table lookupTable;
 
     /**
-     * Constructs a connector to HBase lookup table.
-     * Requires calling {@link #close()} to close {@link HBaseAdmin} instance.
+     * Constructs a connector to HBase lookup table. Requires calling
+     * {@link #close()} to close {@link HBaseAdmin} instance.
      *
      * @param conf HBase configuration
      * @throws IOException when initializing HBaseAdmin fails
@@ -76,14 +79,14 @@ public class HBaseLookupTable implements Closeable {
         connection = ConnectionFactory.createConnection(hbaseConfiguration);
         admin = connection.getAdmin();
         ClusterStatus cs = admin.getClusterStatus();
-        LOG.debug("HBase cluster has " + cs.getServersSize() + " region servers " +
-                "(" + cs.getDeadServers() + " dead)");
+        LOG.debug("HBase cluster has " + cs.getServersSize()
+                + " region servers " + "(" + cs.getDeadServers() + " dead)");
     }
 
     /**
      * Returns mappings for given table name between its HAWQ column names and
-     * HBase column names.
-     * If lookup table doesn't exist or no mappings for the table exist, returns null.
+     * HBase column names. If lookup table doesn't exist or no mappings for the
+     * table exist, returns null.
      * <p>
      * All HAWQ column names are returns in low case.
      *
@@ -119,12 +122,12 @@ public class HBaseLookupTable implements Closeable {
      * @return whether lookup table is valid
      */
     private boolean lookupTableValid() throws IOException {
-        return (HBaseUtilities.isTableAvailable(admin, LOOKUPTABLENAME) &&
-                lookupHasCorrectStructure());
+        return (HBaseUtilities.isTableAvailable(admin, LOOKUPTABLENAME) && lookupHasCorrectStructure());
     }
 
     /**
-     * Returns true if {@link #LOOKUPTABLENAME} has {@value #LOOKUPCOLUMNFAMILY} family.
+     * Returns true if {@link #LOOKUPTABLENAME} has {@value #LOOKUPCOLUMNFAMILY}
+     * family.
      *
      * @return whether lookup has expected column family name
      */
@@ -145,22 +148,21 @@ public class HBaseLookupTable implements Closeable {
     }
 
     /**
-     * Returns true if lookup table has no relevant mappings.
-     * Should be called after {@link #loadMappingMap(String)}.
+     * Returns true if lookup table has no relevant mappings. Should be called
+     * after {@link #loadMappingMap(String)}.
      */
     private boolean tableHasNoMappings() {
         return MapUtils.isEmpty(rawTableMapping);
     }
 
     /**
-     * Returns a map of mappings between HAWQ and HBase column names,
-     * with the HAWQ column values in lower case.
+     * Returns a map of mappings between HAWQ and HBase column names, with the
+     * HAWQ column values in lower case.
      */
     private Map<String, byte[]> lowerCaseMappings() {
         Map<String, byte[]> lowCaseKeys = new HashMap<String, byte[]>();
         for (Map.Entry<byte[], byte[]> entry : rawTableMapping.entrySet()) {
-            lowCaseKeys.put(lowerCase(entry.getKey()),
-                    entry.getValue());
+            lowCaseKeys.put(lowerCase(entry.getKey()), entry.getValue());
         }
 
         return lowCaseKeys;
@@ -174,8 +176,9 @@ public class HBaseLookupTable implements Closeable {
     }
 
     /**
-     * Loads mappings for given table name from the lookup table {@link #LOOKUPTABLENAME}.
-     * The table name should be in the row key, and the family name should be {@link #LOOKUPCOLUMNFAMILY}.
+     * Loads mappings for given table name from the lookup table
+     * {@link #LOOKUPTABLENAME}. The table name should be in the row key, and
+     * the family name should be {@link #LOOKUPCOLUMNFAMILY}.
      *
      * @param tableName HBase table name
      * @throws IOException when HBase operations fail
@@ -188,8 +191,9 @@ public class HBaseLookupTable implements Closeable {
 
         row = lookupTable.get(lookupRow);
         rawTableMapping = row.getFamilyMap(LOOKUPCOLUMNFAMILY);
-        LOG.debug("lookup table mapping for " + tableName +
-                " has " + (rawTableMapping == null ? 0 : rawTableMapping.size()) + " entries");
+        LOG.debug("lookup table mapping for " + tableName + " has "
+                + (rawTableMapping == null ? 0 : rawTableMapping.size())
+                + " entries");
     }
 
     private void closeLookupTable() throws IOException {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/LineBreakAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/LineBreakAccessor.java b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/LineBreakAccessor.java
index 92853a3..40ca2fa 100644
--- a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/LineBreakAccessor.java
+++ b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/LineBreakAccessor.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hdfs;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -46,7 +46,7 @@ public class LineBreakAccessor extends HdfsSplittableDataAccessor implements
     private Configuration conf;
     private FileSystem fs;
     private Path file;
-    private static Log Log = LogFactory.getLog(LineBreakAccessor.class);
+    private static final Log LOG = LogFactory.getLog(LineBreakAccessor.class);
 
     /**
      * Constructs a LineReaderAccessor.
@@ -93,7 +93,7 @@ public class LineBreakAccessor extends HdfsSplittableDataAccessor implements
         org.apache.hadoop.fs.Path parent = file.getParent();
         if (!fs.exists(parent)) {
             fs.mkdirs(parent);
-            Log.debug("Created new dir " + parent.toString());
+            LOG.debug("Created new dir " + parent.toString());
         }
 
         // create output stream - do not allow overwriting existing file
@@ -132,7 +132,7 @@ public class LineBreakAccessor extends HdfsSplittableDataAccessor implements
     @Override
     public void closeForWrite() throws Exception {
         if ((dos != null) && (fsdos != null)) {
-            Log.debug("Closing writing stream for path " + file);
+            LOG.debug("Closing writing stream for path " + file);
             dos.flush();
             /*
              * From release 0.21.0 sync() is deprecated in favor of hflush(),

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/SequenceFileAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/SequenceFileAccessor.java b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/SequenceFileAccessor.java
index aef063b..a395d09 100644
--- a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/SequenceFileAccessor.java
+++ b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/SequenceFileAccessor.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hdfs;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -56,7 +56,7 @@ public class SequenceFileAccessor extends HdfsSplittableDataAccessor implements
     private SequenceFile.Writer writer;
     private LongWritable defaultKey; // used when recordkey is not defined
 
-    private static Log Log = LogFactory.getLog(SequenceFileAccessor.class);;
+    private static final Log LOG = LogFactory.getLog(SequenceFileAccessor.class);;
 
     /**
      * Constructs a SequenceFileAccessor.
@@ -99,7 +99,7 @@ public class SequenceFileAccessor extends HdfsSplittableDataAccessor implements
         parent = file.getParent();
         if (!fs.exists(parent)) {
             fs.mkdirs(parent);
-            Log.debug("Created new dir " + parent);
+            LOG.debug("Created new dir " + parent);
         }
 
         writer = null;
@@ -136,7 +136,7 @@ public class SequenceFileAccessor extends HdfsSplittableDataAccessor implements
                         "Compression type must be defined");
             }
 
-            Log.debug("Compression ON: " + "compression codec: "
+            LOG.debug("Compression ON: " + "compression codec: "
                     + userCompressCodec + ", compression type: "
                     + compressionType);
         }
@@ -178,7 +178,7 @@ public class SequenceFileAccessor extends HdfsSplittableDataAccessor implements
         if (codec != null) {
             fileName += codec.getDefaultExtension();
         }
-        Log.debug("File name for write: " + fileName);
+        LOG.debug("File name for write: " + fileName);
         return fileName;
     }
 
@@ -202,7 +202,7 @@ public class SequenceFileAccessor extends HdfsSplittableDataAccessor implements
         try {
             writer.append((key == null) ? defaultKey : key, value);
         } catch (IOException e) {
-            Log.error("Failed to write data to file: " + e.getMessage());
+            LOG.error("Failed to write data to file: " + e.getMessage());
             return false;
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/HdfsUtilities.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/HdfsUtilities.java b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/HdfsUtilities.java
index 68f0a94..aa8c4b4 100644
--- a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/HdfsUtilities.java
+++ b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/HdfsUtilities.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hdfs.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -49,7 +49,7 @@ import java.util.List;
  * HdfsUtilities class exposes helper methods for PXF classes.
  */
 public class HdfsUtilities {
-    private static Log Log = LogFactory.getLog(HdfsUtilities.class);
+    private static final Log LOG = LogFactory.getLog(HdfsUtilities.class);
     private static Configuration config = new Configuration();
     private static CompressionCodecFactory factory = new CompressionCodecFactory(
             config);
@@ -107,7 +107,7 @@ public class HdfsUtilities {
         if (codec != null) {
             codecClass = codec.getClass();
         }
-        Log.debug((codecClass == null ? "No codec" : "Codec " + codecClass)
+        LOG.debug((codecClass == null ? "No codec" : "Codec " + codecClass)
                 + " was found for file " + path);
         return codecClass;
     }
@@ -191,7 +191,7 @@ public class HdfsUtilities {
             FileSplit fileSplit = new FileSplit(new Path(
                     inputData.getDataSource()), start, end, hosts);
 
-            Log.debug("parsed file split: path " + inputData.getDataSource()
+            LOG.debug("parsed file split: path " + inputData.getDataSource()
                     + ", start " + start + ", end " + end + ", hosts "
                     + ArrayUtils.toString(hosts));
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapter.java b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapter.java
index 1016f72..2c189a2 100644
--- a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapter.java
+++ b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapter.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hdfs.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.plugins.hdfs.utilities;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.OneField;
 import org.apache.hawq.pxf.api.OneRow;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
@@ -31,16 +30,21 @@ import org.apache.hadoop.io.*;
 import java.util.List;
 
 /**
- * Adapter used for adding a recordkey field to the records output {@code List<OneField>}.
+ * Adapter used for adding a recordkey field to the records output
+ * {@code List<OneField>}.
  */
 public class RecordkeyAdapter {
-    private Log Log;
+
+    private static final Log LOG = LogFactory.getLog(RecordkeyAdapter.class);
+
+    // private Log LOG = LogFactory.getLog(RecordkeyAdapter.class);
 
     /*
-     * We need to transform Record keys to java primitive types.
-     * Since the type of the key is the same throughout the file we do the type resolution
-     * in the first call (for the first record) and then use a "Java variation on Function pointer"
-     * to do the extraction for the rest of the records.
+     * We need to transform Record keys to java primitive types. Since the type
+     * of the key is the same throughout the file we do the type resolution in
+     * the first call (for the first record) and then use a
+     * "Java variation on Function pointer" to do the extraction for the rest of
+     * the records.
      */
     private interface ValExtractor {
         public Object get(Object key);
@@ -58,58 +62,59 @@ public class RecordkeyAdapter {
      * Constructs a RecordkeyAdapter.
      */
     public RecordkeyAdapter() {
-        Log = LogFactory.getLog(RecordkeyAdapter.class);
     }
 
     /**
-     *  Adds the recordkey to the end of the passed in recFields list.
-     *  <p>
-     *  This method also verifies cases in which record keys are not supported
-     *  by the underlying source type, and therefore "illegally" requested.
+     * Adds the recordkey to the end of the passed in recFields list.
+     * <p>
+     * This method also verifies cases in which record keys are not supported by
+     * the underlying source type, and therefore "illegally" requested.
      *
-     * @param recFields existing list of record (non-key) fields and their values.
+     * @param recFields existing list of record (non-key) fields and their
+     *            values.
      * @param input all input parameters coming from the client request
-     * @param onerow a row object which is used here in order to find out if
-     *        the given type supports recordkeys or not.
+     * @param onerow a row object which is used here in order to find out if the
+     *            given type supports recordkeys or not.
      * @return 0 if record key not needed, or 1 if record key was appended
      * @throws NoSuchFieldException when the given record type does not support
-     *         recordkeys
+     *             recordkeys
      */
-    public int appendRecordkeyField(List<OneField> recFields,
-                                    InputData input,
+    public int appendRecordkeyField(List<OneField> recFields, InputData input,
                                     OneRow onerow) throws NoSuchFieldException {
 
-		/*
-		 * user did not request the recordkey field in the
-		 * "create external table" statement
-		 */
+        /*
+         * user did not request the recordkey field in the
+         * "create external table" statement
+         */
         ColumnDescriptor recordkeyColumn = input.getRecordkeyColumn();
         if (recordkeyColumn == null) {
             return 0;
         }
 
-		/*
-		 * The recordkey was filled in the fileAccessor during execution of
-		 * method readNextObject. The current accessor implementations are
-		 * SequenceFileAccessor, LineBreakAccessor and AvroFileAccessor from
-		 * HdfsSplittableDataAccessor and QuotedLineBreakAccessor from
-		 * HdfsAtomicDataAccessor. For SequenceFileAccessor, LineBreakAccessor
-		 * the recordkey is set, since it is returned by the
-		 * SequenceFileRecordReader or LineRecordReader(for text file). But Avro
-		 * files do not have keys, so the AvroRecordReader will not return a key
-		 * and in this case recordkey will be null. If the user specified a
-		 * recordkey attribute in the CREATE EXTERNAL TABLE statement and he
-		 * reads from an AvroFile, we will throw an exception since the Avro
-		 * file does not have keys In the future, additional implementations of
-		 * FileAccessors will have to set recordkey during readNextObject().
-		 * Otherwise it is null by default and we will throw an exception here,
-		 * that is if we get here... a careful user will not specify recordkey
-		 * in the CREATE EXTERNAL statement and then we will leave this function
-		 * one line above.
-		 */
+        /*
+         * The recordkey was filled in the fileAccessor during execution of
+         * method readNextObject. The current accessor implementations are
+         * SequenceFileAccessor, LineBreakAccessor and AvroFileAccessor from
+         * HdfsSplittableDataAccessor and QuotedLineBreakAccessor from
+         * HdfsAtomicDataAccessor. For SequenceFileAccessor, LineBreakAccessor
+         * the recordkey is set, since it is returned by the
+         * SequenceFileRecordReader or LineRecordReader(for text file). But Avro
+         * files do not have keys, so the AvroRecordReader will not return a key
+         * and in this case recordkey will be null. If the user specified a
+         * recordkey attribute in the CREATE EXTERNAL TABLE statement and he
+         * reads from an AvroFile, we will throw an exception since the Avro
+         * file does not have keys In the future, additional implementations of
+         * FileAccessors will have to set recordkey during readNextObject().
+         * Otherwise it is null by default and we will throw an exception here,
+         * that is if we get here... a careful user will not specify recordkey
+         * in the CREATE EXTERNAL statement and then we will leave this function
+         * one line above.
+         */
         Object recordkey = onerow.getKey();
         if (recordkey == null) {
-            throw new NoSuchFieldException("Value for field \"recordkey\" was requested but the queried HDFS resource type does not support key");
+            throw new NoSuchFieldException(
+                    "Value for field \"recordkey\" was requested but the "
+                            + "queried HDFS resource type does not support key");
         }
 
         OneField oneField = new OneField();
@@ -120,11 +125,11 @@ public class RecordkeyAdapter {
     }
 
     /*
-	 * Extracts a java primitive type value from the recordkey. If the key is a
-	 * Writable implementation we extract the value as a Java primitive. If the
-	 * key is already a Java primitive we returned it as is If it is an unknown
-	 * type we throw an exception
-	 */
+     * Extracts a java primitive type value from the recordkey. If the key is a
+     * Writable implementation we extract the value as a Java primitive. If the
+     * key is already a Java primitive we returned it as is If it is an unknown
+     * type we throw an exception
+     */
     private Object extractVal(Object key) {
         if (extractor == null) {
             extractor = InitializeExtractor(key);
@@ -197,17 +202,19 @@ public class RecordkeyAdapter {
             return new ValExtractor() {
                 @Override
                 public Object get(Object key) {
-                    throw new UnsupportedOperationException("Unsupported recordkey data type " + key.getClass().getName());
+                    throw new UnsupportedOperationException(
+                            "Unsupported recordkey data type "
+                                    + key.getClass().getName());
                 }
             };
         }
     }
 
     /**
-     * Converts given key object to its matching Writable.
-     * Supported types: Integer, Byte, Boolean, Double, Float, Long, String.
-     * The type is only checked once based on the key, all consequent calls
-     * must be of the same type.
+     * Converts given key object to its matching Writable. Supported types:
+     * Integer, Byte, Boolean, Double, Float, Long, String. The type is only
+     * checked once based on the key, all consequent calls must be of the same
+     * type.
      *
      * @param key object to convert
      * @return Writable object matching given key
@@ -215,8 +222,8 @@ public class RecordkeyAdapter {
     public Writable convertKeyValue(Object key) {
         if (converter == null) {
             converter = initializeConverter(key);
-            Log.debug("converter initialized for type " + key.getClass() +
-                    " (key value: " + key + ")");
+            LOG.debug("converter initialized for type " + key.getClass()
+                    + " (key value: " + key + ")");
         }
 
         return converter.get(key);
@@ -277,7 +284,9 @@ public class RecordkeyAdapter {
             return new ValConverter() {
                 @Override
                 public Writable get(Object key) {
-                    throw new UnsupportedOperationException("Unsupported recordkey data type " + key.getClass().getName());
+                    throw new UnsupportedOperationException(
+                            "Unsupported recordkey data type "
+                                    + key.getClass().getName());
                 }
             };
         }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/StringPassResolverTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/StringPassResolverTest.java b/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/StringPassResolverTest.java
index 826920f..d03cec8 100644
--- a/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/StringPassResolverTest.java
+++ b/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/StringPassResolverTest.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hdfs;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,48 +19,59 @@ package org.apache.hawq.pxf.plugins.hdfs;
  * under the License.
  */
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
 
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.logging.LogFactory;
 import org.apache.hawq.pxf.api.OneField;
 import org.apache.hawq.pxf.api.OneRow;
 import org.apache.hawq.pxf.api.OutputFormat;
 import org.apache.hawq.pxf.service.BridgeInputBuilder;
 import org.apache.hawq.pxf.service.io.Text;
 import org.apache.hawq.pxf.service.utilities.ProtocolData;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.powermock.api.mockito.PowerMockito;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInputStream;
-import java.util.Arrays;
-import java.util.List;
-
-import static org.junit.Assert.*;
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.*;
-
 @RunWith(PowerMockRunner.class)
-@PrepareForTest({Text.class, BridgeInputBuilder.class, ProtocolData.class, LogFactory.class})
+@PrepareForTest({
+        Text.class,
+        BridgeInputBuilder.class,
+        ProtocolData.class,
+        LogFactory.class })
 public class StringPassResolverTest {
     ProtocolData mockProtocolData;
-    Log mockLog;
-    
+
     @Test
     /*
      * Test the setFields method: small \n terminated input
-	 */
+     */
     public void testSetFields() throws Exception {
         StringPassResolver resolver = buildResolver();
 
-        byte[] data = new byte[]{(int) 'a', (int) 'b', (int) 'c', (int) 'd', (int) '\n',
-                (int) 'n', (int) 'o', (int) '\n'};
-
-        DataInputStream inputStream = new DataInputStream(new ByteArrayInputStream(data));
-        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(mockProtocolData);
+        byte[] data = new byte[] {
+                (int) 'a',
+                (int) 'b',
+                (int) 'c',
+                (int) 'd',
+                (int) '\n',
+                (int) 'n',
+                (int) 'o',
+                (int) '\n' };
+
+        DataInputStream inputStream = new DataInputStream(
+                new ByteArrayInputStream(data));
+        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(
+                mockProtocolData);
         List<OneField> record = inputBuilder.makeInput(inputStream);
 
         OneRow oneRow = resolver.setFields(record);
@@ -74,7 +85,7 @@ public class StringPassResolverTest {
     @Test
     /*
      * Test the setFields method: input > buffer size, \n terminated
-	 */
+     */
     public void testSetFieldsBigArray() throws Exception {
 
         StringPassResolver resolver = buildResolver();
@@ -85,8 +96,10 @@ public class StringPassResolverTest {
         }
         bigArray[1999] = (byte) '\n';
 
-        DataInputStream inputStream = new DataInputStream(new ByteArrayInputStream(bigArray));
-        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(mockProtocolData);
+        DataInputStream inputStream = new DataInputStream(
+                new ByteArrayInputStream(bigArray));
+        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(
+                mockProtocolData);
         List<OneField> record = inputBuilder.makeInput(inputStream);
 
         OneRow oneRow = resolver.setFields(record);
@@ -97,60 +110,56 @@ public class StringPassResolverTest {
     @Test
     /*
      * Test the setFields method: input > buffer size, no \n
-	 */
+     */
     public void testSetFieldsBigArrayNoNewLine() throws Exception {
 
-    	PowerMockito.mockStatic(LogFactory.class);
-        mockLog = mock(Log.class);
-        PowerMockito.when(LogFactory.getLog(any(Class.class))).thenReturn(mockLog);
-
-    	StringPassResolver resolver = buildResolver();
+        StringPassResolver resolver = buildResolver();
 
         byte[] bigArray = new byte[2000];
         for (int i = 0; i < 2000; ++i) {
             bigArray[i] = (byte) (i % 10 + 60);
         }
 
-        DataInputStream inputStream = new DataInputStream(new ByteArrayInputStream(bigArray));
-        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(mockProtocolData);
+        DataInputStream inputStream = new DataInputStream(
+                new ByteArrayInputStream(bigArray));
+        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(
+                mockProtocolData);
         List<OneField> record = inputBuilder.makeInput(inputStream);
 
         OneRow oneRow = resolver.setFields(record);
 
         verifyOneRow(oneRow, bigArray);
-
-        //verify(mockLog, atLeastOnce()).info(anyString());
-        //Mockito.verify(mockLog).warn("Stream ended without line breaksdfljsldkj");
-        //verifyWarning();
     }
 
     @Test
     /*
-	 * Test the setFields method: empty stream (returns -1)
-	 */
+     * Test the setFields method: empty stream (returns -1)
+     */
     public void testSetFieldsEmptyStream() throws Exception {
 
         StringPassResolver resolver = buildResolver();
 
         byte[] empty = new byte[0];
 
-        DataInputStream inputStream = new DataInputStream(new ByteArrayInputStream(empty));
-        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(mockProtocolData);
+        DataInputStream inputStream = new DataInputStream(
+                new ByteArrayInputStream(empty));
+        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(
+                mockProtocolData);
         List<OneField> record = inputBuilder.makeInput(inputStream);
 
         OneRow oneRow = resolver.setFields(record);
 
         assertNull(oneRow);
     }
-	
-	/*
-	 * helpers functions
-	 */
-    private StringPassResolver buildResolver()
-            throws Exception {
- 
+
+    /*
+     * helpers functions
+     */
+    private StringPassResolver buildResolver() throws Exception {
+
         mockProtocolData = mock(ProtocolData.class);
-        PowerMockito.when(mockProtocolData.outputFormat()).thenReturn(OutputFormat.TEXT);
+        PowerMockito.when(mockProtocolData.outputFormat()).thenReturn(
+                OutputFormat.TEXT);
 
         return new StringPassResolver(mockProtocolData);
     }
@@ -162,8 +171,4 @@ public class StringPassResolverTest {
         assertEquals(result.length, expected.length);
         assertTrue(Arrays.equals(result, expected));
     }
-
-//    private void verifyWarning() {
-//        Mockito.verify(Log).warn("Stream ended without line break");
-//    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapterTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapterTest.java b/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapterTest.java
index b0e220c..304f14f 100644
--- a/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapterTest.java
+++ b/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapterTest.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hdfs.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -26,19 +26,20 @@ import org.apache.hadoop.io.*;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.mockito.Mockito;
-import org.powermock.api.mockito.PowerMockito;
 import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
 import org.powermock.modules.junit4.PowerMockRunner;
+import org.powermock.reflect.Whitebox;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
 import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 
 @RunWith(PowerMockRunner.class)
+@SuppressStaticInitializationFor("RecordkeyAdapter")
 @PrepareForTest({RecordkeyAdapter.class, LogFactory.class})
 public class RecordkeyAdapterTest {
-    Log Log;
+    Log LOG;
     RecordkeyAdapter recordkeyAdapter;
 
     /**
@@ -159,16 +160,15 @@ public class RecordkeyAdapterTest {
     }
 
     private void mockLog() {
-        PowerMockito.mockStatic(LogFactory.class);
-        Log = mock(Log.class);
-        when(LogFactory.getLog(RecordkeyAdapter.class)).thenReturn(Log);
+        LOG = mock(Log.class);
+        Whitebox.setInternalState(RecordkeyAdapter.class, LOG);
     }
 
     private void verifyLog(String msg) {
-        Mockito.verify(Log).debug(msg);
+        Mockito.verify(LOG).debug(msg);
     }
 
     private void verifyLogOnlyOnce() {
-        Mockito.verify(Log, Mockito.times(1)).debug(Mockito.any());
+        Mockito.verify(LOG, Mockito.times(1)).debug(Mockito.any());
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
index c1b9c6b..ab40b3c 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hive;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.plugins.hive;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.FilterParser;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
 import org.apache.hawq.pxf.api.utilities.InputData;

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
index 79260bd..af1a666 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hive;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.plugins.hive;
  * under the License.
  */
 
-
 import java.io.ByteArrayOutputStream;
 import java.util.List;
 import java.util.ListIterator;
@@ -55,8 +54,7 @@ import org.apache.hawq.pxf.plugins.hdfs.utilities.HdfsUtilities;
 import org.apache.hawq.pxf.plugins.hive.utilities.HiveUtilities;
 
 /**
- * Fragmenter class for HIVE tables.
- * <br>
+ * Fragmenter class for HIVE tables. <br>
  * Given a Hive table and its partitions divide the data into fragments (here a
  * data fragment is actually a HDFS file block) and return a list of them. Each
  * data fragment will contain the following information:
@@ -469,6 +467,7 @@ public class HiveDataFragmenter extends Fragmenter {
      */
     @Override
     public FragmentsStats getFragmentsStats() throws Exception {
-        throw new UnsupportedOperationException("ANALYZE for Hive plugin is not supported");
+        throw new UnsupportedOperationException(
+                "ANALYZE for Hive plugin is not supported");
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
index 103ae4d..59245d0 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hive;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.plugins.hive;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.*;
 import org.apache.hawq.pxf.api.io.DataType;
 import org.apache.hawq.pxf.api.utilities.InputData;
@@ -144,7 +143,10 @@ public class HiveResolver extends Plugin implements ReadResolver {
                 : input.getUserProperty("MAPKEY_DELIM");
     }
 
-    /* Gets and init the deserializer for the records of this Hive data fragment. */
+    /*
+     * Gets and init the deserializer for the records of this Hive data
+     * fragment.
+     */
     void initSerde(InputData inputData) throws Exception {
         Properties serdeProperties;
 
@@ -588,10 +590,9 @@ public class HiveResolver extends Plugin implements ReadResolver {
 
         String userDelim = input.getUserProperty("DELIMITER");
 
-	if (userDelim == null) {
-            throw new IllegalArgumentException(
-                    "DELIMITER is a required option" ) ;
-	}
+        if (userDelim == null) {
+            throw new IllegalArgumentException("DELIMITER is a required option");
+        }
 
         final int VALID_LENGTH = 1;
         final int VALID_LENGTH_HEX = 4;

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java
index 1ebb66d..c59fbea 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.BadRecordException;
 import org.apache.hawq.pxf.api.OneField;
 import org.apache.hawq.pxf.api.OutputFormat;
@@ -281,13 +280,13 @@ public class BridgeOutputBuilder {
     /**
      * Breaks raw bytes into lines. Used only for sampling.
      *
-     * When sampling a data source, we have to make sure that
-     * we deal with actual rows (lines) and not bigger chunks of
-     * data such as used by LineBreakAccessor for performance.
-     * The input byte array is broken into lines, each one stored in
-     * the outputList. In case the read data doesn't end with a line delimiter,
-     * which can happen when reading chunks of bytes, the partial line is
-     * stored separately, and is being completed when reading the next chunk of data.
+     * When sampling a data source, we have to make sure that we deal with
+     * actual rows (lines) and not bigger chunks of data such as used by
+     * LineBreakAccessor for performance. The input byte array is broken into
+     * lines, each one stored in the outputList. In case the read data doesn't
+     * end with a line delimiter, which can happen when reading chunks of bytes,
+     * the partial line is stored separately, and is being completed when
+     * reading the next chunk of data.
      *
      * @param val input raw data to break into lines
      */

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponse.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponse.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponse.java
index 515ee61..d6efcae 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponse.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponse.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service;
  * under the License.
  */
 
-
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
@@ -35,14 +34,14 @@ import org.codehaus.jackson.map.ObjectMapper;
 import org.apache.hawq.pxf.api.Fragment;
 
 /**
- * Class for serializing fragments metadata in JSON format.
- * The class implements {@link StreamingOutput} so the serialization will be
- * done in a stream and not in one bulk, this in order to avoid running
- * out of memory when processing a lot of fragments.
+ * Class for serializing fragments metadata in JSON format. The class implements
+ * {@link StreamingOutput} so the serialization will be done in a stream and not
+ * in one bulk, this in order to avoid running out of memory when processing a
+ * lot of fragments.
  */
 public class FragmentsResponse implements StreamingOutput {
 
-    private static Log Log = LogFactory.getLog(FragmentsResponse.class);
+    private static final Log Log = LogFactory.getLog(FragmentsResponse.class);
 
     private List<Fragment> fragments;
 
@@ -56,10 +55,17 @@ public class FragmentsResponse implements StreamingOutput {
     }
 
     /**
-     * Serializes a fragments list in JSON,
-     * To be used as the result string for HAWQ.
-     * An example result is as follows:
-     * &lt;code&gt;{"PXFFragments":[{"replicas":["sdw1.corp.emc.com","sdw3.corp.emc.com","sdw8.corp.emc.com"],"sourceName":"text2.csv", "index":"0", "metadata":"&lt;base64 metadata for fragment&gt;", "userData":"&lt;data_specific_to_third_party_fragmenter&gt;"},{"replicas":["sdw2.corp.emc.com","sdw4.corp.emc.com","sdw5.corp.emc.com"],"sourceName":"text_data.csv","index":"0","metadata":"&lt;base64 metadata for fragment&gt;","userData":"&lt;data_specific_to_third_party_fragmenter&gt;"}]}&lt;/code&gt;
+     * Serializes a fragments list in JSON, To be used as the result string for
+     * HAWQ. An example result is as follows:
+     * <code>{"PXFFragments":[{"replicas":
+     * ["sdw1.corp.emc.com","sdw3.corp.emc.com","sdw8.corp.emc.com"],
+     * "sourceName":"text2.csv", "index":"0","metadata":"&lt;base64 metadata for fragment&gt;",
+     * "userData":"&lt;data_specific_to_third_party_fragmenter&gt;"
+     * },{"replicas":["sdw2.corp.emc.com","sdw4.corp.emc.com","sdw5.corp.emc.com"
+     * ],"sourceName":"text_data.csv","index":"0","metadata":
+     * "&lt;base64 metadata for fragment&gt;"
+     * ,"userData":"&lt;data_specific_to_third_party_fragmenter&gt;"
+     * }]}</code>
      */
     @Override
     public void write(OutputStream output) throws IOException,

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponseFormatter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponseFormatter.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponseFormatter.java
index 1148078..14e87f9 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponseFormatter.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponseFormatter.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.Fragment;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -30,23 +29,25 @@ import java.util.HashMap;
 import java.util.List;
 
 /**
- * Utility class for converting Fragments into a {@link FragmentsResponse}
- * that will serialize them into JSON format.
+ * Utility class for converting Fragments into a {@link FragmentsResponse} that
+ * will serialize them into JSON format.
  */
 public class FragmentsResponseFormatter {
 
-    private static Log LOG = LogFactory.getLog(FragmentsResponseFormatter.class);
+    private static final Log LOG = LogFactory.getLog(FragmentsResponseFormatter.class);
 
     /**
-     * Converts Fragments list to FragmentsResponse
-     * after replacing host name by their respective IPs.
+     * Converts Fragments list to FragmentsResponse after replacing host name by
+     * their respective IPs.
      *
      * @param fragments list of fragments
      * @param data data (e.g. path) related to the fragments
      * @return FragmentsResponse with given fragments
      * @throws UnknownHostException if converting host names to IP fails
      */
-    public static FragmentsResponse formatResponse(List<Fragment> fragments, String data) throws UnknownHostException   {
+    public static FragmentsResponse formatResponse(List<Fragment> fragments,
+                                                   String data)
+            throws UnknownHostException {
         /* print the raw fragment list to log when in debug level */
         if (LOG.isDebugEnabled()) {
             LOG.debug("Fragments before conversion to IP list:");
@@ -58,7 +59,7 @@ public class FragmentsResponseFormatter {
 
         updateFragmentIndex(fragments);
 
-	/* print the fragment list to log when in debug level */
+        /* print the fragment list to log when in debug level */
         if (LOG.isDebugEnabled()) {
             FragmentsResponseFormatter.printList(fragments, data);
         }
@@ -92,7 +93,8 @@ public class FragmentsResponseFormatter {
      *
      * @throws UnknownHostException if converting host name to IP fails
      */
-    private static void convertHostsToIPs(List<Fragment> fragments) throws UnknownHostException {
+    private static void convertHostsToIPs(List<Fragment> fragments)
+            throws UnknownHostException {
         /* host converted to IP map. Used to limit network calls. */
         HashMap<String, String> hostToIpMap = new HashMap<String, String>();
 
@@ -123,29 +125,30 @@ public class FragmentsResponseFormatter {
 
     /*
      * Converts a fragments list to a readable string and prints it to the log.
-     * Intended for debugging purposes only.
-     * 'datapath' is the data path part of the original URI (e.g., table name, *.csv, etc).
-	 */
+     * Intended for debugging purposes only. 'datapath' is the data path part of
+     * the original URI (e.g., table name, *.csv, etc).
+     */
     private static void printList(List<Fragment> fragments, String datapath) {
-        LOG.debug("List of " +
-                (fragments.isEmpty() ? "no" : fragments.size()) + "fragments for \"" +
-                 datapath + "\"");
+        LOG.debug("List of " + (fragments.isEmpty() ? "no" : fragments.size())
+                + "fragments for \"" + datapath + "\"");
 
         int i = 0;
         for (Fragment fragment : fragments) {
             StringBuilder result = new StringBuilder();
-            result.append("Fragment #").append(++i).append(": [")
-                .append("Source: ").append(fragment.getSourceName())
-                .append(", Index: ").append(fragment.getIndex())
-                .append(", Replicas:");
+            result.append("Fragment #").append(++i).append(": [").append(
+                    "Source: ").append(fragment.getSourceName()).append(
+                    ", Index: ").append(fragment.getIndex()).append(
+                    ", Replicas:");
             for (String host : fragment.getReplicas()) {
                 result.append(" ").append(host);
             }
 
-            result.append(", Metadata: ").append(new String(fragment.getMetadata()));
+            result.append(", Metadata: ").append(
+                    new String(fragment.getMetadata()));
 
             if (fragment.getUserData() != null) {
-                result.append(", User Data: ").append(new String(fragment.getUserData()));
+                result.append(", User Data: ").append(
+                        new String(fragment.getUserData()));
             }
             result.append("] ");
             LOG.debug(result);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
index 4f710ed..eb83627 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -34,7 +34,7 @@ import org.apache.hawq.pxf.api.Metadata;
  */
 public class MetadataResponseFormatter {
 
-    private static Log Log = LogFactory.getLog(MetadataResponseFormatter.class);
+    private static final Log LOG = LogFactory.getLog(MetadataResponseFormatter.class);
 
     /**
      * Converts {@link Metadata} to JSON String format.
@@ -45,7 +45,7 @@ public class MetadataResponseFormatter {
      */
     public static String formatResponseString(Metadata metadata) throws IOException {
         /* print the metadata before serialization */
-        Log.debug(MetadataResponseFormatter.metadataToString(metadata));
+        LOG.debug(MetadataResponseFormatter.metadataToString(metadata));
 
         return MetadataResponseFormatter.metadataToJSON(metadata);
     }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadBridge.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadBridge.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadBridge.java
index ed764d6..0f3c968 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadBridge.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadBridge.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.BadRecordException;
 import org.apache.hawq.pxf.api.OneRow;
 import org.apache.hawq.pxf.api.ReadAccessor;
@@ -51,7 +50,7 @@ public class ReadBridge implements Bridge {
     BridgeOutputBuilder outputBuilder = null;
     LinkedList<Writable> outputQueue = null;
 
-    private static final Log Log = LogFactory.getLog(ReadBridge.class);
+    private static final Log LOG = LogFactory.getLog(ReadBridge.class);
 
     /**
      * C'tor - set the implementation of the bridge.
@@ -94,7 +93,7 @@ public class ReadBridge implements Bridge {
                     fileAccessor.closeForRead();
                     output = outputBuilder.getPartialLine();
                     if (output != null) {
-                        Log.warn("A partial record in the end of the fragment");
+                        LOG.warn("A partial record in the end of the fragment");
                     }
                     // if there is a partial line, return it now, otherwise it
                     // will return null
@@ -121,10 +120,10 @@ public class ReadBridge implements Bridge {
                 row_info = onerow.toString();
             }
             if (ex.getCause() != null) {
-                Log.debug("BadRecordException " + ex.getCause().toString()
+                LOG.debug("BadRecordException " + ex.getCause().toString()
                         + ": " + row_info);
             } else {
-                Log.debug(ex.toString() + ": " + row_info);
+                LOG.debug(ex.toString() + ": " + row_info);
             }
             output = outputBuilder.getErrorOutput(ex);
         } catch (Exception ex) {
@@ -174,7 +173,7 @@ public class ReadBridge implements Bridge {
     public boolean isThreadSafe() {
         boolean result = ((Plugin) fileAccessor).isThreadSafe()
                 && ((Plugin) fieldsResolver).isThreadSafe();
-        Log.debug("Bridge is " + (result ? "" : "not ") + "thread safe");
+        LOG.debug("Bridge is " + (result ? "" : "not ") + "thread safe");
         return result;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadSamplingBridge.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadSamplingBridge.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadSamplingBridge.java
index e7bfa4e..d5ae66a 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadSamplingBridge.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadSamplingBridge.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service;
  * under the License.
  */
 
-
 import java.io.DataInputStream;
 import java.util.BitSet;
 
@@ -52,7 +51,7 @@ public class ReadSamplingBridge implements Bridge {
     int sampleSize;
     int curIndex;
 
-    static private Log Log = LogFactory.getLog(ReadSamplingBridge.class);;
+    private static final Log LOG = LogFactory.getLog(ReadSamplingBridge.class);
 
     /**
      * C'tor - set the implementation of the bridge.
@@ -86,7 +85,7 @@ public class ReadSamplingBridge implements Bridge {
             bitSetSize /= 10;
             sampleSize /= 10;
         }
-        Log.debug("bit set size = " + bitSetSize + " sample size = "
+        LOG.debug("bit set size = " + bitSetSize + " sample size = "
                 + sampleSize);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java
index da541b2..5bc26f1 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.io;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -46,7 +46,7 @@ public class GPDBWritable implements Writable {
      * For var length type, col val is <4 byte length><payload val>
 	 */
 
-    private static Log Log = LogFactory.getLog(GPDBWritable.class);
+    private static final Log LOG = LogFactory.getLog(GPDBWritable.class);
     private static final int EOF = -1;
 
     /*
@@ -156,11 +156,11 @@ public class GPDBWritable implements Writable {
         try {
             pktlen = in.readInt();
         } catch (EOFException e) {
-            Log.debug("Reached end of stream (EOFException)");
+            LOG.debug("Reached end of stream (EOFException)");
             return EOF;
         }
         if (pktlen == EOF) {
-            Log.debug("Reached end of stream (returned -1)");
+            LOG.debug("Reached end of stream (returned -1)");
         }
 
         return pktlen;



Mime
View raw message