asterixdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From buyin...@apache.org
Subject [3/4] incubator-asterixdb git commit: Clean up compilation warnings.
Date Sat, 21 Nov 2015 21:21:24 GMT
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/HDFSAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/HDFSAdapter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/HDFSAdapter.java
index 96e8393..df0926b 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/HDFSAdapter.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/HDFSAdapter.java
@@ -23,12 +23,6 @@ import java.io.InputStream;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.SequenceFileInputFormat;
-import org.apache.hadoop.mapred.TextInputFormat;
-
-import org.apache.asterix.external.adapter.factory.HDFSAdapterFactory;
 import org.apache.asterix.external.indexing.input.GenericFileAwareRecordReader;
 import org.apache.asterix.external.indexing.input.GenericRecordReader;
 import org.apache.asterix.external.indexing.input.TextualDataReader;
@@ -36,6 +30,10 @@ import org.apache.asterix.external.indexing.input.TextualFullScanDataReader;
 import org.apache.asterix.metadata.entities.ExternalFile;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.runtime.operators.file.AsterixTupleParserFactory;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.std.file.ITupleParserFactory;
@@ -76,10 +74,12 @@ public class HDFSAdapter extends FileSystemBasedAdapter {
      */
     @Override
     public InputStream getInputStream(int partition) throws IOException {
-        if ((conf.getInputFormat() instanceof TextInputFormat || conf.getInputFormat() instanceof SequenceFileInputFormat)
-                && (AsterixTupleParserFactory.FORMAT_ADM.equalsIgnoreCase((String) configuration
-                        .get(AsterixTupleParserFactory.KEY_FORMAT)) || AsterixTupleParserFactory.FORMAT_DELIMITED_TEXT
-                        .equalsIgnoreCase((String) configuration.get(AsterixTupleParserFactory.KEY_FORMAT)))) {
+        if ((conf.getInputFormat() instanceof TextInputFormat
+                || conf.getInputFormat() instanceof SequenceFileInputFormat)
+                && (AsterixTupleParserFactory.FORMAT_ADM
+                        .equalsIgnoreCase(configuration.get(AsterixTupleParserFactory.KEY_FORMAT))
+                        || AsterixTupleParserFactory.FORMAT_DELIMITED_TEXT
+                                .equalsIgnoreCase(configuration.get(AsterixTupleParserFactory.KEY_FORMAT)))) {
             if (files != null) {
                 return new TextualDataReader(inputSplits, readSchedule, nodeName, conf, executed, files);
             } else {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/NCFileSystemAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/NCFileSystemAdapter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/NCFileSystemAdapter.java
index 11bf2a5..64c62f7 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/NCFileSystemAdapter.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/NCFileSystemAdapter.java
@@ -25,7 +25,6 @@ import java.io.IOException;
 import java.io.InputStream;
 
 import org.apache.asterix.om.types.IAType;
-import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.std.file.FileSplit;
@@ -61,7 +60,6 @@ public class NCFileSystemAdapter extends FileSystemBasedAdapter {
         }
     }
 
-   
     @Override
     public String getFilename(int partition) {
         final FileSplit fileSplit = fileSplits[partition];

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/PushBasedTwitterFeedClient.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/PushBasedTwitterFeedClient.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/PushBasedTwitterFeedClient.java
index 38e8a9e..bb40ac9 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/PushBasedTwitterFeedClient.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/PushBasedTwitterFeedClient.java
@@ -18,22 +18,22 @@
  */
 package org.apache.asterix.external.dataset.adapter;
 
+import java.util.concurrent.LinkedBlockingQueue;
+
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
 import org.apache.asterix.external.util.TweetProcessor;
 import org.apache.asterix.external.util.TwitterUtil;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+
 import twitter4j.FilterQuery;
-import twitter4j.Query;
 import twitter4j.StallWarning;
 import twitter4j.Status;
 import twitter4j.StatusDeletionNotice;
 import twitter4j.StatusListener;
 import twitter4j.TwitterStream;
 
-import java.util.concurrent.LinkedBlockingQueue;
-
 /**
  * An implementation of @see {PullBasedFeedClient} for the Twitter service. The
  * feed client fetches data from Twitter service by sending request at regular
@@ -41,14 +41,12 @@ import java.util.concurrent.LinkedBlockingQueue;
  */
 public class PushBasedTwitterFeedClient extends FeedClient {
 
-    private String keywords;
-    private Query query;
-
     private ARecordType recordType;
     private TweetProcessor tweetProcessor;
     private LinkedBlockingQueue<Status> inputQ;
 
-    public PushBasedTwitterFeedClient(IHyracksTaskContext ctx, ARecordType recordType, PushBasedTwitterAdapter adapter) throws AsterixException {
+    public PushBasedTwitterFeedClient(IHyracksTaskContext ctx, ARecordType recordType, PushBasedTwitterAdapter adapter)
+            throws AsterixException {
         this.recordType = recordType;
         this.tweetProcessor = new TweetProcessor(recordType);
         this.recordSerDe = new ARecordSerializerDeserializer(recordType);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSIndexingParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSIndexingParserFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSIndexingParserFactory.java
index edc79fe..136df05 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSIndexingParserFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSIndexingParserFactory.java
@@ -20,16 +20,13 @@ package org.apache.asterix.external.indexing.dataflow;
 
 import java.util.Map;
 
-import org.apache.hadoop.mapred.JobConf;
-
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.adapter.factory.HDFSAdapterFactory;
 import org.apache.asterix.external.adapter.factory.HDFSIndexingAdapterFactory;
-import org.apache.asterix.external.adapter.factory.StreamBasedAdapterFactory;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.runtime.operators.file.ADMDataParser;
 import org.apache.asterix.runtime.operators.file.AsterixTupleParserFactory;
 import org.apache.asterix.runtime.operators.file.DelimitedDataParser;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.std.file.ITupleParser;
@@ -38,7 +35,6 @@ import org.apache.hyracks.dataflow.std.file.ITupleParserFactory;
 /**
  * This is the parser factory for parsers used to do indexing
  */
-@SuppressWarnings("deprecation")
 public class HDFSIndexingParserFactory implements ITupleParserFactory {
 
     private static final long serialVersionUID = 1L;
@@ -59,8 +55,8 @@ public class HDFSIndexingParserFactory implements ITupleParserFactory {
     // adapter arguments
     private Map<String, String> arguments;
 
-    public HDFSIndexingParserFactory(ARecordType atype, String inputFormat, String format, char delimiter,
-                                     char quote, String parserClassName) {
+    public HDFSIndexingParserFactory(ARecordType atype, String inputFormat, String format, char delimiter, char quote,
+            String parserClassName) {
         this.inputFormat = inputFormat;
         this.format = format;
         this.parserClassName = parserClassName;
@@ -95,8 +91,7 @@ public class HDFSIndexingParserFactory implements ITupleParserFactory {
             return new AdmOrDelimitedIndexingTupleParser(ctx, atype, dataParser);
         } else if (format.equalsIgnoreCase(AsterixTupleParserFactory.FORMAT_DELIMITED_TEXT)) {
             // choice 3 with delimited data parser
-            DelimitedDataParser dataParser = HDFSIndexingAdapterFactory.getDelimitedDataParser(atype,
-                delimiter, quote); 
+            DelimitedDataParser dataParser = HDFSIndexingAdapterFactory.getDelimitedDataParser(atype, delimiter, quote);
             return new AdmOrDelimitedIndexingTupleParser(ctx, atype, dataParser);
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSLookupAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSLookupAdapter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSLookupAdapter.java
index 7fc335c..c6013a9 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSLookupAdapter.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSLookupAdapter.java
@@ -23,12 +23,8 @@ import java.io.Serializable;
 import java.nio.ByteBuffer;
 import java.util.Map;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapred.JobConf;
-
 import org.apache.asterix.external.adapter.factory.HDFSAdapterFactory;
 import org.apache.asterix.external.adapter.factory.HDFSIndexingAdapterFactory;
-import org.apache.asterix.external.adapter.factory.StreamBasedAdapterFactory;
 import org.apache.asterix.external.indexing.input.RCFileLookupReader;
 import org.apache.asterix.external.indexing.input.SequenceFileLookupInputStream;
 import org.apache.asterix.external.indexing.input.SequenceFileLookupReader;
@@ -41,6 +37,8 @@ import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.runtime.operators.file.ADMDataParser;
 import org.apache.asterix.runtime.operators.file.AsterixTupleParserFactory;
 import org.apache.asterix.runtime.operators.file.DelimitedDataParser;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.dataflow.value.INullWriterFactory;
@@ -86,7 +84,8 @@ public class HDFSLookupAdapter implements IControlledAdapter, Serializable {
         // Create the lookup reader and the controlled parser
         if (configuration.get(HDFSAdapterFactory.KEY_INPUT_FORMAT).equals(HDFSAdapterFactory.INPUT_FORMAT_RC)) {
             configureRCFile(jobConf, iNullWriterFactory);
-        } else if (configuration.get(AsterixTupleParserFactory.KEY_FORMAT).equals(AsterixTupleParserFactory.FORMAT_ADM)) {
+        } else if (configuration.get(AsterixTupleParserFactory.KEY_FORMAT)
+                .equals(AsterixTupleParserFactory.FORMAT_ADM)) {
             // create an adm parser
             ADMDataParser dataParser = new ADMDataParser();
             if (configuration.get(HDFSAdapterFactory.KEY_INPUT_FORMAT).equals(HDFSAdapterFactory.INPUT_FORMAT_TEXT)) {
@@ -100,7 +99,8 @@ public class HDFSLookupAdapter implements IControlledAdapter, Serializable {
                 parser = new AdmOrDelimitedControlledTupleParser(ctx, (ARecordType) atype, in, propagateInput,
                         inRecDesc, dataParser, propagatedFields, ridFields, retainNull, iNullWriterFactory);
             }
-        } else if (configuration.get(AsterixTupleParserFactory.KEY_FORMAT).equals(AsterixTupleParserFactory.FORMAT_DELIMITED_TEXT)) {
+        } else if (configuration.get(AsterixTupleParserFactory.KEY_FORMAT)
+                .equals(AsterixTupleParserFactory.FORMAT_DELIMITED_TEXT)) {
             // create a delimited text parser
             char delimiter = AsterixTupleParserFactory.getDelimiter(configuration);
             char quote = AsterixTupleParserFactory.getQuote(configuration, delimiter);
@@ -152,8 +152,8 @@ public class HDFSLookupAdapter implements IControlledAdapter, Serializable {
         // Do nothing
     }
 
-    private void configureRCFile(Configuration jobConf, INullWriterFactory iNullWriterFactory) throws IOException,
-            Exception {
+    private void configureRCFile(Configuration jobConf, INullWriterFactory iNullWriterFactory)
+            throws IOException, Exception {
         // RCFileLookupReader
         RCFileLookupReader reader = new RCFileLookupReader(fileIndexAccessor,
                 HDFSAdapterFactory.configureJobConf(configuration));
@@ -169,8 +169,8 @@ public class HDFSLookupAdapter implements IControlledAdapter, Serializable {
             objectParser = new HiveObjectParser();
         } else {
             try {
-                objectParser = (IAsterixHDFSRecordParser) Class.forName(
-                        configuration.get(HDFSAdapterFactory.KEY_PARSER)).newInstance();
+                objectParser = (IAsterixHDFSRecordParser) Class
+                        .forName(configuration.get(HDFSAdapterFactory.KEY_PARSER)).newInstance();
             } catch (Exception e) {
                 throw new HyracksDataException("Unable to create object parser", e);
             }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/IndexingScheduler.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/IndexingScheduler.java b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/IndexingScheduler.java
index 371414c..2a51380 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/IndexingScheduler.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/IndexingScheduler.java
@@ -33,7 +33,6 @@ import java.util.logging.Logger;
 
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.mapred.InputSplit;
-
 import org.apache.hyracks.api.client.HyracksConnection;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.api.client.NodeControllerInfo;
@@ -41,7 +40,6 @@ import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.hdfs.scheduler.Scheduler;
 
-@SuppressWarnings("deprecation")
 public class IndexingScheduler {
     private static final Logger LOGGER = Logger.getLogger(Scheduler.class.getName());
 
@@ -59,7 +57,7 @@ public class IndexingScheduler {
 
     /**
      * The constructor of the scheduler.
-     * 
+     *
      * @param ncNameToNcInfos
      * @throws HyracksException
      */
@@ -77,7 +75,7 @@ public class IndexingScheduler {
      * Set location constraints for a file scan operator with a list of file
      * splits. It tries to assign splits to their local machines fairly
      * Locality is more important than fairness
-     * 
+     *
      * @throws HyracksDataException
      */
     public String[] getLocationConstraints(InputSplit[] splits) throws HyracksException {
@@ -125,17 +123,16 @@ public class IndexingScheduler {
              * push non-data-local upper-bounds slots to each machine
              */
             locationToNumOfAssignement.clear();
-            for(String nc: NCs){
+            for (String nc : NCs) {
                 locationToNumOfAssignement.put(nc, 0);
             }
-            for(int i=0; i< scheduled.length;i++){
-                if(scheduled[i])
-                {
-                    locationToNumOfAssignement.put(locations[i], locationToNumOfAssignement.get(locations[i])+1);
+            for (int i = 0; i < scheduled.length; i++) {
+                if (scheduled[i]) {
+                    locationToNumOfAssignement.put(locations[i], locationToNumOfAssignement.get(locations[i]) + 1);
                 }
             }
-            
-            scheduleNonLocalSlots(splits, workloads, locations, upperBoundSlots, scheduled,locationToNumOfAssignement);
+
+            scheduleNonLocalSlots(splits, workloads, locations, upperBoundSlots, scheduled, locationToNumOfAssignement);
             return locations;
         } catch (IOException e) {
             throw new HyracksException(e);
@@ -144,7 +141,7 @@ public class IndexingScheduler {
 
     /**
      * Schedule non-local slots to each machine
-     * 
+     *
      * @param splits
      *            The HDFS file splits.
      * @param workloads
@@ -155,11 +152,12 @@ public class IndexingScheduler {
      *            The maximum slots of each machine.
      * @param scheduled
      *            Indicate which slot is scheduled.
-     * @param locationToNumOfAssignement 
+     * @param locationToNumOfAssignement
      */
     private void scheduleNonLocalSlots(InputSplit[] splits, final int[] workloads, String[] locations, int slotLimit,
-            boolean[] scheduled, final HashMap<String,Integer> locationToNumOfAssignement) throws IOException, UnknownHostException {
-        
+            boolean[] scheduled, final HashMap<String, Integer> locationToNumOfAssignement)
+                    throws IOException, UnknownHostException {
+
         PriorityQueue<String> scheduleCadndiates = new PriorityQueue<String>(NCs.length, new Comparator<String>() {
             @Override
             public int compare(String s1, String s2) {
@@ -168,8 +166,7 @@ public class IndexingScheduler {
 
         });
 
-        
-        for(String nc:NCs){
+        for (String nc : NCs) {
             scheduleCadndiates.add(nc);
         }
         /**
@@ -193,7 +190,7 @@ public class IndexingScheduler {
 
     /**
      * Schedule data-local slots to each machine.
-     * 
+     *
      * @param splits
      *            The HDFS file splits.
      * @param workloads
@@ -216,8 +213,8 @@ public class IndexingScheduler {
         PriorityQueue<String> scheduleCadndiates = new PriorityQueue<String>(3, new Comparator<String>() {
             @Override
             public int compare(String s1, String s2) {
-                int assignmentDifference = locationToNumOfAssignement.get(s1).compareTo(
-                        locationToNumOfAssignement.get(s2));
+                int assignmentDifference = locationToNumOfAssignement.get(s1)
+                        .compareTo(locationToNumOfAssignement.get(s2));
                 if (assignmentDifference != 0) {
                     return assignmentDifference;
                 }
@@ -267,8 +264,8 @@ public class IndexingScheduler {
                                 locations[i] = nc;
                                 workloads[pos]++;
                                 scheduled[i] = true;
-                                locationToNumOfAssignement
-                                        .put(candidate, locationToNumOfAssignement.get(candidate) + 1);
+                                locationToNumOfAssignement.put(candidate,
+                                        locationToNumOfAssignement.get(candidate) + 1);
                                 break;
                             }
                         }
@@ -287,7 +284,7 @@ public class IndexingScheduler {
 
     /**
      * Scan the splits once and build a popularity map
-     * 
+     *
      * @param splits
      *            the split array
      * @param locationToNumOfSplits
@@ -311,7 +308,7 @@ public class IndexingScheduler {
 
     /**
      * Load the IP-address-to-NC map from the NCNameToNCInfoMap
-     * 
+     *
      * @param ncNameToNcInfos
      * @throws HyracksException
      */

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/AbstractHDFSLookupInputStream.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/AbstractHDFSLookupInputStream.java b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/AbstractHDFSLookupInputStream.java
index 7ca5b72..2bd2a95 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/AbstractHDFSLookupInputStream.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/AbstractHDFSLookupInputStream.java
@@ -22,19 +22,17 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
+import org.apache.asterix.metadata.entities.ExternalFile;
+import org.apache.asterix.metadata.external.ExternalFileIndexAccessor;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
 
-import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
-import org.apache.asterix.metadata.entities.ExternalFile;
-import org.apache.asterix.metadata.external.ExternalFileIndexAccessor;
-
 /*
  * This class is used for seek and read of external data of format adm or delimited text in sequence of text input format
  */
-@SuppressWarnings("deprecation")
 public abstract class AbstractHDFSLookupInputStream extends InputStream {
 
     protected String pendingValue = null;
@@ -45,7 +43,8 @@ public abstract class AbstractHDFSLookupInputStream extends InputStream {
     protected ExternalFile file = new ExternalFile(null, null, 0, null, null, 0, ExternalFilePendingOp.PENDING_NO_OP);
     protected ExternalFileIndexAccessor filesIndexAccessor;
 
-    public AbstractHDFSLookupInputStream(ExternalFileIndexAccessor filesIndexAccessor, JobConf conf) throws IOException {
+    public AbstractHDFSLookupInputStream(ExternalFileIndexAccessor filesIndexAccessor, JobConf conf)
+            throws IOException {
         this.filesIndexAccessor = filesIndexAccessor;
         fs = FileSystem.get(conf);
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/AbstractHDFSReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/AbstractHDFSReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/AbstractHDFSReader.java
index 577e9b2..65bfcf3 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/AbstractHDFSReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/AbstractHDFSReader.java
@@ -27,12 +27,11 @@ import org.apache.hadoop.mapred.Reporter;
 /***
  * an abstract class to be used for reading hdfs based datasets one record at a time <- used for indexing->
  */
-@SuppressWarnings("deprecation")
 public abstract class AbstractHDFSReader extends InputStream {
 
     /***
      * This function should be called once to do initial setup before starting to read records
-     * 
+     *
      * @return true if ready for reading
      */
     abstract public boolean initialize() throws Exception;
@@ -96,6 +95,7 @@ public abstract class AbstractHDFSReader extends InputStream {
             public void progress() {
             }
 
+            @Override
             public float getProgress() {
                 return 0.0f;
             }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/GenericFileAwareRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/GenericFileAwareRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/GenericFileAwareRecordReader.java
index 04e43eb..6dbf464 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/GenericFileAwareRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/GenericFileAwareRecordReader.java
@@ -21,6 +21,7 @@ package org.apache.asterix.external.indexing.input;
 import java.io.IOException;
 import java.util.List;
 
+import org.apache.asterix.metadata.entities.ExternalFile;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -28,17 +29,15 @@ import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 
-import org.apache.asterix.metadata.entities.ExternalFile;
 /**
  * This is a generic reader used for indexing external dataset or for performing full scan for external dataset with
  * a stored snapshot
- * @author alamouda
  *
+ * @author alamouda
  */
 
-@SuppressWarnings("deprecation")
-public class GenericFileAwareRecordReader extends GenericRecordReader{
-    
+public class GenericFileAwareRecordReader extends GenericRecordReader {
+
     private List<ExternalFile> files;
     private FileSystem hadoopFS;
     private long recordOffset = 0L;
@@ -71,16 +70,15 @@ public class GenericFileAwareRecordReader extends GenericRecordReader{
                 /**
                  * read the split
                  */
-                try{
-                    String fileName = ((FileSplit) (inputSplits[currentSplitIndex])).getPath()
-                            .toUri().getPath();
+                try {
+                    String fileName = ((FileSplit) (inputSplits[currentSplitIndex])).getPath().toUri().getPath();
                     FileStatus fileStatus = hadoopFS.getFileStatus(new Path(fileName));
                     //skip if not the same file stored in the files snapshot
-                    if(fileStatus.getModificationTime() != files.get(currentSplitIndex).getLastModefiedTime().getTime())
+                    if (fileStatus.getModificationTime() != files.get(currentSplitIndex).getLastModefiedTime()
+                            .getTime())
                         continue;
                     reader = getRecordReader(currentSplitIndex);
-                }
-                catch(Exception e){
+                } catch (Exception e) {
                     continue;
                 }
                 key = reader.createKey();
@@ -90,7 +88,7 @@ public class GenericFileAwareRecordReader extends GenericRecordReader{
         }
         return false;
     }
-    
+
     @SuppressWarnings("unchecked")
     @Override
     public Object readNext() throws IOException {
@@ -125,5 +123,5 @@ public class GenericFileAwareRecordReader extends GenericRecordReader{
     public int getFileNumber() throws Exception {
         return files.get(currentSplitIndex).getFileNumber();
     }
-    
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/GenericRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/GenericRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/GenericRecordReader.java
index 681e4bc..ab050a7 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/GenericRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/GenericRecordReader.java
@@ -19,6 +19,7 @@
 package org.apache.asterix.external.indexing.input;
 
 import java.io.IOException;
+
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
@@ -28,7 +29,7 @@ import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
  * This class can be used by any input format to perform full scan operations
  */
 
-@SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
+@SuppressWarnings({ "rawtypes", "unchecked" })
 public class GenericRecordReader extends AbstractHDFSReader {
 
     protected RecordReader reader;
@@ -86,8 +87,7 @@ public class GenericRecordReader extends AbstractHDFSReader {
     }
 
     protected RecordReader getRecordReader(int slitIndex) throws IOException {
-        RecordReader reader = conf.getInputFormat().getRecordReader(
-                (org.apache.hadoop.mapred.FileSplit) inputSplits[slitIndex], conf, getReporter());
+        RecordReader reader = conf.getInputFormat().getRecordReader(inputSplits[slitIndex], conf, getReporter());
         return reader;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/RCFileDataReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/RCFileDataReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/RCFileDataReader.java
index d44359b..4b89f59 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/RCFileDataReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/RCFileDataReader.java
@@ -21,6 +21,7 @@ package org.apache.asterix.external.indexing.input;
 import java.io.IOException;
 import java.util.List;
 
+import org.apache.asterix.metadata.entities.ExternalFile;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -28,15 +29,13 @@ import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
-
-import org.apache.asterix.metadata.entities.ExternalFile;
 import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
 
 //Used in two cases:
 //1. building an index over a dataset
 //2. performing full scan over a dataset that has built index (to provide consistent view) with RCFile format
 
-@SuppressWarnings({ "rawtypes", "deprecation" })
+@SuppressWarnings("rawtypes")
 public class RCFileDataReader extends AbstractHDFSReader {
 
     private RecordReader reader;
@@ -121,10 +120,9 @@ public class RCFileDataReader extends AbstractHDFSReader {
 
     private RecordReader getRecordReader(int slitIndex) throws IOException {
         RecordReader reader;
-        try{
-        reader = conf.getInputFormat().getRecordReader(
-                (org.apache.hadoop.mapred.FileSplit) inputSplits[slitIndex], conf, getReporter());
-        } catch(Exception e){
+        try {
+            reader = conf.getInputFormat().getRecordReader(inputSplits[slitIndex], conf, getReporter());
+        } catch (Exception e) {
             e.printStackTrace();
             throw e;
         }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/TextualDataReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/TextualDataReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/TextualDataReader.java
index 3bf024a..797b961 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/TextualDataReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/input/TextualDataReader.java
@@ -21,6 +21,7 @@ package org.apache.asterix.external.indexing.input;
 import java.io.IOException;
 import java.util.List;
 
+import org.apache.asterix.metadata.entities.ExternalFile;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -31,15 +32,13 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
-
-import org.apache.asterix.metadata.entities.ExternalFile;
 import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
 
 // Used in two cases:
 // 1. building an index over a dataset
 // 2. performing full scan over a dataset that has built index (to provide consistent view)
 
-@SuppressWarnings({ "rawtypes", "deprecation" })
+@SuppressWarnings("rawtypes")
 public class TextualDataReader extends AbstractHDFSReader {
 
     private RecordReader<Object, Text> reader;
@@ -151,24 +150,21 @@ public class TextualDataReader extends AbstractHDFSReader {
                     continue;
                 }
                 key = reader.createKey();
-                value = (Text) reader.createValue();
+                value = reader.createValue();
                 return true;
             }
         }
         return false;
     }
 
-    
     private RecordReader getRecordReader(int splitIndex) throws IOException {
         RecordReader reader;
         if (conf.getInputFormat() instanceof SequenceFileInputFormat) {
             SequenceFileInputFormat format = (SequenceFileInputFormat) conf.getInputFormat();
-            reader = format.getRecordReader((org.apache.hadoop.mapred.FileSplit) inputSplits[splitIndex], conf,
-                    getReporter());
+            reader = format.getRecordReader(inputSplits[splitIndex], conf, getReporter());
         } else {
             TextInputFormat format = (TextInputFormat) conf.getInputFormat();
-            reader = format.getRecordReader((org.apache.hadoop.mapred.FileSplit) inputSplits[splitIndex], conf,
-                    getReporter());
+            reader = format.getRecordReader(inputSplits[splitIndex], conf, getReporter());
         }
         return reader;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
index 9808f8f..e769ad1 100755
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
@@ -63,7 +63,7 @@ public abstract class ExternalFunction implements IExternalFunction {
         String dataverse = finfo.getFunctionIdentifier().getNamespace();
         ClassLoader libraryClassLoader = ExternalLibraryManager.getLibraryClassLoader(dataverse, functionLibary);
         String classname = finfo.getFunctionBody().trim();
-        Class clazz;
+        Class<?> clazz;
         try {
             clazz = Class.forName(classname, true, libraryClassLoader);
             externalFunctionFactory = (IFunctionFactory) clazz.newInstance();
@@ -88,8 +88,8 @@ public abstract class ExternalFunction implements IExternalFunction {
 
             // Type-cast the source array based on the input type that this function wants to receive.
             ATypeTag targetTypeTag = finfo.getParamList().get(i).getTypeTag();
-            ATypeTag sourceTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[inputVal
-                    .getStartOffset()]);
+            ATypeTag sourceTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
+                    .deserialize(inputVal.getByteArray()[inputVal.getStartOffset()]);
             if (sourceTypeTag != targetTypeTag) {
                 castBuffer.reset();
                 ATypeHierarchy.convertNumericTypeByteArray(inputVal.getByteArray(), inputVal.getStartOffset(),

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java
index b3a1130..be9f9c1 100755
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java
@@ -46,10 +46,9 @@ public class ExternalFunctionDescriptorProvider {
 }
 
 class ExternalScalarFunctionDescriptor extends AbstractScalarFunctionDynamicDescriptor implements IFunctionDescriptor {
-
+    private static final long serialVersionUID = 1L;
     private final IFunctionInfo finfo;
     private ICopyEvaluatorFactory evaluatorFactory;
-    private ICopyEvaluatorFactory[] args;
 
     @Override
     public ICopyEvaluatorFactory createEvaluatorFactory(ICopyEvaluatorFactory[] args) throws AlgebricksException {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
index 3c5c20f..d0d44e3 100755
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
@@ -44,7 +44,6 @@ public class ExternalFunctionProvider {
 }
 
 class ExternalScalarFunction extends ExternalFunction implements IExternalScalarFunction, ICopyEvaluator {
-    private final static byte SER_RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
     private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
 
     public ExternalScalarFunction(IExternalFunctionInfo finfo, ICopyEvaluatorFactory args[],
@@ -69,6 +68,7 @@ class ExternalScalarFunction extends ExternalFunction implements IExternalScalar
         }
     }
 
+    @Override
     public void evaluate(IFunctionHelper argumentProvider) throws Exception {
         ((IExternalScalarFunction) externalFunction).evaluate(argumentProvider);
         /*
@@ -82,5 +82,4 @@ class ExternalScalarFunction extends ExternalFunction implements IExternalScalar
         }
     }
 
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/library/JTypeObjectFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/JTypeObjectFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/JTypeObjectFactory.java
index 649cf81..677ed76 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/JTypeObjectFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/JTypeObjectFactory.java
@@ -144,6 +144,8 @@ public class JTypeObjectFactory implements IObjectFactory<IJObject, IAType> {
                     }
                 }
                 return retValue = itemObject;
+            default:
+                break;
         }
         return retValue;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/library/ResultCollector.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ResultCollector.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ResultCollector.java
index 39c5116..2671f13 100755
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ResultCollector.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ResultCollector.java
@@ -19,7 +19,6 @@
 package org.apache.asterix.external.library;
 
 import java.io.DataOutput;
-import java.nio.ByteBuffer;
 
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
@@ -42,7 +41,6 @@ import org.apache.hyracks.data.std.api.IDataOutputProvider;
 public class ResultCollector implements IResultCollector {
 
     private IAObject reusableResultObjectHolder;
-    private ByteBuffer reusableResultBinaryHolder;
     private IDataOutputProvider outputProvider;
     private IExternalFunctionInfo finfo;
 
@@ -50,7 +48,6 @@ public class ResultCollector implements IResultCollector {
         this.finfo = finfo;
         IAType returnType = finfo.getReturnType();
         reusableResultObjectHolder = allocateResultObjectHolder(returnType);
-        reusableResultBinaryHolder = allocateResultBinaryHolder(returnType);
         this.outputProvider = outputProvider;
     }
 
@@ -73,24 +70,8 @@ public class ResultCollector implements IResultCollector {
                     fieldObjects[i] = allocateResultObjectHolder(fieldType[i]);
                 }
                 return new AMutableRecord((ARecordType) type, fieldObjects);
-        }
-        return null;
-    }
-
-    private ByteBuffer allocateResultBinaryHolder(IAType type) {
-        switch (type.getTypeTag()) {
-            case INT32:
-                return ByteBuffer.allocate(4);
-            case FLOAT:
-                return ByteBuffer.allocate(4);
-            case DOUBLE:
-                return ByteBuffer.allocate(8);
-            case STRING:
-                return ByteBuffer.allocate(32 * 1024);
-            case ORDEREDLIST:
-                return ByteBuffer.allocate(32 * 1024);
-            case RECORD:
-                return ByteBuffer.allocate(32 * 1024);
+            default:
+                break;
         }
         return null;
     }
@@ -130,14 +111,15 @@ public class ResultCollector implements IResultCollector {
         serializeResult(list);
     }
 
+    @Override
     public IAObject getComplexTypeResultHolder() {
         return reusableResultObjectHolder;
     }
 
     private void serializeResult(IAObject object) throws AsterixException {
         try {
-            AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(finfo.getReturnType()).serialize(
-                    reusableResultObjectHolder, outputProvider.getDataOutput());
+            AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(finfo.getReturnType())
+                    .serialize(reusableResultObjectHolder, outputProvider.getDataOutput());
         } catch (HyracksDataException hde) {
             throw new AsterixException(hde);
         }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java
index 6010e54..677e913 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java
@@ -18,6 +18,11 @@
  */
 package org.apache.asterix.external.library.java;
 
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.util.LinkedHashMap;
+import java.util.List;
+
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.dataflow.data.nontagged.serde.ABooleanSerializerDeserializer;
 import org.apache.asterix.dataflow.data.nontagged.serde.ACircleSerializerDeserializer;
@@ -81,13 +86,6 @@ import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.util.container.IObjectPool;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.util.LinkedHashMap;
-import java.util.List;
 
 public class JObjectAccessors {
 
@@ -136,6 +134,8 @@ public class JObjectAccessors {
             case DURATION:
                 accessor = new JDurationAccessor();
                 break;
+            default:
+                break;
         }
         return accessor;
     }
@@ -236,8 +236,8 @@ public class JObjectAccessors {
             int l = pointable.getLength();
 
             String v = null;
-            v = aStringSerDer.deserialize(
-                    new DataInputStream(new ByteArrayInputStream(b, s + 1, l - 1))).getStringValue();
+            v = aStringSerDer.deserialize(new DataInputStream(new ByteArrayInputStream(b, s + 1, l - 1)))
+                    .getStringValue();
             JObjectUtil.getNormalizedString(v);
 
             IJObject jObject = objectPool.allocate(BuiltinType.ASTRING);
@@ -296,8 +296,8 @@ public class JObjectAccessors {
             byte[] b = pointable.getByteArray();
             int s = pointable.getStartOffset();
             int l = pointable.getLength();
-            ADuration duration = ADurationSerializerDeserializer.INSTANCE.deserialize(new DataInputStream(
-                    new ByteArrayInputStream(b, s, l)));
+            ADuration duration = ADurationSerializerDeserializer.INSTANCE
+                    .deserialize(new DataInputStream(new ByteArrayInputStream(b, s, l)));
             IJObject jObject = objectPool.allocate(BuiltinType.ADURATION);
             ((JDuration) jObject).setValue(duration.getMonths(), duration.getMilliseconds());
             return jObject;
@@ -348,8 +348,8 @@ public class JObjectAccessors {
             byte[] b = pointable.getByteArray();
             int s = pointable.getStartOffset();
             int l = pointable.getLength();
-            ACircle v = ACircleSerializerDeserializer.INSTANCE.deserialize(new DataInputStream(
-                    new ByteArrayInputStream(b, s, l)));
+            ACircle v = ACircleSerializerDeserializer.INSTANCE
+                    .deserialize(new DataInputStream(new ByteArrayInputStream(b, s, l)));
             JPoint jpoint = (JPoint) objectPool.allocate(BuiltinType.APOINT);
             jpoint.setValue(v.getP().getX(), v.getP().getY());
             IJObject jObject = objectPool.allocate(BuiltinType.ACIRCLE);
@@ -366,8 +366,8 @@ public class JObjectAccessors {
             byte[] b = pointable.getByteArray();
             int s = pointable.getStartOffset();
             int l = pointable.getLength();
-            APoint v = APointSerializerDeserializer.INSTANCE.deserialize(new DataInputStream(new ByteArrayInputStream(
-                    b, s, l)));
+            APoint v = APointSerializerDeserializer.INSTANCE
+                    .deserialize(new DataInputStream(new ByteArrayInputStream(b, s, l)));
             JPoint jObject = (JPoint) objectPool.allocate(BuiltinType.APOINT);
             jObject.setValue(v.getX(), v.getY());
             return jObject;
@@ -382,8 +382,8 @@ public class JObjectAccessors {
             byte[] b = pointable.getByteArray();
             int s = pointable.getStartOffset();
             int l = pointable.getLength();
-            APoint3D v = APoint3DSerializerDeserializer.INSTANCE.deserialize(new DataInputStream(
-                    new ByteArrayInputStream(b, s, l)));
+            APoint3D v = APoint3DSerializerDeserializer.INSTANCE
+                    .deserialize(new DataInputStream(new ByteArrayInputStream(b, s, l)));
             JPoint3D jObject = (JPoint3D) objectPool.allocate(BuiltinType.APOINT3D);
             jObject.setValue(v.getX(), v.getY(), v.getZ());
             return jObject;
@@ -398,8 +398,8 @@ public class JObjectAccessors {
             byte[] b = pointable.getByteArray();
             int s = pointable.getStartOffset();
             int l = pointable.getLength();
-            ALine v = ALineSerializerDeserializer.INSTANCE.deserialize(new DataInputStream(new ByteArrayInputStream(b,
-                    s, l)));
+            ALine v = ALineSerializerDeserializer.INSTANCE
+                    .deserialize(new DataInputStream(new ByteArrayInputStream(b, s, l)));
             JLine jObject = (JLine) objectPool.allocate(BuiltinType.ALINE);
             jObject.setValue(v.getP1(), v.getP2());
             return jObject;
@@ -414,8 +414,8 @@ public class JObjectAccessors {
             byte[] b = pointable.getByteArray();
             int s = pointable.getStartOffset();
             int l = pointable.getLength();
-            APolygon v = APolygonSerializerDeserializer.INSTANCE.deserialize(new DataInputStream(
-                    new ByteArrayInputStream(b, s, l)));
+            APolygon v = APolygonSerializerDeserializer.INSTANCE
+                    .deserialize(new DataInputStream(new ByteArrayInputStream(b, s, l)));
             JPolygon jObject = (JPolygon) objectPool.allocate(BuiltinType.APOLYGON);
             jObject.setValue(v.getPoints());
             return jObject;
@@ -430,8 +430,8 @@ public class JObjectAccessors {
             byte[] b = pointable.getByteArray();
             int s = pointable.getStartOffset();
             int l = pointable.getLength();
-            ARectangle v = ARectangleSerializerDeserializer.INSTANCE.deserialize(new DataInputStream(
-                    new ByteArrayInputStream(b, s, l)));
+            ARectangle v = ARectangleSerializerDeserializer.INSTANCE
+                    .deserialize(new DataInputStream(new ByteArrayInputStream(b, s, l)));
             JRectangle jObject = (JRectangle) objectPool.allocate(BuiltinType.ARECTANGLE);
             jObject.setValue(v.getP1(), v.getP2());
             return jObject;
@@ -461,7 +461,7 @@ public class JObjectAccessors {
             } catch (AlgebricksException e) {
                 throw new HyracksDataException(e);
             }
-            ARecordVisitablePointable recordPointable = (ARecordVisitablePointable) pointable;
+            ARecordVisitablePointable recordPointable = pointable;
             List<IVisitablePointable> fieldPointables = recordPointable.getFieldValues();
             List<IVisitablePointable> fieldTypeTags = recordPointable.getFieldTypeTags();
             List<IVisitablePointable> fieldNames = recordPointable.getFieldNames();
@@ -473,8 +473,8 @@ public class JObjectAccessors {
                     closedPart = index < recordType.getFieldTypes().length;
                     IVisitablePointable tt = fieldTypeTags.get(index);
                     IAType fieldType = closedPart ? recordType.getFieldTypes()[index] : null;
-                    ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(tt.getByteArray()[tt
-                            .getStartOffset()]);
+                    ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER
+                            .deserialize(tt.getByteArray()[tt.getStartOffset()]);
                     IVisitablePointable fieldName = fieldNames.get(index);
                     typeInfo.reset(fieldType, typeTag);
                     switch (typeTag) {
@@ -487,8 +487,8 @@ public class JObjectAccessors {
                                 // value is null
                                 fieldObject = null;
                             } else {
-                                fieldObject = pointableVisitor
-                                        .visit((AListVisitablePointable) fieldPointable, typeInfo);
+                                fieldObject = pointableVisitor.visit((AListVisitablePointable) fieldPointable,
+                                        typeInfo);
                             }
                             break;
                         case ANY:
@@ -502,8 +502,9 @@ public class JObjectAccessors {
                         byte[] b = fieldName.getByteArray();
                         int s = fieldName.getStartOffset();
                         int l = fieldName.getLength();
-                        String v = aStringSerDer.deserialize(
-                                new DataInputStream(new ByteArrayInputStream(b, s + 1, l - 1))).getStringValue();
+                        String v = aStringSerDer
+                                .deserialize(new DataInputStream(new ByteArrayInputStream(b, s + 1, l - 1)))
+                                .getStringValue();
                         openFields.put(v, fieldObject);
                     }
                     index++;
@@ -538,8 +539,7 @@ public class JObjectAccessors {
 
         @Override
         public IJObject access(AListVisitablePointable pointable, IObjectPool<IJObject, IAType> objectPool,
-                IAType listType,
-                JObjectPointableVisitor pointableVisitor) throws HyracksDataException {
+                IAType listType, JObjectPointableVisitor pointableVisitor) throws HyracksDataException {
             List<IVisitablePointable> items = pointable.getItems();
             List<IVisitablePointable> itemTags = pointable.getItemTags();
             JList list = pointable.ordered() ? new JOrderedList(listType) : new JUnorderedList(listType);
@@ -549,8 +549,8 @@ public class JObjectAccessors {
 
                 for (IVisitablePointable itemPointable : items) {
                     IVisitablePointable itemTagPointable = itemTags.get(index);
-                    ATypeTag itemTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(itemTagPointable
-                            .getByteArray()[itemTagPointable.getStartOffset()]);
+                    ATypeTag itemTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
+                            .deserialize(itemTagPointable.getByteArray()[itemTagPointable.getStartOffset()]);
                     typeInfo.reset(listType.getType(), listType.getTypeTag());
                     switch (itemTypeTag) {
                         case RECORD:
@@ -561,17 +561,14 @@ public class JObjectAccessors {
                             listItem = pointableVisitor.visit((AListVisitablePointable) itemPointable, typeInfo);
                             break;
                         case ANY:
-                            throw new IllegalArgumentException("Cannot parse list item of type "
-                                    + listType.getTypeTag());
+                            throw new IllegalArgumentException(
+                                    "Cannot parse list item of type " + listType.getTypeTag());
                         default:
                             IAType itemType = ((AbstractCollectionType) listType).getItemType();
                             typeInfo.reset(itemType, itemType.getTypeTag());
                             listItem = pointableVisitor.visit((AFlatValuePointable) itemPointable, typeInfo);
 
                     }
-                    ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER
-                            .deserialize(itemPointable.getByteArray()[itemPointable.getStartOffset()]);
-
                     list.add(listItem);
                 }
             } catch (AsterixException exception) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java
index a0710ff..b5458e2 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java
@@ -18,6 +18,10 @@
  */
 package org.apache.asterix.external.library.java;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
 import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
@@ -56,20 +60,16 @@ import org.apache.asterix.om.util.container.IObjectPool;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
 public class JObjectUtil {
 
     /**
-     *  Normalize an input string by removing linebreaks, and replace them with space
-     *  Also remove non-readable special characters
+     * Normalize an input string by removing linebreaks, and replace them with space
+     * Also remove non-readable special characters
      *
      * @param originalString
-     *      The input String
+     *            The input String
      * @return
-     *      String - the normalized string
+     *         String - the normalized string
      */
     public static String getNormalizedString(String originalString) {
         int len = originalString.length();
@@ -221,7 +221,7 @@ public class JObjectUtil {
                     p1.setValue(dis.readDouble(), dis.readDouble());
                     points.add(p1);
                 }
-                ((JPolygon) jObject).setValue(points.toArray(new APoint[]{}));
+                ((JPolygon) jObject).setValue(points.toArray(new APoint[] {}));
                 break;
             }
 
@@ -267,7 +267,7 @@ public class JObjectUtil {
                             dis.readInt();
                     }
                     for (int i = 0; i < numberOfitems; i++) {
-                        IJObject v = (IJObject) getJType(elementType.getTypeTag(), elementType, dis, objectPool);
+                        IJObject v = getJType(elementType.getTypeTag(), elementType, dis, objectPool);
                         ((JUnorderedList) jObject).add(v);
                     }
                 }
@@ -302,7 +302,7 @@ public class JObjectUtil {
                             dis.readInt();
                     }
                     for (int i = 0; i < numberOfitems; i++) {
-                        IJObject v = (IJObject) getJType(elementType.getTypeTag(), elementType, dis, objectPool);
+                        IJObject v = getJType(elementType.getTypeTag(), elementType, dis, objectPool);
                         ((JOrderedList) jObject).add(v);
                     }
                 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java
index 02f7b4b..406d242 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java
@@ -18,6 +18,18 @@
  */
 package org.apache.asterix.external.library.java;
 
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
 import org.apache.asterix.builders.IAsterixListBuilder;
 import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.builders.UnorderedListBuilder;
@@ -44,7 +56,6 @@ import org.apache.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeseriali
 import org.apache.asterix.om.base.ABoolean;
 import org.apache.asterix.om.base.ADouble;
 import org.apache.asterix.om.base.AFloat;
-import org.apache.asterix.om.base.AInt16;
 import org.apache.asterix.om.base.AInt32;
 import org.apache.asterix.om.base.AInt64;
 import org.apache.asterix.om.base.AInt8;
@@ -83,18 +94,6 @@ import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
 public class JObjects {
 
     public static abstract class JObject implements IJObject {
@@ -200,11 +199,11 @@ public class JObjects {
         }
 
         public void setValue(byte v) {
-            ((AMutableInt16) value).setValue(v);
+            value.setValue(v);
         }
 
         public short getValue() {
-            return ((AMutableInt16) value).getShortValue();
+            return value.getShortValue();
         }
 
         @Override
@@ -216,12 +215,12 @@ public class JObjects {
                     throw new HyracksDataException(e);
                 }
             }
-            AInt16SerializerDeserializer.INSTANCE.serialize((AInt16) value, dataOutput);
+            AInt16SerializerDeserializer.INSTANCE.serialize(value, dataOutput);
         }
 
         @Override
         public void reset() {
-            ((AMutableInt16) value).setValue((short) 0);
+            value.setValue((short) 0);
         }
 
     }
@@ -930,6 +929,7 @@ public class JObjects {
             this.jObjects = new ArrayList<IJObject>();
         }
 
+        @Override
         public void add(IJObject jObject) {
             jObjects.add(jObject);
         }
@@ -980,7 +980,6 @@ public class JObjects {
         private Map<String, IJObject> openFields;
         private final AStringSerializerDeserializer aStringSerDer = new AStringSerializerDeserializer();
 
-
         public JRecord(ARecordType recordType, IJObject[] fields) {
             this.recordType = recordType;
             this.fields = fields;
@@ -993,21 +992,6 @@ public class JObjects {
             this.openFields = openFields;
         }
 
-        private ARecordType getARecordType(String[] fieldNames, IJObject[] fields) throws AsterixException {
-            IAType[] fieldTypes = new IAType[fields.length];
-            int index = 0;
-            for (IJObject jObj : fields) {
-                fieldTypes[index++] = jObj.getIAObject().getType();
-            }
-            ARecordType recordType;
-            try {
-                recordType = new ARecordType(null, fieldNames, fieldTypes, false);
-            } catch (HyracksDataException e) {
-                throw new AsterixException(e);
-            }
-            return recordType;
-        }
-
         public void addField(String fieldName, IJObject fieldValue) throws AsterixException {
             int pos = getFieldPosByName(fieldName);
             if (pos >= 0) {
@@ -1086,6 +1070,7 @@ public class JObjects {
             return recordBuilder;
         }
 
+        @Override
         public void serialize(DataOutput output, boolean writeTypeTag) throws HyracksDataException {
             RecordBuilder recordBuilder = new RecordBuilder();
             recordBuilder.reset(recordType);
@@ -1128,6 +1113,7 @@ public class JObjects {
             return value;
         }
 
+        @Override
         public void reset() throws AlgebricksException {
             if (openFields != null && !openFields.isEmpty()) {
                 openFields.clear();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinAppendLength.java
----------------------------------------------------------------------
diff --git a/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinAppendLength.java b/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinAppendLength.java
index 138aee2..6778152 100644
--- a/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinAppendLength.java
+++ b/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinAppendLength.java
@@ -25,7 +25,6 @@ import java.io.FileReader;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.Collection;
-import java.util.HashMap;
 
 import org.apache.asterix.fuzzyjoin.tokenizer.Tokenizer;
 import org.apache.asterix.fuzzyjoin.tokenizer.TokenizerFactory;
@@ -44,11 +43,10 @@ public class FuzzyJoinAppendLength {
         int[] dataColumns = FuzzyJoinUtil.getDataColumns("2,3");
 
         String line;
-        HashMap<String, MutableInteger> tokenCount = new HashMap<String, MutableInteger>();
         while ((line = input.readLine()) != null) {
             String[] splits = line.split(FuzzyJoinConfig.RECORD_SEPARATOR_REGEX);
-            Collection<String> tokens = tokenizer.tokenize(FuzzyJoinUtil.getData(splits, dataColumns,
-                    FuzzyJoinConfig.TOKEN_SEPARATOR));
+            Collection<String> tokens = tokenizer
+                    .tokenize(FuzzyJoinUtil.getData(splits, dataColumns, FuzzyJoinConfig.TOKEN_SEPARATOR));
             output.write(splits[0] + FuzzyJoinConfig.RECORD_SEPARATOR + splits[1] + FuzzyJoinConfig.RECORD_SEPARATOR
                     + splits[2] + FuzzyJoinConfig.RECORD_SEPARATOR + splits[3] + FuzzyJoinConfig.RECORD_SEPARATOR
                     + tokens.size() + "\n");

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java
----------------------------------------------------------------------
diff --git a/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java b/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java
index f56d01e..62b34c0 100644
--- a/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java
+++ b/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java
@@ -39,7 +39,7 @@ import org.apache.asterix.fuzzyjoin.tokenorder.TokenRank;
 import org.apache.asterix.fuzzyjoin.tokenorder.TokenRankFrequency;
 
 public class FuzzyJoinTokenize {
-    public static class TokenCount implements Comparable {
+    public static class TokenCount implements Comparable<Object> {
         public String token;
         public MutableInteger count;
 
@@ -111,14 +111,14 @@ public class FuzzyJoinTokenize {
         tokenLoad.loadTokenRank();
 
         input = new BufferedReader(new FileReader(inputFileName));
-        LittleEndianIntOutputStream outputTokenized = new LittleEndianIntOutputStream(new BufferedOutputStream(
-                new FileOutputStream(tokenizedFileName)));
+        LittleEndianIntOutputStream outputTokenized = new LittleEndianIntOutputStream(
+                new BufferedOutputStream(new FileOutputStream(tokenizedFileName)));
         while ((line = input.readLine()) != null) {
             String splits[] = line.split(FuzzyJoinConfig.RECORD_SEPARATOR_REGEX);
             int rid = Integer.parseInt(splits[FuzzyJoinConfig.RECORD_KEY]);
             outputTokenized.writeInt(rid);
-            Collection<String> tokens = tokenizer.tokenize(FuzzyJoinUtil.getData(splits, dataColumns,
-                    FuzzyJoinConfig.TOKEN_SEPARATOR));
+            Collection<String> tokens = tokenizer
+                    .tokenize(FuzzyJoinUtil.getData(splits, dataColumns, FuzzyJoinConfig.TOKEN_SEPARATOR));
             Collection<Integer> tokensRanked = tokenRank.getTokenRanks(tokens);
             outputTokenized.writeInt(tokensRanked.size());
             for (Integer token : tokensRanked) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/MutableInteger.java
----------------------------------------------------------------------
diff --git a/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/MutableInteger.java b/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/MutableInteger.java
index 5083d3a..f7d7d35 100644
--- a/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/MutableInteger.java
+++ b/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/MutableInteger.java
@@ -19,7 +19,7 @@
 
 package org.apache.asterix.fuzzyjoin;
 
-public class MutableInteger implements Comparable {
+public class MutableInteger implements Comparable<Object> {
     private int v;
 
     public MutableInteger(int v) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/FuzzyJoinTestUtil.java
----------------------------------------------------------------------
diff --git a/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/FuzzyJoinTestUtil.java b/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/FuzzyJoinTestUtil.java
index f4d1aeb..703db60 100644
--- a/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/FuzzyJoinTestUtil.java
+++ b/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/FuzzyJoinTestUtil.java
@@ -29,15 +29,12 @@ import org.junit.Assert;
 
 public class FuzzyJoinTestUtil {
 
-    public static void verifyDirectory(String pathTest, String pathCorrect)
-            throws IOException {
+    public static void verifyDirectory(String pathTest, String pathCorrect) throws IOException {
         verifyDirectory(pathTest, pathCorrect, false);
     }
 
-    public static void verifyDirectory(String pathTest, String pathCorrect,
-            boolean noDup) throws IOException {
-        int countTest = 0, countTestDedup = 0, countCorrect = 0;
-
+    public static void verifyDirectory(String pathTest, String pathCorrect, boolean noDup) throws IOException {
+        int countTestDedup = 0, countCorrect = 0;
         BufferedReader input;
         String line;
         HashSet<String> buffer = new HashSet<String>();
@@ -46,7 +43,6 @@ public class FuzzyJoinTestUtil {
         input = new BufferedReader(new FileReader(pathTest));
         while ((line = input.readLine()) != null) {
             buffer.add(line);
-            countTest++;
         }
         countTestDedup = buffer.size();
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-installer/src/main/java/org/apache/asterix/installer/error/OutputHandler.java
----------------------------------------------------------------------
diff --git a/asterix-installer/src/main/java/org/apache/asterix/installer/error/OutputHandler.java b/asterix-installer/src/main/java/org/apache/asterix/installer/error/OutputHandler.java
index 1da9ee4..4c83706 100644
--- a/asterix-installer/src/main/java/org/apache/asterix/installer/error/OutputHandler.java
+++ b/asterix-installer/src/main/java/org/apache/asterix/installer/error/OutputHandler.java
@@ -35,6 +35,7 @@ public class OutputHandler implements IOutputHandler {
 
     }
 
+    @Override
     public OutputAnalysis reportEventOutput(Event event, String output) {
 
         EventType eventType = EventType.valueOf(event.getType().toUpperCase());
@@ -83,6 +84,8 @@ public class OutputHandler implements IOutputHandler {
                     ignore = false;
                 }
                 break;
+            default:
+                break;
         }
         if (ignore) {
             return new OutputAnalysis(true, null);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixInstallerIntegrationUtil.java
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixInstallerIntegrationUtil.java b/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixInstallerIntegrationUtil.java
index 52ee4b4..1dd69df 100644
--- a/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixInstallerIntegrationUtil.java
+++ b/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixInstallerIntegrationUtil.java
@@ -25,7 +25,6 @@ import java.io.FilenameFilter;
 import java.io.IOException;
 import java.math.BigInteger;
 import java.util.Map;
-import java.util.logging.Logger;
 
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
@@ -53,11 +52,8 @@ public class AsterixInstallerIntegrationUtil {
     private static final int DEFAULT_HYRACKS_CC_CLIENT_PORT = 1098;
     private static final int zookeeperClientPort = 2900;
     private static final int zookeeperTestClientPort = 3945;
-
     private static IHyracksClientConnection hcc;
 
-    private static final Logger LOGGER = Logger.getLogger(AsterixInstallerIntegrationUtil.class.getName());
-
     public static void deinit() throws Exception {
         deleteInstance();
         stopZookeeper();
@@ -76,12 +72,12 @@ public class AsterixInstallerIntegrationUtil {
 
         })[0];
         managixHome = new File(installerTargetDir, managixHomeDirName).getAbsolutePath();
-        System.setProperty("log4j.configuration", managixHome + File.separator + "conf" + File.separator
-                + "log4j.properties");
+        System.setProperty("log4j.configuration",
+                managixHome + File.separator + "conf" + File.separator + "log4j.properties");
 
         managixHome = AsterixInstallerIntegrationUtil.getManagixHome();
-        clusterConfigurationPath = managixHome + File.separator + "clusters" + File.separator + "local"
-                + File.separator + "local.xml";
+        clusterConfigurationPath = managixHome + File.separator + "clusters" + File.separator + "local" + File.separator
+                + "local.xml";
 
         InstallerDriver.setManagixHome(managixHome);
 
@@ -206,7 +202,8 @@ public class AsterixInstallerIntegrationUtil {
         return managixHome;
     }
 
-    public static void installLibrary(String libraryName, String libraryDataverse, String libraryPath) throws Exception {
+    public static void installLibrary(String libraryName, String libraryDataverse, String libraryPath)
+            throws Exception {
         transformIntoRequiredState(State.INACTIVE);
         String command = "install -n " + ASTERIX_INSTANCE_NAME + " -d " + libraryDataverse + " -l " + libraryName
                 + " -p " + libraryPath;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
index d3d481c..df48269 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
@@ -62,7 +62,7 @@ public final class MetadataRecordTypes {
 
     /**
      * Create all metadata record types.
-     * 
+     *
      * @throws HyracksDataException
      */
     public static void init() throws MetadataException, HyracksDataException {
@@ -150,9 +150,10 @@ public final class MetadataRecordTypes {
 
     private static final ARecordType createDataverseRecordType() throws AsterixException {
         try {
-            return new ARecordType("DataverseRecordType", new String[] { "DataverseName", "DataFormat", "Timestamp",
-                    "PendingOp" }, new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
-                    BuiltinType.AINT32 }, true);
+            return new ARecordType("DataverseRecordType",
+                    new String[] { "DataverseName", "DataFormat", "Timestamp", "PendingOp" },
+                    new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AINT32 },
+                    true);
         } catch (HyracksDataException e) {
             throw new AsterixException(e);
         }
@@ -203,12 +204,10 @@ public final class MetadataRecordTypes {
     public static final int EXTERNAL_DETAILS_ARECORD_TRANSACTION_STATE_FIELD_INDEX = 3;
 
     private static final ARecordType createExternalDetailsRecordType() throws AsterixException {
-
         AOrderedListType orderedPropertyListType = new AOrderedListType(DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE, null);
-        AOrderedListType compactionPolicyPropertyListType = new AOrderedListType(
-                COMPACTION_POLICY_PROPERTIES_RECORDTYPE, null);
         String[] fieldNames = { "DatasourceAdapter", "Properties", "LastRefreshTime", "TransactionState", };
-        IAType[] fieldTypes = { BuiltinType.ASTRING, orderedPropertyListType, BuiltinType.ADATETIME, BuiltinType.AINT32 };
+        IAType[] fieldTypes = { BuiltinType.ASTRING, orderedPropertyListType, BuiltinType.ADATETIME,
+                BuiltinType.AINT32 };
         try {
             return new ARecordType(null, fieldNames, fieldTypes, true);
         } catch (HyracksDataException e) {
@@ -388,8 +387,8 @@ public final class MetadataRecordTypes {
     private static final ARecordType createIndexRecordType() throws AsterixException {
         AOrderedListType olType = new AOrderedListType(BuiltinType.ASTRING, null);
         AOrderedListType ololType = new AOrderedListType(olType, null);
-        String[] fieldNames = { "DataverseName", "DatasetName", "IndexName", "IndexStructure", "SearchKey",
-                "IsPrimary", "Timestamp", "PendingOp" };
+        String[] fieldNames = { "DataverseName", "DatasetName", "IndexName", "IndexStructure", "SearchKey", "IsPrimary",
+                "Timestamp", "PendingOp" };
         IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
                 ololType, BuiltinType.ABOOLEAN, BuiltinType.ASTRING, BuiltinType.AINT32 };
         try {
@@ -518,7 +517,8 @@ public final class MetadataRecordTypes {
     public static final int FEED_TYPE_PRIMARY_ARECORD_ADAPTER_NAME_FIELD_INDEX = 0;
     public static final int FEED_TYPE_PRIMARY_ARECORD_ADAPTER_CONFIGURATION_FIELD_INDEX = 1;
 
-    private static final ARecordType createPrimaryFeedDetailsRecordType() throws AsterixException, HyracksDataException {
+    private static final ARecordType createPrimaryFeedDetailsRecordType()
+            throws AsterixException, HyracksDataException {
         AUnorderedListType unorderedAdaptorPropertyListType = new AUnorderedListType(
                 DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE, null);
 
@@ -529,8 +529,8 @@ public final class MetadataRecordTypes {
 
     public static final int FEED_TYPE_SECONDARY_ARECORD_SOURCE_FEED_NAME_FIELD_INDEX = 0;
 
-    private static final ARecordType createSecondaryFeedDetailsRecordType() throws AsterixException,
-            HyracksDataException {
+    private static final ARecordType createSecondaryFeedDetailsRecordType()
+            throws AsterixException, HyracksDataException {
         String[] fieldNames = { "SourceFeedName" };
         IAType[] fieldTypes = { BuiltinType.ASTRING };
         return new ARecordType(null, fieldNames, fieldTypes, true);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
index 946c950..cf7a95c 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
@@ -24,7 +24,7 @@ import org.apache.asterix.metadata.MetadataCache;
 import org.apache.asterix.metadata.api.IMetadataEntity;
 
 public class Function implements IMetadataEntity {
-
+    private static final long serialVersionUID = 1L;
     public static final String LANGUAGE_AQL = "AQL";
     public static final String LANGUAGE_JAVA = "JAVA";
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/18a9dca5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
index 10895f2..e09928b 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
@@ -37,7 +37,6 @@ import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
 import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
 import org.apache.asterix.metadata.entities.FeedPolicy;
 import org.apache.asterix.om.base.AInt32;
-import org.apache.asterix.om.base.AMutableInt32;
 import org.apache.asterix.om.base.AMutableString;
 import org.apache.asterix.om.base.ARecord;
 import org.apache.asterix.om.base.AString;
@@ -66,13 +65,11 @@ public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolic
     @SuppressWarnings("unchecked")
     private ISerializerDeserializer<ARecord> recordSerDes = AqlSerializerDeserializerProvider.INSTANCE
             .getSerializerDeserializer(MetadataRecordTypes.FEED_POLICY_RECORDTYPE);
-    private AMutableInt32 aInt32;
     protected ISerializerDeserializer<AInt32> aInt32Serde;
 
     @SuppressWarnings("unchecked")
     public FeedPolicyTupleTranslator(boolean getTuple) {
         super(getTuple, MetadataPrimaryIndexes.FEED_POLICY_DATASET.getFieldCount());
-        aInt32 = new AMutableInt32(-1);
         aInt32Serde = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
     }
 
@@ -83,7 +80,7 @@ public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolic
         int recordLength = frameTuple.getFieldLength(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
         ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
         DataInput in = new DataInputStream(stream);
-        ARecord feedPolicyRecord = (ARecord) recordSerDes.deserialize(in);
+        ARecord feedPolicyRecord = recordSerDes.deserialize(in);
         return createFeedPolicyFromARecord(feedPolicyRecord);
     }
 
@@ -150,8 +147,8 @@ public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolic
         // write field 3 (properties)
         Map<String, String> properties = feedPolicy.getProperties();
         UnorderedListBuilder listBuilder = new UnorderedListBuilder();
-        listBuilder
-                .reset((AUnorderedListType) MetadataRecordTypes.FEED_POLICY_RECORDTYPE.getFieldTypes()[MetadataRecordTypes.FEED_POLICY_ARECORD_PROPERTIES_FIELD_INDEX]);
+        listBuilder.reset((AUnorderedListType) MetadataRecordTypes.FEED_POLICY_RECORDTYPE
+                .getFieldTypes()[MetadataRecordTypes.FEED_POLICY_ARECORD_PROPERTIES_FIELD_INDEX]);
         for (Map.Entry<String, String> property : properties.entrySet()) {
             String name = property.getKey();
             String value = property.getValue();


Mime
View raw message