pirk-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From eawilli...@apache.org
Subject incubator-pirk git commit: [PIRK-48] - Pirk should pass all FindBugs tests - closes apache/incubator-pirk#56
Date Fri, 12 Aug 2016 13:18:31 GMT
Repository: incubator-pirk
Updated Branches:
  refs/heads/master 9233b19e2 -> c7fc6ec87


[PIRK-48] - Pirk should pass all FindBugs tests - closes apache/incubator-pirk#56


Project: http://git-wip-us.apache.org/repos/asf/incubator-pirk/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-pirk/commit/c7fc6ec8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-pirk/tree/c7fc6ec8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-pirk/diff/c7fc6ec8

Branch: refs/heads/master
Commit: c7fc6ec8719e7b1df5ff3f3baa8bab17643a8454
Parents: 9233b19
Author: tellison <tellison@apache.org>
Authored: Fri Aug 12 09:18:18 2016 -0400
Committer: eawilliams <eawilliams@apache.org>
Committed: Fri Aug 12 09:18:18 2016 -0400

----------------------------------------------------------------------
 findbugs-exclude.xml                            |  9 ++----
 .../pirk/benchmark/PaillierBenchmark.java       |  2 +-
 .../inputformat/hadoop/InputFormatConst.java    |  6 ++--
 .../pirk/querier/wideskies/QuerierProps.java    |  2 +-
 .../wideskies/decrypt/DecryptResponse.java      | 13 ++++-----
 .../decrypt/DecryptResponseRunnable.java        |  1 +
 .../org/apache/pirk/query/wideskies/Query.java  | 11 ++++++--
 .../responder/wideskies/ResponderProps.java     |  2 +-
 .../wideskies/common/ComputeEncryptedRow.java   | 25 +++++++++--------
 .../mapreduce/ComputeResponseTool.java          | 20 +++++++-------
 .../mapreduce/FinalResponseReducer.java         |  2 --
 .../HashSelectorsAndPartitionDataMapper.java    |  1 +
 .../wideskies/spark/ComputeResponse.java        | 13 +++++++--
 .../responder/wideskies/spark/EncRowCalc.java   |  1 +
 .../pirk/schema/query/QuerySchemaLoader.java    |  3 +-
 .../pirk/schema/query/filter/FilterFactory.java | 15 ++++++----
 .../pirk/schema/response/QueryResponseJSON.java |  6 ++--
 .../org/apache/pirk/test/utils/BaseTests.java   | 12 ++++----
 .../org/apache/pirk/test/utils/TestUtils.java   | 29 ++++++++++----------
 .../java/org/apache/pirk/utils/FileIOUtils.java |  8 ++----
 src/main/java/org/apache/pirk/utils/HDFS.java   | 20 ++++----------
 .../apache/pirk/utils/ISO8601DateParser.java    | 18 ++++++------
 .../org/apache/pirk/general/PaillierTest.java   | 13 ++++++++-
 .../apache/pirk/general/PartitionUtilsTest.java |  2 +-
 24 files changed, 124 insertions(+), 110 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/findbugs-exclude.xml
----------------------------------------------------------------------
diff --git a/findbugs-exclude.xml b/findbugs-exclude.xml
index e30d682..e763502 100644
--- a/findbugs-exclude.xml
+++ b/findbugs-exclude.xml
@@ -3,11 +3,8 @@
 
 <FindBugsFilter>
     <Match>
-        <Class name="org.apache.pirk.schema.data.DataSchema" />
-        <or>
-            <Field name="partitionerInstances" />
-            <Field name="textRep" />
-        </or>
-        <Bug pattern="SE_TRANSIENT_FIELD_NOT_RESTORED" />
+        <Class name="org.apache.pirk.test.utils.StandaloneQuery" />
+        <Method name="performStandaloneQuery" />
+        <Bug pattern="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE" />
     </Match>
 </FindBugsFilter>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/benchmark/PaillierBenchmark.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/benchmark/PaillierBenchmark.java b/src/main/java/org/apache/pirk/benchmark/PaillierBenchmark.java
index 7af3fcf..95f850d 100644
--- a/src/main/java/org/apache/pirk/benchmark/PaillierBenchmark.java
+++ b/src/main/java/org/apache/pirk/benchmark/PaillierBenchmark.java
@@ -66,7 +66,7 @@ public class PaillierBenchmark
 
       } catch (PIRException e)
       {
-        System.out.printf("Couldn't build pallier object!\n");
+        System.out.printf("Couldn't build pallier object!%n");
       }
 
       r1 = BigInteger.valueOf(3);

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/inputformat/hadoop/InputFormatConst.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/inputformat/hadoop/InputFormatConst.java b/src/main/java/org/apache/pirk/inputformat/hadoop/InputFormatConst.java
index 8c33b2c..e45f596 100644
--- a/src/main/java/org/apache/pirk/inputformat/hadoop/InputFormatConst.java
+++ b/src/main/java/org/apache/pirk/inputformat/hadoop/InputFormatConst.java
@@ -18,8 +18,10 @@
  */
 package org.apache.pirk.inputformat.hadoop;
 
-import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
 
 /**
  * Constants class for allowed input format types
@@ -30,5 +32,5 @@ public class InputFormatConst
 
   public static final String ES = "elasticsearch";
 
-  public static final ArrayList<String> ALLOWED_FORMATS = new ArrayList<>(Arrays.asList(BASE_FORMAT,
ES));
+  public static final Set<String> ALLOWED_FORMATS = Collections.unmodifiableSet(new
HashSet<>(Arrays.asList(BASE_FORMAT, ES)));
 }

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/querier/wideskies/QuerierProps.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/QuerierProps.java b/src/main/java/org/apache/pirk/querier/wideskies/QuerierProps.java
index 38fb088..d703737 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/QuerierProps.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/QuerierProps.java
@@ -60,7 +60,7 @@ public class QuerierProps
   // Decryption properties
   public static final String QUERIERFILE = "querier.querierFile";
 
-  public static final List<String> PROPSLIST = Arrays.asList(ACTION, INPUTFILE, OUTPUTFILE,
QUERYTYPE, NUMTHREADS, EMBEDQUERYSCHEMA, HASHBITSIZE, HASHKEY,
+  static final List<String> PROPSLIST = Arrays.asList(ACTION, INPUTFILE, OUTPUTFILE,
QUERYTYPE, NUMTHREADS, EMBEDQUERYSCHEMA, HASHBITSIZE, HASHKEY,
       DATAPARTITIONSIZE, PAILLIERBITSIZE, BITSET, CERTAINTY, QUERYSCHEMAS, DATASCHEMAS, EMBEDSELECTOR,
USEMEMLOOKUPTABLE, USEHDFSLOOKUPTABLE, SR_ALGORITHM,
       SR_PROVIDER);
 

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java
b/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java
index a9004fe..f4852f8 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java
@@ -26,6 +26,7 @@ import java.io.OutputStreamWriter;
 import java.math.BigInteger;
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.Map.Entry;
 import java.util.TreeMap;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -119,7 +120,7 @@ public class DecryptResponse
     {
       numThreads = selectors.size();
     }
-    int elementsPerThread = (int) (Math.floor(selectors.size() / numThreads));
+    int elementsPerThread = selectors.size() / numThreads; // Integral division.
 
     ArrayList<DecryptResponseRunnable> runnables = new ArrayList<>();
     for (int i = 0; i < numThreads; ++i)
@@ -183,10 +184,9 @@ public class DecryptResponse
   {
     FileOutputStream fout = new FileOutputStream(new File(filename));
     BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fout));
-    for (String selector : resultMap.keySet())
+    for (Entry<String,ArrayList<QueryResponseJSON>> entry : resultMap.entrySet())
     {
-      ArrayList<QueryResponseJSON> hits = resultMap.get(selector);
-      for (QueryResponseJSON hitJSON : hits)
+      for (QueryResponseJSON hitJSON : entry.getValue())
       {
         bw.write(hitJSON.getJSONString());
         bw.newLine();
@@ -203,10 +203,9 @@ public class DecryptResponse
   {
     FileOutputStream fout = new FileOutputStream(file);
     BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fout));
-    for (String selector : resultMap.keySet())
+    for (Entry<String,ArrayList<QueryResponseJSON>> entry : resultMap.entrySet())
     {
-      ArrayList<QueryResponseJSON> hits = resultMap.get(selector);
-      for (QueryResponseJSON hitJSON : hits)
+      for (QueryResponseJSON hitJSON : entry.getValue())
       {
         bw.write(hitJSON.getJSONString());
         bw.newLine();

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponseRunnable.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponseRunnable.java
b/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponseRunnable.java
index f062640..02f51b8 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponseRunnable.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponseRunnable.java
@@ -146,6 +146,7 @@ public class DecryptResponseRunnable implements Runnable
           } catch (Exception e)
           {
             e.printStackTrace();
+            throw new RuntimeException(e);
           }
           qrJOSN.setMapping(selectorName, selector);
           logger.debug("selector = " + selector + " qrJOSN = " + qrJOSN.getJSONString());

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/query/wideskies/Query.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/query/wideskies/Query.java b/src/main/java/org/apache/pirk/query/wideskies/Query.java
index 2035d4b..c373454 100644
--- a/src/main/java/org/apache/pirk/query/wideskies/Query.java
+++ b/src/main/java/org/apache/pirk/query/wideskies/Query.java
@@ -22,6 +22,7 @@ import java.io.Serializable;
 import java.math.BigInteger;
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.Map.Entry;
 import java.util.TreeMap;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -170,7 +171,7 @@ public class Query implements Serializable, Storable
     // multithreaded case
     {
       ExecutorService es = Executors.newCachedThreadPool();
-      int elementsPerThread = (int) (Math.floor(queryElements.size() / numThreads));
+      int elementsPerThread = queryElements.size() / numThreads; // Integral division.
 
       ArrayList<ExpTableRunnable> runnables = new ArrayList<>();
       for (int i = 0; i < numThreads; ++i)
@@ -199,6 +200,10 @@ public class Query implements Serializable, Storable
       // Allow threads to complete
       es.shutdown(); // previously submitted tasks are executed, but no new tasks will be
accepted
       boolean finished = es.awaitTermination(1, TimeUnit.DAYS); // waits until all tasks
complete or until the specified timeout
+      if (!finished)
+      {
+        throw new InterruptedException("Operation timed out.");
+      }
 
       // Pull all decrypted elements and add to resultMap
       for (ExpTableRunnable runner : runnables)
@@ -207,9 +212,9 @@ public class Query implements Serializable, Storable
         expTable.putAll(expValues);
       }
       logger.debug("expTable.size() = " + expTable.keySet().size() + " NSqaured = " + NSquared.intValue()
+ " = " + NSquared.toString());
-      for (BigInteger key : expTable.keySet())
+      for (Entry<BigInteger,HashMap<Integer,BigInteger>> entry : expTable.entrySet())
       {
-        logger.debug("expTable for key = " + key.toString() + " = " + expTable.get(key).size());
+        logger.debug("expTable for key = " + entry.getKey().toString() + " = " + entry.getValue().size());
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/responder/wideskies/ResponderProps.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/ResponderProps.java b/src/main/java/org/apache/pirk/responder/wideskies/ResponderProps.java
index 1f2130b..b1d2828 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/ResponderProps.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/ResponderProps.java
@@ -66,7 +66,7 @@ public class ResponderProps
   public static final String COLMULTREDUCEBYKEY = "pir.colMultReduceByKey";
   public static final String ALLOWEMBEDDEDQUERYSCHEMAS = "pir.allowEmbeddedQuerySchemas";
 
-  public static final List<String> PROPSLIST = Arrays.asList(PLATFORM, QUERYINPUT,
DATAINPUTFORMAT, INPUTDATA, BASEQUERY, ESRESOURCE, ESQUERY, OUTPUTFILE,
+  static final List<String> PROPSLIST = Arrays.asList(PLATFORM, QUERYINPUT, DATAINPUTFORMAT,
INPUTDATA, BASEQUERY, ESRESOURCE, ESQUERY, OUTPUTFILE,
       BASEINPUTFORMAT, STOPLISTFILE, NUMREDUCETASKS, USELOCALCACHE, LIMITHITSPERSELECTOR,
MAXHITSPERSELECTOR, MAPMEMORY, REDUCEMEMORY, MAPJAVAOPTS,
       REDUCEJAVAOPTS, QUERYSCHEMAS, DATASCHEMAS, NUMEXPLOOKUPPARTS, USEHDFSLOOKUPTABLE, NUMDATAPARTITIONS,
NUMCOLMULTPARTITIONS, USEMODEXPJOIN,
       COLMULTREDUCEBYKEY, ALLOWEMBEDDEDQUERYSCHEMAS);

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java
b/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java
index fe8e4aa..5eed275 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java
@@ -70,21 +70,22 @@ public class ComputeEncryptedRow
     logger.info("Loading cache from hdfsFileName = " + hdfsFileName);
 
     Path expPath = new Path(hdfsFileName);
-    InputStreamReader isr = new InputStreamReader(fs.open(expPath));
-    BufferedReader br = new BufferedReader(isr);
-    String line;
-    while ((line = br.readLine()) != null) // form: element_hash,<exponent>-<element^exponent
mod N^2>
+    try (BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(expPath))))
     {
-      String[] rowValTokens = line.split(",");
-      BigInteger base = query.getQueryElement(Integer.parseInt(rowValTokens[0]));
+      String line;
+      while ((line = br.readLine()) != null) // form: element_hash,<exponent>-<element^exponent
mod N^2>
+      {
+        String[] rowValTokens = line.split(",");
+        BigInteger base = query.getQueryElement(Integer.parseInt(rowValTokens[0]));
 
-      String[] expMod = rowValTokens[1].split("-");
-      BigInteger exponent = new BigInteger(expMod[0]);
-      BigInteger value = new BigInteger(expMod[1]);
+        String[] expMod = rowValTokens[1].split("-");
+        BigInteger exponent = new BigInteger(expMod[0]);
+        BigInteger value = new BigInteger(expMod[1]);
 
-      // Cache: <<base,exponent,NSquared>, base^exponent mod N^2>
-      Tuple3<BigInteger,BigInteger,BigInteger> key = new Tuple3<>(base, exponent,
query.getNSquared());
-      expCache.put(key, value);
+        // Cache: <<base,exponent,NSquared>, base^exponent mod N^2>
+        Tuple3<BigInteger,BigInteger,BigInteger> key = new Tuple3<>(base, exponent,
query.getNSquared());
+        expCache.put(key, value);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/ComputeResponseTool.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/ComputeResponseTool.java
b/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/ComputeResponseTool.java
index 98f58c6..544e3ed 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/ComputeResponseTool.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/ComputeResponseTool.java
@@ -247,7 +247,7 @@ public class ComputeResponseTool extends Configured implements Tool
     ArrayList<Integer> keys = new ArrayList<>(queryElements.keySet());
 
     int numSplits = SystemConfiguration.getIntProperty("pir.expCreationSplits", 100);
-    int elementsPerSplit = (int) Math.floor(queryElements.size() / numSplits);
+    int elementsPerSplit = queryElements.size() / numSplits; // Integral division.
     logger.info("numSplits = " + numSplits + " elementsPerSplit = " + elementsPerSplit);
     for (int i = 0; i < numSplits; ++i)
     {
@@ -320,17 +320,17 @@ public class ComputeResponseTool extends Configured implements Tool
         logger.info("fstat.getPath().getName().toString() = " + fstat.getPath().getName());
         try
         {
-          InputStreamReader isr = new InputStreamReader(fs.open(fstat.getPath()));
-          BufferedReader br = new BufferedReader(isr);
-          String line;
-          while ((line = br.readLine()) != null)
+          try (BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(fstat.getPath()))))
           {
-            String[] rowValTokens = line.split(","); // form is element_index,reducerNumber
-            String fileName = fstat.getPath().getParent() + "/" + FileConst.EXP + "-r-" +
rowValTokens[1];
-            logger.info("fileName = " + fileName);
-            expFileTable.put(Integer.parseInt(rowValTokens[0]), fileName);
+            String line;
+            while ((line = br.readLine()) != null)
+            {
+              String[] rowValTokens = line.split(","); // form is element_index,reducerNumber
+              String fileName = fstat.getPath().getParent() + "/" + FileConst.EXP + "-r-"
+ rowValTokens[1];
+              logger.info("fileName = " + fileName);
+              expFileTable.put(Integer.parseInt(rowValTokens[0]), fileName);
+            }
           }
-
         } catch (Exception e)
         {
           e.printStackTrace();

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/FinalResponseReducer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/FinalResponseReducer.java
b/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/FinalResponseReducer.java
index 628c97c..6e19309 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/FinalResponseReducer.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/FinalResponseReducer.java
@@ -46,14 +46,12 @@ public class FinalResponseReducer extends Reducer<LongWritable,Text,LongWritable
   private Response response = null;
   private String outputFile = null;
   private HadoopFileSystemStore storage = null;
-  private QueryInfo queryInfo = null;
 
   @Override
   public void setup(Context ctx) throws IOException, InterruptedException
   {
     super.setup(ctx);
 
-    Text outputValue = new Text();
     mos = new MultipleOutputs<>(ctx);
 
     FileSystem fs = FileSystem.newInstance(ctx.getConfiguration());

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/HashSelectorsAndPartitionDataMapper.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/HashSelectorsAndPartitionDataMapper.java
b/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/HashSelectorsAndPartitionDataMapper.java
index adfe5b7..dd20f87 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/HashSelectorsAndPartitionDataMapper.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/HashSelectorsAndPartitionDataMapper.java
@@ -141,6 +141,7 @@ public class HashSelectorsAndPartitionDataMapper extends Mapper<Text,MapWritable
       {
         logger.error("Error in partitioning data element value = " + StringUtils.mapWritableToString(value));
         e.printStackTrace();
+        throw new RuntimeException(e);
       }
 
       keyOut.set(returnTuple._1);

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java
b/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java
index 2acd380..4a98e69 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.math.BigInteger;
 import java.util.ArrayList;
 import java.util.Map;
+import java.util.Map.Entry;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.io.MapWritable;
@@ -230,6 +231,11 @@ public class ComputeResponse
     {
       inputRDD = readDataES();
     }
+    else
+    {
+      throw new PIRException("Unknown data input format " + dataInputFormat);
+    }
+
     performQuery(inputRDD);
   }
 
@@ -380,10 +386,11 @@ public class ComputeResponse
     Map<Long,BigInteger> encColResults = encColRDD.collectAsMap();
     logger.debug("encColResults.size() = " + encColResults.size());
 
-    for (long colVal : encColResults.keySet())
+    for (Entry<Long,BigInteger> entry : encColResults.entrySet())
     {
-      response.addElement((int) colVal, encColResults.get(colVal));
-      logger.debug("colNum = " + colVal + " column = " + encColResults.get(colVal).toString());
+      int colVal = entry.getKey().intValue();
+      response.addElement(colVal, entry.getValue());
+      logger.debug("colNum = " + colVal + " column = " + entry.getValue().toString());
     }
 
     try

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java b/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java
index 7bf11bd..cc0a5ac 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java
@@ -91,6 +91,7 @@ public class EncRowCalc implements PairFlatMapFunction<Tuple2<Integer,Iterable<A
       } catch (IOException e)
       {
         e.printStackTrace();
+        throw e;
       }
       ComputeEncryptedRow.loadCacheFromHDFS(fs, query.getExpFile(rowIndex), query);
     }

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java b/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java
index 78e5bc7..39a4fbb 100644
--- a/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java
+++ b/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java
@@ -300,7 +300,7 @@ public class QuerySchemaLoader
    */
   private Set<String> extractFilteredElementNames(Document doc) throws PIRException
   {
-    HashSet<String> filteredNamesSet = new HashSet<>();
+    Set<String> filteredNamesSet = new HashSet<>();
 
     NodeList filterNamesList = doc.getElementsByTagName("filterNames");
     if (filterNamesList.getLength() != 0)
@@ -311,7 +311,6 @@ public class QuerySchemaLoader
       }
 
       // Extract element names from the list.
-      Element foo = (Element) filterNamesList.item(0);
       NodeList filterNList = ((Element) filterNamesList.item(0)).getElementsByTagName("name");
       for (int i = 0; i < filterNList.getLength(); i++)
       {

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/schema/query/filter/FilterFactory.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/schema/query/filter/FilterFactory.java b/src/main/java/org/apache/pirk/schema/query/filter/FilterFactory.java
index c44e1e8..0a2d840 100644
--- a/src/main/java/org/apache/pirk/schema/query/filter/FilterFactory.java
+++ b/src/main/java/org/apache/pirk/schema/query/filter/FilterFactory.java
@@ -58,14 +58,19 @@ public class FilterFactory
         }
         else
         {
-          FileReader fr = new FileReader(new File(stopListFile));
-          br = new BufferedReader(fr);
+          br = new BufferedReader(new FileReader(new File(stopListFile)));
         }
 
-        String qLine;
-        while ((qLine = br.readLine()) != null)
+        try
         {
-          stopList.add(qLine);
+          String qLine;
+          while ((qLine = br.readLine()) != null)
+          {
+            stopList.add(qLine);
+          }
+        } finally
+        {
+          br.close();
         }
 
         obj = new StopListFilter(filteredElementNames, stopList);

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/schema/response/QueryResponseJSON.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/schema/response/QueryResponseJSON.java b/src/main/java/org/apache/pirk/schema/response/QueryResponseJSON.java
index d32d69b..488c51c 100644
--- a/src/main/java/org/apache/pirk/schema/response/QueryResponseJSON.java
+++ b/src/main/java/org/apache/pirk/schema/response/QueryResponseJSON.java
@@ -21,6 +21,7 @@ package org.apache.pirk.schema.response;
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.Map.Entry;
 import java.util.Set;
 
 import org.apache.hadoop.io.Text;
@@ -70,6 +71,7 @@ public class QueryResponseJSON implements Serializable
     if (queryInfo == null)
     {
       logger.info("queryInfo is null");
+      throw new NullPointerException("queryInfo is null");
     }
 
     QuerySchema qSchema = QuerySchemaRegistry.get(queryInfo.getQueryType());
@@ -187,9 +189,9 @@ public class QueryResponseJSON implements Serializable
 
   public void setAllFields(HashMap<String,String> dataMap)
   {
-    for (String key : dataMap.keySet())
+    for (Entry<String,String> entry : dataMap.entrySet())
     {
-      setMapping(key, dataMap.get(key));
+      setMapping(entry.getKey(), entry.getValue());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/test/utils/BaseTests.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/test/utils/BaseTests.java b/src/main/java/org/apache/pirk/test/utils/BaseTests.java
index e99744c..26ab2eb 100644
--- a/src/main/java/org/apache/pirk/test/utils/BaseTests.java
+++ b/src/main/java/org/apache/pirk/test/utils/BaseTests.java
@@ -47,18 +47,18 @@ public class BaseTests
 {
   private static final Logger logger = LoggerFactory.getLogger(BaseTests.class);
 
-  public static UUID queryIdentifier = UUID.randomUUID();
-  public static int dataPartitionBitSize = 8;
+  public static final UUID queryIdentifier = UUID.randomUUID();
+  public static final int dataPartitionBitSize = 8;
 
   // Selectors for domain and IP queries, queryIdentifier is the first entry for file generation
   private static ArrayList<String> selectorsDomain = new ArrayList<>(Arrays.asList("s.t.u.net",
"d.e.com", "r.r.r.r", "a.b.c.com", "something.else", "x.y.net"));
   private static ArrayList<String> selectorsIP = new ArrayList<>(Arrays.asList("55.55.55.55",
"5.6.7.8", "10.20.30.40", "13.14.15.16", "21.22.23.24"));
 
   // Encryption variables -- Paillier mechanisms are tested in the Paillier test code, so
these are fixed...
-  public static int hashBitSize = 12;
-  public static String hashKey = "someKey";
-  public static int paillierBitSize = 384;
-  public static int certainty = 128;
+  public static final int hashBitSize = 12;
+  public static final String hashKey = "someKey";
+  public static final int paillierBitSize = 384;
+  public static final int certainty = 128;
 
   public static void testDNSHostnameQuery(ArrayList<JSONObject> dataElements, int numThreads,
boolean testFalsePositive) throws Exception
   {

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/test/utils/TestUtils.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/test/utils/TestUtils.java b/src/main/java/org/apache/pirk/test/utils/TestUtils.java
index c30e204..1ea01fb 100644
--- a/src/main/java/org/apache/pirk/test/utils/TestUtils.java
+++ b/src/main/java/org/apache/pirk/test/utils/TestUtils.java
@@ -76,22 +76,23 @@ public class TestUtils
   {
     Process proc = p.start();
 
-    BufferedReader stdInput = new BufferedReader(new InputStreamReader(proc.getInputStream()));
-    BufferedReader stdError = new BufferedReader(new InputStreamReader(proc.getErrorStream()));
-
-    // Read the output from the command
-    logger.info("Standard output of the command:\n");
-    String s;
-    while ((s = stdInput.readLine()) != null)
+    try (BufferedReader stdInput = new BufferedReader(new InputStreamReader(proc.getInputStream()));
+        BufferedReader stdError = new BufferedReader(new InputStreamReader(proc.getErrorStream())))
     {
-      logger.info(s);
-    }
+      // Read the output from the command
+      logger.info("Standard output of the command:\n");
+      String s;
+      while ((s = stdInput.readLine()) != null)
+      {
+        logger.info(s);
+      }
 
-    // Read any errors from the attempted command
-    logger.info("Standard error of the command (if any):\n");
-    while ((s = stdError.readLine()) != null)
-    {
-      logger.info(s);
+      // Read any errors from the attempted command
+      logger.info("Standard error of the command (if any):\n");
+      while ((s = stdError.readLine()) != null)
+      {
+        logger.info(s);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/utils/FileIOUtils.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/utils/FileIOUtils.java b/src/main/java/org/apache/pirk/utils/FileIOUtils.java
index 17b97ec..32cba40 100644
--- a/src/main/java/org/apache/pirk/utils/FileIOUtils.java
+++ b/src/main/java/org/apache/pirk/utils/FileIOUtils.java
@@ -96,12 +96,9 @@ public class FileIOUtils
       return null;
     }
 
-    try
+    // create buffered reader
+    try (BufferedReader br = new BufferedReader(new FileReader(file)))
     {
-      // create buffered reader
-      FileReader fr = new FileReader(file);
-      BufferedReader br = new BufferedReader(fr);
-
       // read through the file, line by line
       String line;
       while ((line = br.readLine()) != null)
@@ -112,7 +109,6 @@ public class FileIOUtils
           collection.add(item);
         }
       }
-
     } catch (Exception e)
     {
       logger.error("unable to read file");

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/utils/HDFS.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/utils/HDFS.java b/src/main/java/org/apache/pirk/utils/HDFS.java
index 32e0c05..9308196 100644
--- a/src/main/java/org/apache/pirk/utils/HDFS.java
+++ b/src/main/java/org/apache/pirk/utils/HDFS.java
@@ -29,6 +29,7 @@ import java.util.Collection;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -138,10 +139,9 @@ public class HDFS
       BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fs.create(filePath, true)));
 
       // write each element on a new line
-      for (String key : sortedMap.keySet())
+      for (Entry<String,Integer> entry : sortedMap.entrySet())
       {
-        bw.write(key + "," + sortedMap.get(key));
-        // bw.write(key);
+        bw.write(entry.getKey() + "," + entry.getValue());
         bw.newLine();
       }
       bw.close();
@@ -166,20 +166,14 @@ public class HDFS
 
   public static ArrayList<String> readFile(FileSystem fs, Path path)
   {
-
     ArrayList<String> rv = new ArrayList<>();
-    try
+    try (BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path))))
     {
-
-      InputStreamReader isr = new InputStreamReader(fs.open(path));
-      BufferedReader br = new BufferedReader(isr);
-
       String line;
       while ((line = br.readLine()) != null)
       {
         rv.add(line);
       }
-
     } catch (Exception e)
     {
       e.printStackTrace();
@@ -190,13 +184,9 @@ public class HDFS
 
   public static HashSet<String> readFileHashSet(FileSystem fs, Path path)
   {
-
     HashSet<String> rv = new HashSet<>();
-    try
+    try (BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path))))
     {
-      InputStreamReader isr = new InputStreamReader(fs.open(path));
-      BufferedReader br = new BufferedReader(isr);
-
       String line;
       while ((line = br.readLine()) != null)
       {

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/main/java/org/apache/pirk/utils/ISO8601DateParser.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/utils/ISO8601DateParser.java b/src/main/java/org/apache/pirk/utils/ISO8601DateParser.java
index 675fb21..575e2c5 100755
--- a/src/main/java/org/apache/pirk/utils/ISO8601DateParser.java
+++ b/src/main/java/org/apache/pirk/utils/ISO8601DateParser.java
@@ -23,16 +23,12 @@ import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.TimeZone;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * Class to parse a date in ISO86091 format
  * 
  */
 public class ISO8601DateParser
 {
-  private static final Logger logger = LoggerFactory.getLogger(ISO8601DateParser.class);
 
   static
   {
@@ -41,34 +37,36 @@ public class ISO8601DateParser
 
   private static SimpleDateFormat format;
 
-  private static void init()
+  private static synchronized void init()
   {
     format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
     format.setTimeZone(TimeZone.getTimeZone("UTC"));
   }
 
-  public static String parseDate(String date)
+  public static synchronized String parseDate(String date)
   {
     try
     {
       return format.parse(date).getTime() + "";
     } catch (Exception ignore)
-    {}
+    {
+      // Empty
+    }
 
     return null;
   }
 
-  public static Date getDate(String isoDate) throws ParseException
+  public static synchronized Date getDate(String isoDate) throws ParseException
   {
     return format.parse(isoDate);
   }
 
-  public static long getLongDate(String isoDate) throws ParseException
+  public static synchronized long getLongDate(String isoDate) throws ParseException
   {
     return format.parse(isoDate).getTime();
   }
 
-  public static String fromLongDate(long dateLongFormat)
+  public static synchronized String fromLongDate(long dateLongFormat)
   {
     Date date = new Date(dateLongFormat);
     return format.format(date);

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/test/java/org/apache/pirk/general/PaillierTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/pirk/general/PaillierTest.java b/src/test/java/org/apache/pirk/general/PaillierTest.java
index 444332d..14347fa 100644
--- a/src/test/java/org/apache/pirk/general/PaillierTest.java
+++ b/src/test/java/org/apache/pirk/general/PaillierTest.java
@@ -19,6 +19,7 @@
 package org.apache.pirk.general;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.fail;
 
 import java.math.BigInteger;
@@ -79,12 +80,12 @@ public class PaillierTest
   }
 
   @Test
-  @SuppressWarnings("unused")
   public void testPIRExceptions()
   {
     try
     {
       Paillier paillier = new Paillier(BigInteger.valueOf(2), BigInteger.valueOf(2), 128);
+      assertNotNull(paillier);
       fail("Paillier constructor did not throw PIRException for p,q < 3");
     } catch (PIRException ignore)
     {}
@@ -92,6 +93,7 @@ public class PaillierTest
     try
     {
       Paillier paillier = new Paillier(BigInteger.valueOf(2), BigInteger.valueOf(3), 128);
+      assertNotNull(paillier);
       fail("Paillier constructor did not throw PIRException for p < 3");
     } catch (PIRException ignore)
     {}
@@ -99,6 +101,7 @@ public class PaillierTest
     try
     {
       Paillier paillier = new Paillier(BigInteger.valueOf(3), BigInteger.valueOf(2), 128);
+      assertNotNull(paillier);
       fail("Paillier constructor did not throw PIRException for q < 3");
     } catch (PIRException ignore)
     {}
@@ -106,6 +109,7 @@ public class PaillierTest
     try
     {
       Paillier paillier = new Paillier(BigInteger.valueOf(7), BigInteger.valueOf(7), 128);
+      assertNotNull(paillier);
       fail("Paillier constructor did not throw PIRException for p = q");
     } catch (PIRException ignore)
     {}
@@ -113,6 +117,7 @@ public class PaillierTest
     try
     {
       Paillier paillier = new Paillier(BigInteger.valueOf(8), BigInteger.valueOf(7), 128);
+      assertNotNull(paillier);
       fail("Paillier constructor did not throw PIRException for p not prime");
     } catch (PIRException ignore)
     {}
@@ -120,6 +125,7 @@ public class PaillierTest
     try
     {
       Paillier paillier = new Paillier(BigInteger.valueOf(7), BigInteger.valueOf(10), 128);
+      assertNotNull(paillier);
       fail("Paillier constructor did not throw PIRException for q not prime");
     } catch (PIRException ignore)
     {}
@@ -128,6 +134,7 @@ public class PaillierTest
     {
       int systemPrimeCertainty = SystemConfiguration.getIntProperty("pir.primeCertainty",
128);
       Paillier paillier = new Paillier(3072, systemPrimeCertainty - 10);
+      assertNotNull(paillier);
       fail("Paillier constructor did not throw PIRException for certainty less than system
default of " + systemPrimeCertainty);
     } catch (PIRException ignore)
     {}
@@ -136,6 +143,7 @@ public class PaillierTest
     {
       Paillier pailler = new Paillier(p, q, bitLength);
       BigInteger encM1 = pailler.encrypt(N);
+      assertNotNull(encM1);
       fail("Paillier encryption did not throw PIRException for message m = N");
     } catch (PIRException ignore)
     {}
@@ -144,6 +152,7 @@ public class PaillierTest
     {
       Paillier pailler = new Paillier(p, q, bitLength);
       BigInteger encM1 = pailler.encrypt(N.add(BigInteger.TEN));
+      assertNotNull(encM1);
       fail("Paillier encryption did not throw PIRException for message m > N");
     } catch (PIRException ignore)
     {}
@@ -151,6 +160,7 @@ public class PaillierTest
     try
     {
       Paillier pailler = new Paillier(bitLength, 128, bitLength);
+      assertNotNull(pailler);
       fail("Paillier constructor did not throw PIRException for ensureBitSet = bitLength");
     } catch (PIRException ignore)
     {}
@@ -158,6 +168,7 @@ public class PaillierTest
     try
     {
       Paillier pailler = new Paillier(bitLength, 128, bitLength + 1);
+      assertNotNull(pailler);
       fail("Paillier constructor did not throw PIRException for ensureBitSet > bitLength");
     } catch (PIRException ignore)
     {}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/c7fc6ec8/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java b/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java
index e20215b..4c1495d 100644
--- a/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java
+++ b/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java
@@ -151,7 +151,7 @@ public class PartitionUtilsTest
     SystemConfiguration.setProperty("pir.stringBits", stringBits);
 
     // Test short
-    short shortTest = new Short("2456");
+    short shortTest = Short.valueOf("2456");
     ArrayList<BigInteger> partsShort = primitivePartitioner.toPartitions(shortTest,
PrimitiveTypePartitioner.SHORT);
     assertEquals(2, partsShort.size());
     assertEquals(shortTest, primitivePartitioner.fromPartitions(partsShort, 0, PrimitiveTypePartitioner.SHORT));



Mime
View raw message