accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mmil...@apache.org
Subject [accumulo-testing] branch master updated: Remove HdfsZooInstance & formatting
Date Thu, 30 Aug 2018 17:24:44 GMT
This is an automated email from the ASF dual-hosted git repository.

mmiller pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/accumulo-testing.git


The following commit(s) were added to refs/heads/master by this push:
     new d741f36  Remove HdfsZooInstance & formatting
d741f36 is described below

commit d741f36cbc78a7e8349ed8f5df57ce0c49efc888
Author: Mike Miller <mmiller@apache.org>
AuthorDate: Thu Aug 30 13:08:21 2018 -0400

    Remove HdfsZooInstance & formatting
---
 .../testing/core/ingest/BulkImportDirectory.java   | 30 ++++------------------
 .../core/performance/tests/ScanExecutorPT.java     | 21 +++++----------
 2 files changed, 12 insertions(+), 39 deletions(-)

diff --git a/core/src/main/java/org/apache/accumulo/testing/core/ingest/BulkImportDirectory.java
b/core/src/main/java/org/apache/accumulo/testing/core/ingest/BulkImportDirectory.java
index c8988b8..a189eda 100644
--- a/core/src/main/java/org/apache/accumulo/testing/core/ingest/BulkImportDirectory.java
+++ b/core/src/main/java/org/apache/accumulo/testing/core/ingest/BulkImportDirectory.java
@@ -16,18 +16,12 @@
  */
 package org.apache.accumulo.testing.core.ingest;
 
-import static java.nio.charset.StandardCharsets.UTF_8;
-
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
 
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.server.cli.ClientOnRequiredTable;
-import org.apache.accumulo.server.client.HdfsZooInstance;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -40,29 +34,15 @@ public class BulkImportDirectory {
     String source = null;
     @Parameter(names = {"-f", "--failures"}, description = "directory to copy failures into:
will be deleted before the bulk import")
     String failures = null;
-    @Parameter(description = "<username> <password> <tablename> <sourcedir>
<failuredir>")
-    List<String> args = new ArrayList<>();
   }
 
   public static void main(String[] args) throws IOException, AccumuloException, AccumuloSecurityException,
TableNotFoundException {
     final FileSystem fs = FileSystem.get(new Configuration());
     Opts opts = new Opts();
-    if (args.length == 5) {
-      System.err.println("Deprecated syntax for BulkImportDirectory, please use the new style
(see --help)");
-      final String user = args[0];
-      final byte[] pass = args[1].getBytes(UTF_8);
-      final String tableName = args[2];
-      final String dir = args[3];
-      final String failureDir = args[4];
-      final Path failureDirPath = new Path(failureDir);
-      fs.delete(failureDirPath, true);
-      fs.mkdirs(failureDirPath);
-      HdfsZooInstance.getInstance().getConnector(user, new PasswordToken(pass)).tableOperations().importDirectory(tableName,
dir, failureDir, false);
-    } else {
-      opts.parseArgs(BulkImportDirectory.class.getName(), args);
-      fs.delete(new Path(opts.failures), true);
-      fs.mkdirs(new Path(opts.failures));
-      opts.getConnector().tableOperations().importDirectory(opts.getTableName(), opts.source,
opts.failures, false);
-    }
+    System.err.println("Deprecated syntax for BulkImportDirectory, please use the new style
(see --help)");
+    opts.parseArgs(BulkImportDirectory.class.getName(), args);
+    fs.delete(new Path(opts.failures), true);
+    fs.mkdirs(new Path(opts.failures));
+    opts.getConnector().tableOperations().importDirectory(opts.getTableName(), opts.source,
opts.failures, false);
   }
 }
diff --git a/core/src/main/java/org/apache/accumulo/testing/core/performance/tests/ScanExecutorPT.java
b/core/src/main/java/org/apache/accumulo/testing/core/performance/tests/ScanExecutorPT.java
index 3dbd5da..f604355 100644
--- a/core/src/main/java/org/apache/accumulo/testing/core/performance/tests/ScanExecutorPT.java
+++ b/core/src/main/java/org/apache/accumulo/testing/core/performance/tests/ScanExecutorPT.java
@@ -61,8 +61,7 @@ public class ScanExecutorPT implements PerformanceTest {
   private static final String TEST_DESC = "Scan Executor Test.  Test running lots of short
scans "
       + "while long scans are running in the background.  Each short scan reads a random
row and "
       + "family. Using execution hints, short scans are randomly either given a high priority
or "
-      + "a dedicated executor.  If the scan prioritizer or dispatcher is not working properly,
"
-      + "then the short scans will be orders of magnitude slower.";
+      + "a dedicated executor.  If the scan prioritizer or dispatcher is not working properly,
" + "then the short scans will be orders of magnitude slower.";
 
   @Override
   public SystemConfiguration getConfiguration() {
@@ -70,14 +69,10 @@ public class ScanExecutorPT implements PerformanceTest {
 
     siteCfg.put(Property.TSERV_SCAN_MAX_OPENFILES.getKey(), "200");
     siteCfg.put(Property.TSERV_MINTHREADS.getKey(), "200");
-    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se1.threads",
-        SCAN_EXECUTOR_THREADS);
-    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se1.prioritizer",
-        SCAN_PRIORITIZER);
-    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se2.threads",
-        SCAN_EXECUTOR_THREADS);
-    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se2.prioritizer",
-        SCAN_PRIORITIZER);
+    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se1.threads", SCAN_EXECUTOR_THREADS);
+    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se1.prioritizer", SCAN_PRIORITIZER);
+    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se2.threads", SCAN_EXECUTOR_THREADS);
+    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se2.prioritizer", SCAN_PRIORITIZER);
 
     return new SystemConfiguration().setAccumuloConfig(siteCfg);
   }
@@ -139,8 +134,7 @@ public class ScanExecutorPT implements PerformanceTest {
     return builder.build();
   }
 
-  private static long scan(String tableName, Connector c, byte[] row, byte[] fam,
-      Map<String,String> hints) throws TableNotFoundException {
+  private static long scan(String tableName, Connector c, byte[] row, byte[] fam, Map<String,String>
hints) throws TableNotFoundException {
     long t1 = System.currentTimeMillis();
     int count = 0;
     try (Scanner scanner = c.createScanner(tableName, Authorizations.EMPTY)) {
@@ -154,8 +148,7 @@ public class ScanExecutorPT implements PerformanceTest {
     return System.currentTimeMillis() - t1;
   }
 
-  private long scan(String tableName, Connector c, AtomicBoolean stop, Map<String,String>
hints)
-      throws TableNotFoundException {
+  private long scan(String tableName, Connector c, AtomicBoolean stop, Map<String,String>
hints) throws TableNotFoundException {
     long count = 0;
     while (!stop.get()) {
       try (Scanner scanner = c.createScanner(tableName, Authorizations.EMPTY)) {


Mime
View raw message