Return-Path: X-Original-To: apmail-phoenix-commits-archive@minotaur.apache.org Delivered-To: apmail-phoenix-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 9C06E173B2 for ; Thu, 10 Sep 2015 21:16:32 +0000 (UTC) Received: (qmail 84996 invoked by uid 500); 10 Sep 2015 21:16:32 -0000 Delivered-To: apmail-phoenix-commits-archive@phoenix.apache.org Received: (qmail 84960 invoked by uid 500); 10 Sep 2015 21:16:32 -0000 Mailing-List: contact commits-help@phoenix.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@phoenix.apache.org Delivered-To: mailing list commits@phoenix.apache.org Received: (qmail 84951 invoked by uid 99); 10 Sep 2015 21:16:32 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 10 Sep 2015 21:16:32 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 51110E1072; Thu, 10 Sep 2015 21:16:32 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: codymarcel@apache.org To: commits@phoenix.apache.org Message-Id: X-Mailer: ASF-Git Admin Mailer Subject: phoenix git commit: PHOENIX-2184 - Pherf - Add plugable custom result writer Date: Thu, 10 Sep 2015 21:16:32 +0000 (UTC) Repository: phoenix Updated Branches: refs/heads/4.5-HBase-0.98 14280fef3 -> e345a36ee PHOENIX-2184 - Pherf - Add plugable custom result writer Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e345a36e Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e345a36e Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e345a36e Branch: refs/heads/4.5-HBase-0.98 Commit: e345a36ee90deeb70fc37e80113eacabce104514 Parents: 14280fe Author: Cody Marcel Authored: Mon Aug 3 15:10:58 2015 -0700 Committer: Cody Marcel Committed: Thu Sep 10 14:15:44 2015 -0700 ---------------------------------------------------------------------- .../coprocessor/BaseScannerRegionObserver.java | 2 +- .../apache/phoenix/util/InstanceResolver.java | 26 ++- phoenix-pherf/config/pherf.properties | 3 +- .../org/apache/phoenix/pherf/DataIngestIT.java | 10 +- .../apache/phoenix/pherf/ResultBaseTestIT.java | 2 +- .../java/org/apache/phoenix/pherf/Pherf.java | 14 +- .../apache/phoenix/pherf/PherfConstants.java | 35 ++-- .../phoenix/pherf/jmx/MonitorManager.java | 14 +- .../phoenix/pherf/result/QueryResult.java | 31 +-- .../phoenix/pherf/result/ResultHandler.java | 6 + .../phoenix/pherf/result/ResultManager.java | 61 ++++-- .../apache/phoenix/pherf/result/ResultUtil.java | 34 ++- .../pherf/result/file/ResultFileDetails.java | 5 + .../pherf/result/impl/CSVFileResultHandler.java | 89 ++++++++ .../pherf/result/impl/CSVResultHandler.java | 108 ++-------- .../pherf/result/impl/DefaultResultHandler.java | 67 ++++++ .../pherf/result/impl/ImageResultHandler.java | 50 ++--- .../pherf/result/impl/XMLResultHandler.java | 44 ++-- .../apache/phoenix/pherf/util/PhoenixUtil.java | 209 ++++++++++--------- .../pherf/workload/MultiThreadedRunner.java | 5 +- .../phoenix/pherf/workload/QueryExecutor.java | 37 ++-- .../pherf/workload/WorkloadExecutor.java | 17 +- .../phoenix/pherf/workload/WriteWorkload.java | 3 +- .../org/apache/phoenix/pherf/PherfTest.java | 2 +- .../apache/phoenix/pherf/ResultBaseTest.java | 5 +- .../org/apache/phoenix/pherf/ResultTest.java | 97 +++++---- 26 files changed, 573 insertions(+), 403 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java index 8e94c78..ab1b6b4 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java @@ -87,7 +87,7 @@ abstract public class BaseScannerRegionObserver extends BaseRegionObserver { public static final String ANALYZE_TABLE = "_ANALYZETABLE"; public static final String GUIDEPOST_WIDTH_BYTES = "_GUIDEPOST_WIDTH_BYTES"; public static final String GUIDEPOST_PER_REGION = "_GUIDEPOST_PER_REGION"; - public static final String UPGRADE_DESC_ROW_KEY = "_UPGRADE_DESC_ROW_KEY"; + public static final String UPGRADE_DESC_ROW_KEY = "_UPGRADE_DESnC_ROW_KEY"; /** * Attribute name used to pass custom annotations in Scans and Mutations (later). Custom annotations http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java b/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java index e238c64..dd99d1e 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java @@ -17,6 +17,10 @@ */ package org.apache.phoenix.util; +import org.apache.commons.collections.IteratorUtils; + +import java.util.Iterator; +import java.util.List; import java.util.ServiceLoader; import java.util.concurrent.ConcurrentHashMap; @@ -52,7 +56,27 @@ public class InstanceResolver { } return (T)obj; } - + + /** + * Resolves all instances of a specified class and add it to the list of default implementations + * @param clazz Type of the instance to resolve + * @param defaultInstances {@link List} of instances that match the type clazz + * @param Type of class passed + * @return {@link List} of instance of the specified class. Newly found instances will be added + * to the existing contents of defaultInstances + */ + @SuppressWarnings("unchecked") + public static List get(Class clazz, List defaultInstances) { + Iterator iterator = ServiceLoader.load(clazz).iterator(); + if (defaultInstances != null) { + defaultInstances.addAll(IteratorUtils.toList(iterator)); + } else { + defaultInstances = IteratorUtils.toList(iterator); + } + + return defaultInstances; + } + private synchronized static T resolveSingleton(Class clazz, T defaultInstance) { ServiceLoader loader = ServiceLoader.load(clazz); // returns the first registered instance found http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/config/pherf.properties ---------------------------------------------------------------------- diff --git a/phoenix-pherf/config/pherf.properties b/phoenix-pherf/config/pherf.properties index 1142f9b5..152e09c 100644 --- a/phoenix-pherf/config/pherf.properties +++ b/phoenix-pherf/config/pherf.properties @@ -30,5 +30,4 @@ pherf.default.dataloader.threadpool=0 pherf.default.dataloader.batchsize=1000 # Directory where results from a scenario run will be written -pherf.default.results.dir=RESULTS - +pherf.default.results.dir=RESULTS \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java index 4fb5ccd..f2a1a07 100644 --- a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java +++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java @@ -31,6 +31,8 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import com.jcabi.jdbc.JdbcSession; +import com.jcabi.jdbc.Outcome; import org.apache.phoenix.pherf.PherfConstants.GeneratePhoenixStats; import org.apache.phoenix.pherf.configuration.Column; import org.apache.phoenix.pherf.configuration.DataModel; @@ -45,9 +47,6 @@ import org.apache.phoenix.pherf.workload.WriteWorkload; import org.junit.Before; import org.junit.Test; -import com.jcabi.jdbc.JdbcSession; -import com.jcabi.jdbc.Outcome; - public class DataIngestIT extends ResultBaseTestIT { @Before @@ -99,7 +98,7 @@ public class DataIngestIT extends ResultBaseTestIT { } // Run some queries - Workload query = new QueryExecutor(parser, util, executor.getPool()); + Workload query = new QueryExecutor(parser, util, executor); executor.add(query); executor.get(); @@ -119,8 +118,7 @@ public class DataIngestIT extends ResultBaseTestIT { dataModels.add(dataModel); QueryExecutor qe = - new QueryExecutor(parser, util, executor.getPool(), dataModels, null, false, - PherfConstants.RunMode.PERFORMANCE); + new QueryExecutor(parser, util, executor, dataModels, null, false); executor.add(qe); Scenario scenario = parser.getScenarioByName("testScenarioRW"); http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java index d2c5173..1841d71 100644 --- a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java +++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java @@ -43,7 +43,7 @@ public class ResultBaseTestIT extends BaseHBaseManagedTimeIT { @BeforeClass public static void setUp() throws Exception { PherfConstants constants = PherfConstants.create(); - properties = constants.getProperties(PherfConstants.PHERF_PROPERTIES); + properties = constants.getProperties(PherfConstants.PHERF_PROPERTIES, false); String dir = properties.getProperty("pherf.default.results.dir"); String targetDir = "target/" + dir; properties.setProperty("pherf.default.results.dir", targetDir); http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java index 0421b6f..70fdb11 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java @@ -87,7 +87,7 @@ public class Pherf { private final String dropPherfTablesRegEx; private final boolean executeQuerySets; private final boolean exportCSV; - private final boolean diff; + private final boolean isFunctional; private final boolean monitor; private final int rowCountOverride; private final boolean listFiles; @@ -106,7 +106,7 @@ public class Pherf { System.exit(1); } - properties = PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES); + properties = PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES, false); dropPherfTablesRegEx = command.getOptionValue("drop", null); monitor = command.hasOption("m"); String @@ -123,7 +123,7 @@ public class Pherf { zookeeper = command.getOptionValue("z", "localhost"); queryHint = command.getOptionValue("hint", null); exportCSV = command.hasOption("export"); - diff = command.hasOption("diff"); + isFunctional = command.hasOption("diff"); listFiles = command.hasOption("listFiles"); applySchema = !command.hasOption("disableSchemaApply"); scenarioFile = @@ -158,7 +158,7 @@ public class Pherf { public void run() throws Exception { MonitorManager monitorManager = null; List workloads = new ArrayList<>(); - WorkloadExecutor workloadExecutor = new WorkloadExecutor(properties, workloads); + WorkloadExecutor workloadExecutor = new WorkloadExecutor(properties, workloads, !isFunctional); try { if (listFiles) { ResourceList list = new ResourceList(PherfConstants.RESOURCE_DATAMODEL); @@ -224,10 +224,8 @@ public class Pherf { logger.info("\nStarting to apply Execute Queries..."); workloadExecutor - .add(new QueryExecutor(parser, phoenixUtil, workloadExecutor.getPool(), - parser.getDataModels(), queryHint, exportCSV, diff ? - PherfConstants.RunMode.FUNCTIONAL : - PherfConstants.RunMode.PERFORMANCE)); + .add(new QueryExecutor(parser, phoenixUtil, workloadExecutor, parser.getDataModels(), queryHint, + isFunctional)); } else { logger.info( http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/PherfConstants.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/PherfConstants.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/PherfConstants.java index 12580d4..3acf5a5 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/PherfConstants.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/PherfConstants.java @@ -18,12 +18,18 @@ package org.apache.phoenix.pherf; +import java.io.IOException; import java.io.InputStream; import java.util.Properties; public class PherfConstants { + public enum GeneratePhoenixStats { + YES, + NO + } + private static PherfConstants instance = null; - private Properties properties = null; + private static Properties instanceProperties = null; public static final int DEFAULT_THREAD_POOL_SIZE = 10; public static final int DEFAULT_BATCH_SIZE = 1000; @@ -62,16 +68,6 @@ public class PherfConstants { public static final int MONITOR_FREQUENCY = 5000; public static final String MONITOR_FILE_NAME = "STATS_MONITOR"; - public static enum GeneratePhoenixStats { - YES, - NO - } - - public static enum RunMode { - PERFORMANCE, - FUNCTIONAL - } - private PherfConstants() { } @@ -82,12 +78,19 @@ public class PherfConstants { return instance; } - public Properties getProperties(final String fileName) throws Exception { - if (properties != null) { - return properties; + public Properties getProperties(final String fileName, boolean getDefault) throws Exception { + + if (instanceProperties == null) { + instanceProperties = loadProperties(fileName); + } else { + return getDefault ? loadProperties(fileName) : instanceProperties; } - properties = new Properties(); + return instanceProperties; + } + + private Properties loadProperties(String fileName) throws IOException{ + Properties properties = new Properties(); InputStream is = null; try { is = getClass().getClassLoader().getResourceAsStream(fileName); @@ -109,7 +112,7 @@ public class PherfConstants { public String getProperty(final String fileName, String property) { String value = null; try { - value = getProperties(fileName).getProperty(property); + value = getProperties(fileName, false).getProperty(property); } catch (Exception e) { e.printStackTrace(); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java index 5b39b2b..bb29902 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java @@ -24,7 +24,7 @@ import org.apache.phoenix.pherf.jmx.monitors.Monitor; import org.apache.phoenix.pherf.result.Result; import org.apache.phoenix.pherf.result.ResultHandler; import org.apache.phoenix.pherf.result.file.ResultFileDetails; -import org.apache.phoenix.pherf.result.impl.CSVResultHandler; +import org.apache.phoenix.pherf.result.impl.CSVFileResultHandler; import org.apache.phoenix.pherf.workload.Workload; import org.apache.phoenix.util.DateUtil; @@ -42,7 +42,6 @@ import java.util.concurrent.atomic.AtomicLong; * This class starts JMX stats for the configured monitors. * Monitors should be configured in MonitorDetails Enum. * Each stat implements {@link org.apache.phoenix.pherf.jmx.monitors.Monitor}. - * * For the duration of any Pherf run, when the configured * {@link org.apache.phoenix.pherf.PherfConstants#MONITOR_FREQUENCY} is reached a snapshot of * each monitor is taken and dumped out to a log file. @@ -83,8 +82,9 @@ public class MonitorManager implements Workload { } } rowCount = new AtomicLong(0); - this.resultHandler = - new CSVResultHandler(PherfConstants.MONITOR_FILE_NAME, ResultFileDetails.CSV); + this.resultHandler = new CSVFileResultHandler(); + this.resultHandler.setResultFileDetails(ResultFileDetails.CSV); + this.resultHandler.setResultFileName(PherfConstants.MONITOR_FILE_NAME); } @Override public synchronized void complete() { @@ -176,9 +176,9 @@ public class MonitorManager implements Workload { ResultHandler handler = null; try { if (resultHandler.isClosed()) { - handler = - new CSVResultHandler(PherfConstants.MONITOR_FILE_NAME, - ResultFileDetails.CSV); + handler = new CSVFileResultHandler(); + handler.setResultFileDetails(ResultFileDetails.CSV); + handler.setResultFileName(PherfConstants.MONITOR_FILE_NAME); return handler.read(); } else { return resultHandler.read(); http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/QueryResult.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/QueryResult.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/QueryResult.java index 1a682da..c0b4bf7 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/QueryResult.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/QueryResult.java @@ -18,8 +18,8 @@ package org.apache.phoenix.pherf.result; -import org.apache.phoenix.pherf.PherfConstants.RunMode; import org.apache.phoenix.pherf.configuration.Query; +import org.apache.phoenix.pherf.result.file.ResultFileDetails; import org.apache.phoenix.util.DateUtil; import java.util.ArrayList; @@ -27,7 +27,11 @@ import java.util.Date; import java.util.List; public class QueryResult extends Query { - private List threadTimes = new ArrayList(); + private List threadTimes = new ArrayList<>(); + + public QueryResult() { + super(); + } public synchronized List getThreadTimes() { return this.threadTimes; @@ -47,9 +51,6 @@ public class QueryResult extends Query { this.setId(query.getId()); } - @SuppressWarnings("unused") public QueryResult() { - } - public Date getStartTime() { Date startTime = null; for (ThreadTime tt : getThreadTimes()) { @@ -108,18 +109,10 @@ public class QueryResult extends Query { return rowValues; } - private int getRunCount() { - int totalRunCount = 0; - for (ThreadTime tt : getThreadTimes()) { - totalRunCount += tt.getRunCount(); - } - return totalRunCount; - } - - public List> getCsvDetailedRepresentation(ResultUtil util, RunMode runMode) { + public List> getCsvDetailedRepresentation(ResultUtil util, ResultFileDetails details) { List> rows = new ArrayList<>(); for (ThreadTime tt : getThreadTimes()) { - for (List runTime : runMode == RunMode.PERFORMANCE ? + for (List runTime : details.isPerformance() ? tt.getCsvPerformanceRepresentation(util) : tt.getCsvFunctionalRepresentation(util)) { List rowValues = new ArrayList<>(); @@ -134,6 +127,14 @@ public class QueryResult extends Query { return rows; } + private int getRunCount() { + int totalRunCount = 0; + for (ThreadTime tt : getThreadTimes()) { + totalRunCount += tt.getRunCount(); + } + return totalRunCount; + } + private String getStartTimeText() { return (null == this.getStartTime()) ? "" : http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultHandler.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultHandler.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultHandler.java index 5b71300..2d2acf7 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultHandler.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultHandler.java @@ -39,4 +39,10 @@ public interface ResultHandler { public boolean isClosed(); public ResultFileDetails getResultFileDetails(); + + public String getResultFileName(); + + public void setResultFileDetails(ResultFileDetails details); + + public void setResultFileName(String resultFileName); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java index 39d6a9c..6a79486 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java @@ -19,33 +19,52 @@ package org.apache.phoenix.pherf.result; import org.apache.phoenix.pherf.PherfConstants; -import org.apache.phoenix.pherf.PherfConstants.RunMode; import org.apache.phoenix.pherf.result.file.ResultFileDetails; -import org.apache.phoenix.pherf.result.impl.CSVResultHandler; +import org.apache.phoenix.pherf.result.impl.CSVFileResultHandler; import org.apache.phoenix.pherf.result.impl.ImageResultHandler; import org.apache.phoenix.pherf.result.impl.XMLResultHandler; +import org.apache.phoenix.util.InstanceResolver; -import java.util.Arrays; +import java.util.ArrayList; import java.util.List; public class ResultManager { private final List resultHandlers; private final ResultUtil util; - private final PherfConstants.RunMode runMode; + private static final List defaultHandlers; - public ResultManager(String fileNameSeed, PherfConstants.RunMode runMode) { - this(runMode, Arrays.asList(new XMLResultHandler(fileNameSeed, ResultFileDetails.XML), - new ImageResultHandler(fileNameSeed, ResultFileDetails.IMAGE), - new CSVResultHandler(fileNameSeed, runMode == RunMode.PERFORMANCE ? - ResultFileDetails.CSV_DETAILED_PERFORMANCE : - ResultFileDetails.CSV_DETAILED_FUNCTIONAL), - new CSVResultHandler(fileNameSeed, ResultFileDetails.CSV_AGGREGATE_PERFORMANCE))); + static { + defaultHandlers = new ArrayList<>(); + XMLResultHandler xmlResultHandler = new XMLResultHandler(); + xmlResultHandler.setResultFileDetails(ResultFileDetails.XML); + defaultHandlers.add(xmlResultHandler); + + ImageResultHandler imageResultHandler = new ImageResultHandler(); + imageResultHandler.setResultFileDetails(ResultFileDetails.IMAGE); + defaultHandlers.add(imageResultHandler); + + ResultHandler handlerAgg = new CSVFileResultHandler(); + handlerAgg.setResultFileDetails(ResultFileDetails.CSV_AGGREGATE_PERFORMANCE); + defaultHandlers.add(handlerAgg); + + ResultHandler handlerDet = new CSVFileResultHandler(); + handlerDet.setResultFileDetails(ResultFileDetails.CSV_DETAILED_PERFORMANCE); + defaultHandlers.add(handlerDet); + } + + public ResultManager(String fileNameSeed) { + this(fileNameSeed, InstanceResolver.get(ResultHandler.class, defaultHandlers)); } - public ResultManager(PherfConstants.RunMode runMode, List resultHandlers) { + public ResultManager(String fileNameSeed, List resultHandlers) { this.resultHandlers = resultHandlers; util = new ResultUtil(); - this.runMode = runMode; + + for (ResultHandler resultHandler : resultHandlers) { + if (resultHandler.getResultFileName() == null) { + resultHandler.setResultFileName(fileNameSeed); + } + } } /** @@ -59,7 +78,7 @@ public class ResultManager { util.ensureBaseResultDirExists(); final DataModelResult dataModelResultCopy = new DataModelResult(result); for (ResultHandler handler : resultHandlers) { - util.write(handler, dataModelResultCopy, runMode); + util.write(handler, dataModelResultCopy); } } finally { for (ResultHandler handler : resultHandlers) { @@ -84,13 +103,13 @@ public class ResultManager { public synchronized void write(List dataModelResults) throws Exception { util.ensureBaseResultDirExists(); - CSVResultHandler detailsCSVWriter = null; + CSVFileResultHandler detailsCSVWriter = null; try { - detailsCSVWriter = - new CSVResultHandler(PherfConstants.COMBINED_FILE_NAME, - ResultFileDetails.CSV_DETAILED_PERFORMANCE); + detailsCSVWriter = new CSVFileResultHandler(); + detailsCSVWriter.setResultFileDetails(ResultFileDetails.CSV_DETAILED_PERFORMANCE); + detailsCSVWriter.setResultFileName(PherfConstants.COMBINED_FILE_NAME); for (DataModelResult dataModelResult : dataModelResults) { - util.write(detailsCSVWriter, dataModelResult, runMode); + util.write(detailsCSVWriter, dataModelResult); } } finally { if (detailsCSVWriter != null) { @@ -99,4 +118,8 @@ public class ResultManager { } } } + + public List getResultHandlers() { + return resultHandlers; + } } http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java index 07dfa86..9a589f5 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java @@ -19,8 +19,8 @@ package org.apache.phoenix.pherf.result; import org.apache.phoenix.pherf.PherfConstants; -import org.apache.phoenix.pherf.PherfConstants.RunMode; import org.apache.phoenix.pherf.result.file.ResultFileDetails; +import org.apache.phoenix.pherf.result.impl.CSVFileResultHandler; import org.apache.phoenix.pherf.result.impl.CSVResultHandler; import org.apache.phoenix.pherf.util.PhoenixUtil; @@ -50,7 +50,10 @@ public class ResultUtil { CSVResultHandler writer = null; try { if (!dataLoadThreadTime.getThreadTime().isEmpty()) { - writer = new CSVResultHandler("Data_Load_Details", ResultFileDetails.CSV); + writer = new CSVFileResultHandler(); + writer.setResultFileName("Data_Load_Details"); + writer.setResultFileDetails(ResultFileDetails.CSV); + for (WriteThreadTime writeThreadTime : dataLoadThreadTime.getThreadTime()) { List rowValues = new ArrayList<>(); rowValues.add(new ResultValue(PhoenixUtil.getZookeeper())); @@ -82,7 +85,10 @@ public class ResultUtil { CSVResultHandler writer = null; ResultFileDetails resultFileDetails = ResultFileDetails.CSV_AGGREGATE_DATA_LOAD; try { - writer = new CSVResultHandler("Data_Load_Summary", ResultFileDetails.CSV); + writer = new CSVFileResultHandler(); + writer.setResultFileName("Data_Load_Summary"); + writer.setResultFileDetails(resultFileDetails); + for (TableLoadTime loadTime : dataLoadTime.getTableLoadTime()) { List rowValues = new ArrayList<>(); rowValues.add(new ResultValue(PhoenixUtil.getZookeeper())); @@ -101,8 +107,8 @@ public class ResultUtil { } } - public synchronized void write(ResultHandler resultHandler, DataModelResult dataModelResult, - RunMode runMode) throws Exception { + public synchronized void write(ResultHandler resultHandler, DataModelResult dataModelResult) + throws Exception { ResultFileDetails resultFileDetails = resultHandler.getResultFileDetails(); switch (resultFileDetails) { case CSV_AGGREGATE_PERFORMANCE: @@ -110,7 +116,7 @@ public class ResultUtil { case CSV_DETAILED_FUNCTIONAL: List> rowDetails = - getCSVResults(dataModelResult, resultFileDetails, runMode); + getCSVResults(dataModelResult, resultFileDetails); for (List row : rowDetails) { Result result = @@ -159,8 +165,20 @@ public class ResultUtil { return str; } + /** + * Used by custom ResultWriter out Pherf's normal code base + * + * @return Header field as a {@link Result} + */ + @SuppressWarnings("unused") + public Result getCSVHeaderAsResult(String row) { + List resultValues = new ArrayList<>(); + resultValues.add(new ResultValue(row)); + return new Result(ResultFileDetails.CSV, row, resultValues); + } + private List> getCSVResults(DataModelResult dataModelResult, - ResultFileDetails resultFileDetails, RunMode runMode) { + ResultFileDetails resultFileDetails) { List> rowList = new ArrayList<>(); for (ScenarioResult result : dataModelResult.getScenarioResult()) { @@ -175,7 +193,7 @@ public class ResultUtil { case CSV_DETAILED_FUNCTIONAL: List> detailedRows = - queryResult.getCsvDetailedRepresentation(this, runMode); + queryResult.getCsvDetailedRepresentation(this, resultFileDetails); for (List detailedRowList : detailedRows) { List valueList = new ArrayList<>(); valueList.add(new ResultValue(convertNull(result.getTableName()))); http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/file/ResultFileDetails.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/file/ResultFileDetails.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/file/ResultFileDetails.java index 63b6284..a85f830 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/file/ResultFileDetails.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/file/ResultFileDetails.java @@ -43,4 +43,9 @@ public enum ResultFileDetails { public Header getHeader() { return header; } + + public boolean isPerformance() { + return (this == ResultFileDetails.CSV_AGGREGATE_PERFORMANCE) + || (this == CSV_DETAILED_PERFORMANCE); + } } http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVFileResultHandler.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVFileResultHandler.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVFileResultHandler.java new file mode 100644 index 0000000..8ddae67 --- /dev/null +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVFileResultHandler.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.phoenix.pherf.result.impl; + +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVParser; +import org.apache.commons.csv.CSVPrinter; +import org.apache.commons.csv.CSVRecord; +import org.apache.phoenix.pherf.PherfConstants; +import org.apache.phoenix.pherf.result.Result; +import org.apache.phoenix.pherf.result.ResultValue; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.List; + +public class CSVFileResultHandler extends CSVResultHandler { + + public CSVFileResultHandler() { + super(); + } + + @Override + public synchronized void write(Result result) throws IOException{ + util.ensureBaseResultDirExists(); + open(result.getHeader()); + super.write(result); + } + + public synchronized List read() throws IOException { + CSVParser parser = null; + util.ensureBaseResultDirExists(); + try { + File file = new File(resultFileName); + parser = CSVParser.parse(file, Charset.defaultCharset(), CSVFormat.DEFAULT); + List records = parser.getRecords(); + List results = new ArrayList<>(); + String header = null; + for (CSVRecord record : records) { + + // First record is the CSV Header + if (record.getRecordNumber() == 1) { + header = record.toString(); + continue; + } + List resultValues = new ArrayList<>(); + for (String val : record.toString().split(PherfConstants.RESULT_FILE_DELIMETER)) { + resultValues.add(new ResultValue(val)); + } + Result result = new Result(resultFileDetails, header, resultValues); + results.add(result); + } + return results; + } finally { + parser.close(); + } + } + + @Override + protected void open(String header) throws IOException { + // Check if already so we only open one writer + if (csvPrinter != null) { + return; + } + csvPrinter = new CSVPrinter(new PrintWriter(resultFileName), CSVFormat.DEFAULT); + Object[] records = header.split(PherfConstants.RESULT_FILE_DELIMETER); + csvPrinter.printRecord(records); + isClosed = false; + } +} http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVResultHandler.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVResultHandler.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVResultHandler.java index e69f600..41fadb1 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVResultHandler.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/CSVResultHandler.java @@ -15,72 +15,38 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.phoenix.pherf.result.impl; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVParser; import org.apache.commons.csv.CSVPrinter; -import org.apache.commons.csv.CSVRecord; -import org.apache.phoenix.pherf.PherfConstants; import org.apache.phoenix.pherf.result.Result; -import org.apache.phoenix.pherf.result.ResultHandler; import org.apache.phoenix.pherf.result.ResultUtil; -import org.apache.phoenix.pherf.result.ResultValue; -import org.apache.phoenix.pherf.result.file.ResultFileDetails; -import java.io.File; import java.io.IOException; -import java.io.PrintWriter; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.List; - -/** - * TODO Doc this class. Note that each instance that has a non unique file name will overwrite the last - */ -public class CSVResultHandler implements ResultHandler { - - private final ResultUtil util; - private final ResultFileDetails resultFileDetails; - private final String resultFileName; - private volatile CSVPrinter csvPrinter = null; - private volatile boolean isClosed = true; - public CSVResultHandler(String resultFileName, ResultFileDetails resultFileDetails) { - this(resultFileName, resultFileDetails, true); - } +public abstract class CSVResultHandler extends DefaultResultHandler { + protected final ResultUtil util; + protected volatile CSVPrinter csvPrinter = null; + protected volatile boolean isClosed = true; - public CSVResultHandler(String resultFileName, ResultFileDetails resultFileDetails, - boolean generateFullFileName) { + public CSVResultHandler() { this.util = new ResultUtil(); - PherfConstants constants = PherfConstants.create(); - String resultDir = constants.getProperty("pherf.default.results.dir"); - - this.resultFileName = - generateFullFileName ? - resultDir + PherfConstants.PATH_SEPARATOR + PherfConstants.RESULT_PREFIX - + resultFileName + util.getSuffix() + resultFileDetails - .getExtension().toString() : - resultFileName; - this.resultFileDetails = resultFileDetails; } - @Override public synchronized void write(Result result) throws IOException { - util.ensureBaseResultDirExists(); - - open(result); + @Override + public synchronized void write(Result result) throws IOException { csvPrinter.printRecord(result.getResultValues()); flush(); } - @Override public synchronized void flush() throws IOException { + @Override + public synchronized void flush() throws IOException { if (csvPrinter != null) { csvPrinter.flush(); } } - @Override public synchronized void close() throws IOException { + @Override + public synchronized void close() throws IOException { if (csvPrinter != null) { csvPrinter.flush(); csvPrinter.close(); @@ -88,51 +54,15 @@ public class CSVResultHandler implements ResultHandler { } } - @Override public synchronized List read() throws IOException { - CSVParser parser = null; - util.ensureBaseResultDirExists(); - try { - File file = new File(resultFileName); - parser = CSVParser.parse(file, Charset.defaultCharset(), CSVFormat.DEFAULT); - List records = parser.getRecords(); - List results = new ArrayList<>(); - String header = null; - for (CSVRecord record : records) { - - // First record is the CSV Header - if (record.getRecordNumber() == 1) { - header = record.toString(); - continue; - } - List resultValues = new ArrayList<>(); - for (String val : record.toString().split(PherfConstants.RESULT_FILE_DELIMETER)) { - resultValues.add(new ResultValue(val)); - } - Result result = new Result(resultFileDetails, header, resultValues); - results.add(result); - } - return results; - } finally { - parser.close(); - } - } - - private void open(Result result) throws IOException { - // Check if already so we only open one writer - if (csvPrinter != null) { - return; - } - csvPrinter = new CSVPrinter(new PrintWriter(resultFileName), CSVFormat.DEFAULT); - Object[] records = result.getHeader().split(PherfConstants.RESULT_FILE_DELIMETER); - csvPrinter.printRecord(records); - isClosed = false; - } - - @Override public synchronized boolean isClosed() { + @Override + public synchronized boolean isClosed() { return isClosed; } - @Override public ResultFileDetails getResultFileDetails() { - return resultFileDetails; - } + /** + * This method is meant to open the connection to the target CSV location + * @param header {@link String} Comma separated list of header values for CSV + * @throws IOException + */ + protected abstract void open(String header) throws IOException; } http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/DefaultResultHandler.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/DefaultResultHandler.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/DefaultResultHandler.java new file mode 100644 index 0000000..22fb625 --- /dev/null +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/DefaultResultHandler.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.pherf.result.impl; + +import org.apache.phoenix.pherf.PherfConstants; +import org.apache.phoenix.pherf.result.ResultHandler; +import org.apache.phoenix.pherf.result.ResultUtil; +import org.apache.phoenix.pherf.result.file.ResultFileDetails; + +public abstract class DefaultResultHandler implements ResultHandler{ + protected String resultFileName; + protected ResultFileDetails resultFileDetails; + protected final String resultDir; + protected final ResultUtil util; + + public DefaultResultHandler() { + util = new ResultUtil(); + PherfConstants constants = PherfConstants.create(); + this.resultDir = constants.getProperty("pherf.default.results.dir"); + } + + /** + * {@link DefaultResultHandler#setResultFileDetails(ResultFileDetails)} Must be called prior to + * setting the file name. Otherwise you will get a NPE. + * + * TODO Change this so NPE is not possible. Needs a bit of refactoring here + * + * @param resultFileName Base name of file + */ + @Override + public void setResultFileName(String resultFileName) { + this.resultFileName = + resultDir + PherfConstants.PATH_SEPARATOR + PherfConstants.RESULT_PREFIX + + resultFileName + util.getSuffix() + getResultFileDetails() + .getExtension().toString(); + } + + @Override + public void setResultFileDetails(ResultFileDetails details) { + this.resultFileDetails = details; + } + + @Override + public String getResultFileName() { + return resultFileName; + } + + @Override + public ResultFileDetails getResultFileDetails() { + return resultFileDetails; + } +} http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/ImageResultHandler.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/ImageResultHandler.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/ImageResultHandler.java index 5c3eac1..f25ce4e 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/ImageResultHandler.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/ImageResultHandler.java @@ -18,7 +18,6 @@ package org.apache.phoenix.pherf.result.impl; -import org.apache.phoenix.pherf.PherfConstants; import org.apache.phoenix.pherf.result.*; import org.apache.phoenix.pherf.result.file.ResultFileDetails; import org.jfree.chart.ChartFactory; @@ -34,30 +33,14 @@ import java.io.File; import java.io.IOException; import java.util.List; -public class ImageResultHandler implements ResultHandler { - private final String resultFileName; - private final ResultFileDetails resultFileDetails; +public class ImageResultHandler extends DefaultResultHandler{ - public ImageResultHandler(String resultFileName, ResultFileDetails resultFileDetails) { - this(resultFileName, resultFileDetails, true); + public ImageResultHandler() { + super(); } - public ImageResultHandler(String resultFileName, ResultFileDetails resultFileDetails, - boolean generateFullFileName) { - ResultUtil util = new ResultUtil(); - PherfConstants constants = PherfConstants.create(); - String resultDir = constants.getProperty("pherf.default.results.dir"); - - this.resultFileName = - generateFullFileName ? - resultDir + PherfConstants.PATH_SEPARATOR + PherfConstants.RESULT_PREFIX - + resultFileName + util.getSuffix() + resultFileDetails - .getExtension().toString() : - resultFileName; - this.resultFileDetails = resultFileDetails; - } - - @Override public synchronized void write(Result result) throws Exception { + @Override + public synchronized void write(Result result) throws Exception { TimeSeriesCollection timeSeriesCollection = new TimeSeriesCollection(); int rowCount = 0; int maxLegendCount = 20; @@ -112,23 +95,32 @@ public class ImageResultHandler implements ResultHandler { } - @Override public synchronized void flush() throws Exception { + @Override + public synchronized void flush() throws Exception { } - @Override public synchronized void close() throws Exception { + @Override + public synchronized void close() throws Exception { } - @Override public List read() throws Exception { + @Override + public List read() throws Exception { return null; } - @Override public boolean isClosed() { - return false; + /** + * File is never left open. This impl always overwrites existing file. + * @return + */ + @Override + public boolean isClosed() { + return true; } - @Override public ResultFileDetails getResultFileDetails() { - return resultFileDetails; + @Override + public void setResultFileDetails(ResultFileDetails details) { + super.setResultFileDetails(ResultFileDetails.IMAGE); } } http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/XMLResultHandler.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/XMLResultHandler.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/XMLResultHandler.java index 009ae21..990c9be 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/XMLResultHandler.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/impl/XMLResultHandler.java @@ -18,7 +18,6 @@ package org.apache.phoenix.pherf.result.impl; -import org.apache.phoenix.pherf.PherfConstants; import org.apache.phoenix.pherf.result.*; import org.apache.phoenix.pherf.result.file.ResultFileDetails; @@ -31,30 +30,14 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -public class XMLResultHandler implements ResultHandler { - private final String resultFileName; - private final ResultFileDetails resultFileDetails; +public class XMLResultHandler extends DefaultResultHandler{ - public XMLResultHandler(String resultFileName, ResultFileDetails resultFileDetails) { - this(resultFileName, resultFileDetails, true); + public XMLResultHandler() { + super(); } - public XMLResultHandler(String resultFileName, ResultFileDetails resultFileDetails, - boolean generateFullFileName) { - ResultUtil util = new ResultUtil(); - PherfConstants constants = PherfConstants.create(); - String resultDir = constants.getProperty("pherf.default.results.dir"); - - this.resultFileName = - generateFullFileName ? - resultDir + PherfConstants.PATH_SEPARATOR + PherfConstants.RESULT_PREFIX - + resultFileName + util.getSuffix() + resultFileDetails - .getExtension().toString() : - resultFileName; - this.resultFileDetails = resultFileDetails; - } - - @Override public synchronized void write(Result result) throws Exception { + @Override + public synchronized void write(Result result) throws Exception { FileOutputStream os = null; JAXBContext jaxbContext = JAXBContext.newInstance(DataModelResult.class); Marshaller jaxbMarshaller = jaxbContext.createMarshaller(); @@ -71,15 +54,18 @@ public class XMLResultHandler implements ResultHandler { } } - @Override public synchronized void flush() throws IOException { + @Override + public synchronized void flush() throws IOException { return; } - @Override public synchronized void close() throws IOException { + @Override + public synchronized void close() throws IOException { return; } - @Override public synchronized List read() throws Exception { + @Override + public synchronized List read() throws Exception { JAXBContext jaxbContext = JAXBContext.newInstance(DataModelResult.class); Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller(); @@ -91,11 +77,13 @@ public class XMLResultHandler implements ResultHandler { return results; } - @Override public boolean isClosed() { + @Override + public boolean isClosed() { return true; } - @Override public ResultFileDetails getResultFileDetails() { - return resultFileDetails; + @Override + public void setResultFileDetails(ResultFileDetails details) { + super.setResultFileDetails(ResultFileDetails.XML); } } http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java index db967fc..57858a3 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java @@ -18,34 +18,26 @@ package org.apache.phoenix.pherf.util; -import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_NAME; -import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SCHEM; +import org.apache.phoenix.pherf.PherfConstants; +import org.apache.phoenix.pherf.configuration.*; +import org.apache.phoenix.pherf.jmx.MonitorManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.DriverManager; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; +import java.sql.*; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Properties; -import org.apache.phoenix.pherf.PherfConstants; -import org.apache.phoenix.pherf.configuration.Column; -import org.apache.phoenix.pherf.configuration.DataTypeMapping; -import org.apache.phoenix.pherf.configuration.Query; -import org.apache.phoenix.pherf.configuration.QuerySet; -import org.apache.phoenix.pherf.configuration.Scenario; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_NAME; +import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SCHEM; // TODO This class needs to be cleanup up a bit. I just wanted to get an initial placeholder in. public class PhoenixUtil { - private static final Logger logger = LoggerFactory.getLogger(PhoenixUtil.class); - private static String zookeeper; - private static int rowCountOverride = 0; + private static final Logger logger = LoggerFactory.getLogger(PhoenixUtil.class); + private static String zookeeper; + private static int rowCountOverride = 0; private boolean testEnabled; private static PhoenixUtil instance; @@ -66,10 +58,10 @@ public class PhoenixUtil { return instance; } - public Connection getConnection() throws Exception{ - return getConnection(null); + public Connection getConnection() throws Exception { + return getConnection(null); } - + public Connection getConnection(String tenantId) throws Exception { return getConnection(tenantId, testEnabled); } @@ -104,15 +96,17 @@ public class PhoenixUtil { /** * Execute statement + * * @param sql * @param connection * @return * @throws SQLException */ - public boolean executeStatementThrowException(String sql, Connection connection) throws SQLException { - boolean result = false; - PreparedStatement preparedStatement = null; - try { + public boolean executeStatementThrowException(String sql, Connection connection) + throws SQLException { + boolean result = false; + PreparedStatement preparedStatement = null; + try { preparedStatement = connection.prepareStatement(sql); result = preparedStatement.execute(); connection.commit(); @@ -121,9 +115,9 @@ public class PhoenixUtil { } return result; } - + public boolean executeStatement(String sql, Connection connection) { - boolean result = false; + boolean result = false; PreparedStatement preparedStatement = null; try { preparedStatement = connection.prepareStatement(sql); @@ -143,7 +137,7 @@ public class PhoenixUtil { @SuppressWarnings("unused") public boolean executeStatement(PreparedStatement preparedStatement, Connection connection) { - boolean result = false; + boolean result = false; try { result = preparedStatement.execute(); connection.commit(); @@ -154,72 +148,75 @@ public class PhoenixUtil { } /** - * Delete existing tables with schema name set as {@link PherfConstants#PHERF_SCHEMA_NAME} with regex comparison - * + * Delete existing tables with schema name set as {@link PherfConstants#PHERF_SCHEMA_NAME} with regex comparison + * * @param regexMatch * @throws SQLException * @throws Exception */ public void deleteTables(String regexMatch) throws Exception { - regexMatch = regexMatch.toUpperCase().replace("ALL", ".*"); - Connection conn = getConnection(); - try { - ResultSet resultSet = getTableMetaData(PherfConstants.PHERF_SCHEMA_NAME, null, conn); - while (resultSet.next()) { - String tableName = resultSet.getString(TABLE_SCHEM) == null ? resultSet - .getString(TABLE_NAME) : resultSet - .getString(TABLE_SCHEM) - + "." - + resultSet.getString(TABLE_NAME); - if (tableName.matches(regexMatch)) { - logger.info("\nDropping " + tableName); - try { - executeStatementThrowException("DROP TABLE " - + tableName + " CASCADE", conn); - } catch (org.apache.phoenix.schema.TableNotFoundException tnf) { - logger.error("Table might be already be deleted via cascade. Schema: " - + tnf.getSchemaName() - + " Table: " - + tnf.getTableName()); - } - } - } - } finally { - conn.close(); - } + regexMatch = regexMatch.toUpperCase().replace("ALL", ".*"); + Connection conn = getConnection(); + try { + ResultSet resultSet = getTableMetaData(PherfConstants.PHERF_SCHEMA_NAME, null, conn); + while (resultSet.next()) { + String tableName = resultSet.getString(TABLE_SCHEM) == null ? resultSet + .getString(TABLE_NAME) : resultSet + .getString(TABLE_SCHEM) + + "." + + resultSet.getString(TABLE_NAME); + if (tableName.matches(regexMatch)) { + logger.info("\nDropping " + tableName); + try { + executeStatementThrowException("DROP TABLE " + + tableName + " CASCADE", conn); + } catch (org.apache.phoenix.schema.TableNotFoundException tnf) { + logger.error("Table might be already be deleted via cascade. Schema: " + + tnf.getSchemaName() + + " Table: " + + tnf.getTableName()); + } + } + } + } finally { + conn.close(); + } } - - public ResultSet getTableMetaData(String schemaName, String tableName, Connection connection) throws SQLException { - DatabaseMetaData dbmd = connection.getMetaData(); - ResultSet resultSet = dbmd.getTables(null, schemaName, tableName, null); - return resultSet; + + public ResultSet getTableMetaData(String schemaName, String tableName, Connection connection) + throws SQLException { + DatabaseMetaData dbmd = connection.getMetaData(); + ResultSet resultSet = dbmd.getTables(null, schemaName, tableName, null); + return resultSet; } - - public ResultSet getColumnsMetaData(String schemaName, String tableName, Connection connection) throws SQLException { - DatabaseMetaData dbmd = connection.getMetaData(); - ResultSet resultSet = dbmd.getColumns(null, schemaName, tableName, null); - return resultSet; + + public ResultSet getColumnsMetaData(String schemaName, String tableName, Connection connection) + throws SQLException { + DatabaseMetaData dbmd = connection.getMetaData(); + ResultSet resultSet = dbmd.getColumns(null, schemaName, tableName, null); + return resultSet; } - - public synchronized List getColumnsFromPhoenix(String schemaName, String tableName, Connection connection) throws SQLException { - List columnList = new ArrayList(); - ResultSet resultSet = null; - try { - resultSet = getColumnsMetaData(schemaName, tableName, connection); - while (resultSet.next()) { - Column column = new Column(); - column.setName(resultSet.getString("COLUMN_NAME")); - column.setType(DataTypeMapping.valueOf(resultSet.getString("TYPE_NAME"))); - column.setLength(resultSet.getInt("COLUMN_SIZE")); - columnList.add(column); - } - } finally { - if (null != resultSet) { - resultSet.close(); - } - } - - return Collections.unmodifiableList(columnList); + + public synchronized List getColumnsFromPhoenix(String schemaName, String tableName, + Connection connection) throws SQLException { + List columnList = new ArrayList(); + ResultSet resultSet = null; + try { + resultSet = getColumnsMetaData(schemaName, tableName, connection); + while (resultSet.next()) { + Column column = new Column(); + column.setName(resultSet.getString("COLUMN_NAME")); + column.setType(DataTypeMapping.valueOf(resultSet.getString("TYPE_NAME"))); + column.setLength(resultSet.getInt("COLUMN_SIZE")); + columnList.add(column); + } + } finally { + if (null != resultSet) { + resultSet.close(); + } + } + + return Collections.unmodifiableList(columnList); } /** @@ -269,22 +266,22 @@ public class PhoenixUtil { } public static String getZookeeper() { - return zookeeper; - } + return zookeeper; + } + + public static void setZookeeper(String zookeeper) { + logger.info("Setting zookeeper: " + zookeeper); + PhoenixUtil.zookeeper = zookeeper; + } + + public static int getRowCountOverride() { + return rowCountOverride; + } + + public static void setRowCountOverride(int rowCountOverride) { + PhoenixUtil.rowCountOverride = rowCountOverride; + } - public static void setZookeeper(String zookeeper) { - logger.info("Setting zookeeper: " + zookeeper); - PhoenixUtil.zookeeper = zookeeper; - } - - public static int getRowCountOverride() { - return rowCountOverride; - } - - public static void setRowCountOverride(int rowCountOverride) { - PhoenixUtil.rowCountOverride = rowCountOverride; - } - /** * Update Phoenix table stats * @@ -295,4 +292,12 @@ public class PhoenixUtil { logger.info("Updating stats for " + tableName); executeStatement("UPDATE STATISTICS " + tableName, scenario); } + + public MonitorManager loadCustomMonitors(MonitorManager manager) throws Exception { + Properties + properties = + PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES, false); + + return manager; + } } http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java index efb3da9..524724c 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/MultiThreadedRunner.java @@ -24,8 +24,6 @@ import java.sql.ResultSet; import java.util.Calendar; import java.util.Date; -import org.apache.phoenix.pherf.PherfConstants.RunMode; - import org.apache.phoenix.pherf.result.DataModelResult; import org.apache.phoenix.pherf.result.ResultManager; import org.apache.phoenix.pherf.result.RunTime; @@ -66,12 +64,13 @@ class MultiThreadedRunner implements Runnable { this.dataModelResult = dataModelResult; this.numberOfExecutions = numberOfExecutions; this.executionDurationInMs = executionDurationInMs; - this.resultManager = new ResultManager(dataModelResult.getName(), RunMode.PERFORMANCE); + this.resultManager = new ResultManager(dataModelResult.getName()); } /** * Executes run for a minimum of number of execution or execution duration */ + @Override public void run() { logger.info("\n\nThread Starting " + threadName + " ; " + query.getStatement() + " for " + numberOfExecutions + "times\n\n"); http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java index 4ab76c8..5a7c49f 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java @@ -21,7 +21,6 @@ package org.apache.phoenix.pherf.workload; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.phoenix.pherf.PherfConstants.GeneratePhoenixStats; -import org.apache.phoenix.pherf.PherfConstants.RunMode; import org.apache.phoenix.pherf.configuration.*; import org.apache.phoenix.pherf.result.*; import org.apache.phoenix.pherf.util.PhoenixUtil; @@ -32,43 +31,42 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; public class QueryExecutor implements Workload { private static final Logger logger = LoggerFactory.getLogger(QueryExecutor.class); private List dataModels; private String queryHint; - private final RunMode runMode; private final boolean exportCSV; - private final ExecutorService pool; private final XMLConfigParser parser; private final PhoenixUtil util; + private final WorkloadExecutor workloadExecutor; - public QueryExecutor(XMLConfigParser parser, PhoenixUtil util, ExecutorService pool) { - this(parser, util, pool, parser.getDataModels(), null, false, RunMode.PERFORMANCE); + public QueryExecutor(XMLConfigParser parser, PhoenixUtil util, + WorkloadExecutor workloadExecutor) { + this(parser, util, workloadExecutor, parser.getDataModels(), null, false); } - public QueryExecutor(XMLConfigParser parser, PhoenixUtil util, ExecutorService pool, - List dataModels, String queryHint, boolean exportCSV, RunMode runMode) { + public QueryExecutor(XMLConfigParser parser, PhoenixUtil util, + WorkloadExecutor workloadExecutor, List dataModels, String queryHint, + boolean exportCSV) { this.parser = parser; this.queryHint = queryHint; this.exportCSV = exportCSV; - this.runMode = runMode; this.dataModels = dataModels; - this.pool = pool; this.util = util; + this.workloadExecutor = workloadExecutor; } - @Override public void complete() { - - } + @Override + public void complete() {} /** * Calls in Multithreaded Query Executor for all datamodels * * @throws Exception */ + @Override public Runnable execute() throws Exception { Runnable runnable = null; for (DataModel dataModel : dataModels) { @@ -89,7 +87,8 @@ public class QueryExecutor implements Workload { */ protected Runnable exportAllScenarios(final DataModel dataModel) throws Exception { return new Runnable() { - @Override public void run() { + @Override + public void run() { try { List scenarios = dataModel.getScenarios(); @@ -124,7 +123,7 @@ public class QueryExecutor implements Workload { new DataModelResult(dataModel, PhoenixUtil.getZookeeper()); ResultManager resultManager = - new ResultManager(dataModelResult.getName(), QueryExecutor.this.runMode); + new ResultManager(dataModelResult.getName()); dataModelResults.add(dataModelResult); List scenarios = dataModel.getScenarios(); @@ -144,7 +143,7 @@ public class QueryExecutor implements Workload { logger.debug("Inserting write workload ( " + i + " ) of ( " + writerThreadCount + " )"); Workload writes = new WriteWorkload(PhoenixUtil.create(), parser, GeneratePhoenixStats.NO); - pool.submit(writes.execute()); + workloadExecutor.add(writes); } } @@ -193,7 +192,7 @@ public class QueryExecutor implements Workload { thread = executeRunner((i + 1) + "," + cr, dataModelResult, queryResult, querySetResult); - threads.add(pool.submit(thread)); + threads.add(workloadExecutor.getPool().submit(thread)); } for (Future thread : threads) { @@ -228,7 +227,7 @@ public class QueryExecutor implements Workload { thread = executeRunner((i + 1) + "," + cr, dataModelResult, queryResult, querySetResult); - threads.add(pool.submit(thread)); + threads.add(workloadExecutor.getPool().submit(thread)); } for (Future thread : threads) { @@ -259,7 +258,7 @@ public class QueryExecutor implements Workload { queryResult.setHint(this.queryHint); logger.info("\nExecuting query " + queryResult.getStatement()); Runnable thread; - if (this.runMode == RunMode.FUNCTIONAL) { + if (workloadExecutor.isPerformance()) { thread = new MultithreadedDiffer(threadTime.getThreadName(), queryResult, threadTime, querySet.getNumberOfExecutions(), querySet.getExecutionDurationInMs()); http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WorkloadExecutor.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WorkloadExecutor.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WorkloadExecutor.java index a65b4aa..3cde7ae 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WorkloadExecutor.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WorkloadExecutor.java @@ -31,6 +31,7 @@ import java.util.concurrent.*; public class WorkloadExecutor { private static final Logger logger = LoggerFactory.getLogger(WorkloadExecutor.class); private final int poolSize; + private final boolean isPerformance; // Jobs can be accessed by multiple threads private final Map jobs = new ConcurrentHashMap<>(); @@ -38,14 +39,15 @@ public class WorkloadExecutor { private final ExecutorService pool; public WorkloadExecutor() throws Exception { - this(PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES)); + this(PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES, false)); } public WorkloadExecutor(Properties properties) throws Exception { - this(properties, new ArrayList()); + this(properties, new ArrayList(), true); } - public WorkloadExecutor(Properties properties, List workloads) throws Exception { + public WorkloadExecutor(Properties properties, List workloads, boolean isPerformance) throws Exception { + this.isPerformance = isPerformance; this.poolSize = (properties.getProperty("pherf.default.threadpool") == null) ? PherfConstants.DEFAULT_THREAD_POOL_SIZE : @@ -102,10 +104,19 @@ public class WorkloadExecutor { pool.shutdownNow(); } + /** + * TODO This should be removed, Access to the pool should be restriced and callers should Workflows + * + * @return {@link ExecutorService} Exposes the underlying thread pool + */ public ExecutorService getPool() { return pool; } + public boolean isPerformance() { + return isPerformance; + } + private void init(List workloads) throws Exception { for (Workload workload : workloads) { this.jobs.put(workload, pool.submit(workload.execute())); http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java index 6985abd..b38f875 100644 --- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java +++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java @@ -79,7 +79,8 @@ public class WriteWorkload implements Workload { public WriteWorkload(PhoenixUtil phoenixUtil, XMLConfigParser parser, Scenario scenario, GeneratePhoenixStats generateStatistics) throws Exception { - this(phoenixUtil, PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES), + this(phoenixUtil, PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES, + false), parser, scenario, generateStatistics); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/PherfTest.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/PherfTest.java b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/PherfTest.java index 731a57a..80ab971 100644 --- a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/PherfTest.java +++ b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/PherfTest.java @@ -36,7 +36,7 @@ public class PherfTest { public void testUnknownOption() { String[] args = {"-drop", "all", "-q", "-m","-bsOption"}; - // Makes sure that System.exit(1) is called. Release is a required param. + // Makes sure that System.exit(1) is called. exit.expectSystemExitWithStatus(1); Pherf.main(args); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultBaseTest.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultBaseTest.java b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultBaseTest.java index 5c455fc..1497e77 100644 --- a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultBaseTest.java +++ b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultBaseTest.java @@ -24,6 +24,7 @@ import org.junit.BeforeClass; import java.util.Properties; public class ResultBaseTest { + protected static PherfConstants constants; private static boolean isSetUpDone = false; @BeforeClass @@ -33,8 +34,8 @@ public class ResultBaseTest { } ResultUtil util = new ResultUtil(); - PherfConstants constants = PherfConstants.create(); - Properties properties = constants.getProperties(PherfConstants.PHERF_PROPERTIES); + constants = PherfConstants.create(); + Properties properties = constants.getProperties(PherfConstants.PHERF_PROPERTIES, false); String dir = properties.getProperty("pherf.default.results.dir"); String targetDir = "target/" + dir; properties.setProperty("pherf.default.results.dir", targetDir); http://git-wip-us.apache.org/repos/asf/phoenix/blob/e345a36e/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java ---------------------------------------------------------------------- diff --git a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java index 4ccf95c..81d5fd0 100644 --- a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java +++ b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java @@ -25,12 +25,11 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; -import org.apache.phoenix.pherf.PherfConstants.RunMode; import org.apache.phoenix.pherf.jmx.MonitorManager; import org.apache.phoenix.pherf.result.file.Extension; import org.apache.phoenix.pherf.result.file.ResultFileDetails; -import org.apache.phoenix.pherf.result.impl.CSVResultHandler; +import org.apache.phoenix.pherf.result.impl.CSVFileResultHandler; import org.apache.phoenix.pherf.result.impl.XMLResultHandler; import org.apache.phoenix.pherf.result.*; import org.junit.Test; @@ -49,7 +48,9 @@ public class ResultTest extends ResultBaseTest { } try { - resultMonitorWriter = new CSVResultHandler(PherfConstants.MONITOR_FILE_NAME, ResultFileDetails.CSV_MONITOR); + resultMonitorWriter = new CSVFileResultHandler(); + resultMonitorWriter.setResultFileDetails(ResultFileDetails.CSV_MONITOR); + resultMonitorWriter.setResultFileName(PherfConstants.MONITOR_FILE_NAME); Result result = new Result(ResultFileDetails.CSV_MONITOR, ResultFileDetails.CSV_MONITOR.getHeader().toString(), resultValues); resultMonitorWriter.write(result); @@ -110,9 +111,56 @@ public class ResultTest extends ResultBaseTest { public void testResult() throws Exception { String filename = "testresult"; ResultHandler xmlResultHandler = null; - ResultManager resultManager = new ResultManager(filename, RunMode.PERFORMANCE); + ResultManager resultManager = new ResultManager(filename); + assertTrue("Default Handlers were not initialized.", resultManager.getResultHandlers().size() > 0); // write result to file + DataModelResult dataModelResult = setUpDataModelResult(); + resultManager.write(dataModelResult); + + // Put some stuff in a combined file + List modelResults = new ArrayList<>(); + modelResults.add(dataModelResult); + modelResults.add(dataModelResult); + resultManager.write(modelResults); + + // read result from file + xmlResultHandler = new XMLResultHandler(); + xmlResultHandler.setResultFileDetails(ResultFileDetails.XML); + xmlResultHandler.setResultFileName(filename); + + List resultList = xmlResultHandler.read(); + ResultValue resultValue = resultList.get(0).getResultValues().get(0); + DataModelResult dataModelResultFromFile = resultValue.getResultValue(); + + ScenarioResult scenarioResultFromFile = dataModelResultFromFile.getScenarioResult().get(0); + QuerySetResult querySetResultFromFile = scenarioResultFromFile.getQuerySetResult().get(0); + QueryResult queryResultFromFile = querySetResultFromFile.getQueryResults().get(0); + ThreadTime ttFromFile = queryResultFromFile.getThreadTimes().get(0); + + // thread level verification + assertEquals(10, (int) ttFromFile.getMinTimeInMs().getElapsedDurationInMs()); + assertEquals(30, (int) ttFromFile.getMaxTimeInMs().getElapsedDurationInMs()); + assertEquals(20, (int) ttFromFile.getAvgTimeInMs()); + + // 3rd runtime has the earliest start time, therefore that's what's expected. + QueryResult + qr = + dataModelResult.getScenarioResult().get(0).getQuerySetResult().get(0) + .getQueryResults().get(0); + List runTimes = qr.getThreadTimes().get(0).getRunTimesInMs(); + assertEquals(runTimes.get(2).getStartTime(), ttFromFile.getStartTime()); + assertEquals(runTimes.get(0).getResultRowCount(), ttFromFile.getRunTimesInMs().get(0).getResultRowCount()); + assertEquals(runTimes.get(1).getResultRowCount(), ttFromFile.getRunTimesInMs().get(1).getResultRowCount()); + assertEquals(runTimes.get(2).getResultRowCount(), ttFromFile.getRunTimesInMs().get(2).getResultRowCount()); + + // query result level verification + assertEquals(10, queryResultFromFile.getAvgMinRunTimeInMs()); + assertEquals(30, queryResultFromFile.getAvgMaxRunTimeInMs()); + assertEquals(20, queryResultFromFile.getAvgRunTimeInMs()); + } + + private DataModelResult setUpDataModelResult() { DataModelResult dataModelResult = new DataModelResult(); dataModelResult.setZookeeper("mytestzk"); ScenarioResult scenarioResult = new ScenarioResult(); @@ -134,7 +182,8 @@ public class ResultTest extends ResultBaseTest { query.setTenantId("tennantID123"); query.setStatement("Select * \n" + "from FHA"); query2.setStatement("Select a, b, c * \n" + "from FHA2"); - assertEquals("Expected consecutive spaces to be normalized", "Select * from FHA", query.getStatement()); + assertEquals("Expected consecutive spaces to be normalized", "Select * from FHA", + query.getStatement()); QueryResult queryResult = new QueryResult(query); QueryResult queryResult2 = new QueryResult(query2); @@ -156,42 +205,6 @@ public class ResultTest extends ResultBaseTest { queryResult.getThreadTimes().add(tt); queryResult2.getThreadTimes().add(tt); - //resultUtil.writeResultToFile(dataModelResult, filename, RunMode.PERFORMANCE); - resultManager.write(dataModelResult); - - // Put some stuff in a combined file - List modelResults = new ArrayList<>(); - modelResults.add(dataModelResult); - modelResults.add(dataModelResult); - resultManager.write(modelResults); - - // read result from file - xmlResultHandler = new XMLResultHandler(filename, ResultFileDetails.XML); - List resultList = xmlResultHandler.read(); - ResultValue resultValue = resultList.get(0).getResultValues().get(0); - DataModelResult dataModelResultFromFile = resultValue.getResultValue(); - - ScenarioResult scenarioResultFromFile = dataModelResultFromFile.getScenarioResult().get(0); - QuerySetResult querySetResultFromFile = scenarioResultFromFile.getQuerySetResult().get(0); - QueryResult queryResultFromFile = querySetResultFromFile.getQueryResults().get(0); - ThreadTime ttFromFile = queryResultFromFile.getThreadTimes().get(0); - - // thread level verification - assertEquals(10, (int) ttFromFile.getMinTimeInMs().getElapsedDurationInMs()); - assertEquals(30, (int) ttFromFile.getMaxTimeInMs().getElapsedDurationInMs()); - assertEquals(20, (int) ttFromFile.getAvgTimeInMs()); - // 3rd runtime has the earliest start time, therefore that's what's expected. - assertEquals(runtime3.getStartTime(), ttFromFile.getStartTime()); - - assertEquals(runtime1.getResultRowCount(), ttFromFile.getRunTimesInMs().get(0).getResultRowCount()); - assertEquals(runtime2.getResultRowCount(), ttFromFile.getRunTimesInMs().get(1).getResultRowCount()); - assertEquals(runtime3.getResultRowCount(), ttFromFile.getRunTimesInMs().get(2).getResultRowCount()); - - // query result level verification - assertEquals(10, queryResultFromFile.getAvgMinRunTimeInMs()); - assertEquals(30, queryResultFromFile.getAvgMaxRunTimeInMs()); - assertEquals(20, queryResultFromFile.getAvgRunTimeInMs()); - // 3rd runtime has the earliest start time, therefore that's what's expected. - assertEquals(runtime3.getStartTime(), queryResultFromFile.getStartTime()); + return dataModelResult; } } \ No newline at end of file