asterixdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ima...@apache.org
Subject [09/15] incubator-asterixdb git commit: Merge asterix-experiments to master
Date Wed, 16 Mar 2016 00:35:14 GMT
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a70fba5c/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/AbstractDynamicDataEvalReportBuilder.java
----------------------------------------------------------------------
diff --git a/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/AbstractDynamicDataEvalReportBuilder.java b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/AbstractDynamicDataEvalReportBuilder.java
new file mode 100644
index 0000000..13d2694
--- /dev/null
+++ b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/AbstractDynamicDataEvalReportBuilder.java
@@ -0,0 +1,286 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.experiment.report;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.List;
+
+public abstract class AbstractDynamicDataEvalReportBuilder implements IDynamicDataEvalReportBuilder {
+
+    protected final static String INSTANTANEOUS_INSERT_STRING = "[TimeToInsert100000]";
+    protected final static int INSTANTAEOUS_INSERT_COUNT = 100000;
+    protected final static int ROUND_COUNT = 721;
+    protected final static int ROUND_INTERVAL = 5;
+    protected final String expHomePath;
+    protected final String expName;
+    protected final String runLogFilePath;
+    protected final String[] ncLogFilePaths;
+    protected BufferedReader br = null;
+    protected BufferedReader[] ncLogBrs = null;
+    protected final int ncLogFileCount;
+
+    protected final StringBuilder dataGenSb;
+    protected final StringBuilder queryGenSb;
+    protected final StringBuilder rsb;
+
+    protected AbstractDynamicDataEvalReportBuilder(String expHomePath, String expName, String runLogFileName,
+            boolean hasStatFile) {
+        this.expHomePath = expHomePath;
+        this.expName = expName;
+        this.runLogFilePath = new String(expHomePath + runLogFileName);
+        if (expName.contains("1A")) {
+            ncLogFileCount = 1;
+        } else if (expName.contains("1B")) {
+            ncLogFileCount = 2;
+        } else if (expName.contains("1C")) {
+            ncLogFileCount = 4;
+        } else /* if (expName.contains("1D") || other exps) */{
+            ncLogFileCount = 8;
+        }
+        ncLogFilePaths = new String[ncLogFileCount];
+        ncLogBrs = new BufferedReader[ncLogFileCount];
+        for (int i = 0; i < ncLogFileCount; i++) {
+            if (hasStatFile) {
+                ncLogFilePaths[i] = new String(expHomePath + expName + File.separator + "node" + (i + 1)
+                        + File.separator + "logs" + File.separator + "a1_node" + (i + 1) + ".log");
+            } else {
+                ncLogFilePaths[i] = new String(expHomePath + expName + File.separator + "logs" + File.separator
+                        + "a1_node" + (i + 1) + ".log");
+            }
+        }
+        dataGenSb = new StringBuilder();
+        queryGenSb = new StringBuilder();
+        rsb = new StringBuilder();
+    }
+
+    protected void openRunLog() throws IOException {
+        br = new BufferedReader(new FileReader(runLogFilePath));
+        for (int i = 0; i < ncLogFileCount; i++) {
+            ncLogBrs[i] = new BufferedReader(new FileReader(ncLogFilePaths[i]));
+        }
+    }
+
+    protected void closeRunLog() throws IOException {
+        if (br != null) {
+            br.close();
+        }
+        if (ncLogBrs != null) {
+            for (int i = 0; i < ncLogFileCount; i++) {
+                if (ncLogBrs[i] != null) {
+                    ncLogBrs[i].close();
+                }
+            }
+        }
+    }
+
+    protected boolean moveToExperimentBegin() throws IOException {
+        String line;
+        while ((line = br.readLine()) != null) {
+            if (line.contains("Running experiment: " + expName)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    protected void renewStringBuilder() {
+        dataGenSb.setLength(0);
+        queryGenSb.setLength(0);
+        rsb.setLength(0);
+    }
+
+    @Override
+    public String getInstantaneousInsertPS(int nodeId, boolean useTimeForX) throws Exception {
+        renewStringBuilder();
+        openRunLog();
+        try {
+
+            if (!moveToExperimentBegin()) {
+                //The experiment run log doesn't exist in this run log file
+                return null;
+            }
+
+            int round = 0;
+            while (round < ROUND_COUNT) {
+                long IIPS = 0;
+                String line;
+                while ((line = ncLogBrs[nodeId].readLine()) != null) {
+                    if (line.contains("IPS")) {
+                        IIPS = ReportBuilderHelper.getLong(line, ", IIPS[", "]");
+                        break;
+                    }
+                }
+                round++;
+                dataGenSb.append(round * ROUND_INTERVAL).append(",").append(IIPS).append("\n");
+            }
+
+            return dataGenSb.toString();
+        } finally {
+            closeRunLog();
+        }
+    }
+
+    @Override
+    public void getAllNodesAccumulatedInsertPS(int targetRound, List<Long> ipsList) throws Exception {
+        renewStringBuilder();
+        openRunLog();
+        ipsList.clear();
+        try {
+
+            if (!moveToExperimentBegin()) {
+                //The experiment run log doesn't exist in this run log file
+                return;
+            }
+
+            int round = 0;
+            while (round < targetRound) {
+                long IPSPerRound = 0;
+                for (int i = 0; i < ncLogFileCount; i++) {
+                    String line;
+                    while ((line = ncLogBrs[i].readLine()) != null) {
+                        if (line.contains("IPS")) {
+                            IPSPerRound += ReportBuilderHelper.getLong(line, ", IPS[", "]");
+                            break;
+                        }
+                    }
+                }
+                ipsList.add(IPSPerRound);
+                round++;
+            }
+            return;
+        } finally {
+            closeRunLog();
+        }
+    }
+
+    public String getInstantaneousDataGenPS(int genId, boolean useTimeForX) throws Exception {
+        renewStringBuilder();
+        openRunLog();
+        try {
+            if (!moveToExperimentBegin()) {
+                //The experiment run log doesn't exist in this run log file
+                return null;
+            }
+
+            String line;
+            int dGenId;
+            int count = 0;
+            long timeToInsert = 0;
+            long totalTimeToInsert = 0;
+            while ((line = br.readLine()) != null) {
+                if (line.contains(INSTANTANEOUS_INSERT_STRING)) {
+                    dGenId = ReportBuilderHelper.getInt(line, "DataGen[", "]");
+                    if (dGenId == genId) {
+                        count++;
+                        timeToInsert = ReportBuilderHelper.getLong(line, INSTANTANEOUS_INSERT_STRING, "in");
+                        totalTimeToInsert += timeToInsert;
+                        if (useTimeForX) {
+                            dataGenSb.append(totalTimeToInsert / 1000).append(",")
+                                    .append(INSTANTAEOUS_INSERT_COUNT / ((double) (timeToInsert) / 1000)).append("\n");
+                        } else {
+                            dataGenSb.append(count).append(",")
+                                    .append(INSTANTAEOUS_INSERT_COUNT / ((double) (timeToInsert) / 1000)).append("\n");
+                        }
+                    }
+                }
+                if (line.contains("Running")) {
+                    break;
+                }
+            }
+            System.out.println("GenId[" + genId + "] " + totalTimeToInsert + ", " + (totalTimeToInsert / (1000 * 60)));
+            return dataGenSb.toString();
+        } finally {
+            closeRunLog();
+        }
+    }
+
+    public long getDataGenStartTimeStamp() throws Exception {
+        openRunLog();
+        try {
+            String line;
+            while ((line = br.readLine()) != null) {
+                if (line.contains("Running experiment: " + expName)) {
+                    while ((line = br.readLine()) != null) {
+                        //2015-10-27 17:18:28,242 INFO  [ParallelActionThread 6] transport.TransportImpl (TransportImpl.java:init(155)) - Client identity string: SSH-2.0-SSHJ_0_13_0
+                        if (line.contains("INFO  [ParallelActionThread")) {
+                            //format1 = new SimpleDateFormat("MMM dd, yyyy hh:mm:ss aa");
+                            //format2 = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
+                            SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
+                            return ReportBuilderHelper.getTimeStampAsLong(line, format);
+                        }
+                    }
+                }
+            }
+            return -1;
+        } finally {
+            closeRunLog();
+        }
+    }
+
+    public String getIndexSize(String indexDirPath) throws Exception {
+        /*
+         * exmaple
+         * /mnt/data/sdb/youngsk2/asterix/storage/experiments/Tweets_idx_dhbtreeLocation/device_id_0:
+        total 211200
+        -rw-r--r-- 1 youngsk2 grad 191234048 Jun 29 00:11 2015-06-29-00-09-59-023_2015-06-28-23-51-56-984_b
+        -rw-r--r-- 1 youngsk2 grad   7864320 Jun 29 00:11 2015-06-29-00-09-59-023_2015-06-28-23-51-56-984_f
+        -rw-r--r-- 1 youngsk2 grad   4194304 Jun 29 00:10 2015-06-29-00-10-26-997_2015-06-29-00-10-26-997_b
+        -rw-r--r-- 1 youngsk2 grad    393216 Jun 29 00:10 2015-06-29-00-10-26-997_2015-06-29-00-10-26-997_f
+        -rw-r--r-- 1 youngsk2 grad   5898240 Jun 29 00:11 2015-06-29-00-10-59-791_2015-06-29-00-10-59-791_b
+        -rw-r--r-- 1 youngsk2 grad    393216 Jun 29 00:11 2015-06-29-00-10-59-791_2015-06-29-00-10-59-791_f
+        -rw-r--r-- 1 youngsk2 grad   5898240 Jun 29 00:11 2015-06-29-00-11-30-486_2015-06-29-00-11-30-486_b
+        -rw-r--r-- 1 youngsk2 grad    393216 Jun 29 00:11 2015-06-29-00-11-30-486_2015-06-29-00-11-30-486_f
+        
+         */
+        renewStringBuilder();
+        openRunLog();
+        try {
+            if (!moveToExperimentBegin()) {
+                //The experiment run log doesn't exist in this run log file
+                return null;
+            }
+
+            String line;
+            String[] tokens;
+            long diskSize = 0;
+            while ((line = br.readLine()) != null) {
+                if (line.contains(indexDirPath)) {
+                    br.readLine();//discard "total XXXX" line
+                    //read and sum file size
+                    while (!(line = br.readLine()).isEmpty()) {
+                        tokens = line.split("\\s+");;
+                        diskSize += Long.parseLong(tokens[4].trim());
+                    }
+                }
+                if (line.contains("Running")) {
+                    break;
+                }
+            }
+            rsb.append((double) diskSize / (1024 * 1024 * 1024));
+            return rsb.toString();
+        } finally {
+            closeRunLog();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a70fba5c/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/AbstractStaticDataEvalReportBuilder.java
----------------------------------------------------------------------
diff --git a/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/AbstractStaticDataEvalReportBuilder.java b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/AbstractStaticDataEvalReportBuilder.java
new file mode 100644
index 0000000..fdd1d92
--- /dev/null
+++ b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/AbstractStaticDataEvalReportBuilder.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.experiment.report;
+
+public abstract class AbstractStaticDataEvalReportBuilder implements IStaticDataEvalReportBuilder {
+
+    protected final String expName;
+    protected final String runLogFilePath;
+    
+    protected AbstractStaticDataEvalReportBuilder(String expName, String runLogFilePath) {
+        this.expName = expName;
+        this.runLogFilePath = runLogFilePath;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a70fba5c/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/IDynamicDataEvalReportBuilder.java
----------------------------------------------------------------------
diff --git a/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/IDynamicDataEvalReportBuilder.java b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/IDynamicDataEvalReportBuilder.java
new file mode 100644
index 0000000..4d5e511
--- /dev/null
+++ b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/IDynamicDataEvalReportBuilder.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.experiment.report;
+
+import java.util.List;
+
+public interface IDynamicDataEvalReportBuilder {
+    public String getInstantaneousInsertPS(int genId, boolean useTimeForX) throws Exception;
+
+    public String getOverallInsertPS(int minutes) throws Exception;
+
+    public String getInstantaneousQueryPS() throws Exception;
+
+    public String getQueryPS(int minutes) throws Exception;
+
+    void getAllNodesAccumulatedInsertPS(int targetRound, List<Long> ipsList) throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a70fba5c/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/IStaticDataEvalReportBuilder.java
----------------------------------------------------------------------
diff --git a/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/IStaticDataEvalReportBuilder.java b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/IStaticDataEvalReportBuilder.java
new file mode 100644
index 0000000..6c879f4
--- /dev/null
+++ b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/IStaticDataEvalReportBuilder.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.experiment.report;
+
+public interface IStaticDataEvalReportBuilder {
+    public String getSelectQueryTime();
+    public String getJoinQueryTime();
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a70fba5c/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/NCLogReportBuilder.java
----------------------------------------------------------------------
diff --git a/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/NCLogReportBuilder.java b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/NCLogReportBuilder.java
new file mode 100644
index 0000000..5da4463
--- /dev/null
+++ b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/NCLogReportBuilder.java
@@ -0,0 +1,212 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.experiment.report;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map.Entry;
+
+public class NCLogReportBuilder {
+
+    private String ncLogFilePath = "/Users/kisskys/workspace/asterix_experiment/run-log/measure-with-balloon/sie1-8dgen/log-1436511417368/SpatialIndexExperiment1ADhbtree/logs/a1_node1.log";
+    private BufferedReader br;
+    private String timeLine;
+    private String msgLine;
+
+    public NCLogReportBuilder(String filePath) {
+        if (filePath != null) {
+            this.ncLogFilePath = filePath;
+        }
+    }
+
+    public String getFlushMergeEventAsGanttChartFormat(long testBeginTimeStamp) throws Exception {
+        openNCLog();
+        StringBuilder sb = new StringBuilder();
+        long flushStartTimeStamp, flushFinishTimeStamp, mergeStartTimeStamp, mergeFinishTimeStamp;
+        String indexName;
+        SimpleDateFormat format = new SimpleDateFormat("MMM dd, yyyy hh:mm:ss aa");
+        HashMap<String, Long> flushMap = new HashMap<String, Long>();
+        HashMap<String, Long> mergeMap = new HashMap<String, Long>();
+        long sTime, fTime;
+        try {
+            while ((timeLine = br.readLine()) != null) {
+                if ((msgLine = br.readLine()) == null) {
+                    break;
+                }
+                while (!msgLine.contains("INFO:")) {
+                    timeLine = msgLine;
+                    msgLine = br.readLine();
+                    if (msgLine == null) {
+                        break;
+                    }
+                }
+                if (msgLine == null) {
+                    break;
+                }
+
+                //flush start
+                if (msgLine.contains("Started a flush operation for index")) {
+                    flushStartTimeStamp = ReportBuilderHelper.getTimeStampAsLong(timeLine, format);
+
+                    //ignore flush op which happened before the data gen started.
+                    if (flushStartTimeStamp < testBeginTimeStamp) {
+                        continue;
+                    }
+
+                    indexName = ReportBuilderHelper.getString(msgLine, "experiments/Tweets_idx_", "/]");
+                    flushMap.put(indexName, flushStartTimeStamp);
+                }
+
+                //flush finish
+                if (msgLine.contains("Finished the flush operation for index")) {
+                    flushFinishTimeStamp = ReportBuilderHelper.getTimeStampAsLong(timeLine, format);
+
+                    //ignore flush op which happened before the data gen started.
+                    if (flushFinishTimeStamp < testBeginTimeStamp) {
+                        continue;
+                    }
+
+                    indexName = ReportBuilderHelper.getString(msgLine, "experiments/Tweets_idx_", "/]");
+
+                    if (flushMap.containsKey(indexName)) {
+                        flushStartTimeStamp = flushMap.remove(indexName);
+                        sTime = (flushStartTimeStamp - testBeginTimeStamp) / 1000;
+                        fTime = (flushFinishTimeStamp - testBeginTimeStamp) / 1000;
+                        if (fTime == sTime) {
+                            ++fTime;
+                        }
+                        //only for sie1
+                        //                        if (fTime > 1200) {
+                        //                            fTime = 1200;
+                        //                        }
+                        sb.append("f-" + getPrintName(indexName)).append("\t").append(sTime).append("\t").append(fTime)
+                                .append("\t").append(indexName.contains("Tweets") ? "flushPidx" : "flushSidx")
+                                .append("\n");
+                    }
+                }
+
+                //merge start
+                if (msgLine.contains("Started a merge operation for index")) {
+                    mergeStartTimeStamp = ReportBuilderHelper.getTimeStampAsLong(timeLine, format);
+
+                    //ignore flush op which happened before the data gen started.
+                    if (mergeStartTimeStamp < testBeginTimeStamp) {
+                        continue;
+                    }
+
+                    indexName = ReportBuilderHelper.getString(msgLine, "experiments/Tweets_idx_", "/]");
+                    mergeMap.put(indexName, mergeStartTimeStamp);
+                }
+
+                //merge finish
+                if (msgLine.contains("Finished the merge operation for index")) {
+                    mergeFinishTimeStamp = ReportBuilderHelper.getTimeStampAsLong(timeLine, format);
+
+                    //ignore flush op which happened before the data gen started.
+                    if (mergeFinishTimeStamp < testBeginTimeStamp) {
+                        continue;
+                    }
+
+                    indexName = ReportBuilderHelper.getString(msgLine, "experiments/Tweets_idx_", "/]");
+
+                    if (mergeMap.containsKey(indexName)) {
+                        mergeStartTimeStamp = mergeMap.remove(indexName);
+                        sTime = (mergeStartTimeStamp - testBeginTimeStamp) / 1000;
+                        fTime = (mergeFinishTimeStamp - testBeginTimeStamp) / 1000;
+                        if (fTime == sTime) {
+                            ++fTime;
+                        }
+                        //only for sie1
+                        //                        if (fTime > 1200) {
+                        //                            fTime = 1200;
+                        //                        }
+                        sb.append("m-" + getPrintName(indexName)).append("\t").append(sTime).append("\t").append(fTime)
+                                .append("\t").append(indexName.contains("Tweets") ? "mergePidx" : "mergeSidx")
+                                .append("\n");
+                    }
+                }
+            }
+
+            Iterator<Entry<String, Long>> mergeMapIter = mergeMap.entrySet().iterator();
+            Entry<String, Long> entry = null;
+            while (mergeMapIter.hasNext()) {
+                entry = mergeMapIter.next();
+                sb.append("m-" + getPrintName(entry.getKey())).append("\t")
+                        .append((entry.getValue() - testBeginTimeStamp) / 1000).append("\t").append(60 * 20)
+                        .append("\t").append(entry.getKey().contains("Tweets") ? "mergePidx" : "mergeSidx")
+                        .append("\n");
+            }
+
+            Iterator<Entry<String, Long>> flushMapIter = mergeMap.entrySet().iterator();
+            while (mergeMapIter.hasNext()) {
+                entry = flushMapIter.next();
+                sb.append("f-" + getPrintName(entry.getKey())).append("\t")
+                        .append((entry.getValue() - testBeginTimeStamp) / 1000).append("\t").append(60 * 20)
+                        .append("\t").append(entry.getKey().contains("Tweets") ? "flushPidx" : "flushSidx")
+                        .append("\n");
+            }
+
+            return sb.toString();
+        } finally {
+            closeNCLog();
+        }
+    }
+
+    private String getPrintName(String indexName) {
+        String name = null;
+        if (indexName.contains("Tweets")) {
+            if (indexName.contains("0")) {
+                name = "pidx0";
+            } else if (indexName.contains("1")) {
+                name = "pidx1";
+            } else if (indexName.contains("2")) {
+                name = "pidx2";
+            } else if (indexName.contains("3")) {
+                name = "pidx3";
+            }
+        } else if (indexName.contains("Location")) {
+            if (indexName.contains("0")) {
+                name = "sidx0"; //ReportBuilderHelper.getString(indexName, "Location") + "0";
+            } else if (indexName.contains("1")) {
+                name = "sidx1"; //ReportBuilderHelper.getString(indexName, "Location") + "1";
+            } else if (indexName.contains("2")) {
+                name = "sidx2"; //ReportBuilderHelper.getString(indexName, "Location") + "2";
+            } else if (indexName.contains("3")) {
+                name = "sidx3"; //ReportBuilderHelper.getString(indexName, "Location") + "2";
+            }
+        }
+        return name;
+    }
+
+    protected void openNCLog() throws IOException {
+        br = new BufferedReader(new FileReader(ncLogFilePath));
+    }
+
+    protected void closeNCLog() throws IOException {
+        if (br != null) {
+            br.close();
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a70fba5c/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/OperatorProfilerReportBuilder.java
----------------------------------------------------------------------
diff --git a/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/OperatorProfilerReportBuilder.java b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/OperatorProfilerReportBuilder.java
new file mode 100644
index 0000000..24617bb
--- /dev/null
+++ b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/OperatorProfilerReportBuilder.java
@@ -0,0 +1,322 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.experiment.report;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map.Entry;
+
+public class OperatorProfilerReportBuilder {
+
+    private static final int INDEX_BUILD_OP_COUNT = 1;
+    private static final int PIDX_SCAN_OP_COUNT = 1;
+    private static final int WARM_UP_SELECT_QUERY_COUNT = 500;
+    private static final int SELECT_QUERY_COUNT = 5000;
+    private static final int JOIN_QUERY_COUNT = 200;
+    private static final int JOIN_RADIUS_TYPE_COUNT = 4;
+    private static final int SELECT_RADIUS_TYPE_COUNT = 5;
+    private static final int IDX_JOIN_RADIUS_SKIP = JOIN_RADIUS_TYPE_COUNT - 1;
+    private static final int IDX_SELECT_RADIUS_SKIP = SELECT_RADIUS_TYPE_COUNT - 1;
+    private static final int IDX_INITIAL_JOIN_SKIP = INDEX_BUILD_OP_COUNT + PIDX_SCAN_OP_COUNT
+            + WARM_UP_SELECT_QUERY_COUNT + SELECT_QUERY_COUNT;
+    private static final int IDX_INITIAL_SELECT_SKIP = INDEX_BUILD_OP_COUNT + PIDX_SCAN_OP_COUNT
+            + WARM_UP_SELECT_QUERY_COUNT;
+
+    private static final int HYRACK_JOB_ELAPSED_TIME_FIELD = 2;
+    private static final int OP_ELAPSED_TIME_FIELD = 4;
+    private static final int OP_TASK_ID_FIELD = 2;
+    private static final int OP_NAME_FIELD = 1;
+
+    private String executionTimeFilePath = null;
+    private BufferedReader brExecutionTime;
+    private String line;
+    private int lineNum;
+
+    public OperatorProfilerReportBuilder(String executionTimeFilePath) {
+        this.executionTimeFilePath = executionTimeFilePath;
+    }
+
+    public String getIdxNumber(boolean isJoin, int radiusIdx) throws Exception {
+        openExecutionTimeFile();
+
+        StringBuilder sb = new StringBuilder();
+        int initialSkip = (isJoin ? IDX_INITIAL_JOIN_SKIP : IDX_INITIAL_SELECT_SKIP) + radiusIdx;
+        int radiusSkip = isJoin ? IDX_JOIN_RADIUS_SKIP : IDX_SELECT_RADIUS_SKIP;
+        BufferedReader br = brExecutionTime;
+        int queryCount = isJoin ? JOIN_QUERY_COUNT / JOIN_RADIUS_TYPE_COUNT : SELECT_QUERY_COUNT
+                / SELECT_RADIUS_TYPE_COUNT;
+        lineNum = 0;
+        JobStat jobStat = new JobStat();
+
+        try {
+
+            //initial skip
+            int jobCount = 0;
+            while ((line = br.readLine()) != null) {
+                lineNum++;
+                if (line.contains("TOTAL_HYRACKS_JOB")) {
+                    jobCount++;
+                    if (jobCount > initialSkip) {
+                        break;
+                    }
+                }
+            }
+
+            //Reaching Here, line variable contains the first job to be counted
+            for (int j = 0; j < queryCount; j++) {
+
+                analyzeOperatorExecutionTime(jobStat, br);
+
+                //radius skip
+                jobCount = 0;
+                while ((line = br.readLine()) != null) {
+                    lineNum++;
+                    if (line.contains("TOTAL_HYRACKS_JOB")) {
+                        jobCount++;
+                        if (jobCount > radiusSkip) {
+                            break;
+                        }
+                    }
+                }
+            }
+
+            //System.out.println("lineNum: " + lineNum);
+            sb.append("TOTAL_HYRACKS_JOB," + (((double) jobStat.getHyracksJobTimeSum()) / jobStat.getHyracksJobCount())
+                    + "," + jobStat.getHyracksJobTimeSum() + "," + jobStat.getHyracksJobCount() + "\n");
+            sb.append(jobStat.getOperatorsElapsedTimeAsString());
+            return sb.toString();
+        } finally {
+            closeExecutionTimeFile();
+        }
+    }
+
+    private void analyzeOperatorExecutionTime(JobStat jobStat, BufferedReader br) throws IOException {
+        //the line argument contains TOTAL_HYRACKS_JOB string. eg.:
+        //2015-11-04 19:13:08,003   TOTAL_HYRACKS_JOB a1_node1_JID:3_26202768 TOTAL_HYRACKS_JOB1446660788003    1066    1.066   1066    1.066
+        String tokens[] = line.split("\t");
+
+        if (Long.parseLong(tokens[HYRACK_JOB_ELAPSED_TIME_FIELD]) > 10000) {
+            System.out.println("[" + lineNum + "] " + line);
+        }
+
+        jobStat.addHyracksJobTime(Long.parseLong(tokens[HYRACK_JOB_ELAPSED_TIME_FIELD]));
+
+        while ((line = br.readLine()) != null) {
+            lineNum++;
+
+            if (line.isEmpty()) {
+                break;
+            }
+
+            tokens = line.split("\t");
+            if (line.contains("DISTRIBUTE_RESULT")) {
+                jobStat.addDistributeResultTime(Long.parseLong(tokens[OP_ELAPSED_TIME_FIELD]));
+                continue;
+            }
+            if (line.contains("EMPTY_TUPLE_SOURCE")) {
+                continue;
+            }
+
+            if (line.contains("TXN_JOB_COMMIT")) {
+                continue;
+            }
+
+            jobStat.updateOperatorTime(tokens[OP_TASK_ID_FIELD], tokens[OP_NAME_FIELD],
+                    Long.parseLong(tokens[OP_ELAPSED_TIME_FIELD]));
+        }
+
+        jobStat.updateTaskForAvgWithSlowestTask();
+    }
+
+    protected void openExecutionTimeFile() throws IOException {
+        brExecutionTime = new BufferedReader(new FileReader(executionTimeFilePath));
+    }
+
+    protected void closeExecutionTimeFile() throws IOException {
+        if (brExecutionTime != null) {
+            brExecutionTime.close();
+        }
+    }
+
+    class JobStat {
+        private long hyracksJobElapsedTimeSum;
+        private int hyracksJobCount;
+        private long distributeResultTimeSum;
+        private Task taskForAvg;
+        private HashMap<String, Task> taskId2TaskMap;
+
+        public JobStat() {
+            hyracksJobElapsedTimeSum = 0;
+            hyracksJobCount = 0;
+            distributeResultTimeSum = 0;
+            taskForAvg = new Task("TaskForAvg");
+            taskId2TaskMap = new HashMap<String, Task>();
+        }
+
+        public void reset() {
+            hyracksJobElapsedTimeSum = 0;
+            hyracksJobCount = 0;
+            distributeResultTimeSum = 0;
+            taskForAvg.reset();;
+            taskId2TaskMap.clear();
+        }
+
+        public void addHyracksJobTime(long elapsedTime) {
+            hyracksJobElapsedTimeSum += elapsedTime;
+            hyracksJobCount++;
+        }
+
+        public void addDistributeResultTime(long elapsedTime) {
+            distributeResultTimeSum += elapsedTime;
+        }
+
+        public long getDistributeResultTime() {
+            return distributeResultTimeSum;
+        }
+
+        public long getHyracksJobTimeSum() {
+            return hyracksJobElapsedTimeSum;
+        }
+
+        public int getHyracksJobCount() {
+            return hyracksJobCount;
+        }
+
+        public void updateOperatorTime(String taskId, String operatorName, long elapsedTime) {
+            Task task = taskId2TaskMap.get(taskId);
+            if (task == null) {
+                task = new Task(taskId);
+                taskId2TaskMap.put(new String(taskId), task);
+            }
+            task.updateOperatorTime(operatorName, elapsedTime);
+        }
+
+        public void updateTaskForAvgWithSlowestTask() {
+            Iterator<Entry<String, Task>> taskIter = taskId2TaskMap.entrySet().iterator();
+            Task slowestTask = null;
+            Task curTask;
+
+            //get the slowest task
+            while (taskIter.hasNext()) {
+                curTask = taskIter.next().getValue();
+                if (slowestTask == null) {
+                    slowestTask = curTask;
+                } else {
+                    if (slowestTask.getElapsedTime() < curTask.getElapsedTime()) {
+                        slowestTask = curTask;
+                    }
+                }
+            }
+
+            //update the TaskForAvg with the slowest one
+            HashMap<String, SumCount> operator2SumCountMap = slowestTask.getOperator2SumCountMap();
+            Iterator<Entry<String, SumCount>> operatorIter = operator2SumCountMap.entrySet().iterator();
+            while (operatorIter.hasNext()) {
+                Entry<String, SumCount> entry = operatorIter.next();
+                SumCount sc = entry.getValue();
+                taskForAvg.updateOperatorTime(entry.getKey(), sc.sum);
+            }
+            taskId2TaskMap.clear();
+        }
+
+        public String getOperatorsElapsedTimeAsString() {
+            return "SUM_OF_OPERATORS," + (((double) taskForAvg.getElapsedTime()) / hyracksJobCount) + ","
+                    + taskForAvg.getElapsedTime() + "," + hyracksJobCount + "\n"
+                    + taskForAvg.getOperatorsElapsedTimeAsString() + "DISTRIBUTE_RESULT,"
+                    + (((double) distributeResultTimeSum) / hyracksJobCount) + "," + distributeResultTimeSum + ","
+                    + hyracksJobCount + "\n";
+        }
+    }
+
+    class Task {
+        private String taskId;
+        private long elapsedTime;
+        private HashMap<String, SumCount> operator2SumCountMap;
+
+        public Task(String taskId) {
+            this.taskId = new String(taskId);
+            elapsedTime = 0;
+            operator2SumCountMap = new HashMap<String, SumCount>();
+        }
+
+        @Override
+        public int hashCode() {
+            return taskId.hashCode();
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (o == this) {
+                return true;
+            }
+            if (!(o instanceof Task)) {
+                return false;
+            }
+            return ((Task) o).taskId == taskId;
+        }
+
+        public long getElapsedTime() {
+            return elapsedTime;
+        }
+
+        public void updateOperatorTime(String operatorName, long elapsedTime) {
+            SumCount sc = operator2SumCountMap.get(operatorName);
+            if (sc == null) {
+                sc = new SumCount();
+                sc.sum = 0;
+                sc.count = 0;
+                operator2SumCountMap.put(new String(operatorName), sc);
+            }
+            sc.sum += elapsedTime;
+            sc.count++;
+            this.elapsedTime += elapsedTime;
+        }
+
+        public void reset() {
+            elapsedTime = 0;
+            operator2SumCountMap.clear();
+        }
+
+        public String getOperatorsElapsedTimeAsString() {
+            StringBuilder sb = new StringBuilder();
+            Iterator<Entry<String, SumCount>> iter = operator2SumCountMap.entrySet().iterator();
+            while (iter.hasNext()) {
+                Entry<String, SumCount> entry = iter.next();
+                SumCount sc = entry.getValue();
+                sb.append(entry.getKey()).append(",").append(((double) sc.sum) / sc.count).append(",").append(sc.sum)
+                        .append(",").append(sc.count).append("\n");
+            }
+            return sb.toString();
+        }
+
+        public HashMap<String, SumCount> getOperator2SumCountMap() {
+            return operator2SumCountMap;
+        }
+    }
+
+    class SumCount {
+        public long sum;
+        public int count;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a70fba5c/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/ProfilerReportBuilder.java
----------------------------------------------------------------------
diff --git a/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/ProfilerReportBuilder.java b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/ProfilerReportBuilder.java
new file mode 100644
index 0000000..d5eb539
--- /dev/null
+++ b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/ProfilerReportBuilder.java
@@ -0,0 +1,192 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.experiment.report;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+
+public class ProfilerReportBuilder {
+
+    private static final int INDEX_BUILD_OP_COUNT = 1;
+    private static final int PIDX_SCAN_OP_COUNT = 1;
+    private static final int WARM_UP_SELECT_QUERY_COUNT = 500;
+    private static final int SELECT_QUERY_COUNT = 5000;
+    private static final int JOIN_QUERY_COUNT = 200;
+    private static final int PARTITION_COUNT = 4;
+
+    private static final int SELECT_QUERY_INVOLVED_INDEX_COUNT = 2;
+    private static final int JOIN_QUERY_INVOLVED_INDEX_COUNT = 3;
+    private static final int JOIN_RADIUS_TYPE_COUNT = 4;
+    private static final int SELECT_RADIUS_TYPE_COUNT = 5;
+    private static final int IDX_JOIN_RADIUS_SKIP = JOIN_RADIUS_TYPE_COUNT * JOIN_QUERY_INVOLVED_INDEX_COUNT
+            * PARTITION_COUNT - PARTITION_COUNT;
+    private static final int IDX_SELECT_RADIUS_SKIP = SELECT_RADIUS_TYPE_COUNT * SELECT_QUERY_INVOLVED_INDEX_COUNT
+            * PARTITION_COUNT - PARTITION_COUNT;
+    private static final int IDX_INITIAL_JOIN_SKIP = (INDEX_BUILD_OP_COUNT + PIDX_SCAN_OP_COUNT + ((WARM_UP_SELECT_QUERY_COUNT + SELECT_QUERY_COUNT) * SELECT_QUERY_INVOLVED_INDEX_COUNT))
+            * PARTITION_COUNT;
+    private static final int IDX_INITIAL_SELECT_SKIP = (INDEX_BUILD_OP_COUNT + PIDX_SCAN_OP_COUNT + (WARM_UP_SELECT_QUERY_COUNT * SELECT_QUERY_INVOLVED_INDEX_COUNT))
+            * PARTITION_COUNT;
+    private static final int FP_JOIN_RADIUS_SKIP = JOIN_RADIUS_TYPE_COUNT * PARTITION_COUNT - PARTITION_COUNT;
+    private static final int FP_SELECT_RADIUS_SKIP = SELECT_RADIUS_TYPE_COUNT * PARTITION_COUNT - PARTITION_COUNT;
+    private static final int FP_INITIAL_JOIN_SKIP = (PIDX_SCAN_OP_COUNT + WARM_UP_SELECT_QUERY_COUNT + SELECT_QUERY_COUNT)
+            * PARTITION_COUNT;
+    private static final int FP_INITIAL_SELECT_SKIP = (PIDX_SCAN_OP_COUNT + WARM_UP_SELECT_QUERY_COUNT)
+            * PARTITION_COUNT;
+
+    private String indexSearchTimeFilePath = null;
+    private String falsePositiveFilePath = null;
+    private String cacheMissFilePath = null;
+    private BufferedReader brIndexSearchTime;
+    private BufferedReader brFalsePositive;
+    private BufferedReader brCacheMiss;
+    private String line;
+
+    public ProfilerReportBuilder(String indexSearchTimeFilePath, String falsePositiveFilePath, String cacheMissFilePath) {
+        this.indexSearchTimeFilePath = indexSearchTimeFilePath;
+        this.falsePositiveFilePath = falsePositiveFilePath;
+        this.cacheMissFilePath = cacheMissFilePath;
+    }
+
+    public String getIdxNumber(boolean getSearchTime, boolean isJoin, int radiusIdx, int indexIdx) throws Exception {
+        if (getSearchTime) {
+            openIndexSearchTimeFile();
+        } else {
+            openCacheMissFile();
+        }
+
+        StringBuilder sb = new StringBuilder();
+        int involvedIndexCount = isJoin ? JOIN_QUERY_INVOLVED_INDEX_COUNT : SELECT_QUERY_INVOLVED_INDEX_COUNT;
+        int initialSkip = (isJoin ? IDX_INITIAL_JOIN_SKIP : IDX_INITIAL_SELECT_SKIP) + radiusIdx * involvedIndexCount
+                * PARTITION_COUNT + indexIdx * PARTITION_COUNT;
+        int radiusSkip = isJoin ? IDX_JOIN_RADIUS_SKIP : IDX_SELECT_RADIUS_SKIP;
+        long measuredValue = 0;
+        BufferedReader br = getSearchTime ? brIndexSearchTime : brCacheMiss;
+        int lineNum = 0;
+        int queryCount = isJoin ? JOIN_QUERY_COUNT / JOIN_RADIUS_TYPE_COUNT : SELECT_QUERY_COUNT
+                / SELECT_RADIUS_TYPE_COUNT;
+        try {
+
+            //initial skip
+            for (int i = 0; i < initialSkip; i++) {
+                br.readLine();
+                ++lineNum;
+            }
+
+            for (int j = 0; j < queryCount; j++) {
+                //get target index numbers
+                for (int i = 0; i < PARTITION_COUNT; i++) {
+                    line = br.readLine();
+                    measuredValue += Long.parseLong(line);
+                    ++lineNum;
+                }
+
+                //radius skip
+                for (int i = 0; i < radiusSkip; i++) {
+                    br.readLine();
+                    ++lineNum;
+                }
+            }
+
+            //System.out.println("lineNum: " + lineNum);
+            sb.append((double) measuredValue / (PARTITION_COUNT * queryCount));
+            return sb.toString();
+        } finally {
+            if (getSearchTime) {
+                closeIndexSearchTimeFile();
+            } else {
+                closeCacheMissFile();
+            }
+        }
+    }
+
+    public String getFalsePositives(boolean isJoin, int radiusIdx) throws Exception {
+        openFalsePositiveFile();
+
+        StringBuilder sb = new StringBuilder();
+        int initialSkip = (isJoin ? FP_INITIAL_JOIN_SKIP : FP_INITIAL_SELECT_SKIP) + radiusIdx * PARTITION_COUNT;
+        int radiusSkip = isJoin ? FP_JOIN_RADIUS_SKIP : FP_SELECT_RADIUS_SKIP;
+        long falsePositives = 0;
+        BufferedReader br = brFalsePositive;
+        int lineNum = 0;
+        int queryCount = isJoin ? JOIN_QUERY_COUNT / JOIN_RADIUS_TYPE_COUNT : SELECT_QUERY_COUNT
+                / SELECT_RADIUS_TYPE_COUNT;
+        try {
+
+            //initial skip
+            for (int i = 0; i < initialSkip; i++) {
+                br.readLine();
+                ++lineNum;
+            }
+
+            for (int j = 0; j < queryCount; j++) {
+                //get target index numbers
+                for (int i = 0; i < PARTITION_COUNT; i++) {
+                    line = br.readLine();
+                    falsePositives += Long.parseLong(line);
+                    ++lineNum;
+                }
+
+                //radius skip
+                for (int i = 0; i < radiusSkip; i++) {
+                    br.readLine();
+                    ++lineNum;
+                }
+            }
+
+            //System.out.println("lineNum: " + lineNum);
+            sb.append((double) falsePositives / (PARTITION_COUNT * queryCount));
+            return sb.toString();
+        } finally {
+            closeFalsePositiveFile();
+        }
+    }
+
+    protected void openIndexSearchTimeFile() throws IOException {
+        brIndexSearchTime = new BufferedReader(new FileReader(indexSearchTimeFilePath));
+    }
+
+    protected void closeIndexSearchTimeFile() throws IOException {
+        if (brIndexSearchTime != null) {
+            brIndexSearchTime.close();
+        }
+    }
+
+    protected void openFalsePositiveFile() throws IOException {
+        brFalsePositive = new BufferedReader(new FileReader(falsePositiveFilePath));
+    }
+
+    protected void closeFalsePositiveFile() throws IOException {
+        if (brFalsePositive != null) {
+            brFalsePositive.close();
+        }
+    }
+
+    protected void openCacheMissFile() throws IOException {
+        brCacheMiss = new BufferedReader(new FileReader(cacheMissFilePath));
+    }
+
+    protected void closeCacheMissFile() throws IOException {
+        if (brCacheMiss != null) {
+            brCacheMiss.close();
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a70fba5c/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/ReportBuilderHelper.java
----------------------------------------------------------------------
diff --git a/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/ReportBuilderHelper.java b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/ReportBuilderHelper.java
new file mode 100644
index 0000000..93ae819
--- /dev/null
+++ b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/ReportBuilderHelper.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.experiment.report;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+public class ReportBuilderHelper {
+
+    public static void main(String[] args) throws Exception {
+        String line = "INFO: DataGen[13][During ingestion only][TimeToInsert100000] 10651 in milliseconds";
+        System.out.println(getEndIndexOf("DataGen[1][During ingestion only][TimeToInsert100000] 10651 in milliseconds",
+                "DataGen["));
+        System.out.println(getLong(line, "[TimeToInsert100000]", "in"));
+        System.out.println(getLong(line, "DataGen[", "]"));
+        //SIE1AReportBuilder rb = new SIE1AReportBuilder("/Users/kisskys/workspace/asterix_experiment/run-log/run-log-backup/log-1435560604069/run.log");
+        //System.out.println(rb.getInstantaneousInsertPS());
+    }
+
+    public static int getEndIndexOf(String target, String pattern) {
+        //get the end index of the pattern string in target string.
+        int index = target.indexOf(pattern);
+        if (index != -1) {
+            return target.indexOf(pattern) + pattern.length();
+        }
+        return -1;
+    }
+
+    public static long getLong(String line, String beginPattern, String endPattern) {
+        int idBeginIdx = getEndIndexOf(line, beginPattern);
+        int idEndIdx = line.indexOf(endPattern, idBeginIdx);
+        return Long.parseLong(line.substring(idBeginIdx, idEndIdx).trim());
+    }
+
+    public static int getInt(String line, String beginPattern, String endPattern) {
+        int idBeginIdx = getEndIndexOf(line, beginPattern);
+        int idEndIdx = line.indexOf(endPattern, idBeginIdx);
+        return Integer.parseInt(line.substring(idBeginIdx, idEndIdx).trim());
+    }
+
+    public static double getDouble(String line, String beginPattern, String endPattern) {
+        int idBeginIdx = getEndIndexOf(line, beginPattern);
+        int idEndIdx = line.indexOf(endPattern, idBeginIdx);
+        return Double.parseDouble(line.substring(idBeginIdx, idEndIdx).trim());
+    }
+
+    public static String getString(String line, String beginPattern, String endPattern) {
+        int idBeginIdx = getEndIndexOf(line, beginPattern);
+        int idEndIdx = line.indexOf(endPattern, idBeginIdx);
+        return line.substring(idBeginIdx, idEndIdx).trim();
+    }
+    
+    public static String getString(String line, String endPattern) {
+        int idEndIdx = line.indexOf(endPattern, 0);
+        return line.substring(0, idEndIdx).trim();
+    }
+
+    public static long getTimeStampAsLong(String line, SimpleDateFormat format) throws ParseException {
+        //Jul 09, 2015 11:58:08
+        //String line = "Jul 09, 2015 11:58:09 PM org.apache.hyracks.storage.am.lsm.common.impls.LSMHarness flush";
+        //DateFormat format;
+        //format = new SimpleDateFormat("MMM dd, yyyy hh:mm:ss");
+        Date parseDate = format.parse(line);
+        return parseDate.getTime();
+    }
+    
+    protected static FileOutputStream openOutputFile(String filepath) throws IOException {
+        File file = new File(filepath);
+        if (file.exists()) {
+            //throw new IOException(filepath + "already exists");
+            file.delete();
+        }
+        file.createNewFile();
+        return new FileOutputStream(file);
+    }
+
+    protected static void closeOutputFile(FileOutputStream fos) throws IOException {
+        fos.flush();
+        fos.close();
+        fos = null;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a70fba5c/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/ReportBuilderRunner.java
----------------------------------------------------------------------
diff --git a/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/ReportBuilderRunner.java b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/ReportBuilderRunner.java
new file mode 100644
index 0000000..eba8f07
--- /dev/null
+++ b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/ReportBuilderRunner.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.experiment.report;
+
+public class ReportBuilderRunner {
+    public static final boolean REPORT_SIE1 = false;
+    public static final boolean REPORT_SIE2 = false;
+    public static final boolean REPORT_SIE3 = false;
+    public static final boolean REPORT_SIE4 = false;
+    public static final boolean REPORT_SIE5 = true;
+    public static final boolean REPORT_SIE3_PROFILE = false;
+
+    public static void main(String[] args) throws Exception {
+
+        if (REPORT_SIE1) {
+            SIE1ReportBuilderRunner sie1 = new SIE1ReportBuilderRunner();
+            sie1.generateSIE1IPS();
+            //        sie1.generateInstantaneousInsertPS();
+            sie1.generateIndexSize();
+            sie1.generateGanttInstantaneousInsertPS();
+            sie1.generateAccumulatedInsertPS();
+        }
+
+        if (REPORT_SIE2) {
+            SIE2ReportBuilderRunner sie2 = new SIE2ReportBuilderRunner();
+            sie2.generateOverallInsertPS();
+            sie2.generateAccumulatedInsertPS();
+            sie2.generateQueryPS();
+            sie2.generateAverageQueryResultCount();
+            sie2.generateAverageQueryResponseTime();
+            sie2.generateInstantaneousInsertPS();
+            sie2.generateGanttInstantaneousInsertPS();
+            sie2.generateSelectQueryResponseTime();
+            sie2.generateSelectQueryResultCount();
+        }
+
+        if (REPORT_SIE3) {
+            SIE3ReportBuilderRunner sie3 = new SIE3ReportBuilderRunner();
+            sie3.generateIndexCreationTime();
+            sie3.generateIndexSize();
+            sie3.generateSelectQueryResponseTime();
+            sie3.generateJoinQueryResponseTime();
+            sie3.generateSelectQueryResultCount();
+            sie3.generateJoinQueryResultCount();
+
+            //            sie3.generateSelectQueryProfiledSidxSearchTime();
+            //            sie3.generateSelectQueryProfiledPidxSearchTime();
+            //            sie3.generateJoinQueryProfiledSidxSearchTime();
+            //            sie3.generateJoinQueryProfiledPidxSearchTime();
+            //            sie3.generateJoinQueryProfiledSeedPidxSearchTime();
+            //            sie3.generateSelectQueryProfiledSidxCacheMiss();
+            //            sie3.generateSelectQueryProfiledPidxCacheMiss();
+            //            sie3.generateJoinQueryProfiledSidxCacheMiss();
+            //            sie3.generateJoinQueryProfiledPidxCacheMiss();
+            //            sie3.generateJoinQueryProfiledSeedPidxCacheMiss();
+            //            sie3.generateSelectQueryProfiledFalsePositive();
+            //            sie3.generateJoinQueryProfiledFalsePositive();
+        }
+
+        if (REPORT_SIE4) {
+            SIE4ReportBuilderRunner sie4 = new SIE4ReportBuilderRunner();
+            sie4.generateIndexCreationTime();
+            sie4.generateIndexSize();
+            sie4.generateSelectQueryResponseTime();
+            sie4.generateJoinQueryResponseTime();
+            sie4.generateSelectQueryResultCount();
+            sie4.generateJoinQueryResultCount();
+        }
+
+        if (REPORT_SIE5) {
+            SIE5ReportBuilderRunner sie5 = new SIE5ReportBuilderRunner();
+            sie5.generateOverallInsertPS();
+            sie5.generateAccumulatedInsertPS();
+            sie5.generateQueryPS();
+            sie5.generateAverageQueryResultCount();
+            sie5.generateAverageQueryResponseTime();
+            sie5.generateInstantaneousInsertPS();
+            sie5.generateGanttInstantaneousInsertPS();
+            sie5.generateSelectQueryResponseTime();
+            sie5.generateSelectQueryResultCount();
+        }
+
+        if (REPORT_SIE3_PROFILE) {
+            String executionTimeFilePath[] = new String[5];
+            executionTimeFilePath[0] = "/Users/kisskys/workspace/asterix_master/resultLog/Mem3g-Disk4g-part4-Lsev-Jvm5g-Lock6g/profile-exp3/SpatialIndexExperiment3Dhbtree/logs/executionTime-130.149.249.52.txt";
+            executionTimeFilePath[1] = "/Users/kisskys/workspace/asterix_master/resultLog/Mem3g-Disk4g-part4-Lsev-Jvm5g-Lock6g/profile-exp3/SpatialIndexExperiment3Dhvbtree/logs/executionTime-130.149.249.52.txt";
+            executionTimeFilePath[2] = "/Users/kisskys/workspace/asterix_master/resultLog/Mem3g-Disk4g-part4-Lsev-Jvm5g-Lock6g/profile-exp3/SpatialIndexExperiment3Rtree/logs/executionTime-130.149.249.52.txt";
+            executionTimeFilePath[3] = "/Users/kisskys/workspace/asterix_master/resultLog/Mem3g-Disk4g-part4-Lsev-Jvm5g-Lock6g/profile-exp3/SpatialIndexExperiment3Shbtree/logs/executionTime-130.149.249.52.txt";
+            executionTimeFilePath[4] = "/Users/kisskys/workspace/asterix_master/resultLog/Mem3g-Disk4g-part4-Lsev-Jvm5g-Lock6g/profile-exp3/SpatialIndexExperiment3Sif/logs/executionTime-130.149.249.52.txt";
+
+            for (int i = 0; i < 5; i++) {
+                String filePath = executionTimeFilePath[i];
+                OperatorProfilerReportBuilder oprb = new OperatorProfilerReportBuilder(filePath);
+                System.out.println("--------  " + i + " ----------\n");
+                System.out.println(oprb.getIdxNumber(false, 0));
+                System.out.println(oprb.getIdxNumber(false, 1));
+                System.out.println(oprb.getIdxNumber(false, 2));
+                System.out.println(oprb.getIdxNumber(false, 3));
+                System.out.println(oprb.getIdxNumber(false, 4));
+                System.out.println(oprb.getIdxNumber(true, 0));
+                System.out.println(oprb.getIdxNumber(true, 1));
+                System.out.println(oprb.getIdxNumber(true, 2));
+                System.out.println(oprb.getIdxNumber(true, 3));
+            }
+
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a70fba5c/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/SIE1ReportBuilder.java
----------------------------------------------------------------------
diff --git a/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/SIE1ReportBuilder.java b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/SIE1ReportBuilder.java
new file mode 100644
index 0000000..2e04615
--- /dev/null
+++ b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/SIE1ReportBuilder.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.experiment.report;
+
+public class SIE1ReportBuilder extends AbstractDynamicDataEvalReportBuilder {
+    public SIE1ReportBuilder(String expHomePath, String expName, String runLogFileName) {
+        super(expHomePath, expName, runLogFileName, false);
+    }
+
+    @Override
+    public String getOverallInsertPS(int minutes) throws Exception {
+        renewStringBuilder();
+        openRunLog();
+        try {
+            if (!moveToExperimentBegin()) {
+                //The experiment run log doesn't exist in this run log file
+                return null;
+            }
+
+            String line;
+            while ((line = br.readLine()) != null) {
+                if (line.contains("int64")) {
+                    line = br.readLine();
+                    rsb.append(Long.parseLong(line) / (minutes * 60));
+                    break;
+                }
+            }
+
+            return rsb.toString();
+        } finally {
+            closeRunLog();
+        }
+    }
+
+    @Override
+    public String getInstantaneousQueryPS() throws Exception {
+        return null;
+    }
+
+    @Override
+    public String getQueryPS(int minutes) throws Exception {
+        return null;
+        //        renewStringBuilder();
+        //        openRunLog();
+        //        try {
+        //
+        //            return getResult();
+        //        } finally {
+        //            closeRunLog();
+        //        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a70fba5c/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/SIE1ReportBuilderRunner.java
----------------------------------------------------------------------
diff --git a/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/SIE1ReportBuilderRunner.java b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/SIE1ReportBuilderRunner.java
new file mode 100644
index 0000000..bab69a0
--- /dev/null
+++ b/asterix-experiments/src/main/java/org/apache/asterix/experiment/report/SIE1ReportBuilderRunner.java
@@ -0,0 +1,329 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.experiment.report;
+
+import java.io.FileOutputStream;
+import java.util.ArrayList;
+
+public class SIE1ReportBuilderRunner {
+    String expHomePath = "/Users/kisskys/workspace/asterix_master/resultLog/MemBuf3g-DiskBuf3g-Lsev-Jvm7g-Lock0g/exp1/";
+    String runLogFileName = "run-exp1.log";
+    String outputFilePath = "/Users/kisskys/workspace/asterix_master/resultLog/MemBuf3g-DiskBuf3g-Lsev-Jvm7g-Lock0g/result-report/";
+
+    SIE1ReportBuilder sie1ADhbtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1ADhbtree",
+            runLogFileName);
+    SIE1ReportBuilder sie1ADhvbtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1ADhvbtree",
+            runLogFileName);
+    SIE1ReportBuilder sie1ARtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1ARtree", runLogFileName);
+    SIE1ReportBuilder sie1AShbtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1AShbtree",
+            runLogFileName);
+    SIE1ReportBuilder sie1ASif = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1ASif", runLogFileName);
+
+    SIE1ReportBuilder sie1BDhbtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1BDhbtree",
+            runLogFileName);
+    SIE1ReportBuilder sie1BDhvbtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1BDhvbtree",
+            runLogFileName);
+    SIE1ReportBuilder sie1BRtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1BRtree", runLogFileName);
+    SIE1ReportBuilder sie1BShbtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1BShbtree",
+            runLogFileName);
+    SIE1ReportBuilder sie1BSif = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1BSif", runLogFileName);
+
+    SIE1ReportBuilder sie1CDhbtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1CDhbtree",
+            runLogFileName);
+    SIE1ReportBuilder sie1CDhvbtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1CDhvbtree",
+            runLogFileName);
+    SIE1ReportBuilder sie1CRtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1CRtree", runLogFileName);
+    SIE1ReportBuilder sie1CShbtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1CShbtree",
+            runLogFileName);
+    SIE1ReportBuilder sie1CSif = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1CSif", runLogFileName);
+
+    SIE1ReportBuilder sie1DDhbtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1DDhbtree",
+            runLogFileName);
+    SIE1ReportBuilder sie1DDhvbtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1DDhvbtree",
+            runLogFileName);
+    SIE1ReportBuilder sie1DRtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1DRtree", runLogFileName);
+    SIE1ReportBuilder sie1DShbtree = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1DShbtree",
+            runLogFileName);
+    SIE1ReportBuilder sie1DSif = new SIE1ReportBuilder(expHomePath, "SpatialIndexExperiment1DSif", runLogFileName);
+
+    StringBuilder sb = new StringBuilder();
+
+    /**
+     * generate sie1_ips.txt
+     */
+    public void generateSIE1IPS() throws Exception {
+        int minutes = 60;
+        sb.setLength(0);
+        sb.append("# sie1 ips(inserts per second) report\n");
+        sb.append("# number of nodes, dhbtree, dhvbtree, rtree, shbtree, sif\n");
+        sb.append("1,").append(sie1ADhbtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1ADhvbtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1ARtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1AShbtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1ASif.getOverallInsertPS(minutes)).append("\n");
+
+        sb.append("2,").append(sie1BDhbtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1BDhvbtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1BRtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1BShbtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1BSif.getOverallInsertPS(minutes)).append("\n");
+
+        sb.append("4,").append(sie1CDhbtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1CDhvbtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1CRtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1CShbtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1CSif.getOverallInsertPS(minutes)).append("\n");
+
+        sb.append("8,").append(sie1DDhbtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1DDhvbtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1DRtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1DShbtree.getOverallInsertPS(minutes)).append(",")
+                .append(sie1DSif.getOverallInsertPS(minutes)).append("\n");
+
+        FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath + "sie1_ips.txt");
+        fos.write(sb.toString().getBytes());
+        ReportBuilderHelper.closeOutputFile(fos);
+    }
+
+    /**
+     * generate sie1_accumulated_insert_ps.txt
+     */
+    public void generateAccumulatedInsertPS() throws Exception {
+        int targetRound = 721; //(3600 seconds / 5seconds) + 1
+        int roundInterval = 5;
+
+        ArrayList<Long> ipsListDhbtree = new ArrayList<Long>();
+        ArrayList<Long> ipsListDhvbtree = new ArrayList<Long>();
+        ArrayList<Long> ipsListRtree = new ArrayList<Long>();
+        ArrayList<Long> ipsListShbtree = new ArrayList<Long>();
+        ArrayList<Long> ipsListSif = new ArrayList<Long>();
+        sie1DDhbtree.getAllNodesAccumulatedInsertPS(targetRound, ipsListDhbtree);
+        sie1DDhvbtree.getAllNodesAccumulatedInsertPS(targetRound, ipsListDhvbtree);
+        sie1DRtree.getAllNodesAccumulatedInsertPS(targetRound, ipsListRtree);
+        sie1DShbtree.getAllNodesAccumulatedInsertPS(targetRound, ipsListShbtree);
+        sie1DSif.getAllNodesAccumulatedInsertPS(targetRound, ipsListSif);
+
+        sb.setLength(0);
+        sb.append("# sie1 accumulated inserts per second report\n");
+        sb.append("# time, dhbtree, dhvbtree, rtree, shbtree, sif\n");
+
+        for (int i = 0; i < targetRound; i++) {
+            sb.append("" + (i * roundInterval) + "," + ipsListDhbtree.get(i) + "," + ipsListDhvbtree.get(i) + ","
+                    + ipsListRtree.get(i) + "," + ipsListShbtree.get(i) + "," + ipsListSif.get(i) + "\n");
+        }
+        FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath + "sie1_accumulated_insert_ps.txt");
+        fos.write(sb.toString().getBytes());
+        ReportBuilderHelper.closeOutputFile(fos);
+
+        ipsListDhbtree.clear();
+        ipsListDhvbtree.clear();
+        ipsListRtree.clear();
+        ipsListShbtree.clear();
+        ipsListSif.clear();
+    }
+
+    public void generateInstantaneousInsertPS() throws Exception {
+        int nodeCount = 8;
+        for (int i = 0; i < nodeCount; i++) {
+            sb.setLength(0);
+            sb.append("# sie1 8nodes(8 dataGen) instantaneous inserts per second report\n");
+            sb.append(sie1DDhbtree.getInstantaneousInsertPS(i, false));
+            FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath
+                    + "sie1_8nodes_instantaneous_insert_ps_dhbtree_gen" + i + ".txt");
+            fos.write(sb.toString().getBytes());
+            ReportBuilderHelper.closeOutputFile(fos);
+        }
+        for (int i = 0; i < nodeCount; i++) {
+            sb.setLength(0);
+            sb.append("# sie1 8nodes(8 dataGen) instantaneous inserts per second report\n");
+            sb.append(sie1DDhvbtree.getInstantaneousInsertPS(i, false));
+            FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath
+                    + "sie1_8nodes_instantaneous_insert_ps_dhvbtree_gen" + i + ".txt");
+            fos.write(sb.toString().getBytes());
+            ReportBuilderHelper.closeOutputFile(fos);
+        }
+        for (int i = 0; i < nodeCount; i++) {
+            sb.setLength(0);
+            sb.append("# sie1 8nodes(8 dataGen) instantaneous inserts per second report\n");
+            sb.append(sie1DRtree.getInstantaneousInsertPS(i, false));
+            FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath
+                    + "sie1_8nodes_instantaneous_insert_ps_rtree_gen" + i + ".txt");
+            fos.write(sb.toString().getBytes());
+            ReportBuilderHelper.closeOutputFile(fos);
+        }
+        for (int i = 0; i < nodeCount; i++) {
+            sb.setLength(0);
+            sb.append("# sie1 8nodes(8 dataGen) instantaneous inserts per second report\n");
+            sb.append(sie1DShbtree.getInstantaneousInsertPS(i, false));
+            FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath
+                    + "sie1_8nodes_instantaneous_insert_ps_shbtree_gen" + i + ".txt");
+            fos.write(sb.toString().getBytes());
+            ReportBuilderHelper.closeOutputFile(fos);
+        }
+        for (int i = 0; i < nodeCount; i++) {
+            sb.setLength(0);
+            sb.append("# sie1 8nodes(8 dataGen) instantaneous inserts per second report\n");
+            sb.append(sie1DSif.getInstantaneousInsertPS(i, false));
+            FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath
+                    + "sie1_8nodes_instantaneous_insert_ps_sif_gen" + i + ".txt");
+            fos.write(sb.toString().getBytes());
+            ReportBuilderHelper.closeOutputFile(fos);
+        }
+    }
+
+    public void generateIndexSize() throws Exception {
+        sb.setLength(0);
+        sb.append("# sie1 index size report\n");
+
+        sb.append("# number of nodes, dhbtree, dhvbtree, rtree, shbtree, sif\n");
+        sb.append("1,").append(sie1ADhbtree.getIndexSize("Tweets_idx_dhbtreeLocation/device_id")).append(",")
+                .append(sie1ADhvbtree.getIndexSize("Tweets_idx_dhvbtreeLocation/device_id")).append(",")
+                .append(sie1ARtree.getIndexSize("Tweets_idx_rtreeLocation/device_id")).append(",")
+                .append(sie1AShbtree.getIndexSize("Tweets_idx_shbtreeLocation/device_id")).append(",")
+                .append(sie1ASif.getIndexSize("Tweets_idx_sifLocation/device_id")).append(",")
+                .append(sie1ASif.getIndexSize("Tweets_idx_Tweets/device_id")).append("\n");
+        sb.append("2,").append(sie1BDhbtree.getIndexSize("Tweets_idx_dhbtreeLocation/device_id")).append(",")
+                .append(sie1BDhvbtree.getIndexSize("Tweets_idx_dhvbtreeLocation/device_id")).append(",")
+                .append(sie1BRtree.getIndexSize("Tweets_idx_rtreeLocation/device_id")).append(",")
+                .append(sie1BShbtree.getIndexSize("Tweets_idx_shbtreeLocation/device_id")).append(",")
+                .append(sie1BSif.getIndexSize("Tweets_idx_sifLocation/device_id")).append(",")
+                .append(sie1BSif.getIndexSize("Tweets_idx_Tweets/device_id")).append("\n");
+        sb.append("4,").append(sie1CDhbtree.getIndexSize("Tweets_idx_dhbtreeLocation/device_id")).append(",")
+                .append(sie1CDhvbtree.getIndexSize("Tweets_idx_dhvbtreeLocation/device_id")).append(",")
+                .append(sie1CRtree.getIndexSize("Tweets_idx_rtreeLocation/device_id")).append(",")
+                .append(sie1CShbtree.getIndexSize("Tweets_idx_shbtreeLocation/device_id")).append(",")
+                .append(sie1CSif.getIndexSize("Tweets_idx_sifLocation/device_id")).append(",")
+                .append(sie1CSif.getIndexSize("Tweets_idx_Tweets/device_id")).append("\n");
+        sb.append("8,").append(sie1DDhbtree.getIndexSize("Tweets_idx_dhbtreeLocation/device_id")).append(",")
+                .append(sie1DDhvbtree.getIndexSize("Tweets_idx_dhvbtreeLocation/device_id")).append(",")
+                .append(sie1DRtree.getIndexSize("Tweets_idx_rtreeLocation/device_id")).append(",")
+                .append(sie1DShbtree.getIndexSize("Tweets_idx_shbtreeLocation/device_id")).append(",")
+                .append(sie1DSif.getIndexSize("Tweets_idx_sifLocation/device_id")).append(",")
+                .append(sie1DSif.getIndexSize("Tweets_idx_Tweets/device_id")).append("\n");
+
+        FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath + "sie1_index_size.txt");
+        fos.write(sb.toString().getBytes());
+        ReportBuilderHelper.closeOutputFile(fos);
+    }
+
+    public void generateGanttInstantaneousInsertPS() throws Exception {
+
+        SIE1ReportBuilder dhbtree = sie1DDhbtree;
+        SIE1ReportBuilder dhvbtree = sie1DDhvbtree;
+        SIE1ReportBuilder rtree = sie1DRtree;
+        SIE1ReportBuilder shbtree = sie1DShbtree;
+        SIE1ReportBuilder sif = sie1DSif;
+        String sie1Type = "D";
+        String logDirPrefix = "";
+
+        for (int i = 0; i < 1; i++) {
+            sb.setLength(0);
+            sb.append("# sie1 1node(1 dataGen) instantaneous inserts per second report\n");
+            sb.append(dhbtree.getInstantaneousInsertPS(i, true));
+            FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath
+                    + "sie1_gantt_1node_instantaneous_insert_ps_dhbtree_gen" + i + ".txt");
+            fos.write(sb.toString().getBytes());
+            ReportBuilderHelper.closeOutputFile(fos);
+        }
+        for (int i = 0; i < 1; i++) {
+            sb.setLength(0);
+            sb.append("# sie1 1node(1 dataGen) instantaneous inserts per second report\n");
+            sb.append(dhvbtree.getInstantaneousInsertPS(i, true));
+            FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath
+                    + "sie1_gantt_1node_instantaneous_insert_ps_dhvbtree_gen" + i + ".txt");
+            fos.write(sb.toString().getBytes());
+            ReportBuilderHelper.closeOutputFile(fos);
+        }
+        for (int i = 0; i < 1; i++) {
+            sb.setLength(0);
+            sb.append("# sie1 1node(1 dataGen) instantaneous inserts per second report\n");
+            sb.append(rtree.getInstantaneousInsertPS(i, true));
+            FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath
+                    + "sie1_gantt_1node_instantaneous_insert_ps_rtree_gen" + i + ".txt");
+            fos.write(sb.toString().getBytes());
+            ReportBuilderHelper.closeOutputFile(fos);
+        }
+        for (int i = 0; i < 1; i++) {
+            sb.setLength(0);
+            sb.append("# sie1 1node(1 dataGen) instantaneous inserts per second report\n");
+            sb.append(shbtree.getInstantaneousInsertPS(i, true));
+            FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath
+                    + "sie1_gantt_1node_instantaneous_insert_ps_shbtree_gen" + i + ".txt");
+            fos.write(sb.toString().getBytes());
+            ReportBuilderHelper.closeOutputFile(fos);
+        }
+        for (int i = 0; i < 1; i++) {
+            sb.setLength(0);
+            sb.append("# sie1 1node(1 dataGen) instantaneous inserts per second report\n");
+            sb.append(sif.getInstantaneousInsertPS(i, true));
+            FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath
+                    + "sie1_gantt_1node_instantaneous_insert_ps_sif_gen" + i + ".txt");
+            fos.write(sb.toString().getBytes());
+            ReportBuilderHelper.closeOutputFile(fos);
+        }
+
+        long dataGenStartTime = dhbtree.getDataGenStartTimeStamp();
+        NCLogReportBuilder ncLogReportBuilder = new NCLogReportBuilder(expHomePath + "SpatialIndexExperiment1"
+                + sie1Type + "Dhbtree/" + logDirPrefix + "logs/a1_node1.log");
+        sb.setLength(0);
+        sb.append(ncLogReportBuilder.getFlushMergeEventAsGanttChartFormat(dataGenStartTime));
+        FileOutputStream fos = ReportBuilderHelper.openOutputFile(outputFilePath
+                + "sie1_gantt_1node_flush_merge_dhbtree.txt");
+        fos.write(sb.toString().getBytes());
+        ReportBuilderHelper.closeOutputFile(fos);
+
+        dataGenStartTime = dhvbtree.getDataGenStartTimeStamp();
+        ncLogReportBuilder = new NCLogReportBuilder(expHomePath + "SpatialIndexExperiment1" + sie1Type + "Dhvbtree/"
+                + logDirPrefix + "logs/a1_node1.log");
+        sb.setLength(0);
+        sb.append(ncLogReportBuilder.getFlushMergeEventAsGanttChartFormat(dataGenStartTime));
+        fos = ReportBuilderHelper.openOutputFile(outputFilePath + "sie1_gantt_1node_flush_merge_dhvbtree.txt");
+        fos.write(sb.toString().getBytes());
+        ReportBuilderHelper.closeOutputFile(fos);
+
+        dataGenStartTime = rtree.getDataGenStartTimeStamp();
+        ncLogReportBuilder = new NCLogReportBuilder(expHomePath + "SpatialIndexExperiment1" + sie1Type + "Rtree/"
+                + logDirPrefix + "logs/a1_node1.log");
+        sb.setLength(0);
+        sb.append(ncLogReportBuilder.getFlushMergeEventAsGanttChartFormat(dataGenStartTime));
+        fos = ReportBuilderHelper.openOutputFile(outputFilePath + "sie1_gantt_1node_flush_merge_rtree.txt");
+        fos.write(sb.toString().getBytes());
+        ReportBuilderHelper.closeOutputFile(fos);
+
+        dataGenStartTime = shbtree.getDataGenStartTimeStamp();
+        ncLogReportBuilder = new NCLogReportBuilder(expHomePath + "SpatialIndexExperiment1" + sie1Type + "Shbtree/"
+                + logDirPrefix + "logs/a1_node1.log");
+        sb.setLength(0);
+        sb.append(ncLogReportBuilder.getFlushMergeEventAsGanttChartFormat(dataGenStartTime));
+        fos = ReportBuilderHelper.openOutputFile(outputFilePath + "sie1_gantt_1node_flush_merge_shbtree.txt");
+        fos.write(sb.toString().getBytes());
+        ReportBuilderHelper.closeOutputFile(fos);
+
+        dataGenStartTime = sif.getDataGenStartTimeStamp();
+        ncLogReportBuilder = new NCLogReportBuilder(expHomePath + "SpatialIndexExperiment1" + sie1Type + "Sif/"
+                + logDirPrefix + "logs/a1_node1.log");
+        sb.setLength(0);
+        sb.append(ncLogReportBuilder.getFlushMergeEventAsGanttChartFormat(dataGenStartTime));
+        fos = ReportBuilderHelper.openOutputFile(outputFilePath + "sie1_gantt_1node_flush_merge_sif.txt");
+        fos.write(sb.toString().getBytes());
+        ReportBuilderHelper.closeOutputFile(fos);
+    }
+
+}


Mime
View raw message