asterixdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From il...@apache.org
Subject [2/4] incubator-asterixdb git commit: Fist verison of LSM-based statistics collection
Date Sat, 05 Dec 2015 00:50:05 GMT
Fist verison of LSM-based statistics collection


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/d0abc98c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/d0abc98c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/d0abc98c

Branch: refs/heads/statistics
Commit: d0abc98c6f344741c1fe0fe98ff3765d09258560
Parents: 4bb94a8
Author: Ildar Absalyamov <ildar.absalyamov@gmail.com>
Authored: Sun Oct 25 21:15:59 2015 +0500
Committer: Ildar Absalyamov <ildar.absalyamov@gmail.com>
Committed: Sun Oct 25 21:15:59 2015 +0500

----------------------------------------------------------------------
 .../common/AsterixHyracksIntegrationUtil.java   |   5 +-
 .../apache/asterix/file/DatasetOperations.java  |  50 +--
 .../file/ExternalIndexingOperations.java        | 115 ++++---
 .../apache/asterix/file/IndexOperations.java    |  18 +-
 .../file/SecondaryBTreeOperationsHelper.java    |  66 ++--
 .../file/SecondaryIndexOperationsHelper.java    |  34 +-
 .../resources/asterix-build-configuration.xml   |  23 +-
 .../common/config/AsterixStorageProperties.java |  17 +-
 .../dataflow/HDFSLookupAdapterFactory.java      |  23 +-
 .../metadata/bootstrap/MetadataBootstrap.java   |  66 ++--
 .../metadata/declared/AqlMetadataProvider.java  | 341 ++++++++++---------
 ...AqlPrimitiveIntegerValueProviderFactory.java |  94 +++++
 .../ExternalBTreeLocalResourceMetadata.java     |  27 +-
 .../resource/LSMBTreeLocalResourceMetadata.java |  27 +-
 14 files changed, 540 insertions(+), 366 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d0abc98c/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
index 158bf0c..9016e04 100644
--- a/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@ -36,8 +36,8 @@ import org.apache.hyracks.control.nc.NodeControllerService;
 
 public class AsterixHyracksIntegrationUtil {
 
-    public static final int NODES = 2;
-    public static final int PARTITONS = 2;
+    public static final int NODES = 1;
+    public static final int PARTITONS = 1;
 
     public static final int DEFAULT_HYRACKS_CC_CLIENT_PORT = 1098;
 
@@ -136,6 +136,7 @@ public class AsterixHyracksIntegrationUtil {
      */
     public static void main(String[] args) {
         Runtime.getRuntime().addShutdownHook(new Thread() {
+            @Override
             public void run() {
                 try {
                     deinit();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d0abc98c/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
index b7c210c..dbf21d9 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
@@ -31,6 +31,7 @@ import org.apache.asterix.common.context.AsterixVirtualBufferCacheProvider;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
+import org.apache.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveIntegerValueProviderFactory;
 import org.apache.asterix.formats.base.IDataFormat;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
@@ -57,6 +58,7 @@ import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import org.apache.hyracks.storage.am.common.statistics.SynopsisType;
 import org.apache.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
 import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
@@ -68,8 +70,8 @@ public class DatasetOperations {
     private static Logger LOGGER = Logger.getLogger(DatasetOperations.class.getName());
 
     public static JobSpecification createDropDatasetJobSpec(CompiledDatasetDropStatement datasetDropStmt,
-            AqlMetadataProvider metadataProvider) throws AlgebricksException, HyracksDataException, RemoteException,
-            ACIDException, AsterixException {
+            AqlMetadataProvider metadataProvider)
+                    throws AlgebricksException, HyracksDataException, RemoteException, ACIDException, AsterixException {
 
         String dataverseName = null;
         if (datasetDropStmt.getDataverseName() != null) {
@@ -120,12 +122,13 @@ public class DatasetOperations {
         // The index drop operation should be persistent regardless of temp datasets or permanent dataset
         IndexDropOperatorDescriptor primaryBtreeDrop = new IndexDropOperatorDescriptor(specPrimary,
                 AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                splitsAndConstraint.first, new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                        dataset.getDatasetId()), compactionInfo.first, compactionInfo.second,
+                splitsAndConstraint.first,
+                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+                        compactionInfo.first, compactionInfo.second,
                         new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
                         AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits,
-                        filterCmpFactories, btreeFields, filterFields, true));
+                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
+                        btreeFields, filterFields, true, false, null));
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(specPrimary, primaryBtreeDrop,
                 splitsAndConstraint.second);
 
@@ -185,12 +188,14 @@ public class DatasetOperations {
                 AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                 splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields,
                 new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
-                        compactionInfo.first, compactionInfo.second, new PrimaryIndexOperationTrackerProvider(dataset
-                                .getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                        LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties
-                                .getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
-                        btreeFields, filterFields, true), localResourceFactoryProvider,
-                NoOpOperationCallbackFactory.INSTANCE);
+                        compactionInfo.first, compactionInfo.second,
+                        new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
+                        btreeFields, filterFields, true,
+                        storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                        AqlPrimitiveIntegerValueProviderFactory.INSTANCE),
+                localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, indexCreateOp,
                 splitsAndConstraint.second);
         spec.addRoot(indexCreateOp);
@@ -240,16 +245,19 @@ public class DatasetOperations {
                 AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                 splitsAndConstraint.first, typeTraits, comparatorFactories, blooFilterKeyFields,
                 new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
-                        compactionInfo.first, compactionInfo.second, new PrimaryIndexOperationTrackerProvider(
-                                dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                        LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits,
-                        filterCmpFactories, btreeFields, filterFields, !temp), NoOpOperationCallbackFactory.INSTANCE);
-        AlgebricksPartitionConstraintHelper
-                .setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second);
+                        compactionInfo.first, compactionInfo.second,
+                        new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
+                        btreeFields, filterFields, !temp,
+                        storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                        AqlPrimitiveIntegerValueProviderFactory.INSTANCE),
+                NoOpOperationCallbackFactory.INSTANCE);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
+                splitsAndConstraint.second);
 
-        AlgebricksPartitionConstraintHelper
-                .setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
+                splitsAndConstraint.second);
         spec.addRoot(compactOp);
         return spec;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d0abc98c/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
index b9b5527..bb49ed3 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
@@ -27,11 +27,6 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-
 import org.apache.asterix.common.api.ILocalResourceMetadata;
 import org.apache.asterix.common.config.AsterixStorageProperties;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
@@ -44,6 +39,7 @@ import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
 import org.apache.asterix.common.ioopcallbacks.LSMBTreeWithBuddyIOOperationCallbackFactory;
 import org.apache.asterix.common.ioopcallbacks.LSMRTreeIOOperationCallbackFactory;
+import org.apache.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveIntegerValueProviderFactory;
 import org.apache.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
 import org.apache.asterix.external.adapter.factory.HDFSAdapterFactory;
 import org.apache.asterix.external.adapter.factory.HDFSIndexingAdapterFactory;
@@ -80,6 +76,10 @@ import org.apache.asterix.transaction.management.resource.PersistentLocalResourc
 import org.apache.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
 import org.apache.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
 import org.apache.asterix.translator.CompiledStatements.CompiledIndexDropStatement;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -94,6 +94,7 @@ import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import org.apache.hyracks.storage.am.common.statistics.SynopsisType;
 import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeDataflowHelperFactory;
 import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeWithBuddyDataflowHelperFactory;
 import org.apache.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
@@ -107,6 +108,7 @@ public class ExternalIndexingOperations {
 
     public static final List<List<String>> FILE_INDEX_FIELD_NAMES = new ArrayList<List<String>>();
     public static final ArrayList<IAType> FILE_INDEX_FIELD_TYPES = new ArrayList<IAType>();
+
     static {
         FILE_INDEX_FIELD_NAMES.add(new ArrayList<String>(Arrays.asList("")));
         FILE_INDEX_FIELD_TYPES.add(BuiltinType.ASTRING);
@@ -128,8 +130,8 @@ public class ExternalIndexingOperations {
 
     public static boolean datasetUsesHiveAdapter(ExternalDatasetDetails ds) {
         String adapter = ds.getAdapter();
-        return (adapter.equalsIgnoreCase("hive") || adapter
-                .equalsIgnoreCase("org.apache.asterix.external.dataset.adapter.HIVEAdapter"));
+        return (adapter.equalsIgnoreCase("hive")
+                || adapter.equalsIgnoreCase("org.apache.asterix.external.dataset.adapter.HIVEAdapter"));
     }
 
     public static boolean isValidIndexName(String datasetName, String indexName) {
@@ -154,7 +156,8 @@ public class ExternalIndexingOperations {
         return IndexingConstants.getBuddyBtreeComparatorFactories();
     }
 
-    public static ArrayList<ExternalFile> getSnapshotFromExternalFileSystem(Dataset dataset) throws AlgebricksException {
+    public static ArrayList<ExternalFile> getSnapshotFromExternalFileSystem(Dataset dataset)
+            throws AlgebricksException {
         ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
         ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
         try {
@@ -176,9 +179,9 @@ public class ExternalIndexingOperations {
                     if (fileStatuses[i].isDirectory()) {
                         listSubFiles(dataset, fs, fileStatuses[i], files);
                     } else {
-                        files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(),
-                                nextFileNumber, fileStatuses[i].getPath().toUri().getPath(), new Date(fileStatuses[i]
-                                        .getModificationTime()), fileStatuses[i].getLen(),
+                        files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(), nextFileNumber,
+                                fileStatuses[i].getPath().toUri().getPath(),
+                                new Date(fileStatuses[i].getModificationTime()), fileStatuses[i].getLen(),
                                 ExternalFilePendingOp.PENDING_NO_OP));
                     }
                 }
@@ -223,7 +226,7 @@ public class ExternalIndexingOperations {
 
     public static JobSpecification buildFilesIndexReplicationJobSpec(Dataset dataset,
             ArrayList<ExternalFile> externalFilesSnapshot, AqlMetadataProvider metadataProvider, boolean createIndex)
-            throws MetadataException, AlgebricksException {
+                    throws MetadataException, AlgebricksException {
         JobSpecification spec = JobSpecificationUtils.createJobSpecification();
         IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
         AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
@@ -232,20 +235,22 @@ public class ExternalIndexingOperations {
         ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
         Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(),
-                        dataset.getDatasetName(), getFilesIndexName(dataset.getDatasetName()), true);
+                .splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(), dataset.getDatasetName(),
+                        getFilesIndexName(dataset.getDatasetName()), true);
         IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
         ILocalResourceMetadata localResourceMetadata = new ExternalBTreeLocalResourceMetadata(
-                FilesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS,
-                FilesIndexDescription.FILES_INDEX_COMP_FACTORIES, new int[] { 0 }, false, dataset.getDatasetId(),
-                mergePolicyFactory, mergePolicyFactoryProperties);
+                FilesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS, FilesIndexDescription.FILES_INDEX_COMP_FACTORIES,
+                new int[] { 0 }, false, dataset.getDatasetId(), mergePolicyFactory, mergePolicyFactoryProperties);
         PersistentLocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
                 localResourceMetadata, LocalResource.ExternalBTreeResource);
         ExternalBTreeDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeDataflowHelperFactory(
-                mergePolicyFactory, mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
-                        dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties.getBloomFilterFalsePositiveRate(),
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
+                mergePolicyFactory, mergePolicyFactoryProperties,
+                new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                storageProperties.getBloomFilterFalsePositiveRate(),
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true,
+                storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                AqlPrimitiveIntegerValueProviderFactory.INSTANCE);
         ExternalFilesIndexOperatorDescriptor externalFilesOp = new ExternalFilesIndexOperatorDescriptor(spec,
                 AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                 secondaryFileSplitProvider, indexDataflowHelperFactory, localResourceFactoryProvider,
@@ -259,7 +264,7 @@ public class ExternalIndexingOperations {
 
     /**
      * This method create an indexing operator that index records in HDFS
-     * 
+     *
      * @param jobSpec
      * @param itemType
      * @param dataset
@@ -297,7 +302,7 @@ public class ExternalIndexingOperations {
      * deleteedFiles should contain files that are no longer there in the file system
      * appendedFiles should have the new file information of existing files
      * The method should return false in case of zero delta
-     * 
+     *
      * @param dataset
      * @param metadataFiles
      * @param addedFiles
@@ -309,7 +314,7 @@ public class ExternalIndexingOperations {
      */
     public static boolean isDatasetUptodate(Dataset dataset, List<ExternalFile> metadataFiles,
             List<ExternalFile> addedFiles, List<ExternalFile> deletedFiles, List<ExternalFile> appendedFiles)
-            throws MetadataException, AlgebricksException {
+                    throws MetadataException, AlgebricksException {
         boolean uptodate = true;
         int newFileNumber = metadataFiles.get(metadataFiles.size() - 1).getFileNumber() + 1;
 
@@ -340,9 +345,10 @@ public class ExternalIndexingOperations {
                     } else {
                         // Same file name, Different file mod date -> delete and add
                         metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_DROP_OP);
-                        deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile
-                                .getDatasetName(), 0, metadataFile.getFileName(), metadataFile.getLastModefiedTime(),
-                                metadataFile.getSize(), ExternalFilePendingOp.PENDING_DROP_OP));
+                        deletedFiles
+                                .add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(), 0,
+                                        metadataFile.getFileName(), metadataFile.getLastModefiedTime(),
+                                        metadataFile.getSize(), ExternalFilePendingOp.PENDING_DROP_OP));
                         fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_ADD_OP);
                         fileSystemFile.setFileNumber(newFileNumber);
                         addedFiles.add(fileSystemFile);
@@ -382,8 +388,8 @@ public class ExternalIndexingOperations {
             if (metadataFile.getPendingOp() == ExternalFilePendingOp.PENDING_NO_OP) {
                 metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_DROP_OP);
                 deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(),
-                        newFileNumber, metadataFile.getFileName(), metadataFile.getLastModefiedTime(), metadataFile
-                                .getSize(), metadataFile.getPendingOp()));
+                        newFileNumber, metadataFile.getFileName(), metadataFile.getLastModefiedTime(),
+                        metadataFile.getSize(), metadataFile.getPendingOp()));
                 newFileNumber++;
                 uptodate = false;
             }
@@ -421,13 +427,16 @@ public class ExternalIndexingOperations {
                 metadataProvider.getMetadataTxnContext());
         IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec,
                 AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                splitsAndConstraint.first, new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                        dataset.getDatasetId()), compactionInfo.first, compactionInfo.second,
+                splitsAndConstraint.first,
+                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+                        compactionInfo.first, compactionInfo.second,
                         new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
                         AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), false, null, null, null, null, !temp));
-        AlgebricksPartitionConstraintHelper
-                .setPartitionConstraintInJobSpec(spec, btreeDrop, splitsAndConstraint.second);
+                        storageProperties.getBloomFilterFalsePositiveRate(), false, null, null, null, null, !temp,
+                        storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                        AqlPrimitiveIntegerValueProviderFactory.INSTANCE));
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeDrop,
+                splitsAndConstraint.second);
         spec.addRoot(btreeDrop);
 
         return spec;
@@ -443,9 +452,9 @@ public class ExternalIndexingOperations {
             else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_APPEND_OP) {
                 for (ExternalFile appendedFile : appendedFiles) {
                     if (appendedFile.getFileName().equals(file.getFileName())) {
-                        files.add(new ExternalFile(file.getDataverseName(), file.getDatasetName(),
-                                file.getFileNumber(), file.getFileName(), file.getLastModefiedTime(), appendedFile
-                                        .getSize(), ExternalFilePendingOp.PENDING_NO_OP));
+                        files.add(new ExternalFile(file.getDataverseName(), file.getDatasetName(), file.getFileNumber(),
+                                file.getFileName(), file.getLastModefiedTime(), appendedFile.getSize(),
+                                ExternalFilePendingOp.PENDING_NO_OP));
                     }
                 }
             }
@@ -549,7 +558,9 @@ public class ExternalIndexingOperations {
                 new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
                 AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
                 storageProperties.getBloomFilterFalsePositiveRate(),
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true,
+                storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                AqlPrimitiveIntegerValueProviderFactory.INSTANCE);
     }
 
     private static ExternalBTreeWithBuddyDataflowHelperFactory getBTreeDataflowHelperFactory(Dataset ds, Index index,
@@ -557,17 +568,18 @@ public class ExternalIndexingOperations {
             AsterixStorageProperties storageProperties, JobSpecification spec) {
         return new ExternalBTreeWithBuddyDataflowHelperFactory(mergePolicyFactory, mergePolicyFactoryProperties,
                 new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
                 storageProperties.getBloomFilterFalsePositiveRate(), new int[] { index.getKeyFieldNames().size() },
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true,
+                storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                AqlPrimitiveIntegerValueProviderFactory.INSTANCE);
     }
 
     @SuppressWarnings("rawtypes")
     private static ExternalRTreeDataflowHelperFactory getRTreeDataflowHelperFactory(Dataset ds, Index index,
             ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
             AsterixStorageProperties storageProperties, AqlMetadataProvider metadataProvider, JobSpecification spec)
-            throws AlgebricksException, AsterixException {
+                    throws AlgebricksException, AsterixException {
         int numPrimaryKeys = getRIDSize(ds);
         List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
         secondaryKeyFields.size();
@@ -594,8 +606,8 @@ public class ExternalIndexingOperations {
                     .getSerializerDeserializer(nestedKeyType);
             secondaryRecFields[i] = keySerde;
 
-            secondaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
-                    nestedKeyType, true);
+            secondaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE
+                    .getBinaryComparatorFactory(nestedKeyType, true);
             secondaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType);
             valueProviderFactories[i] = AqlPrimitiveValueProviderFactory.INSTANCE;
         }
@@ -743,14 +755,17 @@ public class ExternalIndexingOperations {
         ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
         Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(),
-                        dataset.getDatasetName(), getFilesIndexName(dataset.getDatasetName()), true);
+                .splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(), dataset.getDatasetName(),
+                        getFilesIndexName(dataset.getDatasetName()), true);
         IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
         ExternalBTreeDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeDataflowHelperFactory(
-                mergePolicyFactory, mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
-                        dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties.getBloomFilterFalsePositiveRate(),
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
+                mergePolicyFactory, mergePolicyFactoryProperties,
+                new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                storageProperties.getBloomFilterFalsePositiveRate(),
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true,
+                storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                AqlPrimitiveIntegerValueProviderFactory.INSTANCE);
         LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
                 AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                 secondaryFileSplitProvider, FilesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS,

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d0abc98c/asterix-app/src/main/java/org/apache/asterix/file/IndexOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/IndexOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/IndexOperations.java
index ebac99f..6a817e1 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/IndexOperations.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/IndexOperations.java
@@ -56,7 +56,7 @@ public class IndexOperations {
 
     public static JobSpecification buildSecondaryIndexCreationJobSpec(CompiledCreateIndexStatement createIndexStmt,
             ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider)
-            throws AsterixException, AlgebricksException {
+                    throws AsterixException, AlgebricksException {
         SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
                 .createIndexOperationsHelper(createIndexStmt.getIndexType(), createIndexStmt.getDataverseName(),
                         createIndexStmt.getDatasetName(), createIndexStmt.getIndexName(),
@@ -68,7 +68,7 @@ public class IndexOperations {
 
     public static JobSpecification buildSecondaryIndexLoadingJobSpec(CompiledCreateIndexStatement createIndexStmt,
             ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider)
-            throws AsterixException, AlgebricksException {
+                    throws AsterixException, AlgebricksException {
         SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
                 .createIndexOperationsHelper(createIndexStmt.getIndexType(), createIndexStmt.getDataverseName(),
                         createIndexStmt.getDatasetName(), createIndexStmt.getIndexName(),
@@ -109,13 +109,15 @@ public class IndexOperations {
         // The index drop operation should be persistent regardless of temp datasets or permanent dataset.
         IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec,
                 AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                splitsAndConstraint.first, new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                        dataset.getDatasetId()), compactionInfo.first, compactionInfo.second,
+                splitsAndConstraint.first,
+                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+                        compactionInfo.first, compactionInfo.second,
                         new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
                         AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), false, null, null, null, null, true));
-        AlgebricksPartitionConstraintHelper
-                .setPartitionConstraintInJobSpec(spec, btreeDrop, splitsAndConstraint.second);
+                        storageProperties.getBloomFilterFalsePositiveRate(), false, null, null, null, null, true, false,
+                        null));
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeDrop,
+                splitsAndConstraint.second);
         spec.addRoot(btreeDrop);
 
         return spec;
@@ -123,7 +125,7 @@ public class IndexOperations {
 
     public static JobSpecification buildSecondaryIndexCompactJobSpec(CompiledIndexCompactStatement indexCompactStmt,
             ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider, Dataset dataset)
-            throws AsterixException, AlgebricksException {
+                    throws AsterixException, AlgebricksException {
         SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
                 .createIndexOperationsHelper(indexCompactStmt.getIndexType(), indexCompactStmt.getDataverseName(),
                         indexCompactStmt.getDatasetName(), indexCompactStmt.getIndexName(),

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d0abc98c/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java
index 34517ca..10d9c8e 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java
@@ -30,6 +30,7 @@ import org.apache.asterix.common.context.AsterixVirtualBufferCacheProvider;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
 import org.apache.asterix.common.ioopcallbacks.LSMBTreeWithBuddyIOOperationCallbackFactory;
+import org.apache.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveIntegerValueProviderFactory;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.external.IndexingConstants;
@@ -68,6 +69,7 @@ import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory
 import org.apache.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import org.apache.hyracks.storage.am.common.statistics.SynopsisType;
 import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeWithBuddyDataflowHelperFactory;
 import org.apache.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
 import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
@@ -97,12 +99,14 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
             localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(localResourceMetadata,
                     LocalResource.LSMBTreeResource);
             // The index create operation should be persistent regardless of temp datasets or permanent dataset.
-            indexDataflowHelperFactory = new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                    dataset.getDatasetId()), mergePolicyFactory, mergePolicyFactoryProperties,
-                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+            indexDataflowHelperFactory = new LSMBTreeDataflowHelperFactory(
+                    new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), mergePolicyFactory,
+                    mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
                     AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
                     storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits, filterCmpFactories,
-                    secondaryBTreeFields, secondaryFilterFields, true);
+                    secondaryBTreeFields, secondaryFilterFields, true,
+                    storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                    AqlPrimitiveIntegerValueProviderFactory.INSTANCE);
         } else {
             // External dataset local resource and dataflow helper
             int[] buddyBreeFields = new int[] { numSecondaryKeys };
@@ -116,7 +120,9 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
                     AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                     LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
                     storageProperties.getBloomFilterFalsePositiveRate(), buddyBreeFields,
-                    ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
+                    ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true,
+                    storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                    AqlPrimitiveIntegerValueProviderFactory.INSTANCE);
         }
         TreeIndexCreateOperatorDescriptor secondaryIndexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
                 AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
@@ -164,11 +170,14 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
             // Create secondary BTree bulk load op.
             AbstractTreeIndexOperatorDescriptor secondaryBulkLoadOp;
             ExternalBTreeWithBuddyDataflowHelperFactory dataflowHelperFactory = new ExternalBTreeWithBuddyDataflowHelperFactory(
-                    mergePolicyFactory, mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
-                            dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                    mergePolicyFactory, mergePolicyFactoryProperties,
+                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                     LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
                     storageProperties.getBloomFilterFalsePositiveRate(), new int[] { numSecondaryKeys },
-                    ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
+                    ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true,
+                    storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                    AqlPrimitiveIntegerValueProviderFactory.INSTANCE);
             IOperatorDescriptor root;
             if (externalFiles != null) {
                 // Transaction load
@@ -197,7 +206,7 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
             spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
             return spec;
         } else {
-            // Create dummy key provider for feeding the primary index scan. 
+            // Create dummy key provider for feeding the primary index scan.
             AbstractOperatorDescriptor keyProviderOp = createDummyKeyProviderOp(spec);
 
             // Create primary index scan op.
@@ -223,19 +232,21 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
             AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
             boolean temp = dataset.getDatasetDetails().isTemp();
             // Create secondary BTree bulk load op.
-            TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = createTreeIndexBulkLoadOp(
-                    spec,
-                    numSecondaryKeys,
+            TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = createTreeIndexBulkLoadOp(spec, numSecondaryKeys,
                     new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
                             mergePolicyFactory, mergePolicyFactoryProperties,
                             new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
                             AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties
-                                    .getBloomFilterFalsePositiveRate(), false, filterTypeTraits, filterCmpFactories,
-                            secondaryBTreeFields, secondaryFilterFields, !temp), GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
+                            LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                            storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits,
+                            filterCmpFactories, secondaryBTreeFields, secondaryFilterFields, !temp,
+                            storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                            AqlPrimitiveIntegerValueProviderFactory.INSTANCE),
+                    GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
 
             AlgebricksMetaOperatorDescriptor metaOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
-                    new IPushRuntimeFactory[] { new SinkRuntimeFactory() }, new RecordDescriptor[] { secondaryRecDesc });
+                    new IPushRuntimeFactory[] { new SinkRuntimeFactory() },
+                    new RecordDescriptor[] { secondaryRecDesc });
             // Connect the operators.
             spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
             spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
@@ -269,13 +280,16 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
             compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
                     AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                     AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider, secondaryTypeTraits,
-                    secondaryComparatorFactories, secondaryBloomFilterKeyFields, new LSMBTreeDataflowHelperFactory(
-                            new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), mergePolicyFactory,
-                            mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
-                                    dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                    secondaryComparatorFactories, secondaryBloomFilterKeyFields,
+                    new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+                            mergePolicyFactory, mergePolicyFactoryProperties,
+                            new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                             LSMBTreeIOOperationCallbackFactory.INSTANCE,
                             storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits,
-                            filterCmpFactories, secondaryBTreeFields, secondaryFilterFields, !temp),
+                            filterCmpFactories, secondaryBTreeFields, secondaryFilterFields, !temp,
+                            storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                            AqlPrimitiveIntegerValueProviderFactory.INSTANCE),
                     NoOpOperationCallbackFactory.INSTANCE);
         } else {
             // External dataset
@@ -286,9 +300,11 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
                     new ExternalBTreeWithBuddyDataflowHelperFactory(mergePolicyFactory, mergePolicyFactoryProperties,
                             new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
                             AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE, storageProperties
-                                    .getBloomFilterFalsePositiveRate(), new int[] { numSecondaryKeys },
-                            ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true),
+                            LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
+                            storageProperties.getBloomFilterFalsePositiveRate(), new int[] { numSecondaryKeys },
+                            ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true,
+                            storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                            AqlPrimitiveIntegerValueProviderFactory.INSTANCE),
                     NoOpOperationCallbackFactory.INSTANCE);
         }
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
@@ -302,7 +318,7 @@ public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelp
     @SuppressWarnings("rawtypes")
     protected void setSecondaryRecDescAndComparators(IndexType indexType, List<List<String>> secondaryKeyFields,
             List<IAType> secondaryKeyTypes, int gramLength, AqlMetadataProvider metadataProvider)
-            throws AlgebricksException, AsterixException {
+                    throws AlgebricksException, AsterixException {
         secondaryFieldAccessEvalFactories = new ICopyEvaluatorFactory[numSecondaryKeys + numFilterFields];
         secondaryComparatorFactories = new IBinaryComparatorFactory[numSecondaryKeys + numPrimaryKeys];
         secondaryBloomFilterKeyFields = new int[numSecondaryKeys];

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d0abc98c/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
index 0cc137b..c2e97fe 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
@@ -36,6 +36,7 @@ import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
 import org.apache.asterix.common.transactions.IRecoveryManager.ResourceType;
 import org.apache.asterix.common.transactions.JobId;
+import org.apache.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveIntegerValueProviderFactory;
 import org.apache.asterix.external.indexing.operators.ExternalIndexBulkModifyOperatorDescriptor;
 import org.apache.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
 import org.apache.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
@@ -92,6 +93,7 @@ import org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import org.apache.hyracks.storage.am.common.statistics.SynopsisType;
 import org.apache.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
 
@@ -152,10 +154,10 @@ public abstract class SecondaryIndexOperationsHelper {
     }
 
     public static SecondaryIndexOperationsHelper createIndexOperationsHelper(IndexType indexType, String dataverseName,
-            String datasetName, String indexName, List<List<String>> secondaryKeyFields,
-            List<IAType> secondaryKeyTypes, boolean isEnforced, int gramLength, AqlMetadataProvider metadataProvider,
+            String datasetName, String indexName, List<List<String>> secondaryKeyFields, List<IAType> secondaryKeyTypes,
+            boolean isEnforced, int gramLength, AqlMetadataProvider metadataProvider,
             PhysicalOptimizationConfig physOptConf, ARecordType recType, ARecordType enforcedType)
-            throws AsterixException, AlgebricksException {
+                    throws AsterixException, AlgebricksException {
         IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
         SecondaryIndexOperationsHelper indexOperationsHelper = null;
         switch (indexType) {
@@ -286,8 +288,8 @@ public abstract class SecondaryIndexOperationsHelper {
                 throw new AlgebricksException(e);
             }
             primaryRecFields[i] = serdeProvider.getSerializerDeserializer(keyType);
-            primaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
-                    keyType, true);
+            primaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE
+                    .getBinaryComparatorFactory(keyType, true);
             primaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
             primaryBloomFilterKeyFields[i] = i;
         }
@@ -300,8 +302,8 @@ public abstract class SecondaryIndexOperationsHelper {
             List<List<String>> secondaryKeyFields, List<IAType> secondaryKeyTypes, int gramLength,
             AqlMetadataProvider metadataProvider) throws AlgebricksException, AsterixException;
 
-    protected AbstractOperatorDescriptor createDummyKeyProviderOp(JobSpecification spec) throws AsterixException,
-            AlgebricksException {
+    protected AbstractOperatorDescriptor createDummyKeyProviderOp(JobSpecification spec)
+            throws AsterixException, AlgebricksException {
         // Build dummy tuple containing one field with a dummy value inside.
         ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
         DataOutput dos = tb.getDataOutput();
@@ -345,12 +347,14 @@ public abstract class SecondaryIndexOperationsHelper {
                 primaryFileSplitProvider, primaryRecDesc.getTypeTraits(), primaryComparatorFactories,
                 primaryBloomFilterKeyFields, lowKeyFields, highKeyFields, true, true,
                 new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
-                        mergePolicyFactory, mergePolicyFactoryProperties, new PrimaryIndexOperationTrackerProvider(
-                                dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                        LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties
-                                .getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
-                        primaryBTreeFields, primaryFilterFields, !temp), false, false, null,
-                searchCallbackFactory, null, null);
+                        mergePolicyFactory, mergePolicyFactoryProperties,
+                        new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
+                        primaryBTreeFields, primaryFilterFields, !temp,
+                        storageProperties.getStatisticsSynopsisType() != SynopsisType.None,
+                        AqlPrimitiveIntegerValueProviderFactory.INSTANCE),
+                false, false, null, searchCallbackFactory, null, null);
 
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, primarySearchOp,
                 primaryPartitionConstraint);
@@ -433,7 +437,7 @@ public abstract class SecondaryIndexOperationsHelper {
 
     protected TreeIndexBulkLoadOperatorDescriptor createTreeIndexBulkLoadOp(JobSpecification spec,
             int numSecondaryKeyFields, IIndexDataflowHelperFactory dataflowHelperFactory, float fillFactor)
-            throws MetadataException, AlgebricksException {
+                    throws MetadataException, AlgebricksException {
         int[] fieldPermutation = new int[numSecondaryKeyFields + numPrimaryKeys + numFilterFields];
         for (int i = 0; i < fieldPermutation.length; i++) {
             fieldPermutation[i] = i;
@@ -541,7 +545,7 @@ public abstract class SecondaryIndexOperationsHelper {
 
     protected ExternalIndexBulkModifyOperatorDescriptor createExternalIndexBulkModifyOp(JobSpecification spec,
             int numSecondaryKeyFields, IIndexDataflowHelperFactory dataflowHelperFactory, float fillFactor)
-            throws MetadataException, AlgebricksException {
+                    throws MetadataException, AlgebricksException {
         int[] fieldPermutation = new int[numSecondaryKeyFields + numPrimaryKeys];
         for (int i = 0; i < numSecondaryKeyFields + numPrimaryKeys; i++) {
             fieldPermutation[i] = i;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d0abc98c/asterix-app/src/main/resources/asterix-build-configuration.xml
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/resources/asterix-build-configuration.xml b/asterix-app/src/main/resources/asterix-build-configuration.xml
index f8f33d6..490050d 100644
--- a/asterix-app/src/main/resources/asterix-build-configuration.xml
+++ b/asterix-app/src/main/resources/asterix-build-configuration.xml
@@ -22,18 +22,18 @@
 		<ncId>nc1</ncId>
 		<storeDirs>nc1data</storeDirs>
 	</store>
-	<store>
-		<ncId>nc2</ncId>
-		<storeDirs>nc2data</storeDirs>
-	</store>
+<!-- 	<store> -->
+<!-- 		<ncId>nc2</ncId> -->
+<!-- 		<storeDirs>nc2data</storeDirs> -->
+<!-- 	</store> -->
 	<transactionLogDir>
 		<ncId>nc1</ncId>
 		<txnLogDirPath>target/txnLogDir/nc1</txnLogDirPath>
 	</transactionLogDir>
-	<transactionLogDir>
-		<ncId>nc2</ncId>
-		<txnLogDirPath>target/txnLogDir/nc2</txnLogDirPath>
-	</transactionLogDir>
+<!-- 	<transactionLogDir> -->
+<!-- 		<ncId>nc2</ncId> -->
+<!-- 		<txnLogDirPath>target/txnLogDir/nc2</txnLogDirPath> -->
+<!-- 	</transactionLogDir> -->
 
 	<property>
 		<name>max.wait.active.cluster</name>
@@ -88,6 +88,13 @@
 			(Default = 8)
 		</description>
 	</property>
+    <property>
+        <name>storage.statistics.synopsis.type</name>
+        <value>Wavelet</value>
+        <description>The type of synopsis used for collecting dataset statistics.
+            (Default = None)
+        </description>
+    </property>
 	<property>
 		<name>plot.activate</name>
 		<value>false</value>

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d0abc98c/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixStorageProperties.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixStorageProperties.java b/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixStorageProperties.java
index 7511145..98de109 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixStorageProperties.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixStorageProperties.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.common.config;
 
+import org.apache.hyracks.storage.am.common.statistics.SynopsisType;
+
 public class AsterixStorageProperties extends AbstractAsterixProperties {
 
     private static final String STORAGE_BUFFERCACHE_PAGESIZE_KEY = "storage.buffercache.pagesize";
@@ -47,6 +49,9 @@ public class AsterixStorageProperties extends AbstractAsterixProperties {
     private static final String STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_KEY = "storage.lsm.bloomfilter.falsepositiverate";
     private static double STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_DEFAULT = 0.01;
 
+    private static final String STORAGE_STATISTICS_SYNOPSIS_TYPE_KEY = "storage.statistics.synopsis.type";
+    private static String STORAGE_STATISTICS_SYNOPSIS_TYPE_DEFAULT = SynopsisType.None.name();
+
     public AsterixStorageProperties(AsterixPropertiesAccessor accessor) {
         super(accessor);
     }
@@ -81,10 +86,9 @@ public class AsterixStorageProperties extends AbstractAsterixProperties {
     }
 
     public int getMetadataMemoryComponentNumPages() {
-        return accessor
-                .getProperty(STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES_KEY,
-                        STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES_DEFAULT,
-                        PropertyInterpreters.getIntegerPropertyInterpreter());
+        return accessor.getProperty(STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES_KEY,
+                STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
     }
 
     public int getMemoryComponentsNum() {
@@ -101,4 +105,9 @@ public class AsterixStorageProperties extends AbstractAsterixProperties {
         return accessor.getProperty(STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_KEY,
                 STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_DEFAULT, PropertyInterpreters.getDoublePropertyInterpreter());
     }
+
+    public SynopsisType getStatisticsSynopsisType() {
+        return SynopsisType.valueOf(accessor.getProperty(STORAGE_STATISTICS_SYNOPSIS_TYPE_KEY,
+                STORAGE_STATISTICS_SYNOPSIS_TYPE_DEFAULT, PropertyInterpreters.getStringPropertyInterpreter()));
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d0abc98c/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSLookupAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSLookupAdapterFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSLookupAdapterFactory.java
index 505e1a9..86a6878 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSLookupAdapterFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/dataflow/HDFSLookupAdapterFactory.java
@@ -23,6 +23,7 @@ import java.util.Map;
 
 import org.apache.asterix.common.dataflow.IAsterixApplicationContextInfo;
 import org.apache.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
+import org.apache.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveIntegerValueProviderFactory;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
@@ -56,6 +57,7 @@ import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import org.apache.hyracks.storage.am.common.statistics.SynopsisType;
 import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeDataflowHelperFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
 
@@ -119,7 +121,8 @@ public class HDFSLookupAdapterFactory implements IControlledAdapterFactory {
     public static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildExternalDataLookupRuntime(
             JobSpecification jobSpec, Dataset dataset, Index secondaryIndex, int[] ridIndexes, boolean retainInput,
             IVariableTypeEnvironment typeEnv, List<LogicalVariable> outputVars, IOperatorSchema opSchema,
-            JobGenContext context, AqlMetadataProvider metadataProvider, boolean retainNull) throws AlgebricksException {
+            JobGenContext context, AqlMetadataProvider metadataProvider, boolean retainNull)
+                    throws AlgebricksException {
 
         // Get data type
         IAType itemType = null;
@@ -149,11 +152,13 @@ public class HDFSLookupAdapterFactory implements IControlledAdapterFactory {
         boolean temp = dataset.getDatasetDetails().isTemp();
         // Create the file index data flow helper
         ExternalBTreeDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeDataflowHelperFactory(
-                compactionInfo.first, compactionInfo.second, new SecondaryIndexOperationTrackerProvider(
-                        dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                LSMBTreeIOOperationCallbackFactory.INSTANCE, metadataProvider.getStorageProperties()
-                        .getBloomFilterFalsePositiveRate(), ExternalDatasetsRegistry.INSTANCE.getAndLockDatasetVersion(
-                        dataset, metadataProvider), !temp);
+                compactionInfo.first, compactionInfo.second,
+                new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                metadataProvider.getStorageProperties().getBloomFilterFalsePositiveRate(),
+                ExternalDatasetsRegistry.INSTANCE.getAndLockDatasetVersion(dataset, metadataProvider), !temp,
+                metadataProvider.getStorageProperties().getStatisticsSynopsisType() != SynopsisType.None,
+                AqlPrimitiveIntegerValueProviderFactory.INSTANCE);
 
         // Create the out record descriptor, appContext and fileSplitProvider for the files index
         RecordDescriptor outRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
@@ -172,9 +177,9 @@ public class HDFSLookupAdapterFactory implements IControlledAdapterFactory {
         // Create the operator
         ExternalLoopkupOperatorDiscriptor op = new ExternalLoopkupOperatorDiscriptor(jobSpec, adapterFactory,
                 outRecDesc, indexDataflowHelperFactory, retainInput, appContext.getIndexLifecycleManagerProvider(),
-                appContext.getStorageManagerInterface(), spPc.first, dataset.getDatasetId(), metadataProvider
-                        .getStorageProperties().getBloomFilterFalsePositiveRate(), searchOpCallbackFactory, retainNull,
-                context.getNullWriterFactory());
+                appContext.getStorageManagerInterface(), spPc.first, dataset.getDatasetId(),
+                metadataProvider.getStorageProperties().getBloomFilterFalsePositiveRate(), searchOpCallbackFactory,
+                retainNull, context.getNullWriterFactory());
 
         // Return value
         return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, spPc.second);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d0abc98c/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
index c94133a..15395a3 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -232,8 +232,8 @@ public class MetadataBootstrap {
     public static void insertInitialDataverses(MetadataTransactionContext mdTxnCtx) throws Exception {
         String dataverseName = MetadataPrimaryIndexes.DATAVERSE_DATASET.getDataverseName();
         String dataFormat = NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT;
-        MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverseName, dataFormat,
-                IMetadataEntity.PENDING_NO_OP));
+        MetadataManager.INSTANCE.addDataverse(mdTxnCtx,
+                new Dataverse(dataverseName, dataFormat, IMetadataEntity.PENDING_NO_OP));
     }
 
     public static void insertInitialDatasets(MetadataTransactionContext mdTxnCtx) throws Exception {
@@ -270,8 +270,8 @@ public class MetadataBootstrap {
         getBuiltinTypes(types);
         getMetadataTypes(types);
         for (int i = 0; i < types.size(); i++) {
-            MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, types.get(i).getTypeName(),
-                    types.get(i), false));
+            MetadataManager.INSTANCE.addDatatype(mdTxnCtx,
+                    new Datatype(dataverseName, types.get(i).getTypeName(), types.get(i), false));
         }
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Finished inserting initial datatypes.");
@@ -280,10 +280,11 @@ public class MetadataBootstrap {
 
     public static void insertInitialIndexes(MetadataTransactionContext mdTxnCtx) throws Exception {
         for (int i = 0; i < secondaryIndexes.length; i++) {
-            MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(secondaryIndexes[i].getDataverseName(),
-                    secondaryIndexes[i].getIndexedDatasetName(), secondaryIndexes[i].getIndexName(), IndexType.BTREE,
-                    secondaryIndexes[i].getPartitioningExpr(), secondaryIndexes[i].getPartitioningExprType(), false,
-                    false, IMetadataEntity.PENDING_NO_OP));
+            MetadataManager.INSTANCE.addIndex(mdTxnCtx,
+                    new Index(secondaryIndexes[i].getDataverseName(), secondaryIndexes[i].getIndexedDatasetName(),
+                            secondaryIndexes[i].getIndexName(), IndexType.BTREE,
+                            secondaryIndexes[i].getPartitioningExpr(), secondaryIndexes[i].getPartitioningExprType(),
+                            false, false, IMetadataEntity.PENDING_NO_OP));
         }
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Finished inserting initial indexes.");
@@ -383,33 +384,27 @@ public class MetadataBootstrap {
                 + IndexFileNameUtil.prepareFileName(metadataStore + File.separator + index.getFileNameRelativePath(),
                         runtimeContext.getMetaDataIODeviceId());
         FileReference file = new FileReference(new File(filePath));
-        List<IVirtualBufferCache> virtualBufferCaches = runtimeContext.getVirtualBufferCaches(index.getDatasetId()
-                .getId());
+        List<IVirtualBufferCache> virtualBufferCaches = runtimeContext
+                .getVirtualBufferCaches(index.getDatasetId().getId());
         ITypeTraits[] typeTraits = index.getTypeTraits();
         IBinaryComparatorFactory[] comparatorFactories = index.getKeyBinaryComparatorFactory();
         int[] bloomFilterKeyFields = index.getBloomFilterKeyFields();
         LSMBTree lsmBtree = null;
         long resourceID = -1;
-        ILSMOperationTracker opTracker = index.isPrimaryIndex() ? runtimeContext.getLSMBTreeOperationTracker(index
-                .getDatasetId().getId()) : new BaseOperationTracker((DatasetLifecycleManager) indexLifecycleManager,
-                index.getDatasetId().getId(), ((DatasetLifecycleManager) indexLifecycleManager).getDatasetInfo(index
-                        .getDatasetId().getId()));
+        ILSMOperationTracker opTracker = index.isPrimaryIndex()
+                ? runtimeContext.getLSMBTreeOperationTracker(index.getDatasetId().getId())
+                : new BaseOperationTracker((DatasetLifecycleManager) indexLifecycleManager,
+                        index.getDatasetId().getId(),
+                        ((DatasetLifecycleManager) indexLifecycleManager).getDatasetInfo(index.getDatasetId().getId()));
         final String path = file.getFile().getPath();
         if (create) {
-            lsmBtree = LSMBTreeUtils.createLSMTree(
-                    virtualBufferCaches,
-                    file,
-                    bufferCache,
-                    fileMapProvider,
-                    typeTraits,
-                    comparatorFactories,
-                    bloomFilterKeyFields,
-                    runtimeContext.getBloomFilterFalsePositiveRate(),
+            lsmBtree = LSMBTreeUtils.createLSMTree(virtualBufferCaches, file, bufferCache, fileMapProvider, typeTraits,
+                    comparatorFactories, bloomFilterKeyFields, runtimeContext.getBloomFilterFalsePositiveRate(),
                     runtimeContext.getMetadataMergePolicyFactory().createMergePolicy(
-                            GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES, indexLifecycleManager), opTracker,
-                    runtimeContext.getLSMIOScheduler(),
+                            GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES, indexLifecycleManager),
+                    opTracker, runtimeContext.getLSMIOScheduler(),
                     LSMBTreeIOOperationCallbackFactory.INSTANCE.createIOOperationCallback(), index.isPrimaryIndex(),
-                    null, null, null, null, true);
+                    null, null, null, null, true, false, null);
             lsmBtree.create();
             resourceID = runtimeContext.getResourceIdFactory().createId();
             ILocalResourceMetadata localResourceMetadata = new LSMBTreeLocalResourceMetadata(typeTraits,
@@ -426,19 +421,14 @@ public class MetadataBootstrap {
             resourceID = resource.getResourceId();
             lsmBtree = (LSMBTree) indexLifecycleManager.getIndex(resource.getResourceName());
             if (lsmBtree == null) {
-                lsmBtree = LSMBTreeUtils.createLSMTree(
-                        virtualBufferCaches,
-                        file,
-                        bufferCache,
-                        fileMapProvider,
-                        typeTraits,
-                        comparatorFactories,
-                        bloomFilterKeyFields,
+                lsmBtree = LSMBTreeUtils.createLSMTree(virtualBufferCaches, file, bufferCache, fileMapProvider,
+                        typeTraits, comparatorFactories, bloomFilterKeyFields,
                         runtimeContext.getBloomFilterFalsePositiveRate(),
                         runtimeContext.getMetadataMergePolicyFactory().createMergePolicy(
-                                GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES, indexLifecycleManager), opTracker,
-                        runtimeContext.getLSMIOScheduler(), LSMBTreeIOOperationCallbackFactory.INSTANCE
-                                .createIOOperationCallback(), index.isPrimaryIndex(), null, null, null, null, true);
+                                GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES, indexLifecycleManager),
+                        opTracker, runtimeContext.getLSMIOScheduler(),
+                        LSMBTreeIOOperationCallbackFactory.INSTANCE.createIOOperationCallback(), index.isPrimaryIndex(),
+                        null, null, null, null, true, false, null);
                 indexLifecycleManager.register(path, lsmBtree);
             }
         }
@@ -456,7 +446,7 @@ public class MetadataBootstrap {
     }
 
     public static void startDDLRecovery() throws RemoteException, ACIDException, MetadataException {
-        //#. clean up any record which has pendingAdd/DelOp flag 
+        //#. clean up any record which has pendingAdd/DelOp flag
         //   as traversing all records from DATAVERSE_DATASET to DATASET_DATASET, and then to INDEX_DATASET.
         String dataverseName = null;
         String datasetName = null;


Mime
View raw message