asterixdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From buyin...@apache.org
Subject [1/3] asterixdb git commit: Add a dataset rebalance REST API.
Date Sat, 27 May 2017 19:23:52 GMT
Repository: asterixdb
Updated Branches:
  refs/heads/master 75d32e50d -> 878051979


http://git-wip-us.apache.org/repos/asf/asterixdb/blob/87805197/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
index 1bb1377..a38d9b9 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
@@ -22,6 +22,7 @@ import org.apache.asterix.common.config.DatasetConfig.IndexType;
 import org.apache.asterix.common.config.GlobalConfig;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.transactions.JobId;
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Index;
@@ -42,6 +43,7 @@ import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
 import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
 import org.apache.hyracks.algebricks.runtime.operators.base.SinkRuntimeFactory;
 import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
@@ -54,7 +56,6 @@ import org.apache.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
 import org.apache.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.lsm.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
@@ -207,14 +208,16 @@ public class SecondaryInvertedIndexOperationsHelper extends SecondaryTreeIndexOp
     @Override
     public JobSpecification buildLoadingJobSpec() throws AlgebricksException {
         JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
+        JobId jobId = IndexUtil.bindJobEventListener(spec, metadataProvider);
 
         // Create dummy key provider for feeding the primary index scan.
-        AbstractOperatorDescriptor keyProviderOp = createDummyKeyProviderOp(spec);
+        IOperatorDescriptor keyProviderOp = DatasetUtil.createDummyKeyProviderOp(spec, dataset,
metadataProvider);
 
         // Create primary index scan op.
-        BTreeSearchOperatorDescriptor primaryScanOp = createPrimaryIndexScanOp(spec);
+        IOperatorDescriptor primaryScanOp = DatasetUtil.createPrimaryIndexScanOp(spec, metadataProvider,
dataset,
+                jobId);
 
-        AbstractOperatorDescriptor sourceOp = primaryScanOp;
+        IOperatorDescriptor sourceOp = primaryScanOp;
         boolean isEnforcingKeyTypes = index.isEnforcingKeyFileds();
         int numSecondaryKeys = index.getKeyFieldNames().size();
         if (isEnforcingKeyTypes && !enforcedItemType.equals(itemType)) {

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/87805197/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java
b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java
index 21fa754..d9fba59 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java
@@ -23,6 +23,7 @@ import java.util.List;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.GlobalConfig;
 import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.transactions.JobId;
 import org.apache.asterix.external.indexing.IndexingConstants;
 import org.apache.asterix.external.operators.ExternalScanOperatorDescriptor;
 import org.apache.asterix.formats.nontagged.BinaryComparatorFactoryProvider;
@@ -53,7 +54,6 @@ import org.apache.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
 import org.apache.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory;
@@ -202,13 +202,15 @@ public class SecondaryRTreeOperationsHelper extends SecondaryTreeIndexOperations
                 metadataProvider.getStorageComponentProvider().getStorageManager(), secondaryFileSplitProvider);
         if (dataset.getDatasetType() == DatasetType.INTERNAL) {
             // Create dummy key provider for feeding the primary index scan.
-            AbstractOperatorDescriptor keyProviderOp = createDummyKeyProviderOp(spec);
+            IOperatorDescriptor keyProviderOp = DatasetUtil.createDummyKeyProviderOp(spec,
dataset, metadataProvider);
+            JobId jobId = IndexUtil.bindJobEventListener(spec, metadataProvider);
 
             // Create primary index scan op.
-            BTreeSearchOperatorDescriptor primaryScanOp = createPrimaryIndexScanOp(spec);
+            IOperatorDescriptor primaryScanOp = DatasetUtil.createPrimaryIndexScanOp(spec,
metadataProvider, dataset,
+                    jobId);
 
             // Assign op.
-            AbstractOperatorDescriptor sourceOp = primaryScanOp;
+            IOperatorDescriptor sourceOp = primaryScanOp;
             if (isEnforcingKeyTypes && !enforcedItemType.equals(itemType)) {
                 sourceOp = createCastOp(spec, dataset.getDatasetType());
                 spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp,
0);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/87805197/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
index db33dd5..e634d4e 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
@@ -56,24 +56,23 @@ public class SplitsAndConstraintsUtil {
         return splits.toArray(new FileSplit[] {});
     }
 
-    public static FileSplit[] getDatasetSplits(Dataset dataset, MetadataTransactionContext
mdTxnCtx,
-            String targetIdxName, boolean temp) throws AlgebricksException {
+    public static FileSplit[] getIndexSplits(Dataset dataset, String indexName, MetadataTransactionContext
mdTxnCtx)
+            throws AlgebricksException {
         try {
             List<String> nodeGroup =
                     MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName()).getNodeNames();
             if (nodeGroup == null) {
                 throw new AlgebricksException("Couldn't find node group " + dataset.getNodeGroupName());
             }
-            return getDatasetSplits(dataset, nodeGroup, targetIdxName, temp);
+            return getIndexSplits(dataset, indexName, nodeGroup);
         } catch (MetadataException me) {
             throw new AlgebricksException(me);
         }
     }
 
-    public static FileSplit[] getDatasetSplits(Dataset dataset, List<String> nodes,
String targetIdxName,
-            boolean temp) {
+    public static FileSplit[] getIndexSplits(Dataset dataset, String indexName, List<String>
nodes) {
         File relPathFile = new File(StoragePathUtil.prepareDataverseIndexName(dataset.getDataverseName(),
-                dataset.getDatasetName(), targetIdxName));
+                dataset.getDatasetName(), indexName, dataset.getRebalanceCount()));
         String storageDirName = ClusterProperties.INSTANCE.getStorageDirectoryName();
         List<FileSplit> splits = new ArrayList<>();
         for (String nd : nodes) {
@@ -88,7 +87,8 @@ public class SplitsAndConstraintsUtil {
                 // format: 'storage dir name'/partition_#/dataverse/dataset_idx_index
                 File f = new File(
                         StoragePathUtil.prepareStoragePartitionPath(storageDirName, nodePartitions[k].getPartitionId())
-                                + (temp ? (File.separator + StoragePathUtil.TEMP_DATASETS_STORAGE_FOLDER)
: "")
+                                + (dataset.isTemp() ? (File.separator + StoragePathUtil.TEMP_DATASETS_STORAGE_FOLDER)
+                                        : "")
                                 + File.separator + relPathFile);
                 splits.add(StoragePathUtil.getFileSplitForClusterPartition(nodePartitions[k],
f.getPath()));
             }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/87805197/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFactoryProvider.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFactoryProvider.java
b/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFactoryProvider.java
index cea0369..6365860 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFactoryProvider.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryHashFunctionFactoryProvider.java
@@ -25,28 +25,14 @@ import org.apache.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
 import org.apache.hyracks.api.dataflow.value.IBinaryHashFunction;
 import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
 import org.apache.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
-import org.apache.hyracks.data.std.primitive.DoublePointable;
-import org.apache.hyracks.data.std.primitive.FloatPointable;
-import org.apache.hyracks.data.std.primitive.IntegerPointable;
-import org.apache.hyracks.data.std.primitive.UTF8StringLowercasePointable;
 import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
 
 public class BinaryHashFunctionFactoryProvider implements IBinaryHashFunctionFactoryProvider,
Serializable {
 
     private static final long serialVersionUID = 1L;
     public static final BinaryHashFunctionFactoryProvider INSTANCE = new BinaryHashFunctionFactoryProvider();
-    public static final PointableBinaryHashFunctionFactory INTEGER_POINTABLE_INSTANCE =
-            new PointableBinaryHashFunctionFactory(IntegerPointable.FACTORY);
-    public static final PointableBinaryHashFunctionFactory FLOAT_POINTABLE_INSTANCE =
-            new PointableBinaryHashFunctionFactory(FloatPointable.FACTORY);
-    public static final PointableBinaryHashFunctionFactory DOUBLE_POINTABLE_INSTANCE =
-            new PointableBinaryHashFunctionFactory(DoublePointable.FACTORY);
     public static final PointableBinaryHashFunctionFactory UTF8STRING_POINTABLE_INSTANCE
=
             new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY);
-    // Equivalent to UTF8STRING_POINTABLE_INSTANCE but all characters are considered lower
case to implement
-    // case-insensitive hashing.
-    public static final PointableBinaryHashFunctionFactory UTF8STRING_LOWERCASE_POINTABLE_INSTANCE
=
-            new PointableBinaryHashFunctionFactory(UTF8StringLowercasePointable.FACTORY);
 
     private BinaryHashFunctionFactoryProvider() {
     }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/87805197/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
index 44aaef5..cb03ae4 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
@@ -222,7 +222,8 @@ public class PersistentLocalResourceRepository implements ILocalResourceReposito
                 createReplicationJob(ReplicationOperation.DELETE, resourceFile);
             }
         } else {
-            throw new HyracksDataException("Resource doesn't exist");
+            throw HyracksDataException.create(org.apache.hyracks.api.exceptions.ErrorCode.RESOURCE_DOES_NOT_EXIST,
+                    relativePath);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/87805197/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/DatasetIdFactory.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/DatasetIdFactory.java
b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/DatasetIdFactory.java
index 324b7fd..7acefe9 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/DatasetIdFactory.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/DatasetIdFactory.java
@@ -18,26 +18,34 @@
  */
 package org.apache.asterix.transaction.management.service.transaction;
 
-import java.util.concurrent.atomic.AtomicInteger;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.exceptions.ErrorCode;
 
 public class DatasetIdFactory {
-    private static AtomicInteger id = new AtomicInteger();
+    private static int id = 0;
     private static boolean isInitialized = false;
 
-    public static boolean isInitialized() {
+    public static synchronized boolean isInitialized() {
         return isInitialized;
     }
 
-    public static void initialize(int initialId) {
-        id.set(initialId);
+    public static synchronized void initialize(int initialId) {
+        id = initialId;
         isInitialized = true;
     }
 
-    public static int generateDatasetId() {
-        return id.incrementAndGet();
+    public static synchronized int generateDatasetId() throws AsterixException {
+        if (id == Integer.MAX_VALUE) {
+            throw new AsterixException(ErrorCode.DATASET_ID_EXHAUSTED);
+        }
+        return ++id;
     }
 
-    public static int getMostRecentDatasetId() {
-        return id.get();
+    public static int generateAlternatingDatasetId(int originalId) {
+        return originalId ^ 0x80000000;
+    }
+
+    public static synchronized int getMostRecentDatasetId() {
+        return id;
     }
 }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/87805197/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java
b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java
index a057f40..95f5e1a 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java
@@ -30,15 +30,8 @@ public class PartitionConstraintHelper {
                 count)));
     }
 
-    public static void addLocationChoiceConstraint(JobSpecification spec, IOperatorDescriptor
op, String[][] choices) {
-        addPartitionCountConstraint(spec, op, choices.length);
-        for (int i = 0; i < choices.length; ++i) {
-            spec.addUserConstraint(new Constraint(new PartitionLocationExpression(op.getOperatorId(),
i),
-                    new ConstantExpression(choices[i])));
-        }
-    }
-
-    public static void addAbsoluteLocationConstraint(JobSpecification spec, IOperatorDescriptor
op, String... locations) {
+    public static void addAbsoluteLocationConstraint(JobSpecification spec, IOperatorDescriptor
op,
+            String... locations) {
         addPartitionCountConstraint(spec, op, locations.length);
         for (int i = 0; i < locations.length; ++i) {
             spec.addUserConstraint(new Constraint(new PartitionLocationExpression(op.getOperatorId(),
i),

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/87805197/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
index 7eaeafb..23031bc 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
@@ -88,6 +88,7 @@ public class ErrorCode {
     public static final int CANNOT_DESTROY_ACTIVE_INDEX = 52;
     public static final int CANNOT_CLEAR_INACTIVE_INDEX = 53;
     public static final int CANNOT_ALLOCATE_MEMORY_FOR_INACTIVE_INDEX = 54;
+    public static final int RESOURCE_DOES_NOT_EXIST = 55;
 
     // Compilation error codes.
     public static final int RULECOLLECTION_NOT_INSTANCE_OF_LIST = 10000;

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/87805197/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
b/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
index e95e7f4..88e4204 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
@@ -73,5 +73,6 @@
 52 = Failed to destroy the index since it is active
 53 = Failed to clear the index since it is inactive
 54 = Failed to allocate memory components for the index since it is inactive
+55 = Resource does not exist for %1$s
 
 10000 = The given rule collection %1$s is not an instance of the List class.


Mime
View raw message