asterixdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amo...@apache.org
Subject [3/3] asterixdb git commit: Ensure Metadata locks are acquired for SQL++ queries
Date Sun, 02 Apr 2017 21:49:26 GMT
Ensure Metadata locks are acquired for SQL++ queries

Change-Id: I5f468599897a37cbcb12d8577d072f340f0d949c
Reviewed-on: https://asterix-gerrit.ics.uci.edu/1642
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
BAD: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Yingyi Bu <buyingyi@gmail.com>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>


Project: http://git-wip-us.apache.org/repos/asf/asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/asterixdb/commit/6eb0175f
Tree: http://git-wip-us.apache.org/repos/asf/asterixdb/tree/6eb0175f
Diff: http://git-wip-us.apache.org/repos/asf/asterixdb/diff/6eb0175f

Branch: refs/heads/master
Commit: 6eb0175f99a27117d6f4d3d828f807d653a87bfd
Parents: ff915a9
Author: Abdullah Alamoudi <bamousaa@gmail.com>
Authored: Sat Apr 1 21:51:19 2017 -0700
Committer: abdullah alamoudi <bamousaa@gmail.com>
Committed: Sun Apr 2 14:48:56 2017 -0700

----------------------------------------------------------------------
 .../physical/InvertedIndexPOperator.java        |   6 +-
 .../api/http/server/ConnectorApiServlet.java    |  68 +--
 .../asterix/app/translator/QueryTranslator.java | 239 ++++----
 .../bootstrap/GlobalRecoveryManager.java        | 161 ++---
 .../http/servlet/ConnectorApiServletTest.java   |  18 +-
 .../app/bootstrap/TestNodeController.java       |  18 +-
 .../aql/statement/SubscribeFeedStatement.java   |   8 +-
 .../aql/visitor/AqlDeleteRewriteVisitor.java    |   8 +-
 .../asterix-lang-aql/src/main/javacc/AQL.jj     |  23 +-
 .../lang/common/parser/ScopeChecker.java        |  36 +-
 .../lang/common/statement/DeleteStatement.java  |  29 +-
 .../asterix/lang/common/statement/Query.java    |  30 +-
 .../CloneAndSubstituteVariablesVisitor.java     |   2 +-
 .../visitor/SqlppGroupBySugarVisitor.java       |   9 +-
 .../lang/sqlpp/util/SqlppRewriteUtil.java       |   4 +-
 .../lang/sqlpp/visitor/DeepCopyVisitor.java     |   6 +-
 ...SqlppCloneAndSubstituteVariablesVisitor.java |  56 +-
 .../visitor/SqlppDeleteRewriteVisitor.java      |   6 +-
 .../asterix-lang-sqlpp/src/main/javacc/SQLPP.jj |  19 +-
 .../asterix/metadata/MetadataManager.java       |  15 +-
 .../asterix/metadata/api/IMetadataManager.java  |   7 +-
 .../BTreeDataflowHelperFactoryProvider.java     |   2 +-
 .../metadata/declared/MetadataProvider.java     |  34 +-
 .../asterix/metadata/entities/Dataset.java      |   6 +-
 .../metadata/feeds/FeedMetadataUtil.java        |  17 +-
 .../asterix/metadata/lock/DatasetLock.java      | 157 +++++
 .../metadata/lock/ExternalDatasetsRegistry.java | 142 +++++
 .../asterix/metadata/lock/IMetadataLock.java    |  50 ++
 .../apache/asterix/metadata/lock/LockList.java  |  45 ++
 .../asterix/metadata/lock/MetadataLock.java     |  51 ++
 .../metadata/lock/MetadataLockManager.java      | 295 ++++++++++
 .../asterix/metadata/utils/DatasetLock.java     | 106 ----
 .../asterix/metadata/utils/DatasetUtil.java     |  31 +-
 .../utils/ExternalDatasetsRegistry.java         | 141 -----
 .../metadata/utils/MetadataLockManager.java     | 587 -------------------
 .../RTreeDataflowHelperFactoryProvider.java     |   1 +
 .../utils/SplitsAndConstraintsUtil.java         |  39 +-
 37 files changed, 1135 insertions(+), 1337 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/asterixdb/blob/6eb0175f/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
index c6ea045..5eae36b 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
@@ -117,8 +117,7 @@ public class InvertedIndexPOperator extends IndexSearchPOperator {
         MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
         Dataset dataset;
         try {
-            dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(),
-                    jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
+            dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
         } catch (MetadataException e) {
             throw new AlgebricksException(e);
         }
@@ -257,8 +256,7 @@ public class InvertedIndexPOperator extends IndexSearchPOperator {
                     jobSpec, queryField, appContext.getStorageManager(), secondarySplitsAndConstraint.first,
                     appContext.getIndexLifecycleManagerProvider(), tokenTypeTraits, tokenComparatorFactories,
                     invListsTypeTraits, invListsComparatorFactories, dataflowHelperFactory, queryTokenizerFactory,
-                    searchModifierFactory, outputRecDesc, retainInput, retainMissing,
-                    context.getMissingWriterFactory(),
+                    searchModifierFactory, outputRecDesc, retainInput, retainMissing, context.getMissingWriterFactory(),
                     dataset.getSearchCallbackFactory(metadataProvider.getStorageComponentProvider(), secondaryIndex,
                             ((JobEventListenerFactory) jobSpec.getJobletEventListenerFactory()).getJobId(),
                             IndexOperation.SEARCH, null),

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/6eb0175f/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
index ebd1bdb..869289a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
@@ -90,44 +90,46 @@ public class ConnectorApiServlet extends AbstractServlet {
             // Metadata transaction begins.
             MetadataManager.INSTANCE.init();
             MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-
             // Retrieves file splits of the dataset.
             MetadataProvider metadataProvider = new MetadataProvider(null, new StorageComponentProvider());
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
-            if (dataset == null) {
-                jsonResponse.put("error",
-                        "Dataset " + datasetName + " does not exist in " + "dataverse " + dataverseName);
-                out.write(jsonResponse.toString());
-                out.flush();
-                return;
-            }
-            boolean temp = dataset.getDatasetDetails().isTemp();
-            FileSplit[] fileSplits =
-                    metadataProvider.splitsForDataset(mdTxnCtx, dataverseName, datasetName, datasetName, temp);
-            ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(),
-                    dataset.getItemTypeName());
-            List<List<String>> primaryKeys = DatasetUtil.getPartitioningKeys(dataset);
-            StringBuilder pkStrBuf = new StringBuilder();
-            for (List<String> keys : primaryKeys) {
-                for (String key : keys) {
-                    pkStrBuf.append(key).append(",");
+            try {
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+                if (dataset == null) {
+                    jsonResponse.put("error",
+                            "Dataset " + datasetName + " does not exist in " + "dataverse " + dataverseName);
+                    out.write(jsonResponse.toString());
+                    out.flush();
+                    return;
                 }
-            }
-            pkStrBuf.delete(pkStrBuf.length() - 1, pkStrBuf.length());
-
-            // Constructs the returned json object.
-            formResponseObject(jsonResponse, fileSplits, recordType, pkStrBuf.toString(), temp,
-                    hcc.getNodeControllerInfos());
+                boolean temp = dataset.getDatasetDetails().isTemp();
+                FileSplit[] fileSplits =
+                        metadataProvider.splitsForDataset(mdTxnCtx, dataverseName, datasetName, datasetName, temp);
+                ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(),
+                        dataset.getItemTypeName());
+                List<List<String>> primaryKeys = DatasetUtil.getPartitioningKeys(dataset);
+                StringBuilder pkStrBuf = new StringBuilder();
+                for (List<String> keys : primaryKeys) {
+                    for (String key : keys) {
+                        pkStrBuf.append(key).append(",");
+                    }
+                }
+                pkStrBuf.delete(pkStrBuf.length() - 1, pkStrBuf.length());
+                // Constructs the returned json object.
+                formResponseObject(jsonResponse, fileSplits, recordType, pkStrBuf.toString(), temp,
+                        hcc.getNodeControllerInfos());
 
-            // Flush the cached contents of the dataset to file system.
-            FlushDatasetUtil.flushDataset(hcc, metadataProvider, dataverseName, datasetName, datasetName);
+                // Flush the cached contents of the dataset to file system.
+                FlushDatasetUtil.flushDataset(hcc, metadataProvider, dataverseName, datasetName, datasetName);
 
-            // Metadata transaction commits.
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            // Writes file splits.
-            out.write(jsonResponse.toString());
-            out.flush();
+                // Metadata transaction commits.
+                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                // Writes file splits.
+                out.write(jsonResponse.toString());
+                out.flush();
+            } finally {
+                metadataProvider.getLocks().unlock();
+            }
         } catch (Exception e) {
             LOGGER.log(Level.WARNING, "Failure handling a request", e);
             out.println(e.getMessage());

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/6eb0175f/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 4ed8510..59b3e88 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -135,13 +135,13 @@ import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails;
 import org.apache.asterix.metadata.entities.NodeGroup;
 import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
+import org.apache.asterix.metadata.lock.ExternalDatasetsRegistry;
+import org.apache.asterix.metadata.lock.MetadataLockManager;
 import org.apache.asterix.metadata.utils.DatasetUtil;
-import org.apache.asterix.metadata.utils.ExternalDatasetsRegistry;
 import org.apache.asterix.metadata.utils.ExternalIndexingOperations;
 import org.apache.asterix.metadata.utils.IndexUtil;
 import org.apache.asterix.metadata.utils.KeyFieldTypeUtil;
 import org.apache.asterix.metadata.utils.MetadataConstants;
-import org.apache.asterix.metadata.utils.MetadataLockManager;
 import org.apache.asterix.metadata.utils.MetadataUtil;
 import org.apache.asterix.metadata.utils.TypeUtil;
 import org.apache.asterix.om.types.ARecordType;
@@ -411,7 +411,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         String dvName = dvd.getDataverseName().getValue();
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.acquireDataverseReadLock(dvName);
+        MetadataLockManager.INSTANCE.acquireDataverseReadLock(metadataProvider.getLocks(), dvName);
         try {
             Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
             if (dv == null) {
@@ -423,7 +423,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw new MetadataException(e);
         } finally {
-            MetadataLockManager.INSTANCE.releaseDataverseReadLock(dvName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -434,7 +434,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
 
-        MetadataLockManager.INSTANCE.acquireDataverseReadLock(dvName);
+        MetadataLockManager.INSTANCE.acquireDataverseReadLock(metadataProvider.getLocks(), dvName);
         try {
             Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
             if (dv != null) {
@@ -452,7 +452,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.releaseDataverseReadLock(dvName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -509,18 +509,15 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-        MetadataLockManager.INSTANCE.createDatasetBegin(dataverseName, itemTypeDataverseName,
-                itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName,
+        MetadataLockManager.INSTANCE.createDatasetBegin(metadataProvider.getLocks(), dataverseName,
+                itemTypeDataverseName, itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName,
                 metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy,
                 dataverseName + "." + datasetName, defaultCompactionPolicy);
         Dataset dataset = null;
         Index primaryIndex = null;
         try {
-
             IDatasetDetails datasetDetails = null;
-            Dataset ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
-                    datasetName);
+            Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
             if (ds != null) {
                 if (dd.getIfNotExists()) {
                     MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -671,13 +668,9 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                             + "." + datasetName + ") couldn't be removed from the metadata", e);
                 }
             }
-
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.createDatasetEnd(dataverseName, itemTypeDataverseName,
-                    itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName,
-                    metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy,
-                    dataverseName + "." + datasetName, defaultCompactionPolicy);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -770,9 +763,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-        MetadataLockManager.INSTANCE.createIndexBegin(dataverseName, dataverseName + "." + datasetName);
-
+        MetadataLockManager.INSTANCE.createIndexBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + datasetName);
         String indexName = null;
         JobSpecification spec = null;
         Dataset ds = null;
@@ -784,8 +776,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         boolean datasetLocked = false;
         Index index = null;
         try {
-            ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
-                    datasetName);
+            ds = metadataProvider.findDataset(dataverseName, datasetName);
             if (ds == null) {
                 throw new AlgebricksException(
                         "There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
@@ -1105,7 +1096,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             }
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.createIndexEnd(dataverseName, dataverseName + "." + datasetName);
+            metadataProvider.getLocks().unlock();
             if (datasetLocked) {
                 ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
             }
@@ -1118,7 +1109,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         String typeName = stmtCreateType.getIdent().getValue();
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.createTypeBegin(dataverseName, dataverseName + "." + typeName);
+        MetadataLockManager.INSTANCE.createTypeBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + typeName);
         try {
             Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
             if (dv == null) {
@@ -1145,7 +1137,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.createTypeEnd(dataverseName, dataverseName + "." + typeName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -1162,8 +1154,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.acquireDataverseWriteLock(dataverseName);
         List<JobSpecification> jobsToExecute = new ArrayList<>();
+        MetadataLockManager.INSTANCE.acquireDataverseWriteLock(metadataProvider.getLocks(), dataverseName);
         try {
             Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
             if (dv == null) {
@@ -1177,12 +1169,16 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             // # disconnect all feeds from any datasets in the dataverse.
             IActiveEntityEventsListener[] activeListeners = ActiveJobNotificationHandler.INSTANCE.getEventListeners();
             Identifier dvId = new Identifier(dataverseName);
+            MetadataProvider tempMdProvider = new MetadataProvider(metadataProvider.getDefaultDataverse(),
+                    metadataProvider.getStorageComponentProvider());
+            tempMdProvider.setConfig(metadataProvider.getConfig());
             for (IActiveEntityEventsListener listener : activeListeners) {
                 EntityId activeEntityId = listener.getEntityId();
                 if (activeEntityId.getExtensionName().equals(Feed.EXTENSION_NAME)
                         && activeEntityId.getDataverse().equals(dataverseName)) {
+                    tempMdProvider.getLocks().reset();
                     stopFeedBeforeDelete(new Pair<>(dvId, new Identifier(activeEntityId.getEntityName())),
-                            metadataProvider);
+                            tempMdProvider);
                     // prepare job to remove feed log storage
                     jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(
                             MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, activeEntityId.getEntityName())));
@@ -1199,8 +1195,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                             MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
                     for (int k = 0; k < indexes.size(); k++) {
                         if (indexes.get(k).isSecondaryIndex()) {
-                            jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(k), metadataProvider,
-                                    datasets.get(j)));
+                            jobsToExecute.add(
+                                    IndexUtil.buildDropIndexJobSpec(indexes.get(k), metadataProvider, datasets.get(j)));
                         }
                     }
                     Index primaryIndex =
@@ -1215,8 +1211,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                             jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider,
                                     datasets.get(j)));
                         } else {
-                            jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(k), metadataProvider,
-                                    datasets.get(j)));
+                            jobsToExecute.add(
+                                    IndexUtil.buildDropIndexJobSpec(indexes.get(k), metadataProvider, datasets.get(j)));
                         }
                     }
                     ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(datasets.get(j));
@@ -1285,7 +1281,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
 
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.releaseDataverseWriteLock(dataverseName);
+            metadataProvider.getLocks().unlock();
             ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
         }
     }
@@ -1309,7 +1305,13 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         DropDatasetStatement stmtDelete = (DropDatasetStatement) stmt;
         String dataverseName = getActiveDataverse(stmtDelete.getDataverseName());
         String datasetName = stmtDelete.getDatasetName().getValue();
-        doDropDataset(dataverseName, datasetName, metadataProvider, stmtDelete.getIfExists(), hcc);
+        MetadataLockManager.INSTANCE.dropDatasetBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + datasetName);
+        try {
+            doDropDataset(dataverseName, datasetName, metadataProvider, stmtDelete.getIfExists(), hcc);
+        } finally {
+            metadataProvider.getLocks().unlock();
+        }
     }
 
     public static void doDropDataset(String dataverseName, String datasetName, MetadataProvider metadataProvider,
@@ -1319,10 +1321,9 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                 new MutableObject<>(MetadataManager.INSTANCE.beginTransaction());
         MutableBoolean bActiveTxn = new MutableBoolean(true);
         metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
-        MetadataLockManager.INSTANCE.dropDatasetBegin(dataverseName, dataverseName + "." + datasetName);
         List<JobSpecification> jobsToExecute = new ArrayList<>();
         try {
-            Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx.getValue(), dataverseName, datasetName);
+            Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
             if (ds == null) {
                 if (ifExists) {
                     MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
@@ -1367,7 +1368,6 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             }
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.dropDatasetEnd(dataverseName, dataverseName + "." + datasetName);
             ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
         }
     }
@@ -1382,16 +1382,14 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-        MetadataLockManager.INSTANCE.dropIndexBegin(dataverseName, dataverseName + "." + datasetName);
-
+        List<JobSpecification> jobsToExecute = new ArrayList<>();
+        MetadataLockManager.INSTANCE.dropIndexBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + datasetName);
         String indexName = null;
         // For external index
         boolean dropFilesIndex = false;
-        List<JobSpecification> jobsToExecute = new ArrayList<>();
         try {
-
-            Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
+            Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
             if (ds == null) {
                 throw new AlgebricksException(
                         "There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
@@ -1558,7 +1556,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             throw e;
 
         } finally {
-            MetadataLockManager.INSTANCE.dropIndexEnd(dataverseName, dataverseName + "." + datasetName);
+            metadataProvider.getLocks().unlock();
             ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
         }
     }
@@ -1571,8 +1569,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
 
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.dropTypeBegin(dataverseName, dataverseName + "." + typeName);
-
+        MetadataLockManager.INSTANCE.dropTypeBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + typeName);
         try {
             Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
             if (dt == null) {
@@ -1587,7 +1585,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.dropTypeEnd(dataverseName, dataverseName + "." + typeName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -1596,7 +1594,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         String nodegroupName = stmtDelete.getNodeGroupName().getValue();
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(nodegroupName);
+        MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodegroupName);
         try {
             NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodegroupName);
             if (ng == null) {
@@ -1612,7 +1610,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.releaseNodeGroupWriteLock(nodegroupName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -1624,9 +1622,9 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
 
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.functionStatementBegin(dataverse, dataverse + "." + functionName);
+        MetadataLockManager.INSTANCE.functionStatementBegin(metadataProvider.getLocks(), dataverse,
+                dataverse + "." + functionName);
         try {
-
             Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
             if (dv == null) {
                 throw new AlgebricksException("There is no dataverse with this name " + dataverse + ".");
@@ -1641,7 +1639,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.functionStatementEnd(dataverse, dataverse + "." + functionName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -1651,7 +1649,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         signature.setNamespace(getActiveDataverseName(signature.getNamespace()));
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.functionStatementBegin(signature.getNamespace(),
+        MetadataLockManager.INSTANCE.functionStatementBegin(metadataProvider.getLocks(), signature.getNamespace(),
                 signature.getNamespace() + "." + signature.getName());
         try {
             Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature);
@@ -1667,8 +1665,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.functionStatementEnd(signature.getNamespace(),
-                    signature.getNamespace() + "." + signature.getName());
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -1680,7 +1677,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.modifyDatasetBegin(dataverseName, dataverseName + "." + datasetName);
+        MetadataLockManager.INSTANCE.modifyDatasetBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + datasetName);
         try {
             CompiledLoadFromFileStatement cls =
                     new CompiledLoadFromFileStatement(dataverseName, loadStmt.getDatasetName().getValue(),
@@ -1697,7 +1695,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             }
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.modifyDatasetEnd(dataverseName, dataverseName + "." + datasetName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -1707,21 +1705,16 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             throws Exception {
         InsertStatement stmtInsertUpsert = (InsertStatement) stmt;
         String dataverseName = getActiveDataverse(stmtInsertUpsert.getDataverseName());
-        Query query = stmtInsertUpsert.getQuery();
-
         final IMetadataLocker locker = new IMetadataLocker() {
             @Override
             public void lock() {
-                MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(dataverseName,
-                        dataverseName + "." + stmtInsertUpsert.getDatasetName(), query.getDataverses(),
-                        query.getDatasets());
+                MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(metadataProvider.getLocks(),
+                        dataverseName + "." + stmtInsertUpsert.getDatasetName());
             }
 
             @Override
             public void unlock() {
-                MetadataLockManager.INSTANCE.insertDeleteUpsertEnd(dataverseName,
-                        dataverseName + "." + stmtInsertUpsert.getDatasetName(), query.getDataverses(),
-                        query.getDatasets());
+                metadataProvider.getLocks().unlock();
             }
         };
         final IStatementCompiler compiler = () -> {
@@ -1764,16 +1757,13 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
 
     public JobSpecification handleDeleteStatement(MetadataProvider metadataProvider, Statement stmt,
             IHyracksClientConnection hcc, boolean compileOnly) throws Exception {
-
         DeleteStatement stmtDelete = (DeleteStatement) stmt;
         String dataverseName = getActiveDataverse(stmtDelete.getDataverseName());
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(dataverseName,
-                dataverseName + "." + stmtDelete.getDatasetName(), stmtDelete.getDataverses(),
-                stmtDelete.getDatasets());
-
+        MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(metadataProvider.getLocks(),
+                dataverseName + "." + stmtDelete.getDatasetName());
         try {
             metadataProvider.setWriteTransaction(true);
             CompiledDeleteStatement clfrqs = new CompiledDeleteStatement(stmtDelete.getVariableExpr(), dataverseName,
@@ -1788,16 +1778,13 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                 JobUtils.runJob(hcc, jobSpec, true);
             }
             return jobSpec;
-
         } catch (Exception e) {
             if (bActiveTxn) {
                 abort(e, e, mdTxnCtx);
             }
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.insertDeleteUpsertEnd(dataverseName,
-                    dataverseName + "." + stmtDelete.getDatasetName(), stmtDelete.getDataverses(),
-                    stmtDelete.getDatasets());
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -1852,7 +1839,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         String feedName = cfs.getFeedName().getValue();
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.createFeedBegin(dataverseName, dataverseName + "." + feedName);
+        MetadataLockManager.INSTANCE.createFeedBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + feedName);
         Feed feed = null;
         try {
             feed = MetadataManager.INSTANCE.getFeed(metadataProvider.getMetadataTxnContext(), dataverseName, feedName);
@@ -1873,7 +1861,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.createFeedEnd(dataverseName, dataverseName + "." + feedName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -1886,7 +1874,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         CreateFeedPolicyStatement cfps = (CreateFeedPolicyStatement) stmt;
         dataverse = getActiveDataverse(null);
         policy = cfps.getPolicyName();
-        MetadataLockManager.INSTANCE.createFeedPolicyBegin(dataverse, dataverse + "." + policy);
+        MetadataLockManager.INSTANCE.createFeedPolicyBegin(metadataProvider.getLocks(), dataverse,
+                dataverse + "." + policy);
         try {
             mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
             metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -1935,7 +1924,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw new HyracksDataException(e);
         } finally {
-            MetadataLockManager.INSTANCE.createFeedPolicyEnd(dataverse, dataverse + "." + policy);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -1946,8 +1935,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         String feedName = stmtFeedDrop.getFeedName().getValue();
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.dropFeedBegin(dataverseName, dataverseName + "." + feedName);
-
+        MetadataLockManager.INSTANCE.dropFeedBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + feedName);
         try {
             Feed feed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, feedName);
             if (feed == null) {
@@ -1980,7 +1969,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.dropFeedEnd(dataverseName, dataverseName + "." + feedName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -1990,8 +1979,8 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         FeedPolicyDropStatement stmtFeedPolicyDrop = (FeedPolicyDropStatement) stmt;
         String dataverseName = getActiveDataverse(stmtFeedPolicyDrop.getDataverseName());
         String policyName = stmtFeedPolicyDrop.getPolicyName().getValue();
-        MetadataLockManager.INSTANCE.dropFeedPolicyBegin(dataverseName, dataverseName + "." + policyName);
-
+        MetadataLockManager.INSTANCE.dropFeedPolicyBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + policyName);
         try {
             FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverseName, policyName);
             if (feedPolicy == null) {
@@ -2007,7 +1996,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.dropFeedPolicyEnd(dataverseName, dataverseName + "." + policyName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -2035,15 +2024,15 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             throw new AlgebricksException("Feed " + feedName + " is started already.");
         }
         // Start
+        MetadataLockManager.INSTANCE.startFeedBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + feedName, feedConnections);
         try {
-            MetadataLockManager.INSTANCE.startFeedBegin(dataverseName, dataverseName + "." + feedName, feedConnections);
             // Prepare policy
             List<IDataset> datasets = new ArrayList<>();
             for (FeedConnection connection : feedConnections) {
-                datasets.add(MetadataManager.INSTANCE.getDataset(mdTxnCtx, connection.getDataverseName(),
-                        connection.getDatasetName()));
+                Dataset ds = metadataProvider.findDataset(connection.getDataverseName(), connection.getDatasetName());
+                datasets.add(ds);
             }
-
             org.apache.commons.lang3.tuple.Pair<JobSpecification, AlgebricksAbsolutePartitionConstraint> jobInfo =
                     FeedOperations.buildStartFeedJob(sessionConfig, metadataProvider, feed, feedConnections,
                             compilationProvider, storageComponentProvider, qtFactory, hcc);
@@ -2064,7 +2053,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             }
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.startFeedEnd(dataverseName, dataverseName + "." + feedName, feedConnections);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -2083,7 +2072,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         // Transaction
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.StopFeedBegin(dataverseName, feedName);
+        MetadataLockManager.INSTANCE.stopFeedBegin(metadataProvider.getLocks(), dataverseName, feedName);
         try {
             // validate
             FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName, mdTxnCtx);
@@ -2097,7 +2086,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.StopFeedEnd(dataverseName, feedName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -2110,17 +2099,17 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         String policyName = cfs.getPolicy();
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        // validation
-        FeedMetadataUtil.validateIfDatasetExists(dataverseName, datasetName, mdTxnCtx);
-        Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName,
-                metadataProvider.getMetadataTxnContext());
-        ARecordType outputType = FeedMetadataUtil.getOutputType(feed, feed.getAdapterConfiguration(),
-                ExternalDataConstants.KEY_TYPE_NAME);
-        List<FunctionSignature> appliedFunctions = cfs.getAppliedFunctions();
         // Transaction handling
-        MetadataLockManager.INSTANCE.connectFeedBegin(dataverseName, dataverseName + "." + datasetName,
-                dataverseName + "." + feedName);
+        MetadataLockManager.INSTANCE.connectFeedBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + datasetName, dataverseName + "." + feedName);
         try {
+            // validation
+            FeedMetadataUtil.validateIfDatasetExists(metadataProvider, dataverseName, datasetName, mdTxnCtx);
+            Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName,
+                    metadataProvider.getMetadataTxnContext());
+            ARecordType outputType = FeedMetadataUtil.getOutputType(feed, feed.getAdapterConfiguration(),
+                    ExternalDataConstants.KEY_TYPE_NAME);
+            List<FunctionSignature> appliedFunctions = cfs.getAppliedFunctions();
             fc = MetadataManager.INSTANCE.getFeedConnection(metadataProvider.getMetadataTxnContext(), dataverseName,
                     feedName, datasetName);
             if (fc != null) {
@@ -2134,8 +2123,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.connectFeedEnd(dataverseName, dataverseName + "." + datasetName,
-                    dataverseName + "." + feedName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -2146,10 +2134,11 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         String feedName = cfs.getFeedName().getValue();
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.disconnectFeedBegin(dataverseName, dataverseName + "." + datasetName,
-                dataverseName + "." + cfs.getFeedName());
+        MetadataLockManager.INSTANCE.disconnectFeedBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + datasetName, dataverseName + "." + cfs.getFeedName());
         try {
-            FeedMetadataUtil.validateIfDatasetExists(dataverseName, cfs.getDatasetName().getValue(), mdTxnCtx);
+            FeedMetadataUtil.validateIfDatasetExists(metadataProvider, dataverseName, cfs.getDatasetName().getValue(),
+                    mdTxnCtx);
             FeedMetadataUtil.validateIfFeedExists(dataverseName, cfs.getFeedName().getValue(), mdTxnCtx);
             FeedConnection fc = MetadataManager.INSTANCE.getFeedConnection(metadataProvider.getMetadataTxnContext(),
                     dataverseName, feedName, datasetName);
@@ -2162,8 +2151,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.disconnectFeedEnd(dataverseName, dataverseName + "." + datasetName,
-                    dataverseName + "." + cfs.getFeedName());
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -2175,10 +2163,11 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.compactBegin(dataverseName, dataverseName + "." + datasetName);
         List<JobSpecification> jobsToExecute = new ArrayList<>();
+        MetadataLockManager.INSTANCE.compactBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + datasetName);
         try {
-            Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
+            Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
             if (ds == null) {
                 throw new AlgebricksException(
                         "There is no dataset with this name " + datasetName + " in dataverse " + dataverseName + ".");
@@ -2230,7 +2219,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             }
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.compactEnd(dataverseName, dataverseName + "." + datasetName);
+            metadataProvider.getLocks().unlock();
             ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
         }
     }
@@ -2270,12 +2259,11 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         final IMetadataLocker locker = new IMetadataLocker() {
             @Override
             public void lock() {
-                MetadataLockManager.INSTANCE.queryBegin(activeDataverse, query.getDataverses(), query.getDatasets());
             }
 
             @Override
             public void unlock() {
-                MetadataLockManager.INSTANCE.queryEnd(query.getDataverses(), query.getDatasets());
+                metadataProvider.getLocks().unlock();
                 // release external datasets' locks acquired during compilation of the query
                 ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
             }
@@ -2389,11 +2377,11 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
                 printer.print(jobId);
             }
         } finally {
+            locker.unlock();
             // No matter the job succeeds or fails, removes it into the context.
             if (ctx != null && clientContextId != null) {
                 ctx.removeJobIdFromClientContextId(clientContextId);
             }
-            locker.unlock();
         }
     }
 
@@ -2404,8 +2392,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
 
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(ngName);
-
+        MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), ngName);
         try {
             NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, ngName);
             if (ng != null) {
@@ -2425,7 +2412,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             abort(e, e, mdTxnCtx);
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.releaseNodeGroupWriteLock(ngName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -2436,7 +2423,6 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         String datasetName = stmtRefresh.getDatasetName().getValue();
         TransactionState transactionState = TransactionState.COMMIT;
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-        MetadataLockManager.INSTANCE.refreshDatasetBegin(dataverseName, dataverseName + "." + datasetName);
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
         JobSpecification spec = null;
@@ -2449,10 +2435,10 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         Dataset transactionDataset = null;
         boolean lockAquired = false;
         boolean success = false;
+        MetadataLockManager.INSTANCE.refreshDatasetBegin(metadataProvider.getLocks(), dataverseName,
+                dataverseName + "." + datasetName);
         try {
-            ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
-                    datasetName);
-
+            ds = metadataProvider.findDataset(dataverseName, datasetName);
             // Dataset exists ?
             if (ds == null) {
                 throw new AlgebricksException(
@@ -2649,7 +2635,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             if (lockAquired) {
                 ExternalDatasetsRegistry.INSTANCE.refreshEnd(ds, success);
             }
-            MetadataLockManager.INSTANCE.refreshDatasetEnd(dataverseName, dataverseName + "." + datasetName);
+            metadataProvider.getLocks().unlock();
         }
     }
 
@@ -2676,15 +2662,11 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
         String dataverseNameTo = getActiveDataverse(pregelixStmt.getDataverseNameTo());
         String datasetNameFrom = pregelixStmt.getDatasetNameFrom().getValue();
         String datasetNameTo = pregelixStmt.getDatasetNameTo().getValue();
-
+        String fullyQualifiedDatasetNameTo =
+                DatasetUtil.isFullyQualifiedName(datasetNameTo) ? datasetNameTo : dataverseNameTo + '.' + datasetNameTo;
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        List<String> readDataverses = new ArrayList<>();
-        readDataverses.add(dataverseNameFrom);
-        List<String> readDatasets = new ArrayList<>();
-        readDatasets.add(datasetNameFrom);
-        MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(dataverseNameTo, datasetNameTo, readDataverses,
-                readDatasets);
+        MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(metadataProvider.getLocks(), fullyQualifiedDatasetNameTo);
         try {
             prepareRunExternalRuntime(metadataProvider, hcc, pregelixStmt, dataverseNameFrom, dataverseNameTo,
                     datasetNameFrom, datasetNameTo, mdTxnCtx);
@@ -2729,8 +2711,7 @@ public class QueryTranslator extends AbstractLangTranslator implements IStatemen
             }
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.insertDeleteUpsertEnd(dataverseNameTo, datasetNameTo, readDataverses,
-                    readDatasets);
+            metadataProvider.getLocks().unlock();
         }
     }
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/6eb0175f/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
index 002e270..7bd1e62 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
@@ -111,93 +111,98 @@ public class GlobalRecoveryManager implements IGlobalRecoveryManager {
                         for (Dataverse dataverse : dataverses) {
                             if (!dataverse.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
                                 MetadataProvider metadataProvider = new MetadataProvider(dataverse, componentProvider);
-                                List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx,
-                                        dataverse.getDataverseName());
-                                for (Dataset dataset : datasets) {
-                                    if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-                                        // External dataset
-                                        // Get indexes
-                                        List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx,
-                                                dataset.getDataverseName(), dataset.getDatasetName());
-                                        if (!indexes.isEmpty()) {
-                                            // Get the state of the dataset
-                                            ExternalDatasetDetails dsd =
-                                                    (ExternalDatasetDetails) dataset.getDatasetDetails();
-                                            TransactionState datasetState = dsd.getState();
-                                            if (datasetState == TransactionState.BEGIN) {
-                                                List<ExternalFile> files = MetadataManager.INSTANCE
-                                                        .getDatasetExternalFiles(mdTxnCtx, dataset);
-                                                // if persumed abort, roll backward
-                                                // 1. delete all pending files
-                                                for (ExternalFile file : files) {
-                                                    if (file.getPendingOp() != ExternalFilePendingOp.NO_OP) {
-                                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
+                                try {
+                                    List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx,
+                                            dataverse.getDataverseName());
+                                    for (Dataset dataset : datasets) {
+                                        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+                                            // External dataset
+                                            // Get indexes
+                                            List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx,
+                                                    dataset.getDataverseName(), dataset.getDatasetName());
+                                            if (!indexes.isEmpty()) {
+                                                // Get the state of the dataset
+                                                ExternalDatasetDetails dsd =
+                                                        (ExternalDatasetDetails) dataset.getDatasetDetails();
+                                                TransactionState datasetState = dsd.getState();
+                                                if (datasetState == TransactionState.BEGIN) {
+                                                    List<ExternalFile> files = MetadataManager.INSTANCE
+                                                            .getDatasetExternalFiles(mdTxnCtx, dataset);
+                                                    // if persumed abort, roll backward
+                                                    // 1. delete all pending files
+                                                    for (ExternalFile file : files) {
+                                                        if (file.getPendingOp() != ExternalFilePendingOp.NO_OP) {
+                                                            MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
+                                                        }
                                                     }
-                                                }
-                                                // 2. clean artifacts in NCs
-                                                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                                                JobSpecification jobSpec = ExternalIndexingOperations
-                                                        .buildAbortOp(dataset, indexes, metadataProvider);
-                                                executeHyracksJob(jobSpec);
-                                                // 3. correct the dataset state
-                                                ((ExternalDatasetDetails) dataset.getDatasetDetails())
-                                                        .setState(TransactionState.COMMIT);
-                                                MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
-                                                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                                                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                                            } else if (datasetState == TransactionState.READY_TO_COMMIT) {
-                                                List<ExternalFile> files = MetadataManager.INSTANCE
-                                                        .getDatasetExternalFiles(mdTxnCtx, dataset);
-                                                // if ready to commit, roll forward
-                                                // 1. commit indexes in NCs
-                                                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                                                JobSpecification jobSpec = ExternalIndexingOperations
-                                                        .buildRecoverOp(dataset, indexes, metadataProvider);
-                                                executeHyracksJob(jobSpec);
-                                                // 2. add pending files in metadata
-                                                for (ExternalFile file : files) {
-                                                    if (file.getPendingOp() == ExternalFilePendingOp.ADD_OP) {
-                                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
-                                                        file.setPendingOp(ExternalFilePendingOp.NO_OP);
-                                                        MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
-                                                    } else if (file.getPendingOp() == ExternalFilePendingOp.DROP_OP) {
-                                                        // find original file
-                                                        for (ExternalFile originalFile : files) {
-                                                            if (originalFile.getFileName()
-                                                                    .equals(file.getFileName())) {
-                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                        file);
-                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                        originalFile);
-                                                                break;
+                                                    // 2. clean artifacts in NCs
+                                                    metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                                                    JobSpecification jobSpec = ExternalIndexingOperations
+                                                            .buildAbortOp(dataset, indexes, metadataProvider);
+                                                    executeHyracksJob(jobSpec);
+                                                    // 3. correct the dataset state
+                                                    ((ExternalDatasetDetails) dataset.getDatasetDetails())
+                                                            .setState(TransactionState.COMMIT);
+                                                    MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
+                                                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                                                    mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                                                } else if (datasetState == TransactionState.READY_TO_COMMIT) {
+                                                    List<ExternalFile> files = MetadataManager.INSTANCE
+                                                            .getDatasetExternalFiles(mdTxnCtx, dataset);
+                                                    // if ready to commit, roll forward
+                                                    // 1. commit indexes in NCs
+                                                    metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                                                    JobSpecification jobSpec = ExternalIndexingOperations
+                                                            .buildRecoverOp(dataset, indexes, metadataProvider);
+                                                    executeHyracksJob(jobSpec);
+                                                    // 2. add pending files in metadata
+                                                    for (ExternalFile file : files) {
+                                                        if (file.getPendingOp() == ExternalFilePendingOp.ADD_OP) {
+                                                            MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
+                                                            file.setPendingOp(ExternalFilePendingOp.NO_OP);
+                                                            MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
+                                                        } else if (file
+                                                                .getPendingOp() == ExternalFilePendingOp.DROP_OP) {
+                                                            // find original file
+                                                            for (ExternalFile originalFile : files) {
+                                                                if (originalFile.getFileName()
+                                                                        .equals(file.getFileName())) {
+                                                                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+                                                                            file);
+                                                                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+                                                                            originalFile);
+                                                                    break;
+                                                                }
                                                             }
-                                                        }
-                                                    } else if (file
-                                                            .getPendingOp() == ExternalFilePendingOp.APPEND_OP) {
-                                                        // find original file
-                                                        for (ExternalFile originalFile : files) {
-                                                            if (originalFile.getFileName()
-                                                                    .equals(file.getFileName())) {
-                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                        file);
-                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                        originalFile);
-                                                                originalFile.setSize(file.getSize());
-                                                                MetadataManager.INSTANCE.addExternalFile(mdTxnCtx,
-                                                                        originalFile);
+                                                        } else if (file
+                                                                .getPendingOp() == ExternalFilePendingOp.APPEND_OP) {
+                                                            // find original file
+                                                            for (ExternalFile originalFile : files) {
+                                                                if (originalFile.getFileName()
+                                                                        .equals(file.getFileName())) {
+                                                                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+                                                                            file);
+                                                                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+                                                                            originalFile);
+                                                                    originalFile.setSize(file.getSize());
+                                                                    MetadataManager.INSTANCE.addExternalFile(mdTxnCtx,
+                                                                            originalFile);
+                                                                }
                                                             }
                                                         }
                                                     }
+                                                    // 3. correct the dataset state
+                                                    ((ExternalDatasetDetails) dataset.getDatasetDetails())
+                                                            .setState(TransactionState.COMMIT);
+                                                    MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
+                                                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                                                    mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                                                 }
-                                                // 3. correct the dataset state
-                                                ((ExternalDatasetDetails) dataset.getDatasetDetails())
-                                                        .setState(TransactionState.COMMIT);
-                                                MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
-                                                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                                                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                                             }
                                         }
                                     }
+                                } finally {
+                                    metadataProvider.getLocks().unlock();
                                 }
                             }
                         }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/6eb0175f/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
index 7da3b32..fe08b8c 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
@@ -169,12 +169,16 @@ public class ConnectorApiServletTest {
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         // Retrieves file splits of the dataset.
         MetadataProvider metadataProvider = new MetadataProvider(null, new StorageComponentProvider());
-        metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
-        ARecordType recordType =
-                (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
-        // Metadata transaction commits.
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        return recordType;
+        try {
+            metadataProvider.setMetadataTxnContext(mdTxnCtx);
+            Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+            ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(),
+                    dataset.getItemTypeName());
+            // Metadata transaction commits.
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+            return recordType;
+        } finally {
+            metadataProvider.getLocks().unlock();
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/6eb0175f/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
index 53f5f62..4b2af80 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
@@ -162,11 +162,10 @@ public class TestNodeController {
     }
 
     public Pair<LSMInsertDeleteOperatorNodePushable, CommitRuntime> getInsertPipeline(IHyracksTaskContext ctx,
-            Dataset dataset, IAType[] primaryKeyTypes,
-            ARecordType recordType, ARecordType metaType, ILSMMergePolicyFactory mergePolicyFactory,
-            Map<String, String> mergePolicyProperties, int[] filterFields, int[] primaryKeyIndexes,
-            List<Integer> primaryKeyIndicators, StorageComponentProvider storageComponentProvider)
-            throws AlgebricksException, HyracksDataException {
+            Dataset dataset, IAType[] primaryKeyTypes, ARecordType recordType, ARecordType metaType,
+            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyProperties, int[] filterFields,
+            int[] primaryKeyIndexes, List<Integer> primaryKeyIndicators,
+            StorageComponentProvider storageComponentProvider) throws AlgebricksException, HyracksDataException {
         PrimaryIndexInfo primaryIndexInfo = new PrimaryIndexInfo(dataset, primaryKeyTypes, recordType, metaType,
                 mergePolicyFactory, mergePolicyProperties, filterFields, primaryKeyIndexes, primaryKeyIndicators,
                 storageComponentProvider);
@@ -301,8 +300,13 @@ public class TestNodeController {
                 MetadataUtil.PENDING_NO_OP);
         Index index = primaryIndexInfo.getIndex();
         MetadataProvider mdProvider = new MetadataProvider(dataverse, storageComponentProvider);
-        return dataset.getIndexDataflowHelperFactory(mdProvider, index, primaryIndexInfo.recordType,
-                primaryIndexInfo.metaType, primaryIndexInfo.mergePolicyFactory, primaryIndexInfo.mergePolicyProperties);
+        try {
+            return dataset.getIndexDataflowHelperFactory(mdProvider, index, primaryIndexInfo.recordType,
+                    primaryIndexInfo.metaType, primaryIndexInfo.mergePolicyFactory,
+                    primaryIndexInfo.mergePolicyProperties);
+        } finally {
+            mdProvider.getLocks().unlock();
+        }
     }
 
     public IIndexDataflowHelper getPrimaryIndexDataflowHelper(Dataset dataset, IAType[] primaryKeyTypes,

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/6eb0175f/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java
index f3bebbe..1d8ae5e 100644
--- a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java
+++ b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java
@@ -41,10 +41,8 @@ import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.FeedConnection;
 import org.apache.asterix.metadata.entities.Function;
 import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 
 /**
  * Represents the AQL statement for subscribing to a feed.
@@ -70,9 +68,9 @@ public class SubscribeFeedStatement implements Statement {
     public void initialize(MetadataTransactionContext mdTxnCtx) throws MetadataException {
         this.query = new Query(false);
         EntityId sourceFeedId = connectionRequest.getReceivingFeedId();
-        Feed subscriberFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx,
-                connectionRequest.getReceivingFeedId().getDataverse(),
-                connectionRequest.getReceivingFeedId().getEntityName());
+        Feed subscriberFeed =
+                MetadataManager.INSTANCE.getFeed(mdTxnCtx, connectionRequest.getReceivingFeedId().getDataverse(),
+                        connectionRequest.getReceivingFeedId().getEntityName());
         if (subscriberFeed == null) {
             throw new IllegalStateException(" Subscriber feed " + subscriberFeed + " not found.");
         }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/6eb0175f/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AqlDeleteRewriteVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AqlDeleteRewriteVisitor.java b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AqlDeleteRewriteVisitor.java
index 9268422..57370c5 100644
--- a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AqlDeleteRewriteVisitor.java
+++ b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AqlDeleteRewriteVisitor.java
@@ -41,17 +41,17 @@ public class AqlDeleteRewriteVisitor extends AbstractAqlAstVisitor<Void, Void> {
 
     @Override
     public Void visit(DeleteStatement deleteStmt, Void visitArg) {
-        List<Expression> arguments = new ArrayList<Expression>();
+        List<Expression> arguments = new ArrayList<>();
         Identifier dataverseName = deleteStmt.getDataverseName();
         Identifier datasetName = deleteStmt.getDatasetName();
         String arg = dataverseName == null ? datasetName.getValue()
                 : dataverseName.getValue() + "." + datasetName.getValue();
         LiteralExpr argumentLiteral = new LiteralExpr(new StringLiteral(arg));
         arguments.add(argumentLiteral);
-        CallExpr callExpression = new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_NS, "dataset", 1),
-                arguments);
+        CallExpr callExpression =
+                new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_NS, "dataset", 1), arguments);
 
-        List<Clause> clauseList = new ArrayList<Clause>();
+        List<Clause> clauseList = new ArrayList<>();
         VariableExpr var = deleteStmt.getVariableExpr();
         Clause forClause = new ForClause(var, callExpression);
         clauseList.add(forClause);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/6eb0175f/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj b/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
index ba699f0..b2909c4 100644
--- a/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
+++ b/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
@@ -1015,10 +1015,6 @@ DeleteStatement DeleteStatement() throws ParseException:
   VariableExpr var = null;
   Expression condition = null;
   Pair<Identifier, Identifier> nameComponents;
-  // This is related to the new metadata lock management
-  setDataverses(new ArrayList<String>());
-  setDatasets(new ArrayList<String>());
-
 }
 {
   <DELETE> var = Variable()
@@ -1029,13 +1025,8 @@ DeleteStatement DeleteStatement() throws ParseException:
   (<WHERE> condition = Expression())?
     {
       // First we get the dataverses and datasets that we want to lock
-      List<String> dataverses = getDataverses();
-      List<String> datasets = getDatasets();
-      // we remove the pointer to the dataverses and datasets
-      setDataverses(null);
-      setDatasets(null);
       return new DeleteStatement(var, nameComponents.first, nameComponents.second,
-          condition, getVarCounter(), dataverses, datasets);
+          condition, getVarCounter());
     }
 }
 
@@ -1621,9 +1612,6 @@ FunctionDecl FunctionDeclaration() throws ParseException:
 Query Query() throws ParseException:
 {
   Query query = new Query(false);
-  // we set the pointers to the dataverses and datasets lists to fill them with entities to be locked
-  setDataverses(query.getDataverses());
-  setDatasets(query.getDatasets());
   Expression expr;
 }
 {
@@ -1631,9 +1619,6 @@ Query Query() throws ParseException:
     {
       query.setBody(expr);
       query.setVarCounter(getVarCounter());
-      // we remove the pointers to the locked entities before we return the query object
-      setDataverses(null);
-      setDatasets(null);
       return query;
     }
 }
@@ -2282,12 +2267,6 @@ Expression DatasetAccessExpression() throws ParseException:
       LiteralExpr ds = new LiteralExpr();
       ds.setValue( new StringLiteral(name) );
       nameArg = ds;
-      if(arg2 != null){
-          addDataverse(arg1.toString());
-          addDataset(name);
-      } else {
-          addDataset(defaultDataverse + "." + name);
-      }
     }
   | ( <LEFTPAREN> nameArg = Expression() <RIGHTPAREN> ) )
     {

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/6eb0175f/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
index 1195d37..771ae16 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
@@ -18,7 +18,6 @@
  */
 package org.apache.asterix.lang.common.parser;
 
-import java.util.List;
 import java.util.Stack;
 
 import org.apache.asterix.common.functions.FunctionSignature;
@@ -35,17 +34,14 @@ public class ScopeChecker {
 
     protected Counter varCounter = new Counter(-1);
 
-    protected Stack<Scope> scopeStack = new Stack<Scope>();
+    protected Stack<Scope> scopeStack = new Stack<>();
 
-    protected Stack<Scope> forbiddenScopeStack = new Stack<Scope>();
+    protected Stack<Scope> forbiddenScopeStack = new Stack<>();
 
     protected String[] inputLines;
 
     protected String defaultDataverse;
 
-    private List<String> dataverses;
-    private List<String> datasets;
-
     public ScopeChecker() {
         scopeStack.push(RootScopeFactory.createRootScope(this));
     }
@@ -323,32 +319,4 @@ public class ScopeChecker {
         extract.append(inputLines[endLine - 1].substring(0, endColumn - 1));
         return extract.toString().trim();
     }
-
-    public void addDataverse(String dataverseName) {
-        if (dataverses != null) {
-            dataverses.add(dataverseName);
-        }
-    }
-
-    public void addDataset(String datasetName) {
-        if (datasets != null) {
-            datasets.add(datasetName);
-        }
-    }
-
-    public void setDataverses(List<String> dataverses) {
-        this.dataverses = dataverses;
-    }
-
-    public void setDatasets(List<String> datasets) {
-        this.datasets = datasets;
-    }
-
-    public List<String> getDataverses() {
-        return dataverses;
-    }
-
-    public List<String> getDatasets() {
-        return datasets;
-    }
 }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/6eb0175f/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DeleteStatement.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DeleteStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DeleteStatement.java
index 3bd309a..7bf0db2 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DeleteStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DeleteStatement.java
@@ -18,8 +18,6 @@
  */
 package org.apache.asterix.lang.common.statement;
 
-import java.util.List;
-
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.lang.common.base.Expression;
 import org.apache.asterix.lang.common.base.Statement;
@@ -35,19 +33,15 @@ public class DeleteStatement implements Statement {
     private Identifier datasetName;
     private Expression condition;
     private int varCounter;
-    private List<String> dataverses;
-    private List<String> datasets;
     private Query rewrittenQuery;
 
     public DeleteStatement(VariableExpr vars, Identifier dataverseName, Identifier datasetName, Expression condition,
-            int varCounter, List<String> dataverses, List<String> datasets) {
+            int varCounter) {
         this.vars = vars;
         this.dataverseName = dataverseName;
         this.datasetName = datasetName;
         this.condition = condition;
         this.varCounter = varCounter;
-        this.dataverses = dataverses;
-        this.datasets = datasets;
     }
 
     @Override
@@ -88,18 +82,9 @@ public class DeleteStatement implements Statement {
         return visitor.visit(this, arg);
     }
 
-    public List<String> getDataverses() {
-        return dataverses;
-    }
-
-    public List<String> getDatasets() {
-        return datasets;
-    }
-
     @Override
     public int hashCode() {
-        return ObjectUtils.hashCodeMulti(condition, datasetName, datasets, dataverseName, dataverses, rewrittenQuery,
-                vars);
+        return ObjectUtils.hashCodeMulti(condition, datasetName, dataverseName, rewrittenQuery, vars);
     }
 
     @Override
@@ -111,11 +96,11 @@ public class DeleteStatement implements Statement {
             return false;
         }
         DeleteStatement target = (DeleteStatement) object;
-        boolean equals = ObjectUtils.equals(condition, target.condition)
-                && ObjectUtils.equals(datasetName, target.datasetName) && ObjectUtils.equals(datasets, target.datasets)
-                && ObjectUtils.equals(dataverseName, target.dataverseName);
-        return equals && ObjectUtils.equals(dataverses, target.dataverses)
-                && ObjectUtils.equals(rewrittenQuery, target.rewrittenQuery) && ObjectUtils.equals(vars, target.vars);
+        boolean equals =
+                ObjectUtils.equals(condition, target.condition) && ObjectUtils.equals(datasetName, target.datasetName)
+                        && ObjectUtils.equals(dataverseName, target.dataverseName);
+        return equals && ObjectUtils.equals(rewrittenQuery, target.rewrittenQuery)
+                && ObjectUtils.equals(vars, target.vars);
     }
 
     @Override


Mime
View raw message