jackrabbit-oak-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mdue...@apache.org
Subject svn commit: r1409798 - in /jackrabbit/oak/trunk: oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/ oak-mongomk-test/src/test/resources/META-INF/services/ oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/ oak-mongo...
Date Thu, 15 Nov 2012 14:18:45 GMT
Author: mduerig
Date: Thu Nov 15 14:18:41 2012
New Revision: 1409798

URL: http://svn.apache.org/viewvc?rev=1409798&view=rev
Log:
OAK-440: Concurrency issue when 3 microkernels are writing in the same db
Merge https://github.com/meteatamel/jackrabbit-oak/commit/0aba87afae59b36f8352a41f02aee8d11fe89697 and earlier

Added:
    jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/BaseMongoMicroKernelFixture.java
      - copied, changed from r1409781, jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java
    jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoGridFSMicroKernelFixture.java   (with props)
    jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/multitenancy/
    jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/multitenancy/MultiTenancyTest.java   (with props)
Modified:
    jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java
    jackrabbit/oak/trunk/oak-mongomk-test/src/test/resources/META-INF/services/org.apache.jackrabbit.mk.test.MicroKernelFixture
    jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/FetchCommitsAction.java
    jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/ReadAndIncHeadRevisionAction.java
    jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/SaveAndSetHeadRevisionAction.java
    jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/blob/MongoBlobStore.java
    jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/blob/MongoGridFSBlobStore.java
    jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/CommitCommand.java
    jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/DiffCommand.java
    jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/GetJournalCommand.java
    jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/blob/ReadBlobCommandGridFS.java
    jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/instruction/CommitCommandInstructionVisitor.java
    jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mk/blobs/MongoBlobStoreTest.java
    jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKCommitAddTest.java
    jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKReadTest.java
    jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKWriteTest.java
    jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/command/ConcurrentConflictingCommitCommandTest.java
    jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/command/ConcurrentWriteMultipleMkMongoTest.java
    jackrabbit/oak/trunk/oak-mongomk/src/test/resources/config.cfg

Copied: jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/BaseMongoMicroKernelFixture.java (from r1409781, jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java)
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/BaseMongoMicroKernelFixture.java?p2=jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/BaseMongoMicroKernelFixture.java&p1=jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java&r1=1409781&r2=1409798&rev=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/BaseMongoMicroKernelFixture.java Thu Nov 15 14:18:41 2012
@@ -26,18 +26,17 @@ import org.apache.jackrabbit.mongomk.imp
 import org.apache.jackrabbit.mongomk.impl.MongoMicroKernel;
 import org.apache.jackrabbit.mongomk.impl.MongoNodeStore;
 import org.apache.jackrabbit.mongomk.impl.blob.MongoBlobStore;
-import org.apache.jackrabbit.mongomk.impl.blob.MongoGridFSBlobStore;
 import org.junit.Assert;
 
 import com.mongodb.DB;
 
-public class MongoMicroKernelFixture implements MicroKernelFixture {
+public abstract class BaseMongoMicroKernelFixture implements MicroKernelFixture {
 
-    private static MongoConnection mongoConnection;
+    protected static MongoConnection mongoConnection;
 
     private static MongoConnection createMongoConnection() {
         try {
-            InputStream is = MongoMicroKernelFixture.class.getResourceAsStream("/config.cfg");
+            InputStream is = BaseMongoMicroKernelFixture.class.getResourceAsStream("/config.cfg");
             Properties properties = new Properties();
             properties.load(is);
 
@@ -59,7 +58,7 @@ public class MongoMicroKernelFixture imp
             dropCollections(db);
 
             MongoNodeStore nodeStore = new MongoNodeStore(db);
-            BlobStore blobStore = new MongoGridFSBlobStore(db);
+            BlobStore blobStore = getBlobStore(db);
             MicroKernel mk = new MongoMicroKernel(mongoConnection, nodeStore, blobStore);
 
             for (int i = 0; i < cluster.length; i++) {
@@ -84,6 +83,8 @@ public class MongoMicroKernelFixture imp
         }
     }
 
+    protected abstract BlobStore getBlobStore(DB db);
+
     private void dropCollections(DB db) {
         db.getCollection(MongoBlobStore.COLLECTION_BLOBS).drop();
         db.getCollection(MongoNodeStore.COLLECTION_COMMITS).drop();

Added: jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoGridFSMicroKernelFixture.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoGridFSMicroKernelFixture.java?rev=1409798&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoGridFSMicroKernelFixture.java (added)
+++ jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoGridFSMicroKernelFixture.java Thu Nov 15 14:18:41 2012
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.mongomk.test.it;
+
+import org.apache.jackrabbit.mk.blobs.BlobStore;
+import org.apache.jackrabbit.mongomk.impl.blob.MongoGridFSBlobStore;
+
+import com.mongodb.DB;
+
+public class MongoGridFSMicroKernelFixture extends BaseMongoMicroKernelFixture {
+
+    @Override
+    protected BlobStore getBlobStore(DB db) {
+        return new MongoGridFSBlobStore(db);
+    }
+}

Propchange: jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoGridFSMicroKernelFixture.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoGridFSMicroKernelFixture.java
------------------------------------------------------------------------------
    svn:keywords = Author Date Id Revision Rev URL

Modified: jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java Thu Nov 15 14:18:41 2012
@@ -16,78 +16,15 @@
  */
 package org.apache.jackrabbit.mongomk.test.it;
 
-import java.io.InputStream;
-import java.util.Properties;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
 import org.apache.jackrabbit.mk.blobs.BlobStore;
-import org.apache.jackrabbit.mk.test.MicroKernelFixture;
-import org.apache.jackrabbit.mongomk.impl.MongoConnection;
-import org.apache.jackrabbit.mongomk.impl.MongoMicroKernel;
-import org.apache.jackrabbit.mongomk.impl.MongoNodeStore;
 import org.apache.jackrabbit.mongomk.impl.blob.MongoBlobStore;
-import org.apache.jackrabbit.mongomk.impl.blob.MongoGridFSBlobStore;
-import org.junit.Assert;
 
 import com.mongodb.DB;
 
-public class MongoMicroKernelFixture implements MicroKernelFixture {
-
-    private static MongoConnection mongoConnection;
-
-    private static MongoConnection createMongoConnection() {
-        try {
-            InputStream is = MongoMicroKernelFixture.class.getResourceAsStream("/config.cfg");
-            Properties properties = new Properties();
-            properties.load(is);
-
-            String host = properties.getProperty("mongo.host");
-            int port = Integer.parseInt(properties.getProperty("mongo.port"));
-            String database = properties.getProperty("mongo.db");
-
-            return new MongoConnection(host, port, database);
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    @Override
-    public void setUpCluster(MicroKernel[] cluster) {
-        try {
-            mongoConnection = createMongoConnection();
-            DB db = mongoConnection.getDB();
-            dropCollections(db);
-
-            MongoNodeStore nodeStore = new MongoNodeStore(db);
-            BlobStore blobStore = new MongoGridFSBlobStore(db);
-            MicroKernel mk = new MongoMicroKernel(mongoConnection, nodeStore, blobStore);
-
-            for (int i = 0; i < cluster.length; i++) {
-                cluster[i] = mk;
-            }
-        } catch (Exception e) {
-            Assert.fail(e.getMessage());
-        }
-    }
-
-    @Override
-    public void syncMicroKernelCluster(MicroKernel... nodes) {
-    }
+public class MongoMicroKernelFixture extends BaseMongoMicroKernelFixture {
 
     @Override
-    public void tearDownCluster(MicroKernel[] cluster) {
-        try {
-            DB db = mongoConnection.getDB();
-            dropCollections(db);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private void dropCollections(DB db) {
-        db.getCollection(MongoBlobStore.COLLECTION_BLOBS).drop();
-        db.getCollection(MongoNodeStore.COLLECTION_COMMITS).drop();
-        db.getCollection(MongoNodeStore.COLLECTION_NODES).drop();
-        db.getCollection(MongoNodeStore.COLLECTION_SYNC).drop();
+    protected BlobStore getBlobStore(DB db) {
+        return new MongoBlobStore(db);
     }
-}
+}
\ No newline at end of file

Modified: jackrabbit/oak/trunk/oak-mongomk-test/src/test/resources/META-INF/services/org.apache.jackrabbit.mk.test.MicroKernelFixture
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk-test/src/test/resources/META-INF/services/org.apache.jackrabbit.mk.test.MicroKernelFixture?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk-test/src/test/resources/META-INF/services/org.apache.jackrabbit.mk.test.MicroKernelFixture (original)
+++ jackrabbit/oak/trunk/oak-mongomk-test/src/test/resources/META-INF/services/org.apache.jackrabbit.mk.test.MicroKernelFixture Thu Nov 15 14:18:41 2012
@@ -14,3 +14,4 @@
 # limitations under the License.
 
 org.apache.jackrabbit.mongomk.test.it.MongoMicroKernelFixture
+org.apache.jackrabbit.mongomk.test.it.MongoGridFSMicroKernelFixture

Modified: jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/FetchCommitsAction.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/FetchCommitsAction.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/FetchCommitsAction.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/FetchCommitsAction.java Thu Nov 15 14:18:41 2012
@@ -52,7 +52,6 @@ public class FetchCommitsAction extends 
      * Constructs a new {@link FetchCommitsAction}
      *
      * @param nodeStore Node store.
-     * @param toRevisionId To revision id.
      */
     public FetchCommitsAction(MongoNodeStore nodeStore) {
         this(nodeStore, -1L, -1L);

Modified: jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/ReadAndIncHeadRevisionAction.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/ReadAndIncHeadRevisionAction.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/ReadAndIncHeadRevisionAction.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/ReadAndIncHeadRevisionAction.java Thu Nov 15 14:18:41 2012
@@ -39,15 +39,14 @@ public class ReadAndIncHeadRevisionActio
 
     @Override
     public MongoSync execute() throws Exception {
-        DBObject query = new BasicDBObject();
-        DBObject inc = new BasicDBObject(MongoSync.KEY_NEXT_REVISION_ID, 1L);
-        DBObject update = new BasicDBObject("$inc", inc);
-        DBCollection headCollection = nodeStore.getSyncCollection();
-
-        DBObject dbObject = headCollection.findAndModify(query, null, null, false, update, true, false);
-        // FIXME - Not sure why but sometimes dbObject is null. Simply retry for now.
+        DBObject update = new BasicDBObject("$inc", new BasicDBObject(MongoSync.KEY_NEXT_REVISION_ID, 1L));
+        DBCollection collection = nodeStore.getSyncCollection();
+        DBObject dbObject = null;
+        // In high concurrency situations, increment does not work right away,
+        // so we need to keep retrying until it succeeds.
         while (dbObject == null) {
-            dbObject = headCollection.findAndModify(query, null, null, false, update, true, false);
+            dbObject = collection.findAndModify(null, null/*fields*/, null/*sort*/, false/*remove*/,
+                    update, true/*returnNew*/, false/*upsert*/);
         }
         return MongoSync.fromDBObject(dbObject);
     }

Modified: jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/SaveAndSetHeadRevisionAction.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/SaveAndSetHeadRevisionAction.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/SaveAndSetHeadRevisionAction.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/action/SaveAndSetHeadRevisionAction.java Thu Nov 15 14:18:41 2012
@@ -48,10 +48,12 @@ public class SaveAndSetHeadRevisionActio
 
     @Override
     public MongoSync execute() throws Exception {
-        DBCollection headCollection = nodeStore.getSyncCollection();
         DBObject query = QueryBuilder.start(MongoSync.KEY_HEAD_REVISION_ID).is(oldHeadRevision).get();
-        DBObject update = new BasicDBObject("$set", new BasicDBObject(MongoSync.KEY_HEAD_REVISION_ID, newHeadRevision));
-        DBObject dbObject = headCollection.findAndModify(query, null, null, false, update, true, false);
+        DBObject update = new BasicDBObject("$set", new BasicDBObject(MongoSync.KEY_HEAD_REVISION_ID,
+                newHeadRevision));
+        DBCollection collection = nodeStore.getSyncCollection();
+        DBObject dbObject = collection.findAndModify(query, null/*fields*/,
+                null/*sort*/, false/*remove*/, update, true/*returnNew*/, false/*upsert*/);
         return MongoSync.fromDBObject(dbObject);
     }
 }

Modified: jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/blob/MongoBlobStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/blob/MongoBlobStore.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/blob/MongoBlobStore.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/blob/MongoBlobStore.java Thu Nov 15 14:18:41 2012
@@ -20,6 +20,8 @@ import org.apache.jackrabbit.mk.blobs.Ab
 import org.apache.jackrabbit.mk.blobs.BlobStore;
 import org.apache.jackrabbit.mk.util.StringUtils;
 import org.apache.jackrabbit.mongomk.impl.model.MongoBlob;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.mongodb.BasicDBObject;
 import com.mongodb.DB;
@@ -31,16 +33,19 @@ import com.mongodb.WriteResult;
 /**
  * Implementation of {@link BlobStore} for the {@code MongoDB} extending from
  * {@link AbstractBlobStore}. Unlike {@link MongoGridFSBlobStore}, it saves blobs
- * into a separate collection in {@link MongoDB} instead of GridFS.
+ * into a separate collection in {@link MongoDB} instead of GridFS and it supports
+ * basic garbage collection.
  *
  * FIXME:
  * -Do we need to create commands for retry etc.?
- * -Implement GC
+ * -Not sure if this is going to work for multiple MKs talking to same MongoDB?
  */
 public class MongoBlobStore extends AbstractBlobStore {
 
     public static final String COLLECTION_BLOBS = "blobs";
 
+    private static final Logger LOG = LoggerFactory.getLogger(MongoBlobStore.class);
+
     private final DB db;
     private long minLastModified;
 
@@ -62,6 +67,7 @@ public class MongoBlobStore extends Abst
         mongoBlob.setId(id);
         mongoBlob.setData(data);
         mongoBlob.setLevel(level);
+        mongoBlob.setLastMod(System.currentTimeMillis());
         getBlobCollection().insert(mongoBlob);
     }
 
@@ -106,22 +112,22 @@ public class MongoBlobStore extends Abst
                 new BasicDBObject(MongoBlob.KEY_LAST_MOD, System.currentTimeMillis()));
         WriteResult writeResult = getBlobCollection().update(query, update);
         if (writeResult.getError() != null) {
-            // Handle
+            LOG.error("Mark failed for blob %s: %s", id, writeResult.getError());
         }
     }
 
     @Override
     public int sweep() throws Exception {
         DBObject query = getBlobQuery(null, minLastModified);
-        long beforeCount = getBlobCollection().count(query);
+        long countBefore = getBlobCollection().count(query);
         WriteResult writeResult = getBlobCollection().remove(query);
         if (writeResult.getError() != null) {
-            // Handle
+            LOG.error("Sweep failed: %s", writeResult.getError());
         }
 
-        long afterCount = getBlobCollection().count(query);
+        long countAfter = getBlobCollection().count(query);
         minLastModified = 0;
-        return (int)(beforeCount - afterCount);
+        return (int)(countBefore - countAfter);
     }
 
     private DBCollection getBlobCollection() {

Modified: jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/blob/MongoGridFSBlobStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/blob/MongoGridFSBlobStore.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/blob/MongoGridFSBlobStore.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/blob/MongoGridFSBlobStore.java Thu Nov 15 14:18:41 2012
@@ -30,7 +30,8 @@ import com.mongodb.DB;
 import com.mongodb.gridfs.GridFS;
 
 /**
- * Implementation of {@link BlobStore} for the {@code MongoDB} using GridFS.
+ * Implementation of {@link BlobStore} for the {@code MongoDB} using GridFS. It
+ * does not support garbage collection at the moment.
  */
 public class MongoGridFSBlobStore implements BlobStore {
 

Modified: jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/CommitCommand.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/CommitCommand.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/CommitCommand.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/CommitCommand.java Thu Nov 15 14:18:41 2012
@@ -32,7 +32,6 @@ import org.apache.jackrabbit.mongomk.imp
 import org.apache.jackrabbit.mongomk.impl.action.SaveCommitAction;
 import org.apache.jackrabbit.mongomk.impl.action.SaveNodesAction;
 import org.apache.jackrabbit.mongomk.impl.command.exception.ConflictingCommitException;
-import org.apache.jackrabbit.mongomk.impl.exception.NotFoundException;
 import org.apache.jackrabbit.mongomk.impl.instruction.CommitCommandInstructionVisitor;
 import org.apache.jackrabbit.mongomk.impl.model.MongoCommit;
 import org.apache.jackrabbit.mongomk.impl.model.MongoNode;
@@ -42,6 +41,7 @@ import org.slf4j.LoggerFactory;
 
 import com.mongodb.BasicDBObject;
 import com.mongodb.DBCollection;
+import com.mongodb.DBCursor;
 import com.mongodb.DBObject;
 import com.mongodb.QueryBuilder;
 import com.mongodb.WriteResult;
@@ -57,8 +57,8 @@ public class CommitCommand extends BaseC
 
     private Set<String> affectedPaths;
     private List<MongoNode> existingNodes;
-    private MongoSync syncMongo;
-    private Set<MongoNode> nodeMongos;
+    private MongoSync mongoSync;
+    private Set<MongoNode> nodes;
     private Long revisionId;
     private String branchId;
 
@@ -77,26 +77,20 @@ public class CommitCommand extends BaseC
     public Long execute() throws Exception {
         logger.debug(String.format("Trying to commit: %s", commit.getDiff()));
 
-        readAndIncHeadRevision();
-        createRevision();
-        readBranchIdFromBaseCommit();
-        createMongoNodes();
-        prepareCommit();
-        readExistingNodes();
-        mergeNodes();
-        prepareMongoNodes();
-        saveNodes();
-        saveCommit();
-        boolean success = saveAndSetHeadRevision();
-
-        logger.debug(String.format("Success was: %b", success));
-
-        if (!success) {
-            markAsFailed();
-            throw new ConflictingCommitException();
-        }
-
-        addRevisionId();
+        boolean success = false;
+        do {
+            mongoSync = new ReadAndIncHeadRevisionAction(nodeStore).execute();
+            revisionId = mongoSync.getNextRevisionId() - 1;
+            readBranchIdFromBaseCommit();
+            createMongoNodes();
+            prepareCommit();
+            readExistingNodes();
+            mergeNodes();
+            prepareMongoNodes();
+            new SaveNodesAction(nodeStore, nodes).execute();
+            new SaveCommitAction(nodeStore, commit).execute();
+            success = saveAndSetHeadRevision();
+        } while (!success);
 
         return revisionId;
     }
@@ -108,49 +102,30 @@ public class CommitCommand extends BaseC
 
     @Override
     public boolean needsRetry(Exception e) {
-        // In createMongoNodes step, sometimes add operations could end up with
-        // not found exceptions in high concurrency situations.
-        return e instanceof ConflictingCommitException || e instanceof NotFoundException;
+        return e instanceof ConflictingCommitException;
     }
 
-    /**
-     * FIXME - Currently this assumes a conflict if there's an update but it
-     * should really check the affected paths before assuming a conflict. When
-     * this is fixed, lower the number of retries.
-     *
-     * This is protected for testing purposed only.
-     *
-     * @return True if the operation was successful.
-     * @throws Exception If an exception happens.
-     */
-    protected boolean saveAndSetHeadRevision() throws Exception {
-        MongoSync syncMongo = new SaveAndSetHeadRevisionAction(nodeStore,
-                this.syncMongo.getHeadRevisionId(), revisionId).execute();
-        if (syncMongo == null) {
-            logger.warn(String.format("Encounterd a conflicting update, thus can't commit"
-                    + " revision %s and will be retried with new revision", revisionId));
-            return false;
+    private void readBranchIdFromBaseCommit() throws Exception {
+        String commitBranchId = commit.getBranchId();
+        if (commitBranchId != null) {
+            // This is a newly created branch, so no need to check the base
+            // commit's branch id.
+            branchId = commitBranchId;
+            return;
         }
-        return true;
-    }
 
-    private void addRevisionId() {
-        commit.setRevisionId(revisionId);
-    }
-
-    private void prepareCommit() throws Exception {
-        commit.setAffectedPaths(new LinkedList<String>(affectedPaths));
-        commit.setBaseRevisionId(syncMongo.getHeadRevisionId());
-        commit.setRevisionId(revisionId);
-        if (commit.getBranchId() == null && branchId != null) {
-            commit.setBranchId(branchId);
+        Long baseRevisionId = commit.getBaseRevisionId();
+        if (baseRevisionId == null) {
+            return;
         }
-        commit.removeField("_id"); // In case this is a retry.
+
+        MongoCommit baseCommit = new FetchCommitAction(nodeStore, baseRevisionId).execute();
+        branchId = baseCommit.getBranchId();
     }
 
     private void createMongoNodes() throws Exception {
         CommitCommandInstructionVisitor visitor = new CommitCommandInstructionVisitor(
-                nodeStore, syncMongo.getHeadRevisionId());
+                nodeStore, mongoSync.getHeadRevisionId());
         visitor.setBranchId(branchId);
 
         for (Instruction instruction : commit.getInstructions()) {
@@ -158,35 +133,30 @@ public class CommitCommand extends BaseC
         }
 
         Map<String, MongoNode> pathNodeMap = visitor.getPathNodeMap();
-
         affectedPaths = pathNodeMap.keySet();
-        nodeMongos = new HashSet<MongoNode>(pathNodeMap.values());
-        for (MongoNode nodeMongo : nodeMongos) {
-            nodeMongo.setRevisionId(revisionId);
-            if (branchId != null) {
-                nodeMongo.setBranchId(branchId);
-            }
-        }
+        nodes = new HashSet<MongoNode>(pathNodeMap.values());
     }
 
-    private void createRevision() {
-        revisionId = syncMongo.getNextRevisionId() - 1;
+    private void prepareCommit() throws Exception {
+        commit.setAffectedPaths(new LinkedList<String>(affectedPaths));
+        commit.setBaseRevisionId(mongoSync.getHeadRevisionId());
+        commit.setRevisionId(revisionId);
+        if (commit.getBranchId() == null && branchId != null) {
+            commit.setBranchId(branchId);
+        }
+        commit.removeField("_id"); // In case this is a retry.
     }
 
-    private void markAsFailed() throws Exception {
-        DBCollection commitCollection = nodeStore.getCommitCollection();
-        DBObject query = QueryBuilder.start("_id").is(commit.getObjectId("_id")).get();
-        DBObject update = new BasicDBObject("$set", new BasicDBObject(MongoCommit.KEY_FAILED, Boolean.TRUE));
-        WriteResult writeResult = commitCollection.update(query, update);
-        if (writeResult.getError() != null) {
-            // FIXME This is potentially a bug that we need to handle.
-            throw new Exception(String.format("Update wasn't successful: %s", writeResult));
-        }
+    private void readExistingNodes() {
+        FetchNodesAction action = new FetchNodesAction(nodeStore, affectedPaths,
+                mongoSync.getHeadRevisionId());
+        action.setBranchId(branchId);
+        existingNodes = action.execute();
     }
 
     private void mergeNodes() {
         for (MongoNode existingNode : existingNodes) {
-            for (MongoNode committingNode : nodeMongos) {
+            for (MongoNode committingNode : nodes) {
                 if (existingNode.getPath().equals(committingNode.getPath())) {
                     logger.debug(String.format("Found existing node to merge: %s", existingNode.getPath()));
                     logger.debug(String.format("Existing node: %s", existingNode));
@@ -218,7 +188,7 @@ public class CommitCommand extends BaseC
     }
 
     private void prepareMongoNodes() {
-        for (MongoNode committingNode : nodeMongos) {
+        for (MongoNode committingNode : nodes) {
             logger.debug(String.format("Preparing children (added and removed) of %s", committingNode.getPath()));
             logger.debug(String.format("Committing node: %s", committingNode));
 
@@ -264,49 +234,75 @@ public class CommitCommand extends BaseC
                 committingNode.setProperties(null);
             }
 
+            committingNode.setRevisionId(revisionId);
+            if (branchId != null) {
+                committingNode.setBranchId(branchId);
+            }
+
             logger.debug(String.format("Prepared committing node: %s", committingNode));
         }
     }
 
-    private void readBranchIdFromBaseCommit() throws Exception {
-        String commitBranchId = commit.getBranchId();
-        if (commitBranchId != null) {
-            // This is a newly created branch, so no need to check the base
-            // commit's branch id.
-            branchId = commitBranchId;
-            return;
-        }
-
-        Long baseRevisionId = commit.getBaseRevisionId();
-        if (baseRevisionId == null) {
-            return;
+    /**
+     * Protected for testing purposed only.
+     *
+     * @return True if the operation was successful.
+     * @throws Exception If an exception happens.
+     */
+    protected boolean saveAndSetHeadRevision() throws Exception {
+        long assumedHeadRevision = this.mongoSync.getHeadRevisionId();
+        MongoSync mongoSync = new SaveAndSetHeadRevisionAction(nodeStore,
+                assumedHeadRevision, revisionId).execute();
+        if (mongoSync == null) {
+            // There have been commit(s) in the meantime. If it's a conflicting
+            // update, retry the whole operation and count against number of retries.
+            // If not, need to retry again (in order to write commits and nodes properly)
+            // but don't count these retries against number of retries.
+            if (conflictingCommitsExist(assumedHeadRevision)) {
+                String message = String.format("Encountered a concurrent conflicting update"
+                        + ", thus can't commit revision %s and will be retried with new revision", revisionId);
+                logger.warn(message);
+                markAsFailed();
+                throw new ConflictingCommitException(message);
+            } else {
+                String message = String.format("Encountered a concurrent but non-conflicting update"
+                        + ", thus can't commit revision %s and will be retried with new revision", revisionId);
+                logger.warn(message);
+                markAsFailed();
+                return false;
+            }
         }
-
-        MongoCommit baseCommit = new FetchCommitAction(nodeStore, baseRevisionId).execute();
-        branchId = baseCommit.getBranchId();
-    }
-
-    private void readAndIncHeadRevision() throws Exception {
-        syncMongo = new ReadAndIncHeadRevisionAction(nodeStore).execute();
+        return true;
     }
 
-    private void readExistingNodes() {
-        Set<String> paths = new HashSet<String>();
-        for (MongoNode nodeMongo : nodeMongos) {
-            paths.add(nodeMongo.getPath());
+    private boolean conflictingCommitsExist(long baseRevisionId) {
+        QueryBuilder queryBuilder = QueryBuilder.start(MongoCommit.KEY_FAILED).notEquals(Boolean.TRUE);
+        queryBuilder = queryBuilder.and(MongoCommit.KEY_BASE_REVISION_ID).is(baseRevisionId);
+        DBObject query = queryBuilder.get();
+        DBCollection collection = nodeStore.getCommitCollection();
+        DBCursor dbCursor = collection.find(query);
+        while (dbCursor.hasNext()) {
+            MongoCommit commit = (MongoCommit)dbCursor.next();
+            if (this.commit.getRevisionId().equals(commit.getRevisionId())) {
+                continue;
+            }
+            for (String affectedPath : commit.getAffectedPaths()) {
+                if (affectedPaths.contains(affectedPath)) {
+                    return true;
+                }
+            }
         }
-
-        FetchNodesAction action = new FetchNodesAction(nodeStore, paths,
-                syncMongo.getHeadRevisionId());
-        action.setBranchId(branchId);
-        existingNodes = action.execute();
-    }
-
-    private void saveCommit() throws Exception {
-        new SaveCommitAction(nodeStore, commit).execute();
+        return false;
     }
 
-    private void saveNodes() throws Exception {
-        new SaveNodesAction(nodeStore, nodeMongos).execute();
+    private void markAsFailed() throws Exception {
+        DBCollection commitCollection = nodeStore.getCommitCollection();
+        DBObject query = QueryBuilder.start("_id").is(commit.getObjectId("_id")).get();
+        DBObject update = new BasicDBObject("$set", new BasicDBObject(MongoCommit.KEY_FAILED, Boolean.TRUE));
+        WriteResult writeResult = commitCollection.update(query, update);
+        if (writeResult.getError() != null) {
+            // FIXME This is potentially a bug that we need to handle.
+            throw new Exception(String.format("Update wasn't successful: %s", writeResult));
+        }
     }
-}
+}
\ No newline at end of file

Modified: jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/DiffCommand.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/DiffCommand.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/DiffCommand.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/DiffCommand.java Thu Nov 15 14:18:41 2012
@@ -46,9 +46,7 @@ public class DiffCommand extends BaseCom
 
         long fromRevisionId, toRevisionId;
         if (fromRevision == null || toRevision == null) {
-            FetchHeadRevisionIdAction query = new FetchHeadRevisionIdAction(nodeStore);
-            query.includeBranchCommits(true);
-            long head = query.execute();
+            long head = new FetchHeadRevisionIdAction(nodeStore).execute();
             fromRevisionId = fromRevision == null? head : MongoUtil.toMongoRepresentation(fromRevision);
             toRevisionId = toRevision == null ? head : MongoUtil.toMongoRepresentation(toRevision);;
         } else {

Modified: jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/GetJournalCommand.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/GetJournalCommand.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/GetJournalCommand.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/GetJournalCommand.java Thu Nov 15 14:18:41 2012
@@ -46,9 +46,7 @@ public class GetJournalCommand extends B
         long fromRevision = MongoUtil.toMongoRepresentation(fromRevisionId);
         long toRevision;
         if (toRevisionId == null) {
-            FetchHeadRevisionIdAction query = new FetchHeadRevisionIdAction(nodeStore);
-            query.includeBranchCommits(true);
-            toRevision = query.execute();
+            toRevision = new FetchHeadRevisionIdAction(nodeStore).execute();
         } else {
             toRevision = MongoUtil.toMongoRepresentation(toRevisionId);
         }

Modified: jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/blob/ReadBlobCommandGridFS.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/blob/ReadBlobCommandGridFS.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/blob/ReadBlobCommandGridFS.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/command/blob/ReadBlobCommandGridFS.java Thu Nov 15 14:18:41 2012
@@ -37,7 +37,7 @@ public class ReadBlobCommandGridFS exten
     private final long blobOffset;
     private final byte[] buffer;
     private final int bufferOffset;
-    private final int length;
+    private int length;
 
     /**
      * Constructs a new {@code ReadBlobCommandMongo}.
@@ -68,12 +68,12 @@ public class ReadBlobCommandGridFS exten
     private int fetchBlobFromMongo() throws Exception {
         GridFSDBFile gridFile = gridFS.findOne(new BasicDBObject("md5", blobId));
         long fileLength = gridFile.getLength();
-
         long start = blobOffset;
         long end = blobOffset + length;
         if (end > fileLength) {
             end = fileLength;
         }
+        length = (int)(end - start);
 
         if (start < end) {
             InputStream is = gridFile.getInputStream();

Modified: jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/instruction/CommitCommandInstructionVisitor.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/instruction/CommitCommandInstructionVisitor.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/instruction/CommitCommandInstructionVisitor.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/impl/instruction/CommitCommandInstructionVisitor.java Thu Nov 15 14:18:41 2012
@@ -253,7 +253,7 @@ public class CommitCommandInstructionVis
         } catch (Exception ignore) {}
 
         if (!exists) {
-            throw new NotFoundException(path);
+            throw new NotFoundException(path + " @rev" + headRevisionId);
         }
 
         // Fetch the node without its descendants.

Modified: jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mk/blobs/MongoBlobStoreTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mk/blobs/MongoBlobStoreTest.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mk/blobs/MongoBlobStoreTest.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mk/blobs/MongoBlobStoreTest.java Thu Nov 15 14:18:41 2012
@@ -39,7 +39,6 @@ import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import com.mongodb.DB;
@@ -196,7 +195,6 @@ public class MongoBlobStoreTest /*extend
     }
 
     @Test
-    @Ignore // FIXME - GC is not implemented in MongoBlobStore yet.
     public void testGarbageCollection() throws Exception {
         HashMap<String, byte[]> map = new HashMap<String, byte[]>();
         ArrayList<String> mem = new ArrayList<String>();

Modified: jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKCommitAddTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKCommitAddTest.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKCommitAddTest.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKCommitAddTest.java Thu Nov 15 14:18:41 2012
@@ -110,4 +110,19 @@ public class MongoMKCommitAddTest extend
             fail("Exception expected");
         } catch (Exception expected) {}
     }
+
+    @Test
+    public void setOverwritingProperty() throws Exception {
+        String rev1 = mk.commit("/", "+\"a\" : {} ^\"a/key1\" : \"value1\"", null, null);
+
+        // Commit with rev1
+        mk.commit("/", "^\"a/key1\" : \"value2\"", rev1, null);
+
+        // Commit with rev1 again (to overwrite rev2)
+        mk.commit("/", "^\"a/key1\" : \"value3\"", rev1, null);
+
+        String nodes = mk.getNodes("/", null, 1 /*depth*/, 0 /*offset*/, -1 /*maxChildNodes*/, null /*filter*/);
+        JSONObject obj = parseJSONObject(nodes);
+        assertPropertyValue(obj, "a/key1", "value3");
+   }
 }
\ No newline at end of file

Modified: jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKReadTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKReadTest.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKReadTest.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKReadTest.java Thu Nov 15 14:18:41 2012
@@ -22,10 +22,10 @@ import java.util.Arrays;
 import junit.framework.Assert;
 
 import org.apache.jackrabbit.mk.blobs.BlobStore;
+import org.apache.jackrabbit.mk.util.MicroKernelInputStream;
 import org.apache.jackrabbit.mongomk.BaseMongoMicroKernelTest;
 import org.apache.jackrabbit.mongomk.impl.blob.MongoBlobStore;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import com.mongodb.DB;
@@ -52,32 +52,36 @@ public class MongoMKReadTest extends Bas
 
     @Test
     public void small() throws Exception {
-        read(1024);
+        read(1024, false);
     }
 
     @Test
     public void medium() throws Exception {
-        read(1024 * 1024);
+        read(1024 * 1024, false);
     }
 
     @Test
-    @Ignore // FIXME - Add it back when OAK-430 is fixed.
     public void large() throws Exception {
-        read(20 * 1024 * 1024);
+        // Skip range tests for large blobs for now as it's complicated with
+        // block size.
+        read(20 * 1024 * 1024, true);
     }
 
-    private void read(int blobLength) throws Exception {
+    private void read(int blobLength, boolean skipRangeTests) throws Exception {
         createAndWriteBlob(blobLength);
 
         // Complete read.
-        byte[] buffer = new byte[blob.length];
-        int totalBytes = mk.read(blobId, 0, buffer, 0, blob.length);
-        Assert.assertEquals(blob.length, totalBytes);
+        byte[] buffer = MicroKernelInputStream.readFully(mk, blobId);
+        Assert.assertEquals(blob.length, buffer.length);
         Assert.assertTrue(Arrays.equals(blob, buffer));
 
+        if (skipRangeTests) {
+            return;
+        }
+
         // Range end from end.
         buffer = new byte[blob.length / 2];
-        totalBytes = mk.read(blobId, (blob.length / 2) - 1, buffer, 0, blob.length / 2);
+        int totalBytes = mk.read(blobId, (blob.length / 2) - 1, buffer, 0, blob.length / 2);
         Assert.assertEquals(blob.length / 2, totalBytes);
         for (int i = 0; i < buffer.length; i++) {
             Assert.assertEquals(blob[((blob.length / 2) - 1) + i], buffer[i]);

Modified: jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKWriteTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKWriteTest.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKWriteTest.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/MongoMKWriteTest.java Thu Nov 15 14:18:41 2012
@@ -23,11 +23,11 @@ import java.io.ByteArrayInputStream;
 import java.util.Arrays;
 
 import org.apache.jackrabbit.mk.blobs.BlobStore;
+import org.apache.jackrabbit.mk.util.MicroKernelInputStream;
 import org.apache.jackrabbit.mongomk.BaseMongoMicroKernelTest;
 import org.apache.jackrabbit.mongomk.MongoAssert;
 import org.apache.jackrabbit.mongomk.impl.blob.MongoBlobStore;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import com.mongodb.DB;
@@ -61,7 +61,6 @@ public class MongoMKWriteTest extends Ba
     }
 
     @Test
-    @Ignore // FIXME - Add it back when OAK-430 is fixed.
     public void large() throws Exception {
         write(20 * 1024 * 1024);
     }
@@ -71,8 +70,7 @@ public class MongoMKWriteTest extends Ba
         String blobId = mk.write(new ByteArrayInputStream(blob));
         assertNotNull(blobId);
 
-        byte[] readBlob = new byte[blobLength];
-        mk.read(blobId, 0, readBlob, 0, readBlob.length);
+        byte[] readBlob = MicroKernelInputStream.readFully(mk, blobId);
         assertTrue(Arrays.equals(blob, readBlob));
     }
 

Modified: jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/command/ConcurrentConflictingCommitCommandTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/command/ConcurrentConflictingCommitCommandTest.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/command/ConcurrentConflictingCommitCommandTest.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/command/ConcurrentConflictingCommitCommandTest.java Thu Nov 15 14:18:41 2012
@@ -17,6 +17,7 @@
 package org.apache.jackrabbit.mongomk.impl.command;
 
 import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
@@ -25,7 +26,6 @@ import org.apache.jackrabbit.mongomk.Bas
 import org.apache.jackrabbit.mongomk.api.model.Commit;
 import org.apache.jackrabbit.mongomk.impl.MongoNodeStore;
 import org.apache.jackrabbit.mongomk.impl.model.CommitBuilder;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import static org.junit.Assert.fail;
@@ -40,70 +40,137 @@ public class ConcurrentConflictingCommit
      */
     @Test
     public void rootUpdate() throws Exception {
-        Commit commit1 = CommitBuilder.build("/", "+\"a\" : {}", null);
-        Commit commit2 = CommitBuilder.build("/", "+\"b\" : {}", null);
-        Object waitLock = new Object();
-        CommitCommand cmd1 = new WaitingCommitCommand(getNodeStore(), commit1, waitLock);
-        CommitCommand cmd2 = new NotifyingCommitCommand(getNodeStore(), commit2, waitLock);
+        int n = 2;
+        CountDownLatch latch = new CountDownLatch(n - 1);
+        CommitCommand cmd1 = new WaitingCommitCommand(getNodeStore(),
+                CommitBuilder.build("/", "+\"a1\" : {}", null), latch);
+        CommitCommand cmd2 = new NotifyingCommitCommand(getNodeStore(),
+                CommitBuilder.build("/", "+\"a2\" : {}", null), latch);
 
-        ExecutorService executorService = Executors.newFixedThreadPool(2);
+        ExecutorService executorService = Executors.newFixedThreadPool(n);
         Future<Long> future1 = executorService.submit(new CommitCallable(cmd1));
         Thread.sleep(1000);
-        executorService.submit(new CommitCallable(cmd2));
+        Future<Long> future2 = executorService.submit(new CommitCallable(cmd2));
         try {
             future1.get();
+            future2.get();
+        } catch (Exception expected) {
+            // cmd2 updated root by adding /a2, so this is expected.
         }
-        catch (Exception expected) {
-            // commit2 updated root by adding /b, so this is expected.
+    }
+
+    /**
+     * Test that a commit does not end up with a conflict exception when there
+     * is another concurrent commit with a disjoint affected path.
+     */
+    @Test
+    public void subPathUpdate1() throws Exception {
+        mk.commit("/", "+\"a1\" : {}", null, null);
+        mk.commit("/", "+\"a2\" : {}", null, null);
+
+        int n = 2;
+        CountDownLatch latch = new CountDownLatch(n - 1);
+        CommitCommand cmd1 = new WaitingCommitCommand(getNodeStore(),
+                CommitBuilder.build("/", "+\"a1/b1\" : {}", null), latch);
+        CommitCommand cmd2 = new NotifyingCommitCommand(getNodeStore(),
+                CommitBuilder.build("/", "+\"a2/b1\" : {}", null), latch);
+
+        ExecutorService executorService = Executors.newFixedThreadPool(n);
+        Future<Long> future1 = executorService.submit(new CommitCallable(cmd1));
+        Thread.sleep(1000);
+        Future<Long> future2 = executorService.submit(new CommitCallable(cmd2));
+        try {
+            future1.get();
+            future2.get();
+        } catch (Exception e) {
+            fail("Not expected: " + e);
         }
     }
 
     /**
-     * Test that concurrent update to subpaths does not end up with a conflict
-     * exception.
+     * Test that a commit does not end up with a conflict exception when there
+     * are two concurrent commits with disjoint affected paths.
      */
     @Test
-    @Ignore // FIXME - See OAK-440
-    public void subPathUpdate() throws Exception {
-        mk.commit("/", "+\"a\" : {}", null, null);
-        mk.commit("/", "+\"b\" : {}", null, null);
-
-        Commit commit1 = CommitBuilder.build("/", "+\"a/c\" : {}", null);
-        Commit commit2 = CommitBuilder.build("/", "+\"b/d\" : {}", null);
-        Object waitLock = new Object();
-        CommitCommand cmd1 = new WaitingCommitCommand(getNodeStore(), commit1, waitLock);
-        CommitCommand cmd2 = new NotifyingCommitCommand(getNodeStore(), commit2, waitLock);
+    public void subPathUpdate2() throws Exception {
+        mk.commit("/", "+\"a1\" : {}", null, null);
+        mk.commit("/", "+\"a2\" : {}", null, null);
+        mk.commit("/", "+\"a3\" : {}", null, null);
+
+        int n = 3;
+        CountDownLatch latch = new CountDownLatch(n - 1);
+        CommitCommand cmd1 = new WaitingCommitCommand(getNodeStore(),
+                CommitBuilder.build("/", "+\"a1/b1\" : {}", null), latch);
+        CommitCommand cmd2 = new NotifyingCommitCommand(getNodeStore(),
+                CommitBuilder.build("/", "+\"a2/b1\" : {}", null), latch);
+        CommitCommand cmd3 = new NotifyingCommitCommand(getNodeStore(),
+                CommitBuilder.build("/", "+\"a3/b1\" : {}", null), latch);
 
-        ExecutorService executorService = Executors.newFixedThreadPool(2);
+
+        ExecutorService executorService = Executors.newFixedThreadPool(n);
         Future<Long> future1 = executorService.submit(new CommitCallable(cmd1));
         Thread.sleep(1000);
         Future<Long> future2 = executorService.submit(new CommitCallable(cmd2));
+        Future<Long> future3 = executorService.submit(new CommitCallable(cmd3));
         try {
             future1.get();
             future2.get();
+            future3.get();
         } catch (Exception e) {
             fail("Not expected: " + e);
         }
     }
 
     /**
+     * Test that a commit ends up with a conflict exception when there are two
+     * concurrent commits with one disjoint but other overlapping affected path.
+     */
+    @Test
+    public void subPathUpdate3() throws Exception {
+        mk.commit("/", "+\"a1\" : {}", null, null);
+        mk.commit("/", "+\"a2\" : {}", null, null);
+
+        int n = 3;
+        CountDownLatch latch = new CountDownLatch(n - 1);
+        CommitCommand cmd1 = new WaitingCommitCommand(getNodeStore(),
+                CommitBuilder.build("/", "+\"a1/b1\" : {}", null), latch);
+        CommitCommand cmd2 = new NotifyingCommitCommand(getNodeStore(),
+                CommitBuilder.build("/", "+\"a2/b1\" : {}", null), latch);
+        CommitCommand cmd3 = new NotifyingCommitCommand(getNodeStore(),
+                CommitBuilder.build("/", "+\"a1/b2\" : {}", null), latch);
+
+
+        ExecutorService executorService = Executors.newFixedThreadPool(n);
+        Future<Long> future1 = executorService.submit(new CommitCallable(cmd1));
+        Thread.sleep(1000);
+        Future<Long> future2 = executorService.submit(new CommitCallable(cmd2));
+        Future<Long> future3 = executorService.submit(new CommitCallable(cmd3));
+        try {
+            future1.get();
+            future2.get();
+            future3.get();
+        } catch (Exception expected) {
+            // cmd1 and cmd3 update the same root, so this is expected.
+        }
+    }
+
+    /**
      * A CommitCommand that simply waits on the waitLock until notified.
      */
     private static class WaitingCommitCommand extends CommitCommand {
 
-        private final Object waitLock;
+        private final CountDownLatch latch;
 
-        public WaitingCommitCommand(MongoNodeStore nodeStore, Commit commit, Object waitLock) {
+        public WaitingCommitCommand(MongoNodeStore nodeStore, Commit commit,
+                CountDownLatch latch) {
             super(nodeStore, commit);
-            this.waitLock = waitLock;
+            this.latch = latch;
         }
 
         @Override
         protected boolean saveAndSetHeadRevision() throws Exception {
             try {
-                synchronized (waitLock) {
-                    waitLock.wait();
-                }
+                latch.await();
                 return super.saveAndSetHeadRevision();
             } catch (InterruptedException e) {
                 e.printStackTrace();
@@ -117,24 +184,24 @@ public class ConcurrentConflictingCommit
      */
     private static class NotifyingCommitCommand extends CommitCommand {
 
-        private final Object waitLock;
+        private final CountDownLatch latch;
 
-        public NotifyingCommitCommand(MongoNodeStore nodeStore, Commit commit, Object waitLock) {
+        public NotifyingCommitCommand(MongoNodeStore nodeStore, Commit commit,
+                CountDownLatch latch) {
             super(nodeStore, commit);
-            this.waitLock = waitLock;
+            this.latch = latch;
         }
 
         @Override
         protected boolean saveAndSetHeadRevision() throws Exception {
             try {
                 boolean result = super.saveAndSetHeadRevision();
-                synchronized (waitLock) {
-                    waitLock.notifyAll();
-                }
                 return result;
             } catch (InterruptedException e) {
                 e.printStackTrace();
                 return false;
+            } finally {
+                latch.countDown();
             }
         }
     }

Modified: jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/command/ConcurrentWriteMultipleMkMongoTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/command/ConcurrentWriteMultipleMkMongoTest.java?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/command/ConcurrentWriteMultipleMkMongoTest.java (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/impl/command/ConcurrentWriteMultipleMkMongoTest.java Thu Nov 15 14:18:41 2012
@@ -1,7 +1,5 @@
 package org.apache.jackrabbit.mongomk.impl.command;
 
-import java.io.InputStream;
-import java.util.Properties;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
@@ -11,51 +9,49 @@ import org.apache.jackrabbit.mongomk.Bas
 import org.apache.jackrabbit.mongomk.impl.MongoMicroKernel;
 import org.apache.jackrabbit.mongomk.impl.MongoNodeStore;
 import org.apache.jackrabbit.mongomk.impl.blob.MongoGridFSBlobStore;
+import org.junit.Ignore;
 import org.junit.Test;
 
 import com.mongodb.DB;
 
+/**
+ * Tests for multiple MongoMKs writing against the same DB in separate trees.
+ */
 public class ConcurrentWriteMultipleMkMongoTest extends BaseMongoMicroKernelTest {
 
     @Test
-    public void testConcurrency() throws NumberFormatException, Exception {
+    public void testHundredNodes() throws Exception {
+        doTest(100);
+    }
 
-        String diff1 = buildPyramidDiff("/", 0, 10, 100, "N",
-                new StringBuilder()).toString();
-        String diff2 = buildPyramidDiff("/", 0, 10, 100, "P",
-                new StringBuilder()).toString();
-        String diff3 = buildPyramidDiff("/", 0, 10, 100, "R",
-                new StringBuilder()).toString();
-
-        // System.out.println(diff1);
-        // System.out.println(diff2);
-        // System.out.println(diff3);
-
-        InputStream is = BaseMongoMicroKernelTest.class.getResourceAsStream("/config.cfg");
-        Properties properties = new Properties();
-        properties.load(is);
-
-        DB db = mongoConnection.getDB();
-        MongoMicroKernel mongo1 = new MongoMicroKernel(mongoConnection,
-                new MongoNodeStore(db), new MongoGridFSBlobStore(db));
-        MongoMicroKernel mongo2 = new MongoMicroKernel(mongoConnection,
-                new MongoNodeStore(db), new MongoGridFSBlobStore(db));
-        MongoMicroKernel mongo3 = new MongoMicroKernel(mongoConnection,
-                new MongoNodeStore(db), new MongoGridFSBlobStore(db));
-
-        GenericWriteTask task1 = new GenericWriteTask(mongo1, diff1, 0);
-        GenericWriteTask task2 = new GenericWriteTask(mongo2, diff2, 0);
-        GenericWriteTask task3 = new GenericWriteTask(mongo3, diff3, 0);
-
-        ExecutorService threadExecutor = Executors.newFixedThreadPool(3);
-        threadExecutor.execute(task1);
-        threadExecutor.execute(task2);
-        threadExecutor.execute(task3);
-        threadExecutor.shutdown();
-        threadExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
+    @Test
+    @Ignore // Ignored only because it takes a while to complete.
+    public void testThousandNodes() throws Exception {
+        doTest(1000);
+    }
+
+    private void doTest(int numberOfNodes) throws Exception {
+
+        int numberOfChildren = 10;
+        int numberOfMks = 3;
+        String[] prefixes = new String[]{"a", "b", "c", "d", "e", "f"};
+
+        ExecutorService executor = Executors.newFixedThreadPool(numberOfMks);
+        for (int i = 0; i < numberOfMks; i++) {
+            String diff = buildPyramidDiff("/", 0, numberOfChildren,
+                    numberOfNodes, prefixes[i], new StringBuilder()).toString();
+            //System.out.println(diff);
+            DB db = mongoConnection.getDB();
+            MongoMicroKernel mk = new MongoMicroKernel(mongoConnection,
+                    new MongoNodeStore(db), new MongoGridFSBlobStore(db));
+            GenericWriteTask task = new GenericWriteTask("mk-" + i, mk, diff, 10);
+            executor.execute(task);
+        }
+        executor.shutdown();
+        executor.awaitTermination(10, TimeUnit.MINUTES);
     }
 
-    private static StringBuilder buildPyramidDiff(String startingPoint,
+    private StringBuilder buildPyramidDiff(String startingPoint,
             int index, int numberOfChildren, long nodesNumber,
             String nodePrefixName, StringBuilder diff) {
         if (numberOfChildren == 0) {
@@ -63,8 +59,11 @@ public class ConcurrentWriteMultipleMkMo
                 diff.append(addNodeToDiff(startingPoint, nodePrefixName + i));
             return diff;
         }
-        if (index >= nodesNumber)
+
+        if (index >= nodesNumber) {
             return diff;
+        }
+
         diff.append(addNodeToDiff(startingPoint, nodePrefixName + index));
         for (int i = 1; i <= numberOfChildren; i++) {
             if (!startingPoint.endsWith("/"))
@@ -76,51 +75,52 @@ public class ConcurrentWriteMultipleMkMo
         return diff;
     }
 
-    private static String addNodeToDiff(String startingPoint, String nodeName) {
-        if (!startingPoint.endsWith("/"))
+    private String addNodeToDiff(String startingPoint, String nodeName) {
+        if (!startingPoint.endsWith("/")) {
             startingPoint = startingPoint + "/";
-
-        return ("+\"" + startingPoint + nodeName + "\" : {\"key\":\"00000000000000000000\"} \n");
+        }
+        return ("+\"" + startingPoint + nodeName + "\" : {} \n");
     }
-}
 
-class GenericWriteTask implements Runnable {
+    private static class GenericWriteTask implements Runnable {
 
-    MicroKernel mk;
-    String diff;
-    int nodesPerCommit;
-
-    public GenericWriteTask(MongoMicroKernel mk, String diff, int nodesPerCommit) {
-
-        this.diff = diff;
-        this.mk = mk;
-    }
-
-    @Override
-    public void run() {
-        commit(mk, diff, 10);
-    }
+        private String id;
+        private MicroKernel mk;
+        private String diff;
+        private int nodesPerCommit;
+
+        public GenericWriteTask(String id, MongoMicroKernel mk, String diff,
+                int nodesPerCommit) {
+            this.id = id;
+            this.mk = mk;
+            this.diff = diff;
+            this.nodesPerCommit = nodesPerCommit;
+        }
 
-    private void commit(MicroKernel mk, String diff, int nodesPerCommit) {
+        @Override
+        public void run() {
+            if (nodesPerCommit == 0) {
+                mk.commit("", diff.toString(), null, "");
+                return;
+            }
 
-        if (nodesPerCommit == 0) {
-            mk.commit("", diff.toString(), null, "");
-            return;
-        }
-        String[] string = diff.split(System.getProperty("line.separator"));
-        int i = 0;
-        StringBuilder finalCommit = new StringBuilder();
-        for (String line : string) {
-            finalCommit.append(line);
-            i++;
-            if (i == nodesPerCommit) {
-                mk.commit("", finalCommit.toString(), null, "");
-                finalCommit.setLength(0);
-                i = 0;
+            int i = 0;
+            StringBuilder currentCommit = new StringBuilder();
+            String[] diffs = diff.split(System.getProperty("line.separator"));
+            for (String diff : diffs) {
+                currentCommit.append(diff);
+                i++;
+                if (i == nodesPerCommit) {
+                    //System.out.println("[" + id + "] Committing: " + currentCommit.toString());
+                    String rev = mk.commit("", currentCommit.toString(), null, null);
+                    //System.out.println("[" + id + "] Committed-" + rev + ":" + currentCommit.toString());
+                    currentCommit.setLength(0);
+                    i = 0;
+                }
             }
+            // Commit remaining nodes
+            if (currentCommit.length() > 0)
+                mk.commit("", currentCommit.toString(), null, null);
         }
-        // commit remaining nodes
-        if (finalCommit.length() > 0)
-            mk.commit("", finalCommit.toString(), null, "");
     }
-}
+}
\ No newline at end of file

Added: jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/multitenancy/MultiTenancyTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/multitenancy/MultiTenancyTest.java?rev=1409798&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/multitenancy/MultiTenancyTest.java (added)
+++ jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/multitenancy/MultiTenancyTest.java Thu Nov 15 14:18:41 2012
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.mongomk.multitenancy;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.InputStream;
+import java.util.Properties;
+
+import org.apache.jackrabbit.mk.api.MicroKernel;
+import org.apache.jackrabbit.mongomk.impl.MongoConnection;
+import org.apache.jackrabbit.mongomk.impl.MongoMicroKernel;
+import org.apache.jackrabbit.mongomk.impl.MongoNodeStore;
+import org.apache.jackrabbit.mongomk.impl.blob.MongoBlobStore;
+import org.apache.jackrabbit.mongomk.impl.blob.MongoGridFSBlobStore;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.mongodb.DB;
+
+/**
+ * Tests for multi-tenancy.
+ */
+public class MultiTenancyTest {
+
+    private static MicroKernel mk1;
+    private static MicroKernel mk2;
+    private static MicroKernel mk3;
+    private static MongoConnection mongoConnection1;
+    private static MongoConnection mongoConnection2;
+    private static MongoConnection mongoConnection3;
+
+    @BeforeClass
+    public static void setUpBeforeClass() throws Exception {
+        createMongoConnections();
+    }
+
+    @Before
+    public void setUp() throws Exception {
+        dropCollections();
+        setupMicroKernels();
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        dropCollections();
+    }
+
+    @AfterClass
+    public static void tearDownAfterClass() throws Exception {
+        dropDatabases();
+    }
+
+    /**
+     * Scenario: 3 MKs total, 2 MKs point to DB1, 1 points to DB2.
+     */
+    @Test
+    public void basicMultiTenancy() {
+        mk1.commit("/", "+\"a\" : {}", null, null);
+        assertEquals(1, mk1.getChildNodeCount("/", null));
+        assertEquals(0, mk2.getChildNodeCount("/", null));
+        assertEquals(1, mk3.getChildNodeCount("/", null));
+
+        mk2.commit("/", "+\"b\" : {}", null, null);
+        mk2.commit("/", "+\"c\" : {}", null, null);
+        assertEquals(1, mk1.getChildNodeCount("/", null));
+        assertEquals(2, mk2.getChildNodeCount("/", null));
+        assertEquals(1, mk3.getChildNodeCount("/", null));
+
+        assertTrue(mk1.nodeExists("/a", null));
+        assertFalse(mk1.nodeExists("/b", null));
+        assertFalse(mk1.nodeExists("/c", null));
+
+        assertFalse(mk2.nodeExists("/a", null));
+        assertTrue(mk2.nodeExists("/b", null));
+        assertTrue(mk2.nodeExists("/c", null));
+
+        assertTrue(mk3.nodeExists("/a", null));
+        assertFalse(mk3.nodeExists("/b", null));
+        assertFalse(mk3.nodeExists("/c", null));
+    }
+
+    private static void createMongoConnections() throws Exception {
+        InputStream is = MultiTenancyTest.class.getResourceAsStream("/config.cfg");
+        Properties properties = new Properties();
+        properties.load(is);
+
+        String host = properties.getProperty("host");
+        int port = Integer.parseInt(properties.getProperty("port"));
+        String db1 = properties.getProperty("db");
+        String db2 = properties.getProperty("db2");
+
+        mongoConnection1 = new MongoConnection(host, port, db1);
+        mongoConnection2 = new MongoConnection(host, port, db2);
+        mongoConnection3 = new MongoConnection(host, port, db1);
+    }
+
+    private static void dropDatabases() {
+        mongoConnection1.getDB().dropDatabase();
+        mongoConnection2.getDB().dropDatabase();
+        mongoConnection3.getDB().dropDatabase();
+    }
+
+    private void dropCollections() {
+        doDropCollections(mongoConnection1.getDB());
+        doDropCollections(mongoConnection2.getDB());
+        doDropCollections(mongoConnection3.getDB());
+    }
+
+    private void doDropCollections(DB db) {
+        db.getCollection(MongoBlobStore.COLLECTION_BLOBS).drop();
+        db.getCollection(MongoNodeStore.COLLECTION_COMMITS).drop();
+        db.getCollection(MongoNodeStore.COLLECTION_NODES).drop();
+        db.getCollection(MongoNodeStore.COLLECTION_SYNC).drop();
+    }
+
+    private void setupMicroKernels() {
+        DB db = mongoConnection1.getDB();
+        mk1 = new MongoMicroKernel(mongoConnection1, new MongoNodeStore(db),
+                new MongoGridFSBlobStore(db));
+
+        DB db2 = mongoConnection2.getDB();
+        mk2 = new MongoMicroKernel(mongoConnection2, new MongoNodeStore(db2),
+                new MongoGridFSBlobStore(db2));
+
+        DB db3 = mongoConnection1.getDB();
+        mk3 = new MongoMicroKernel(mongoConnection3, new MongoNodeStore(db3),
+                new MongoGridFSBlobStore(db3));
+    }
+}
\ No newline at end of file

Propchange: jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/multitenancy/MultiTenancyTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/oak/trunk/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/multitenancy/MultiTenancyTest.java
------------------------------------------------------------------------------
    svn:keywords = Author Date Id Revision Rev URL

Modified: jackrabbit/oak/trunk/oak-mongomk/src/test/resources/config.cfg
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-mongomk/src/test/resources/config.cfg?rev=1409798&r1=1409797&r2=1409798&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-mongomk/src/test/resources/config.cfg (original)
+++ jackrabbit/oak/trunk/oak-mongomk/src/test/resources/config.cfg Thu Nov 15 14:18:41 2012
@@ -25,4 +25,7 @@ host = 127.0.0.1
 port = 27017
 
 # The database to use
-db = MongoMicroKernelTest
\ No newline at end of file
+db = MongoMKDB
+
+# The secondary database to use for multi-tenancy tests
+db2 = MongoMKDB2
\ No newline at end of file



Mime
View raw message