jackrabbit-oak-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mreut...@apache.org
Subject svn commit: r1447645 - in /jackrabbit/oak/trunk/oak-core/src: main/java/org/apache/jackrabbit/oak/kernel/KernelNodeState.java test/java/org/apache/jackrabbit/oak/kernel/KernelNodeStoreCacheTest.java
Date Tue, 19 Feb 2013 09:29:45 GMT
Author: mreutegg
Date: Tue Feb 19 09:29:44 2013
New Revision: 1447645

URL: http://svn.apache.org/r1447645
Log:
OAK-591: Improve KernelNodeStore cache efficiency

Added:
    jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/kernel/KernelNodeStoreCacheTest.java
  (with props)
Modified:
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/kernel/KernelNodeState.java

Modified: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/kernel/KernelNodeState.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/kernel/KernelNodeState.java?rev=1447645&r1=1447644&r2=1447645&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/kernel/KernelNodeState.java
(original)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/kernel/KernelNodeState.java
Tue Feb 19 09:29:44 2013
@@ -39,6 +39,7 @@ import org.apache.jackrabbit.mk.json.Jso
 import org.apache.jackrabbit.mk.json.JsopTokenizer;
 import org.apache.jackrabbit.oak.api.PropertyState;
 import org.apache.jackrabbit.oak.api.Type;
+import org.apache.jackrabbit.oak.commons.PathUtils;
 import org.apache.jackrabbit.oak.plugins.memory.BinaryPropertyState;
 import org.apache.jackrabbit.oak.plugins.memory.BooleanPropertyState;
 import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeBuilder;
@@ -69,7 +70,7 @@ public final class KernelNodeState exten
 
     private final String path;
 
-    private final String revision;
+    private String revision;
 
     private Map<String, PropertyState> properties;
 
@@ -77,6 +78,8 @@ public final class KernelNodeState exten
 
     private String hash;
 
+    private String id;
+
     private Map<String, String> childPaths;
 
     private final LoadingCache<String, KernelNodeState> cache;
@@ -100,42 +103,78 @@ public final class KernelNodeState exten
         this.cache = checkNotNull(cache);
     }
 
-    private synchronized void init() {
-        if (properties == null) {
-            String json = kernel.getNodes(
-                    path, revision, 0, 0, MAX_CHILD_NODE_NAMES,
-                    "{\"properties\":[\"*\",\":hash\"]}");
-
-            JsopReader reader = new JsopTokenizer(json);
-            reader.read('{');
-            properties = new LinkedHashMap<String, PropertyState>();
-            childPaths = new LinkedHashMap<String, String>();
-            do {
-                String name = StringCache.get(reader.readString());
-                reader.read(':');
-                if (":childNodeCount".equals(name)) {
-                    childNodeCount =
-                            Long.valueOf(reader.read(JsopReader.NUMBER));
-                } else if (":hash".equals(name)) {
-                    hash = new String(reader.read(JsopReader.STRING));
-                } else if (reader.matches('{')) {
-                    reader.read('}');
-                    String childPath = path + '/' + name;
-                    if ("/".equals(path)) {
-                        childPath = '/' + name;
+    private void init() {
+        boolean initialized = false;
+        synchronized (this) {
+            if (properties == null) {
+                String json = kernel.getNodes(
+                        path, revision, 0, 0, MAX_CHILD_NODE_NAMES,
+                        "{\"properties\":[\"*\",\":hash\",\":id\"]}");
+
+                JsopReader reader = new JsopTokenizer(json);
+                reader.read('{');
+                properties = new LinkedHashMap<String, PropertyState>();
+                childPaths = new LinkedHashMap<String, String>();
+                do {
+                    String name = StringCache.get(reader.readString());
+                    reader.read(':');
+                    if (":childNodeCount".equals(name)) {
+                        childNodeCount =
+                                Long.valueOf(reader.read(JsopReader.NUMBER));
+                    } else if (":hash".equals(name)) {
+                        hash = new String(reader.read(JsopReader.STRING));
+                        if (hash.equals(id)) {
+                            // save some memory
+                            hash = id;
+                        }
+                    } else if (":id".equals(name)) {
+                        id = new String(reader.read(JsopReader.STRING));
+                        if (id.equals(hash)) {
+                            // save some memory
+                            id = hash;
+                        }
+                    } else if (reader.matches('{')) {
+                        reader.read('}');
+                        String childPath = path + '/' + name;
+                        if ("/".equals(path)) {
+                            childPath = '/' + name;
+                        }
+                        childPaths.put(name, childPath);
+                    } else if (reader.matches('[')) {
+                        properties.put(name, readArrayProperty(name, reader));
+                    } else {
+                        properties.put(name, readProperty(name, reader));
                     }
-                    childPaths.put(name, childPath);
-                } else if (reader.matches('[')) {
-                    properties.put(name, readArrayProperty(name, reader));
+                } while (reader.matches(','));
+                reader.read('}');
+                reader.read(JsopReader.END);
+                // optimize for empty childNodes
+                if (childPaths.isEmpty()) {
+                    childPaths = Collections.emptyMap();
+                }
+                initialized = true;
+            }
+        }
+        if (initialized && !PathUtils.denotesRoot(path)) {
+            // OAK-591: check if we can re-use a previous revision
+            // by looking up the node state by hash or id (if available)
+            // introducing this secondary lookup basically means we point
+            // back to a subtree in an older revision, in case it didn't change
+            String hashOrId = null;
+            if (hash != null) {
+                // hash takes precedence
+                hashOrId = hash;
+            } else if (id != null) {
+                hashOrId = id;
+            }
+            if (hashOrId != null) {
+                KernelNodeState cached = cache.getIfPresent(hashOrId);
+                if (cached != null && cached.path.equals(this.path)) {
+                    this.revision = cached.revision;
                 } else {
-                    properties.put(name, readProperty(name, reader));
+                    // store under secondary key
+                    cache.put(hashOrId, this);
                 }
-            } while (reader.matches(','));
-            reader.read('}');
-            reader.read(JsopReader.END);
-            // optimize for empty childNodes
-            if (childPaths.isEmpty()) {
-                childPaths = Collections.emptyMap();
             }
         }
     }
@@ -232,6 +271,8 @@ public final class KernelNodeState exten
                     kbase.init();
                     if (hash != null && hash.equals(kbase.hash)) {
                         return; // no differences
+                    } else if (id != null && id.equals(kbase.id)) {
+                        return; // no differences
                     } else if (path.equals(kbase.path) && !path.equals("/")) {
                         String jsonDiff = kernel.diff(kbase.getRevision(), revision, path,
0);
                         if (!hasChanges(jsonDiff)) {
@@ -269,6 +310,11 @@ public final class KernelNodeState exten
                     that.init();
                     if (hash != null && that.hash != null) {
                         return hash.equals(that.hash);
+                    } else if (id != null && id.equals(that.id)) {
+                        // only return result of equals if ids are equal
+                        // different ids doesn't mean the node states are
+                        // definitively different.
+                        return true;
                     } else if (path.equals(that.path) && !path.equals("/")) {
                         String jsonDiff = kernel.diff(that.getRevision(), revision, path,
0);
                         return !hasChanges(jsonDiff);

Added: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/kernel/KernelNodeStoreCacheTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/kernel/KernelNodeStoreCacheTest.java?rev=1447645&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/kernel/KernelNodeStoreCacheTest.java
(added)
+++ jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/kernel/KernelNodeStoreCacheTest.java
Tue Feb 19 09:29:44 2013
@@ -0,0 +1,273 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.kernel;
+
+import java.io.InputStream;
+
+import javax.annotation.Nonnull;
+
+import org.apache.jackrabbit.mk.api.MicroKernel;
+import org.apache.jackrabbit.mk.api.MicroKernelException;
+import org.apache.jackrabbit.mk.core.MicroKernelImpl;
+import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
+import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.apache.jackrabbit.oak.spi.state.NodeStoreBranch;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Tests if cache is used for repeated reads on unmodified subtree.
+ * See also OAK-591.
+ */
+public class KernelNodeStoreCacheTest {
+
+    private static final String PROP_FILTER = "{\"properties\":[\"*\"]}";
+    private static final String PROP_FILTER_WITH_HASH = "{\"properties\":[\"*\",\":hash\"]}";
+    private static final String PROP_FILTER_WITH_ID = "{\"properties\":[\"*\",\":id\"]}";
+
+    private KernelNodeStore store;
+
+    private MicroKernelWrapper wrapper;
+
+    @Before
+    public void setUp() throws Exception {
+        wrapper = new MicroKernelWrapper(new MicroKernelImpl());
+        store = new KernelNodeStore(wrapper);
+        NodeStoreBranch branch = store.branch();
+
+        NodeBuilder builder = branch.getRoot().builder();
+        builder.child("a");
+        NodeBuilder b = builder.child("b");
+        b.child("c");
+        b.child("d");
+        b.child("e");
+        branch.setRoot(builder.getNodeState());
+
+        branch.merge(EmptyHook.INSTANCE);
+    }
+
+    /**
+     * Provide both :hash and :id
+     */
+    @Test
+    public void withDefaultFilter() throws Exception {
+        int uncachedReads = readTreeWithCleanedCache();
+        modifyContent();
+        int cachedReads = readTreeWithCache();
+        assertTrue(cachedReads < uncachedReads);
+    }
+
+    /**
+     * Don't provide :hash nor :id. This will not reduce the number of
+     * MK.getNodes() after a commit.
+     */
+    @Test
+    public void withSimpleFilter() throws Exception {
+        wrapper.filter = PROP_FILTER;
+        int uncachedReads = readTreeWithCleanedCache();
+        modifyContent();
+        int cachedReads = readTreeWithCache();
+        assertEquals(cachedReads, uncachedReads);
+    }
+
+    /**
+     * Only provide :hash in MK.getNodes()
+     */
+    @Test
+    public void withHashFilter() throws Exception {
+        wrapper.filter = PROP_FILTER_WITH_HASH;
+        int uncachedReads = readTreeWithCleanedCache();
+        modifyContent();
+        int cachedReads = readTreeWithCache();
+        assertTrue(cachedReads < uncachedReads);
+    }
+
+    /**
+     * Only provide :id in MK.getNodes()
+     */
+    @Test
+    public void withIdFilter() throws Exception {
+        wrapper.filter = PROP_FILTER_WITH_ID;
+        int uncachedReads = readTreeWithCleanedCache();
+        System.out.println("Uncached reads: " + uncachedReads);
+
+        modifyContent();
+
+        int cachedReads = readTreeWithCache();
+
+        System.out.println("Cached reads: " + cachedReads);
+        assertTrue(cachedReads < uncachedReads);
+    }
+
+    //---------------------------< internal >-----------------------------------
+
+    private int readTreeWithCache() {
+        NodeState root = store.getRoot();
+        int cachedReads = wrapper.numGetNodes;
+        readTree(root);
+        return wrapper.numGetNodes - cachedReads;
+    }
+
+    private int readTreeWithCleanedCache() {
+        // start with virgin store / empty cache
+        store = new KernelNodeStore(wrapper);
+        KernelNodeState root = store.getRoot();
+        int uncachedReads = wrapper.numGetNodes;
+        readTree(root);
+        return wrapper.numGetNodes - uncachedReads;
+    }
+
+    private void modifyContent() throws Exception {
+        NodeStoreBranch branch = store.branch();
+        NodeBuilder builder = branch.getRoot().builder();
+        builder.child("a").setProperty("foo", "bar");
+        branch.setRoot(builder.getNodeState());
+        branch.merge(EmptyHook.INSTANCE);
+    }
+
+    private void readTree(NodeState root) {
+        for (ChildNodeEntry cne : root.getChildNodeEntries()) {
+            readTree(cne.getNodeState());
+        }
+    }
+
+    private static final class MicroKernelWrapper implements MicroKernel {
+
+        private final MicroKernel kernel;
+
+        String filter = null;
+        int numGetNodes = 0;
+
+        MicroKernelWrapper(MicroKernel kernel) {
+            this.kernel = kernel;
+        }
+
+        @Override
+        public String getHeadRevision() throws MicroKernelException {
+            return kernel.getHeadRevision();
+        }
+
+        @Override
+        public String getRevisionHistory(long since,
+                                         int maxEntries,
+                                         String path)
+                throws MicroKernelException {
+            return kernel.getRevisionHistory(since, maxEntries, path);
+        }
+
+        @Override
+        public String waitForCommit(String oldHeadRevisionId, long timeout)
+                throws MicroKernelException, InterruptedException {
+            return kernel.waitForCommit(oldHeadRevisionId, timeout);
+        }
+
+        @Override
+        public String getJournal(String fromRevisionId,
+                                 String toRevisionId,
+                                 String path) throws MicroKernelException {
+            return kernel.getJournal(fromRevisionId, toRevisionId, path);
+        }
+
+        @Override
+        public String diff(String fromRevisionId,
+                           String toRevisionId,
+                           String path,
+                           int depth) throws MicroKernelException {
+            return kernel.diff(fromRevisionId, toRevisionId, path, depth);
+        }
+
+        @Override
+        public boolean nodeExists(String path, String revisionId)
+                throws MicroKernelException {
+            return kernel.nodeExists(path, revisionId);
+        }
+
+        @Override
+        public long getChildNodeCount(String path, String revisionId)
+                throws MicroKernelException {
+            return kernel.getChildNodeCount(path, revisionId);
+        }
+
+        @Override
+        public String getNodes(String path,
+                               String revisionId,
+                               int depth,
+                               long offset,
+                               int maxChildNodes,
+                               String filter) throws MicroKernelException {
+            numGetNodes++;
+            if (this.filter != null) {
+                filter = this.filter;
+            }
+            return kernel.getNodes(path, revisionId, depth, offset, maxChildNodes, filter);
+        }
+
+        @Override
+        public String commit(String path,
+                             String jsonDiff,
+                             String revisionId,
+                             String message) throws MicroKernelException {
+            return kernel.commit(path, jsonDiff, revisionId, message);
+        }
+
+        @Override
+        public String branch(String trunkRevisionId)
+                throws MicroKernelException {
+            return kernel.branch(trunkRevisionId);
+        }
+
+        @Override
+        public String merge(String branchRevisionId, String message)
+                throws MicroKernelException {
+            return kernel.merge(branchRevisionId, message);
+        }
+
+        @Nonnull
+        @Override
+        public String rebase(@Nonnull String branchRevisionId,
+                             String newBaseRevisionId)
+                throws MicroKernelException {
+            return kernel.rebase(branchRevisionId, newBaseRevisionId);
+        }
+
+        @Override
+        public long getLength(String blobId) throws MicroKernelException {
+            return kernel.getLength(blobId);
+        }
+
+        @Override
+        public int read(String blobId,
+                        long pos,
+                        byte[] buff,
+                        int off,
+                        int length) throws MicroKernelException {
+            return kernel.read(blobId, pos, buff, off, length);
+        }
+
+        @Override
+        public String write(InputStream in) throws MicroKernelException {
+            return kernel.write(in);
+        }
+    }
+}

Propchange: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/kernel/KernelNodeStoreCacheTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/kernel/KernelNodeStoreCacheTest.java
------------------------------------------------------------------------------
    svn:keywords = Author Date Id Revision Rev URL



Mime
View raw message