jackrabbit-oak-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chet...@apache.org
Subject svn commit: r1577448 - in /jackrabbit/oak/trunk: oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/ oak-core/ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore...
Date Fri, 14 Mar 2014 08:41:27 GMT
Author: chetanm
Date: Fri Mar 14 08:41:27 2014
New Revision: 1577448

URL: http://svn.apache.org/r1577448
Log:
OAK-805 - Support for existing Jackrabbit 2.x DataStores

Refactored the implementation

-- Added a new method in BlobStore to obtain inputstream for given blobId.
    By default it is implemented using BlobStoreInputStream
-- DataStoreBlobStore uses the InputStream obtained from DataStore
-- Added support for inlinig binaries is there size is small

Added:
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreBlob.java   (with props)
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/InMemoryDataRecord.java   (with props)
    jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/
    jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStoreTest.java   (with props)
    jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/InMemoryDataRecordTest.java   (with props)
Removed:
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentBlob.java
Modified:
    jackrabbit/oak/trunk/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java
    jackrabbit/oak/trunk/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/BlobStore.java
    jackrabbit/oak/trunk/oak-core/pom.xml
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/gridfs/MongoGridFSBlobStore.java

Modified: jackrabbit/oak/trunk/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java?rev=1577448&r1=1577447&r2=1577448&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java (original)
+++ jackrabbit/oak/trunk/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java Fri Mar 14 08:41:27 2014
@@ -35,6 +35,7 @@ import java.util.NoSuchElementException;
 import java.util.WeakHashMap;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.jackrabbit.oak.commons.cache.Cache;
 import org.apache.jackrabbit.oak.commons.IOUtils;
 import org.apache.jackrabbit.oak.commons.StringUtils;
@@ -133,10 +134,8 @@ public abstract class AbstractBlobStore 
             in = new FileInputStream(file);
             return writeBlob(in);
         } finally {
-            if (in != null) {
-                in.close();
-            }
-            file.delete();
+            org.apache.commons.io.IOUtils.closeQuietly(in);
+            FileUtils.forceDelete(file);
         }
     }
 
@@ -159,6 +158,11 @@ public abstract class AbstractBlobStore 
         }
     }
 
+    public InputStream getInputStream(String blobId) throws IOException {
+        //Marking would handled by next call to store.readBlob
+        return new BlobStoreInputStream(this, blobId, 0);
+    }
+
     protected void usesBlobId(String blobId) {
         inUse.put(blobId, new WeakReference<String>(blobId));
     }

Modified: jackrabbit/oak/trunk/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/BlobStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/BlobStore.java?rev=1577448&r1=1577447&r2=1577448&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/BlobStore.java (original)
+++ jackrabbit/oak/trunk/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/BlobStore.java Fri Mar 14 08:41:27 2014
@@ -53,4 +53,16 @@ public interface BlobStore {
      */
     long getBlobLength(String blobId) throws IOException;
 
+    /**
+     * Returns a new stream for given blobId. The streams returned from
+     * multiple calls to this method are byte wise equals. That is,
+     * subsequent calls to {@link java.io.InputStream#read() read}
+     * return the same sequence of bytes as long as neither call throws
+     * an exception.
+     *
+     * @param blobId the blob id
+     * @return a new stream for given blobId
+     */
+    InputStream getInputStream(String blobId) throws IOException;
+
 }

Modified: jackrabbit/oak/trunk/oak-core/pom.xml
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/pom.xml?rev=1577448&r1=1577447&r2=1577448&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/pom.xml (original)
+++ jackrabbit/oak/trunk/oak-core/pom.xml Fri Mar 14 08:41:27 2014
@@ -290,6 +290,12 @@
       <scope>test</scope>
     </dependency>
     <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-core</artifactId>
+      <version>1.9.5</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>org.easymock</groupId>
       <artifactId>easymock</artifactId>
       <scope>test</scope>

Added: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreBlob.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreBlob.java?rev=1577448&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreBlob.java (added)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreBlob.java Fri Mar 14 08:41:27 2014
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.blob;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import javax.annotation.CheckForNull;
+import javax.annotation.Nonnull;
+
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
+import org.apache.jackrabbit.oak.api.Blob;
+
+/**
+ * A blob implementation.
+ */
+public class BlobStoreBlob implements Blob {
+    
+    private final BlobStore blobStore;
+    private final String id;
+    
+    public BlobStoreBlob(BlobStore blobStore, String id) {
+        this.blobStore = blobStore;
+        this.id = id;
+    }
+
+    @Override
+    @Nonnull
+    public InputStream getNewStream() {
+        try {
+            return blobStore.getInputStream(id);
+        } catch (IOException e) {
+            throw new RuntimeException("Error occurred while obtaining " +
+                    "InputStream for blobId ["+id+"]",e);
+        }
+    }
+
+    @Override
+    public long length() {
+        try {
+            return blobStore.getBlobLength(id);
+        } catch (Exception e) {
+            throw new IllegalArgumentException("Invalid blob id: " + id, e);
+        }
+    }
+
+    @Override @CheckForNull
+    public String getReference() {
+        return id;
+    }
+
+    //------------------------------------------------------------< Object >--
+
+    @Override
+    public String toString() {
+        return id;
+    }
+    
+    @Override
+    public int hashCode() {
+        return id.hashCode();
+    }
+
+    @Override
+    public boolean equals(Object other) {
+        if (this == other) {
+            return true;
+        } 
+        if (other instanceof BlobStoreBlob) {
+            BlobStoreBlob b = (BlobStoreBlob) other;
+            // theoretically, the data could be the same  
+            // even if the id is different
+            return b.id.equals(id);
+        }
+        return false;
+    }
+
+}

Propchange: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreBlob.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java?rev=1577448&r1=1577447&r2=1577448&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java Fri Mar 14 08:41:27 2014
@@ -1,474 +1,173 @@
 /*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
 package org.apache.jackrabbit.oak.plugins.blob.datastore;
 
+import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
-import java.util.Arrays;
+import java.io.SequenceInputStream;
 import java.util.Iterator;
-import java.util.NoSuchElementException;
-import java.util.concurrent.ExecutionException;
 
-import org.apache.jackrabbit.core.data.CachingDataStore;
+import javax.annotation.Nullable;
+import javax.jcr.RepositoryException;
+
+import com.google.common.base.Function;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Iterators;
+import org.apache.commons.io.FileUtils;
 import org.apache.jackrabbit.core.data.DataIdentifier;
 import org.apache.jackrabbit.core.data.DataRecord;
 import org.apache.jackrabbit.core.data.DataStore;
 import org.apache.jackrabbit.core.data.DataStoreException;
 import org.apache.jackrabbit.core.data.MultiDataStoreAware;
-import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
-import org.apache.jackrabbit.oak.commons.cache.Cache;
 import org.apache.jackrabbit.oak.commons.IOUtils;
-import org.apache.jackrabbit.oak.commons.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Strings;
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
-import com.google.common.cache.RemovalListener;
-import com.google.common.cache.RemovalNotification;
-import com.google.common.collect.Iterators;
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
+import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
 
 /**
- * A {@link BlobStore} implementation which is a compatibility wrapper for
- * Jackrabbit {@link DataStore}.
- * <p>
- * Uses a 2 level cache to improve random read performance.
- * 
- * Caches the {@link InputStream} until fully read or closed. Number of streams
- * cached are controlled by the
- * {@link DataStoreConfiguration#getStreamCacheSize()} parameter
- * 
- * Also, uses a 16MB bytes[] cache.
- * 
+ * BlobStore wrapper for DataStore. Wraps Jackrabbit 2 DataStore and expose them as BlobStores
+ * It also handles inlining binaries if there size is smaller than
+ * {@link org.apache.jackrabbit.core.data.DataStore#getMinRecordLength()}
  */
-public class DataStoreBlobStore implements GarbageCollectableBlobStore,
-        Cache.Backend<DataStoreBlobStore.LogicalBlockId, DataStoreBlobStore.Data> {
-
-    /**
-     * Logger instance.
-     */
-    private static final Logger LOG = LoggerFactory.getLogger(DataStoreBlobStore.class);
-
-    protected static final int BLOCK_SIZE_LIMIT = 40;
-
-    private static final int DEFAULT_STREAM_CACHE_SIZE = 256;
-
-    /**
-     * The size of a block. 128 KB has been found to be as fast as larger
-     * values, and faster than smaller values. 2 MB results in less files.
-     */
-    private int blockSize = 2 * 1024 * 1024;
-
-    /**
-     * The block cache (16 MB). Caches blocks up to blockSize.
-     */
-    private Cache<LogicalBlockId, Data> blockCache = Cache.newInstance(this, 16 * 1024 * 1024);
-
-    /** The stream cache size. */
-    protected int streamCacheSize;
+public class DataStoreBlobStore implements DataStore, BlobStore, GarbageCollectableBlobStore {
+    private final DataStore delegate;
 
-    /**
-     * The stream cache caches a number of streams to avoid opening a new stream
-     * on every random access read.
-     */
-    private LoadingCache<String, InputStream> streamCache;
-
-    private LoadingCache<String, Long> fileLengthCache;
-
-    /** The data store. */
-    private DataStore dataStore;
-
-    /**
-     * Gets the stream cache size.
-     * 
-     * @return the stream cache size
-     */
-    protected int getStreamCacheSize() {
-        return streamCacheSize;
+    public DataStoreBlobStore(DataStore delegate) {
+        this.delegate = delegate;
     }
 
-    /**
-     * Sets the stream cache size.
-     * 
-     * @param streamCacheSize
-     *            the new stream cache size
-     */
-    protected void setStreamCacheSize(int streamCacheSize) {
-        this.streamCacheSize = streamCacheSize;
-    }
+    //~----------------------------------< DataStore >
 
-    /**
-     * Sets the block size.
-     * 
-     * @param x
-     *            the new block size
-     */
-    public final void setBlockSize(final int x) {
-        validateBlockSize(x);
-        this.blockSize = x;
+    @Override
+    public DataRecord getRecordIfStored(DataIdentifier identifier) throws DataStoreException {
+        if(isInMemoryRecord(identifier)){
+            return getDataRecord(identifier.toString());
+        }
+        return delegate.getRecordIfStored(identifier);
     }
 
-    /**
-     * Validate block size.
-     * 
-     * @param x
-     *            the x
-     */
-    private static void validateBlockSize(final int x) {
-        if (x < BLOCK_SIZE_LIMIT) {
-            throw new IllegalArgumentException("The minimum size must be bigger "
-                    + "than a content hash itself; limit = " + BLOCK_SIZE_LIMIT);
+    @Override
+    public DataRecord getRecord(DataIdentifier identifier) throws DataStoreException {
+        if(isInMemoryRecord(identifier)){
+            return getDataRecord(identifier.toString());
         }
+        return delegate.getRecord(identifier);
     }
 
-    /**
-     * Initialized the blob store.
-     * 
-     * @param dataStore
-     *            the data store
-     * @param streamCacheSize
-     *            the stream cache size
-     */
-    public void init(DataStore dataStore) {
-        if (streamCacheSize <= 0) {
-            streamCacheSize = DEFAULT_STREAM_CACHE_SIZE;
-        }
-
-        streamCache = CacheBuilder.newBuilder().maximumSize(streamCacheSize)
-                .removalListener(new RemovalListener<String, InputStream>() {
-                    public void onRemoval(RemovalNotification<String, InputStream> removal) {
-                        InputStream stream = removal.getValue();
-                        IOUtils.closeQuietly(stream);
-                    }
-                }).build(new CacheLoader<String, InputStream>() {
-                    public InputStream load(String key) throws Exception {
-                        return loadStream(key);
-                    }
-                });
-        fileLengthCache = CacheBuilder.newBuilder().maximumSize(streamCacheSize)
-                .build(new CacheLoader<String, Long>() {
-                    @Override
-                    public Long load(String key) throws Exception {
-                        return getBlobLength(key);
-                    }
-                });
-        this.dataStore = dataStore;
+    @Override
+    public DataRecord getRecordFromReference(String reference) throws DataStoreException {
+        return delegate.getRecordFromReference(reference);
     }
 
-    /**
-     * Writes the input stream to the data store.
-     */
     @Override
-    public String writeBlob(InputStream in) throws IOException {
+    public DataRecord addRecord(InputStream stream) throws DataStoreException {
         try {
-            // add the record in the data store
-            DataRecord dataRec = dataStore.addRecord(in);
-            return dataRec.getIdentifier().toString();
-        } catch (DataStoreException e) {
-            throw new IOException(e);
-        } finally {
-            IOUtils.closeQuietly(in);
+            return writeStream(stream);
+        } catch (IOException e) {
+            throw new DataStoreException(e);
         }
     }
 
-    /**
-     * Reads the blob with the given blob id and range.
-     */
     @Override
-    public int readBlob(String blobId, long pos, byte[] buff, int off, int length) throws IOException {
-        if (Strings.isNullOrEmpty(blobId)) {
-            return -1;
-        }
+    public void updateModifiedDateOnAccess(long before) {
+        delegate.updateModifiedDateOnAccess(before);
+    }
 
-        long blobLength;
-        try {
-            blobLength = fileLengthCache.get(blobId);
-        } catch (ExecutionException e) {
-            LOG.debug("File length cache error", e);
-            blobLength = getBlobLength(blobId);
-        }
-        LOG.debug("read {" + blobId + "}, {" + blobLength + "}");
-
-        long position = pos;
-        int offset = off;
-
-        if (position < blobLength) {
-            int totalLength = 0;
-            long bytesLeft = ((position + length) > blobLength ? blobLength - position : length);
-
-            // Reads all the logical blocks satisfying the required range
-            while (bytesLeft > 0) {
-                long posBlockStart = position / blockSize;
-                int posOffsetInBlock = (int) (position - posBlockStart * blockSize);
-
-                byte[] block = readBlock(blobId, posBlockStart);
-
-                long bytesToRead = Math.min(bytesLeft,
-                        Math.min((blobLength - posOffsetInBlock), (blockSize - posOffsetInBlock)));
-                System.arraycopy(block, posOffsetInBlock, buff, offset, (int) bytesToRead);
-
-                position += bytesToRead;
-                offset += bytesToRead;
-                totalLength += bytesToRead;
-                bytesLeft -= bytesToRead;
-            }
-            return totalLength;
-        } else {
-            LOG.trace("Blob read for pos " + pos + "," + (pos + length - 1) + " out of range");
-            return -1;
-        }
+    @Override
+    public int deleteAllOlderThan(long min) throws DataStoreException {
+        return delegate.deleteAllOlderThan(min);
     }
 
-    /**
-     * Gets the data store.
-     * 
-     * @return the data store
-     */
-    public DataStore getDataStore() {
-        return dataStore;
+    @Override
+    public Iterator<DataIdentifier> getAllIdentifiers() throws DataStoreException {
+        return delegate.getAllIdentifiers();
     }
 
-    /**
-     * Sets the data store.
-     * 
-     * @param dataStore
-     *            the data store
-     */
-    protected void setDataStore(DataStore dataStore) {
-        this.dataStore = dataStore;
+    @Override
+    public void init(String homeDir) throws RepositoryException {
+        throw new UnsupportedOperationException("DataStore cannot be initialized again");
     }
 
-    /**
-     * Load the block to the cache.
-     */
     @Override
-    public final Data load(final LogicalBlockId id) {
-        byte[] data;
-        try {
-            data = readBlockFromBackend(id);
-        } catch (Exception e) {
-            throw new RuntimeException("failed to read block from backend, id " + id, e);
-        }
-        if (data == null) {
-            throw new IllegalArgumentException("The block with id " + id + " was not found");
-        }
-        LOG.debug("Read from backend (Cache Miss): " + id);
-        return new Data(data);
+    public int getMinRecordLength() {
+        return delegate.getMinRecordLength();
     }
 
-    /**
-     * Gets the length of the blob identified by the blobId.
-     */
     @Override
-    public final long getBlobLength(final String blobId) throws IOException {
-        if (Strings.isNullOrEmpty(blobId)) {
-            return 0;
-        }
+    public void close() throws DataStoreException {
+        delegate.close();
+    }
 
-        Long length = null;
+    //~-------------------------------------------< BlobStore >
+
+    @Override
+    public String writeBlob(InputStream in) throws IOException {
         try {
-            if (dataStore instanceof CachingDataStore) {
-                length = ((CachingDataStore) dataStore).getLength(new DataIdentifier(blobId));
-            } else {
-                length = dataStore.getRecord(new DataIdentifier(blobId)).getLength();
-            }
-            return length;
+            return writeStream(in).getIdentifier().toString();
         } catch (DataStoreException e) {
-            throw new IOException("Could not get length of blob for id " + blobId, e);
+            throw new IOException(e);
         }
     }
 
-    /**
-     * Reads block from backend.
-     * 
-     * @param id
-     *            the id
-     * @return the byte[]
-     * @throws IOException
-     *             Signals that an I/O exception has occurred.
-     */
-    private byte[] readBlockFromBackend(final LogicalBlockId id) throws IOException {
-        String key = StringUtils.convertBytesToHex(id.digest);
-        InputStream stream = null;
+    @Override
+    public int readBlob(String blobId, long pos, byte[] buff, int off, int length) throws IOException {
+        //This is inefficient as repeated calls for same blobId would involve opening new Stream
+        //instead clients should directly access the stream from DataRecord by special casing for
+        //BlobStore which implements DataStore
+        InputStream in = getStream(blobId);
         try {
-            stream = streamCache.get(key);
-        } catch (ExecutionException e) {
-            LOG.debug("Error retrieving from stream cache : " + key, e);
-        }
-
-        byte[] block = new byte[blockSize];
-        org.apache.commons.io.IOUtils.read(stream, block, 0, blockSize);
-
-        if ((stream != null) && (stream.available() <= 0)) {
-            streamCache.invalidate(key);
+            long skip = pos;
+            while (skip > 0) {
+                long skipped = in.skip(skip);
+                if (skipped <= 0) {
+                    return -1;
+                }
+                skip -= skipped;
+            }
+            return IOUtils.readFully(in, buff,off,length);
+        } finally {
+            in.close();
         }
-        return block;
     }
 
-    /**
-     * Loads the stream from the data store.
-     * 
-     * @param key
-     *            the key
-     * @return the input stream
-     * @throws IOException
-     *             Signals that an I/O exception has occurred.
-     */
-    private InputStream loadStream(String key) throws IOException {
-        InputStream stream = null;
+    @Override
+    public long getBlobLength(String blobId) throws IOException {
         try {
-            stream = dataStore.getRecord(new DataIdentifier(key)).getStream();
+            return getDataRecord(blobId).getLength();
         } catch (DataStoreException e) {
-            throw new IOException("Could not read blob for id " + key, e);
+            throw new IOException(e);
         }
-        return stream;
     }
 
-    /**
-     * Reads block.
-     * 
-     * @param blobId
-     *            the blob id
-     * @param posStart
-     *            the pos start
-     * @return the byte[]
-     * @throws Exception
-     *             the exception
-     */
-    private byte[] readBlock(final String blobId, final long posStart) throws IOException {
-        byte[] digest = StringUtils.convertHexToBytes(blobId);
-        LogicalBlockId id = new LogicalBlockId(digest, posStart);
-
-        LOG.debug("Trying to read from cache : " + blobId + ", " + posStart);
-
-        return blockCache.get(id).data;
-    }
-
-    /**
-     * Delete all blobs older than.
-     * 
-     * @param time
-     *            the time
-     * @return the int
-     * @throws Exception
-     *             the exception
-     */
-    public int deleteAllOlderThan(long time) throws Exception {
-        return dataStore.deleteAllOlderThan(time);
-    }
-
-    /**
-     * A file is divided into logical chunks. Blocks are small enough to fit in
-     * memory, so they can be cached.
-     */
-    public static class LogicalBlockId {
-
-        /** The digest. */
-        final byte[] digest;
-
-        /** The starting pos. */
-        final long pos;
-
-        /**
-         * Instantiates a new logical block id.
-         * 
-         * @param digest
-         *            the digest
-         * @param pos
-         *            the starting position of the block
-         */
-        LogicalBlockId(final byte[] digest, final long pos) {
-            this.digest = digest;
-            this.pos = pos;
-        }
-
-        @Override
-        public final boolean equals(final Object other) {
-            if (this == other) {
-                return true;
-            }
-            if (other == null || !(other instanceof LogicalBlockId)) {
-                return false;
-            }
-            LogicalBlockId o = (LogicalBlockId) other;
-            return Arrays.equals(digest, o.digest) && pos == o.pos;
-        }
-
-        @Override
-        public final int hashCode() {
-            return Arrays.hashCode(digest) ^ (int) (pos >> 32) ^ (int) pos;
-        }
-
-        @Override
-        public final String toString() {
-            return StringUtils.convertBytesToHex(digest) + "@" + pos;
-        }
-
-        /**
-         * Gets the digest.
-         * 
-         * @return the digest
-         */
-        public final byte[] getDigest() {
-            return digest;
-        }
-
-        /**
-         * Gets the starting position.
-         * 
-         * @return the starting position
-         */
-        public final long getPos() {
-            return pos;
-        }
+    @Override
+    public InputStream getInputStream(String blobId) throws IOException {
+        return getStream(blobId);
     }
 
-    /**
-     * The data for a block.
-     */
-    public static class Data implements Cache.Value {
+    //~-------------------------------------------< GarbageCollectableBlobStore >
 
-        /** The data. */
-        final byte[] data;
+    @Override
+    public void setBlockSize(int x) {
 
-        /**
-         * Instantiates a new data.
-         * 
-         * @param data
-         *            the data
-         */
-        Data(final byte[] data) {
-            this.data = data;
-        }
-
-        @Override
-        public final String toString() {
-            String s = StringUtils.convertBytesToHex(data);
-            return s.length() > 100 ? s.substring(0, 100) + ".. (len=" + data.length + ")" : s;
-        }
-
-        @Override
-        public final int getMemory() {
-            return data.length;
-        }
     }
 
     @Override
@@ -479,56 +178,56 @@ public class DataStoreBlobStore implemen
             in = new FileInputStream(file);
             return writeBlob(in);
         } finally {
-            if (in != null) {
-                in.close();
-            }
-            file.delete();
+            org.apache.commons.io.IOUtils.closeQuietly(in);
+            FileUtils.forceDelete(file);
         }
     }
 
     @Override
     public int sweep() throws IOException {
-        // no-op
         return 0;
     }
 
     @Override
     public void startMark() throws IOException {
+
     }
 
     @Override
     public void clearInUse() {
-        dataStore.clearInUse();
+        delegate.clearInUse();
     }
 
     @Override
     public void clearCache() {
-        // no-op
+
     }
 
     @Override
     public long getBlockSizeMin() {
-        // no-op
         return 0;
     }
 
-    /**
-     * Ignores the maxLastModifiedTime currently.
-     */
     @Override
-    public Iterator<String> getAllChunkIds(
-            long maxLastModifiedTime) throws Exception {
-        return new DataStoreIterator(dataStore.getAllIdentifiers());
+    public Iterator<String> getAllChunkIds(long maxLastModifiedTime) throws Exception {
+        //TODO Ignores the maxLastModifiedTime currently.
+        return Iterators.transform(delegate.getAllIdentifiers(), new Function<DataIdentifier, String>() {
+            @Nullable
+            @Override
+            public String apply(@Nullable DataIdentifier input) {
+                return input.toString();
+            }
+        });
     }
 
     @Override
-    public boolean deleteChunk(String blobId, long maxLastModifiedTime) throws Exception {
-        if (dataStore instanceof MultiDataStoreAware) {
-            DataIdentifier identifier = new DataIdentifier(blobId);
-            DataRecord dataRecord = dataStore.getRecord(identifier);
-            if ((maxLastModifiedTime <= 0) 
+    public boolean deleteChunk(String chunkId, long maxLastModifiedTime) throws Exception {
+        if (delegate instanceof MultiDataStoreAware) {
+            DataIdentifier identifier = new DataIdentifier(chunkId);
+            DataRecord dataRecord = delegate.getRecord(identifier);
+            if ((maxLastModifiedTime <= 0)
                     || dataRecord.getLastModified() <= maxLastModifiedTime) {
-                ((MultiDataStoreAware) dataStore).deleteRecord(identifier);
+                ((MultiDataStoreAware) delegate).deleteRecord(identifier);
                 return true;
             }
         }
@@ -540,29 +239,68 @@ public class DataStoreBlobStore implemen
         return Iterators.singletonIterator(blobId);
     }
 
-    class DataStoreIterator implements Iterator<String> {
-        Iterator<DataIdentifier> backingIterator;
+    @Override
+    public String toString() {
+        return String.format("DataStore backed BlobStore [%s]", delegate.getClass().getName());
+    }
 
-        public DataStoreIterator(Iterator<DataIdentifier> backingIterator) {
-            this.backingIterator = backingIterator;
-        }
+    public DataStore getDataStore() {
+        return delegate;
+    }
 
-        @Override
-        public boolean hasNext() {
-            return backingIterator.hasNext();
+    private InputStream getStream(String blobId) throws IOException {
+        try {
+            return getDataRecord(blobId).getStream();
+        } catch (DataStoreException e) {
+            throw new IOException(e);
         }
+    }
+
+    private DataRecord getDataRecord(String blobId) throws DataStoreException {
+        DataRecord id;
+        if(InMemoryDataRecord.isInstance(blobId)){
+            id = InMemoryDataRecord.getInstance(blobId);
+        }else{
+            id = delegate.getRecord(new DataIdentifier(blobId));
+            Preconditions.checkNotNull(id, "No DataRecord found for blodId [%s]", blobId);
+        }
+        return id;
+    }
+
+    private boolean isInMemoryRecord(DataIdentifier identifier){
+        return InMemoryDataRecord.isInstance(identifier.toString());
+    }
 
-        @Override
-        public String next() {
-            if (!hasNext()) {
-                throw new NoSuchElementException("No more elements");
+    /**
+     * Create a BLOB value from in input stream. Small objects will create an in-memory object,
+     * while large objects are stored in the data store
+     *
+     * @param in the input stream
+     * @return the value
+     */
+    private DataRecord writeStream(InputStream in) throws IOException, DataStoreException {
+        int maxMemorySize = Math.max(0, delegate.getMinRecordLength() + 1);
+        byte[] buffer = new byte[maxMemorySize];
+        int pos = 0, len = maxMemorySize;
+        while (pos < maxMemorySize) {
+            int l = in.read(buffer, pos, len);
+            if (l < 0) {
+                break;
             }
-            return backingIterator.next().toString();
+            pos += l;
+            len -= l;
         }
-
-        @Override
-        public void remove() {
-            throw new UnsupportedOperationException();
+        DataRecord record;
+        if (pos < maxMemorySize) {
+            // shrink the buffer
+            byte[] data = new byte[pos];
+            System.arraycopy(buffer, 0, data, 0, pos);
+            record = InMemoryDataRecord.getInstance(data);
+        } else {
+            // a few bytes are already read, need to re-build the input stream
+            in = new SequenceInputStream(new ByteArrayInputStream(buffer, 0, pos), in);
+            record = delegate.addRecord(in);
         }
+        return record;
     }
 }

Added: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/InMemoryDataRecord.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/InMemoryDataRecord.java?rev=1577448&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/InMemoryDataRecord.java (added)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/InMemoryDataRecord.java Fri Mar 14 08:41:27 2014
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.blob.datastore;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.util.Arrays;
+
+import org.apache.jackrabbit.core.data.DataIdentifier;
+import org.apache.jackrabbit.core.data.DataRecord;
+import org.apache.jackrabbit.core.data.DataStoreException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Represents binary data which is backed by a byte[] (in memory).
+ */
+class InMemoryDataRecord implements DataRecord {
+
+    /**
+     * Logger instance for this class
+     */
+    private static Logger log = LoggerFactory.getLogger(InMemoryDataRecord.class);
+
+    /**
+     * the prefix of the string representation of this value
+     */
+    private static final String PREFIX = "0x";
+
+    /**
+     * the data
+     */
+    private final byte[] data;
+
+    private DataIdentifier identifier;
+
+    /**
+     * empty array
+     */
+    private static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
+
+    /**
+     * empty instance
+     */
+    private static final InMemoryDataRecord EMPTY = new InMemoryDataRecord(EMPTY_BYTE_ARRAY);
+
+    /**
+     * Creates a new instance from a
+     * <code>byte[]</code> array.
+     *
+     * @param data the byte array
+     */
+    private InMemoryDataRecord(byte[] data) {
+        this.data = data;
+    }
+
+    /**
+     * Creates a new instance from a
+     * <code>byte[]</code> array.
+     *
+     * @param data the byte array
+     */
+    static InMemoryDataRecord getInstance(byte[] data) {
+        if (data.length == 0) {
+            return EMPTY;
+        } else {
+            return new InMemoryDataRecord(data);
+        }
+    }
+
+    /**
+     * Checks if String can be converted to an instance of this class.
+     * @param id DataRecord identifier
+     * @return true if it can be converted
+     */
+    static boolean isInstance(String id) {
+        return id.startsWith(PREFIX);
+    }
+
+    /**
+     * Convert a String to an instance of this class.
+     * @param id DataRecord identifier
+     * @return the instance
+     */
+    static InMemoryDataRecord getInstance(String id) throws IllegalArgumentException {
+        assert id.startsWith(PREFIX);
+        id = id.substring(PREFIX.length());
+        int len = id.length();
+        if (len % 2 != 0) {
+            String msg = "unable to deserialize byte array " + id + " , length=" + id.length();
+            log.debug(msg);
+            throw new IllegalArgumentException(msg);
+        }
+        len /= 2;
+        byte[] data = new byte[len];
+        try {
+            for (int i = 0; i < len; i++) {
+                data[i] = (byte) ((Character.digit(id.charAt(2 * i), 16) << 4) | (Character.digit(id.charAt(2 * i + 1), 16)));
+            }
+        } catch (NumberFormatException e) {
+            String msg = "unable to deserialize byte array " + id;
+            log.debug(msg);
+            throw new IllegalArgumentException(msg);
+        }
+        return InMemoryDataRecord.getInstance(data);
+    }
+
+    @Override
+    public DataIdentifier getIdentifier() {
+        if(identifier == null){
+            identifier = new DataIdentifier(toString());
+        }
+        return identifier;
+    }
+
+    @Override
+    public String getReference() {
+        return null;
+    }
+
+    @Override
+    public long getLength() throws DataStoreException {
+        return data.length;
+    }
+
+    public InputStream getStream() {
+        return new ByteArrayInputStream(data);
+    }
+
+    @Override
+    public long getLastModified() {
+        return 0;
+    }
+
+    public String toString() {
+        StringBuilder buff = new StringBuilder(PREFIX.length() + 2 * data.length);
+        buff.append(PREFIX);
+        for (byte aData : data) {
+            int c = aData & 0xff;
+            buff.append(Integer.toHexString(c >> 4));
+            buff.append(Integer.toHexString(c & 0xf));
+        }
+        return buff.toString();
+    }
+
+    public boolean equals(Object obj) {
+        if (this == obj) {
+            return true;
+        }
+        if (obj instanceof InMemoryDataRecord) {
+            InMemoryDataRecord other = (InMemoryDataRecord) obj;
+            return Arrays.equals(data, other.data);
+        }
+        return false;
+    }
+
+    /**
+     * Returns zero to satisfy the Object equals/hashCode contract.
+     * This class is mutable and not meant to be used as a hash key.
+     *
+     * @return always zero
+     * @see Object#hashCode()
+     */
+    public int hashCode() {
+        return 0;
+    }
+}

Propchange: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/InMemoryDataRecord.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java?rev=1577448&r1=1577447&r2=1577448&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java Fri Mar 14 08:41:27 2014
@@ -58,6 +58,7 @@ import org.apache.jackrabbit.mk.api.Micr
 import org.apache.jackrabbit.oak.api.PropertyState;
 import org.apache.jackrabbit.oak.commons.json.JsopReader;
 import org.apache.jackrabbit.oak.commons.json.JsopTokenizer;
+import org.apache.jackrabbit.oak.plugins.blob.BlobStoreBlob;
 import org.apache.jackrabbit.oak.spi.blob.BlobStore;
 import org.apache.jackrabbit.oak.commons.json.JsopStream;
 import org.apache.jackrabbit.oak.commons.json.JsopWriter;
@@ -266,7 +267,7 @@ public final class DocumentNodeStore
     private final BlobSerializer blobSerializer = new BlobSerializer() {
         @Override
         public String serialize(Blob blob) {
-            if (blob instanceof DocumentBlob) {
+            if (blob instanceof BlobStoreBlob) {
                 return blob.toString();
             }
             String id;
@@ -1232,7 +1233,7 @@ public final class DocumentNodeStore
         } catch (Exception e) {
             throw new IOException("Could not write blob", e);
         }
-        return new DocumentBlob(blobStore, id);
+        return new BlobStoreBlob(blobStore, id);
     }
 
     /**
@@ -1244,7 +1245,7 @@ public final class DocumentNodeStore
     @Override
     @Nonnull
     public Blob getBlob(String blobId) {
-        return new DocumentBlob(blobStore, blobId);
+        return new BlobStoreBlob(blobStore, blobId);
     }
 
     @Nonnull

Modified: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/gridfs/MongoGridFSBlobStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/gridfs/MongoGridFSBlobStore.java?rev=1577448&r1=1577447&r2=1577448&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/gridfs/MongoGridFSBlobStore.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/gridfs/MongoGridFSBlobStore.java Fri Mar 14 08:41:27 2014
@@ -23,6 +23,7 @@ import org.apache.jackrabbit.oak.spi.blo
 
 import com.mongodb.DB;
 import com.mongodb.gridfs.GridFS;
+import org.apache.jackrabbit.oak.spi.blob.BlobStoreInputStream;
 
 /**
  * Implementation of {@link BlobStore} for MongoDB using GridFS. It does not
@@ -50,6 +51,11 @@ public class MongoGridFSBlobStore implem
     }
 
     @Override
+    public InputStream getInputStream(String blobId) throws IOException {
+        return new BlobStoreInputStream(this, blobId, 0);
+    }
+
+    @Override
     public int readBlob(String blobId, long blobOffset, byte[] buffer, int bufferOffset, int length) throws IOException {
         Command<Integer> command = new ReadBlobCommandGridFS(gridFS, blobId, blobOffset,
                 buffer, bufferOffset, length);

Added: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStoreTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStoreTest.java?rev=1577448&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStoreTest.java (added)
+++ jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStoreTest.java Fri Mar 14 08:41:27 2014
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.blob.datastore;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Random;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.jackrabbit.core.data.DataIdentifier;
+import org.apache.jackrabbit.core.data.DataRecord;
+import org.apache.jackrabbit.core.data.DataStore;
+import org.apache.jackrabbit.core.data.DataStoreException;
+import org.apache.jackrabbit.oak.spi.blob.BlobStoreInputStream;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class DataStoreBlobStoreTest {
+
+    @Test
+    public void testInlineBinary() throws DataStoreException, IOException {
+        int maxInlineSize = 300;
+
+        DataStore mockedDS = mock(DataStore.class);
+        when(mockedDS.getMinRecordLength()).thenReturn(maxInlineSize);
+        DataStoreBlobStore ds = new DataStoreBlobStore(mockedDS);
+
+        byte[] data = new byte[maxInlineSize];
+        new Random().nextBytes(data);
+
+        DataRecord dr = ds.addRecord(new ByteArrayInputStream(data));
+        assertTrue(InMemoryDataRecord.isInstance(dr.getIdentifier().toString()));
+        assertTrue(IOUtils.contentEquals(new ByteArrayInputStream(data), dr.getStream()));
+        assertTrue(IOUtils.contentEquals(new ByteArrayInputStream(data),
+                new BlobStoreInputStream(ds, dr.getIdentifier().toString(), 0)));
+
+        assertEquals(dr, ds.getRecordIfStored(dr.getIdentifier()));
+        assertEquals(dr, ds.getRecord(dr.getIdentifier()));
+
+        //Check for BlobStore methods
+        assertEquals(maxInlineSize, ds.getBlobLength(dr.getIdentifier().toString()));
+        assertEquals(dr.getIdentifier().toString(), ds.writeBlob(new ByteArrayInputStream(data)));
+    }
+
+    @Test
+    public void testExternalBinary() throws DataStoreException, IOException {
+        int maxInlineSize = 300;
+        int actualSize = maxInlineSize + 10;
+
+        byte[] data = new byte[actualSize];
+        new Random().nextBytes(data);
+
+        DataIdentifier testDI = new DataIdentifier("test");
+        DataRecord testDR = new ByteArrayDataRecord(data, testDI);
+
+        DataStore mockedDS = mock(DataStore.class);
+        when(mockedDS.getMinRecordLength()).thenReturn(maxInlineSize);
+        when(mockedDS.getRecord(testDI)).thenReturn(testDR);
+        when(mockedDS.getRecordIfStored(testDI)).thenReturn(testDR);
+        when(mockedDS.addRecord(any(InputStream.class))).thenReturn(testDR);
+        DataStoreBlobStore ds = new DataStoreBlobStore(mockedDS);
+
+
+        DataRecord dr = ds.addRecord(new ByteArrayInputStream(data));
+        assertFalse(InMemoryDataRecord.isInstance(dr.getIdentifier().toString()));
+        assertEquals(testDI, dr.getIdentifier());
+        assertTrue(IOUtils.contentEquals(new ByteArrayInputStream(data), dr.getStream()));
+        assertTrue(IOUtils.contentEquals(new ByteArrayInputStream(data),
+                new BlobStoreInputStream(ds, dr.getIdentifier().toString(), 0)));
+
+        assertEquals(dr, ds.getRecordIfStored(dr.getIdentifier()));
+        assertEquals(dr, ds.getRecord(dr.getIdentifier()));
+
+        assertEquals(actualSize, ds.getBlobLength(dr.getIdentifier().toString()));
+        assertEquals(testDI.toString(), ds.writeBlob(new ByteArrayInputStream(data)));
+    }
+
+    private static class ByteArrayDataRecord implements DataRecord {
+        private final byte[] data;
+        private final DataIdentifier identifier;
+
+        private ByteArrayDataRecord(byte[] data, DataIdentifier di) {
+            this.data = data;
+            this.identifier = di;
+        }
+
+        @Override
+        public DataIdentifier getIdentifier() {
+            return identifier;
+        }
+
+        @Override
+        public String getReference() {
+            return null;
+        }
+
+        @Override
+        public long getLength() throws DataStoreException {
+            return data.length;
+        }
+
+        @Override
+        public InputStream getStream() throws DataStoreException {
+            return new ByteArrayInputStream(data);
+        }
+
+        @Override
+        public long getLastModified() {
+            return 0;
+        }
+    }
+}

Propchange: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStoreTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/InMemoryDataRecordTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/InMemoryDataRecordTest.java?rev=1577448&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/InMemoryDataRecordTest.java (added)
+++ jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/InMemoryDataRecordTest.java Fri Mar 14 08:41:27 2014
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.blob.datastore;
+
+import java.io.ByteArrayInputStream;
+import java.util.Random;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.jackrabbit.core.data.DataRecord;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class InMemoryDataRecordTest {
+    @Test
+    public void testGetInstance() throws Exception {
+        int length = 400;
+        byte[] data = new byte[length];
+        new Random().nextBytes(data);
+
+        DataRecord dr = InMemoryDataRecord.getInstance(data);
+        assertTrue(InMemoryDataRecord.isInstance(dr.getIdentifier().toString()));
+
+        DataRecord dr2 = InMemoryDataRecord.getInstance(dr.getIdentifier().toString());
+
+        assertTrue(IOUtils.contentEquals(dr.getStream(), dr2.getStream()));
+        assertTrue(IOUtils.contentEquals(dr.getStream(), new ByteArrayInputStream(data)));
+
+        assertEquals(length, dr.getLength());
+        assertEquals(dr2.getLength(), dr.getLength());
+
+        assertEquals(dr, dr2);
+    }
+}

Propchange: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/InMemoryDataRecordTest.java
------------------------------------------------------------------------------
    svn:eol-style = native



Mime
View raw message