jackrabbit-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From resc...@apache.org
Subject svn commit: r1576690 [4/10] - in /jackrabbit/trunk: examples/jackrabbit-firsthops/ examples/jackrabbit-firsthops/src/main/resources/ jackrabbit-aws-ext/ jackrabbit-aws-ext/src/main/java/org/apache/jackrabbit/aws/ext/ jackrabbit-aws-ext/src/main/java/or...
Date Wed, 12 Mar 2014 11:05:08 GMT
Modified: jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/FileDataStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/FileDataStore.java?rev=1576690&r1=1576689&r2=1576690&view=diff
==============================================================================
--- jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/FileDataStore.java (original)
+++ jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/FileDataStore.java Wed Mar 12 11:05:06 2014
@@ -1,481 +1,481 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.core.data;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.RandomAccessFile;
-import java.lang.ref.WeakReference;
-import java.security.DigestOutputStream;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.WeakHashMap;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Simple file-based data store. Data records are stored as normal files
- * named using a message digest of the contained binary stream.
- *
- * Configuration:
- * <pre>
- * &lt;DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
- *     &lt;param name="{@link #setPath(String) path}" value="/data/datastore"/>
- *     &lt;param name="{@link #setMinRecordLength(int) minRecordLength}" value="1024"/>
- * &lt/DataStore>
- * </pre>
- * <p>
- * If the directory is not set, the directory &lt;repository home&gt;/repository/datastore is used.
- * <p>
- * A three level directory structure is used to avoid placing too many
- * files in a single directory. The chosen structure is designed to scale
- * up to billions of distinct records.
- * <p>
- * This implementation relies on the underlying file system to support
- * atomic O(1) move operations with {@link File#renameTo(File)}.
- */
-public class FileDataStore extends AbstractDataStore
-        implements MultiDataStoreAware {
-
-    /**
-     * Logger instance
-     */
-    private static Logger log = LoggerFactory.getLogger(FileDataStore.class);
-
-    /**
-     * The digest algorithm used to uniquely identify records.
-     */
-    private static final String DIGEST = "SHA-1";
-
-    /**
-     * The default value for the minimum object size.
-     */
-    private static final int DEFAULT_MIN_RECORD_LENGTH = 100;
-
-    /**
-     * The maximum last modified time resolution of the file system.
-     */
-    private static final int ACCESS_TIME_RESOLUTION = 2000;
-
-    /**
-     * Name of the directory used for temporary files.
-     * Must be at least 3 characters.
-     */
-    private static final String TMP = "tmp";
-
-    /**
-     * The minimum modified date. If a file is accessed (read or write) with a modified date
-     * older than this value, the modified date is updated to the current time.
-     */
-    private long minModifiedDate;
-
-    /**
-     * The directory that contains all the data record files. The structure
-     * of content within this directory is controlled by this class.
-     */
-    private File directory;
-
-    /**
-     * The name of the directory that contains all the data record files. The structure
-     * of content within this directory is controlled by this class.
-     */
-    private String path;
-
-    /**
-     * The minimum size of an object that should be stored in this data store.
-     */
-    private int minRecordLength = DEFAULT_MIN_RECORD_LENGTH;
-
-    /**
-     * All data identifiers that are currently in use are in this set until they are garbage collected.
-     */
-    protected Map<DataIdentifier, WeakReference<DataIdentifier>> inUse =
-        Collections.synchronizedMap(new WeakHashMap<DataIdentifier, WeakReference<DataIdentifier>>());
-
-    /**
-     * Initialized the data store.
-     * If the path is not set, &lt;repository home&gt;/repository/datastore is used.
-     * This directory is automatically created if it does not yet exist.
-     *
-     * @param homeDir
-     */
-    public void init(String homeDir) {
-        if (path == null) {
-            path = homeDir + "/repository/datastore";
-        }
-        directory = new File(path);
-        directory.mkdirs();
-    }
-
-    /**
-     * Get a data record for the given identifier.
-     *
-     * @param identifier the identifier
-     * @return the data record or null
-     */
-    public DataRecord getRecordIfStored(DataIdentifier identifier) throws DataStoreException {
-        File file = getFile(identifier);
-        synchronized (this) {
-            if (!file.exists()) {
-                return null;
-            }
-            if (minModifiedDate != 0) {
-                // only check when running garbage collection
-                if (getLastModified(file) < minModifiedDate) {
-                    setLastModified(file, System.currentTimeMillis() + ACCESS_TIME_RESOLUTION);
-                }
-            }
-            usesIdentifier(identifier);
-            return new FileDataRecord(this, identifier, file);
-        }
-    }
-
-    private void usesIdentifier(DataIdentifier identifier) {
-        inUse.put(identifier, new WeakReference<DataIdentifier>(identifier));
-    }
-
-    /**
-     * Creates a new data record.
-     * The stream is first consumed and the contents are saved in a temporary file
-     * and the SHA-1 message digest of the stream is calculated. If a
-     * record with the same SHA-1 digest (and length) is found then it is
-     * returned. Otherwise the temporary file is moved in place to become
-     * the new data record that gets returned.
-     *
-     * @param input binary stream
-     * @return data record that contains the given stream
-     * @throws DataStoreException if the record could not be created
-     */
-    public DataRecord addRecord(InputStream input) throws DataStoreException {
-        File temporary = null;
-        try {
-            temporary = newTemporaryFile();
-            DataIdentifier tempId = new DataIdentifier(temporary.getName());
-            usesIdentifier(tempId);
-            // Copy the stream to the temporary file and calculate the
-            // stream length and the message digest of the stream
-            long length = 0;
-            MessageDigest digest = MessageDigest.getInstance(DIGEST);
-            OutputStream output = new DigestOutputStream(
-                    new FileOutputStream(temporary), digest);
-            try {
-                length = IOUtils.copyLarge(input, output);
-            } finally {
-                output.close();
-            }
-            DataIdentifier identifier =
-                    new DataIdentifier(encodeHexString(digest.digest()));
-            File file;
-
-            synchronized (this) {
-                // Check if the same record already exists, or
-                // move the temporary file in place if needed
-                usesIdentifier(identifier);
-                file = getFile(identifier);
-                if (!file.exists()) {
-                    File parent = file.getParentFile();
-                    parent.mkdirs();
-                    if (temporary.renameTo(file)) {
-                        // no longer need to delete the temporary file
-                        temporary = null;
-                    } else {
-                        throw new IOException(
-                                "Can not rename " + temporary.getAbsolutePath()
-                                + " to " + file.getAbsolutePath()
-                                + " (media read only?)");
-                    }
-                } else {
-                    long now = System.currentTimeMillis();
-                    if (getLastModified(file) < now + ACCESS_TIME_RESOLUTION) {
-                        setLastModified(file, now + ACCESS_TIME_RESOLUTION);
-                    }
-                }
-                if (file.length() != length) {
-                    // Sanity checks on the record file. These should never fail,
-                    // but better safe than sorry...
-                    if (!file.isFile()) {
-                        throw new IOException("Not a file: " + file);
-                    }
-                    throw new IOException(DIGEST + " collision: " + file);
-                }
-            }
-            // this will also make sure that
-            // tempId is not garbage collected until here
-            inUse.remove(tempId);
-            return new FileDataRecord(this, identifier, file);
-        } catch (NoSuchAlgorithmException e) {
-            throw new DataStoreException(DIGEST + " not available", e);
-        } catch (IOException e) {
-            throw new DataStoreException("Could not add record", e);
-        } finally {
-            if (temporary != null) {
-                temporary.delete();
-            }
-        }
-    }
-
-    /**
-     * Returns the identified file. This method implements the pattern
-     * used to avoid problems with too many files in a single directory.
-     * <p>
-     * No sanity checks are performed on the given identifier.
-     *
-     * @param identifier data identifier
-     * @return identified file
-     */
-    private File getFile(DataIdentifier identifier) {
-        usesIdentifier(identifier);
-        String string = identifier.toString();
-        File file = directory;
-        file = new File(file, string.substring(0, 2));
-        file = new File(file, string.substring(2, 4));
-        file = new File(file, string.substring(4, 6));
-        return new File(file, string);
-    }
-
-    /**
-     * Returns a unique temporary file to be used for creating a new
-     * data record.
-     *
-     * @return temporary file
-     * @throws IOException
-     */
-    private File newTemporaryFile() throws IOException {
-        // the directory is already created in the init method
-        return File.createTempFile(TMP, null, directory);
-    }
-
-    public void updateModifiedDateOnAccess(long before) {
-        minModifiedDate = before;
-    }
-
-    public void deleteRecord(DataIdentifier identifier)
-			throws DataStoreException {
-        File file = getFile(identifier);
-        synchronized (this) {
-            if (file.exists()) {
-                if (!file.delete()) {
-                    log.warn("Failed to delete file " + file.getAbsolutePath());
-                }
-            }
-        }
-	}
-
-    public int deleteAllOlderThan(long min) {
-        int count = 0;
-        for (File file : directory.listFiles()) {
-            if (file.isDirectory()) { // skip top-level files
-                count += deleteOlderRecursive(file, min);
-            }
-        }
-        return count;
-    }
-
-    private int deleteOlderRecursive(File file, long min) {
-        int count = 0;
-        if (file.isFile() && file.exists() && file.canWrite()) {
-            synchronized (this) {
-                long lastModified;
-                try {
-                    lastModified = getLastModified(file);
-                } catch (DataStoreException e) {
-                    log.warn("Failed to read modification date; file not deleted", e);
-                    // don't delete the file, since the lastModified date is uncertain
-                    lastModified = min;
-                }
-                if (lastModified < min) {
-                    DataIdentifier id = new DataIdentifier(file.getName());
-                    if (!inUse.containsKey(id)) {
-                        if (log.isInfoEnabled()) {
-                            log.info("Deleting old file " + file.getAbsolutePath() +
-                                    " modified: " + new Timestamp(lastModified).toString() +
-                                    " length: " + file.length());
-                        }
-                        if (!file.delete()) {
-                            log.warn("Failed to delete old file " + file.getAbsolutePath());
-                        }
-                        count++;
-                    }
-                }
-            }
-        } else if (file.isDirectory()) {
-            File[] list = file.listFiles();
-            if (list != null) {
-                for (File f: list) {
-                    count += deleteOlderRecursive(f, min);
-                }
-            }
-
-            // JCR-1396: FileDataStore Garbage Collector and empty directories
-            // Automatic removal of empty directories (but not the root!)
-            synchronized (this) {
-                list = file.listFiles();
-                if (list != null && list.length == 0) {
-                    file.delete();
-                }
-            }
-        }
-        return count;
-    }
-
-    private void listRecursive(List<File> list, File file) {
-        File[] files = file.listFiles();
-        if (files != null) {
-            for (File f : files) {
-                if (f.isDirectory()) {
-                    listRecursive(list, f);
-                } else {
-                    list.add(f);
-                }
-            }
-        }
-    }
-
-    public Iterator<DataIdentifier> getAllIdentifiers() {
-        ArrayList<File> files = new ArrayList<File>();
-        for (File file : directory.listFiles()) {
-            if (file.isDirectory()) { // skip top-level files
-                listRecursive(files, file);
-            }
-        }
-
-        ArrayList<DataIdentifier> identifiers = new ArrayList<DataIdentifier>();
-        for (File f: files) {
-            String name = f.getName();
-            identifiers.add(new DataIdentifier(name));
-        }
-        log.debug("Found " + identifiers.size() + " identifiers.");
-        return identifiers.iterator();
-    }
-
-    public void clearInUse() {
-        inUse.clear();
-    }
-
-    /**
-     * Get the name of the directory where this data store keeps the files.
-     *
-     * @return the full path name
-     */
-    public String getPath() {
-        return path;
-    }
-
-    /**
-     * Set the name of the directory where this data store keeps the files.
-     *
-     * @param directoryName the path name
-     */
-    public void setPath(String directoryName) {
-        this.path = directoryName;
-    }
-
-    public int getMinRecordLength() {
-        return minRecordLength;
-    }
-
-    /**
-     * Set the minimum object length.
-     *
-     * @param minRecordLength the length
-     */
-    public void setMinRecordLength(int minRecordLength) {
-        this.minRecordLength = minRecordLength;
-    }
-
-    public void close() {
-        // nothing to do
-    }
-
-    //---------------------------------------------------------< protected >--
-
-    @Override
-    protected byte[] getOrCreateReferenceKey() throws DataStoreException {
-        File file = new File(directory, "reference.key");
-        try {
-            if (file.exists()) {
-                return FileUtils.readFileToByteArray(file);
-            } else {
-                byte[] key = super.getOrCreateReferenceKey();
-                FileUtils.writeByteArrayToFile(file, key);
-                return key;
-            }
-        } catch (IOException e) {
-            throw new DataStoreException(
-                    "Unable to access reference key file " + file.getPath(), e);
-        }
-    }
-
-    //-----------------------------------------------------------< private >--
-
-    /**
-     * Get the last modified date of a file.
-     *
-     * @param file the file
-     * @return the last modified date
-     * @throws DataStoreException if reading fails
-     */
-    private static long getLastModified(File file) throws DataStoreException {
-        long lastModified = file.lastModified();
-        if (lastModified == 0) {
-            throw new DataStoreException("Failed to read record modified date: " + file.getAbsolutePath());
-        }
-        return lastModified;
-    }
-
-    /**
-     * Set the last modified date of a file, if the file is writable.
-     *
-     * @param file the file
-     * @param time the new last modified date
-     * @throws DataStoreException if the file is writable but modifying the date fails
-     */
-    private static void setLastModified(File file, long time) throws DataStoreException {
-        if (!file.setLastModified(time)) {
-            if (!file.canWrite()) {
-                // if we can't write to the file, so garbage collection will also not delete it
-                // (read only files or file systems)
-                return;
-            }
-            try {
-                // workaround for Windows: if the file is already open for reading
-                // (in this or another process), then setting the last modified date
-                // doesn't work - see also JCR-2872
-                RandomAccessFile r = new RandomAccessFile(file, "rw");
-                try {
-                    r.setLength(r.length());
-                } finally {
-                    r.close();
-                }
-            } catch (IOException e) {
-                throw new DataStoreException("An IO Exception occurred while trying to set the last modified date: " + file.getAbsolutePath(), e);
-            }
-        }
-    }
-}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.core.data;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.RandomAccessFile;
+import java.lang.ref.WeakReference;
+import java.security.DigestOutputStream;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.sql.Timestamp;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.WeakHashMap;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Simple file-based data store. Data records are stored as normal files
+ * named using a message digest of the contained binary stream.
+ *
+ * Configuration:
+ * <pre>
+ * &lt;DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
+ *     &lt;param name="{@link #setPath(String) path}" value="/data/datastore"/>
+ *     &lt;param name="{@link #setMinRecordLength(int) minRecordLength}" value="1024"/>
+ * &lt/DataStore>
+ * </pre>
+ * <p>
+ * If the directory is not set, the directory &lt;repository home&gt;/repository/datastore is used.
+ * <p>
+ * A three level directory structure is used to avoid placing too many
+ * files in a single directory. The chosen structure is designed to scale
+ * up to billions of distinct records.
+ * <p>
+ * This implementation relies on the underlying file system to support
+ * atomic O(1) move operations with {@link File#renameTo(File)}.
+ */
+public class FileDataStore extends AbstractDataStore
+        implements MultiDataStoreAware {
+
+    /**
+     * Logger instance
+     */
+    private static Logger log = LoggerFactory.getLogger(FileDataStore.class);
+
+    /**
+     * The digest algorithm used to uniquely identify records.
+     */
+    private static final String DIGEST = "SHA-1";
+
+    /**
+     * The default value for the minimum object size.
+     */
+    private static final int DEFAULT_MIN_RECORD_LENGTH = 100;
+
+    /**
+     * The maximum last modified time resolution of the file system.
+     */
+    private static final int ACCESS_TIME_RESOLUTION = 2000;
+
+    /**
+     * Name of the directory used for temporary files.
+     * Must be at least 3 characters.
+     */
+    private static final String TMP = "tmp";
+
+    /**
+     * The minimum modified date. If a file is accessed (read or write) with a modified date
+     * older than this value, the modified date is updated to the current time.
+     */
+    private long minModifiedDate;
+
+    /**
+     * The directory that contains all the data record files. The structure
+     * of content within this directory is controlled by this class.
+     */
+    private File directory;
+
+    /**
+     * The name of the directory that contains all the data record files. The structure
+     * of content within this directory is controlled by this class.
+     */
+    private String path;
+
+    /**
+     * The minimum size of an object that should be stored in this data store.
+     */
+    private int minRecordLength = DEFAULT_MIN_RECORD_LENGTH;
+
+    /**
+     * All data identifiers that are currently in use are in this set until they are garbage collected.
+     */
+    protected Map<DataIdentifier, WeakReference<DataIdentifier>> inUse =
+        Collections.synchronizedMap(new WeakHashMap<DataIdentifier, WeakReference<DataIdentifier>>());
+
+    /**
+     * Initialized the data store.
+     * If the path is not set, &lt;repository home&gt;/repository/datastore is used.
+     * This directory is automatically created if it does not yet exist.
+     *
+     * @param homeDir
+     */
+    public void init(String homeDir) {
+        if (path == null) {
+            path = homeDir + "/repository/datastore";
+        }
+        directory = new File(path);
+        directory.mkdirs();
+    }
+
+    /**
+     * Get a data record for the given identifier.
+     *
+     * @param identifier the identifier
+     * @return the data record or null
+     */
+    public DataRecord getRecordIfStored(DataIdentifier identifier) throws DataStoreException {
+        File file = getFile(identifier);
+        synchronized (this) {
+            if (!file.exists()) {
+                return null;
+            }
+            if (minModifiedDate != 0) {
+                // only check when running garbage collection
+                if (getLastModified(file) < minModifiedDate) {
+                    setLastModified(file, System.currentTimeMillis() + ACCESS_TIME_RESOLUTION);
+                }
+            }
+            usesIdentifier(identifier);
+            return new FileDataRecord(this, identifier, file);
+        }
+    }
+
+    private void usesIdentifier(DataIdentifier identifier) {
+        inUse.put(identifier, new WeakReference<DataIdentifier>(identifier));
+    }
+
+    /**
+     * Creates a new data record.
+     * The stream is first consumed and the contents are saved in a temporary file
+     * and the SHA-1 message digest of the stream is calculated. If a
+     * record with the same SHA-1 digest (and length) is found then it is
+     * returned. Otherwise the temporary file is moved in place to become
+     * the new data record that gets returned.
+     *
+     * @param input binary stream
+     * @return data record that contains the given stream
+     * @throws DataStoreException if the record could not be created
+     */
+    public DataRecord addRecord(InputStream input) throws DataStoreException {
+        File temporary = null;
+        try {
+            temporary = newTemporaryFile();
+            DataIdentifier tempId = new DataIdentifier(temporary.getName());
+            usesIdentifier(tempId);
+            // Copy the stream to the temporary file and calculate the
+            // stream length and the message digest of the stream
+            long length = 0;
+            MessageDigest digest = MessageDigest.getInstance(DIGEST);
+            OutputStream output = new DigestOutputStream(
+                    new FileOutputStream(temporary), digest);
+            try {
+                length = IOUtils.copyLarge(input, output);
+            } finally {
+                output.close();
+            }
+            DataIdentifier identifier =
+                    new DataIdentifier(encodeHexString(digest.digest()));
+            File file;
+
+            synchronized (this) {
+                // Check if the same record already exists, or
+                // move the temporary file in place if needed
+                usesIdentifier(identifier);
+                file = getFile(identifier);
+                if (!file.exists()) {
+                    File parent = file.getParentFile();
+                    parent.mkdirs();
+                    if (temporary.renameTo(file)) {
+                        // no longer need to delete the temporary file
+                        temporary = null;
+                    } else {
+                        throw new IOException(
+                                "Can not rename " + temporary.getAbsolutePath()
+                                + " to " + file.getAbsolutePath()
+                                + " (media read only?)");
+                    }
+                } else {
+                    long now = System.currentTimeMillis();
+                    if (getLastModified(file) < now + ACCESS_TIME_RESOLUTION) {
+                        setLastModified(file, now + ACCESS_TIME_RESOLUTION);
+                    }
+                }
+                if (file.length() != length) {
+                    // Sanity checks on the record file. These should never fail,
+                    // but better safe than sorry...
+                    if (!file.isFile()) {
+                        throw new IOException("Not a file: " + file);
+                    }
+                    throw new IOException(DIGEST + " collision: " + file);
+                }
+            }
+            // this will also make sure that
+            // tempId is not garbage collected until here
+            inUse.remove(tempId);
+            return new FileDataRecord(this, identifier, file);
+        } catch (NoSuchAlgorithmException e) {
+            throw new DataStoreException(DIGEST + " not available", e);
+        } catch (IOException e) {
+            throw new DataStoreException("Could not add record", e);
+        } finally {
+            if (temporary != null) {
+                temporary.delete();
+            }
+        }
+    }
+
+    /**
+     * Returns the identified file. This method implements the pattern
+     * used to avoid problems with too many files in a single directory.
+     * <p>
+     * No sanity checks are performed on the given identifier.
+     *
+     * @param identifier data identifier
+     * @return identified file
+     */
+    private File getFile(DataIdentifier identifier) {
+        usesIdentifier(identifier);
+        String string = identifier.toString();
+        File file = directory;
+        file = new File(file, string.substring(0, 2));
+        file = new File(file, string.substring(2, 4));
+        file = new File(file, string.substring(4, 6));
+        return new File(file, string);
+    }
+
+    /**
+     * Returns a unique temporary file to be used for creating a new
+     * data record.
+     *
+     * @return temporary file
+     * @throws IOException
+     */
+    private File newTemporaryFile() throws IOException {
+        // the directory is already created in the init method
+        return File.createTempFile(TMP, null, directory);
+    }
+
+    public void updateModifiedDateOnAccess(long before) {
+        minModifiedDate = before;
+    }
+
+    public void deleteRecord(DataIdentifier identifier)
+			throws DataStoreException {
+        File file = getFile(identifier);
+        synchronized (this) {
+            if (file.exists()) {
+                if (!file.delete()) {
+                    log.warn("Failed to delete file " + file.getAbsolutePath());
+                }
+            }
+        }
+	}
+
+    public int deleteAllOlderThan(long min) {
+        int count = 0;
+        for (File file : directory.listFiles()) {
+            if (file.isDirectory()) { // skip top-level files
+                count += deleteOlderRecursive(file, min);
+            }
+        }
+        return count;
+    }
+
+    private int deleteOlderRecursive(File file, long min) {
+        int count = 0;
+        if (file.isFile() && file.exists() && file.canWrite()) {
+            synchronized (this) {
+                long lastModified;
+                try {
+                    lastModified = getLastModified(file);
+                } catch (DataStoreException e) {
+                    log.warn("Failed to read modification date; file not deleted", e);
+                    // don't delete the file, since the lastModified date is uncertain
+                    lastModified = min;
+                }
+                if (lastModified < min) {
+                    DataIdentifier id = new DataIdentifier(file.getName());
+                    if (!inUse.containsKey(id)) {
+                        if (log.isInfoEnabled()) {
+                            log.info("Deleting old file " + file.getAbsolutePath() +
+                                    " modified: " + new Timestamp(lastModified).toString() +
+                                    " length: " + file.length());
+                        }
+                        if (!file.delete()) {
+                            log.warn("Failed to delete old file " + file.getAbsolutePath());
+                        }
+                        count++;
+                    }
+                }
+            }
+        } else if (file.isDirectory()) {
+            File[] list = file.listFiles();
+            if (list != null) {
+                for (File f: list) {
+                    count += deleteOlderRecursive(f, min);
+                }
+            }
+
+            // JCR-1396: FileDataStore Garbage Collector and empty directories
+            // Automatic removal of empty directories (but not the root!)
+            synchronized (this) {
+                list = file.listFiles();
+                if (list != null && list.length == 0) {
+                    file.delete();
+                }
+            }
+        }
+        return count;
+    }
+
+    private void listRecursive(List<File> list, File file) {
+        File[] files = file.listFiles();
+        if (files != null) {
+            for (File f : files) {
+                if (f.isDirectory()) {
+                    listRecursive(list, f);
+                } else {
+                    list.add(f);
+                }
+            }
+        }
+    }
+
+    public Iterator<DataIdentifier> getAllIdentifiers() {
+        ArrayList<File> files = new ArrayList<File>();
+        for (File file : directory.listFiles()) {
+            if (file.isDirectory()) { // skip top-level files
+                listRecursive(files, file);
+            }
+        }
+
+        ArrayList<DataIdentifier> identifiers = new ArrayList<DataIdentifier>();
+        for (File f: files) {
+            String name = f.getName();
+            identifiers.add(new DataIdentifier(name));
+        }
+        log.debug("Found " + identifiers.size() + " identifiers.");
+        return identifiers.iterator();
+    }
+
+    public void clearInUse() {
+        inUse.clear();
+    }
+
+    /**
+     * Get the name of the directory where this data store keeps the files.
+     *
+     * @return the full path name
+     */
+    public String getPath() {
+        return path;
+    }
+
+    /**
+     * Set the name of the directory where this data store keeps the files.
+     *
+     * @param directoryName the path name
+     */
+    public void setPath(String directoryName) {
+        this.path = directoryName;
+    }
+
+    public int getMinRecordLength() {
+        return minRecordLength;
+    }
+
+    /**
+     * Set the minimum object length.
+     *
+     * @param minRecordLength the length
+     */
+    public void setMinRecordLength(int minRecordLength) {
+        this.minRecordLength = minRecordLength;
+    }
+
+    public void close() {
+        // nothing to do
+    }
+
+    //---------------------------------------------------------< protected >--
+
+    @Override
+    protected byte[] getOrCreateReferenceKey() throws DataStoreException {
+        File file = new File(directory, "reference.key");
+        try {
+            if (file.exists()) {
+                return FileUtils.readFileToByteArray(file);
+            } else {
+                byte[] key = super.getOrCreateReferenceKey();
+                FileUtils.writeByteArrayToFile(file, key);
+                return key;
+            }
+        } catch (IOException e) {
+            throw new DataStoreException(
+                    "Unable to access reference key file " + file.getPath(), e);
+        }
+    }
+
+    //-----------------------------------------------------------< private >--
+
+    /**
+     * Get the last modified date of a file.
+     *
+     * @param file the file
+     * @return the last modified date
+     * @throws DataStoreException if reading fails
+     */
+    private static long getLastModified(File file) throws DataStoreException {
+        long lastModified = file.lastModified();
+        if (lastModified == 0) {
+            throw new DataStoreException("Failed to read record modified date: " + file.getAbsolutePath());
+        }
+        return lastModified;
+    }
+
+    /**
+     * Set the last modified date of a file, if the file is writable.
+     *
+     * @param file the file
+     * @param time the new last modified date
+     * @throws DataStoreException if the file is writable but modifying the date fails
+     */
+    private static void setLastModified(File file, long time) throws DataStoreException {
+        if (!file.setLastModified(time)) {
+            if (!file.canWrite()) {
+                // if we can't write to the file, so garbage collection will also not delete it
+                // (read only files or file systems)
+                return;
+            }
+            try {
+                // workaround for Windows: if the file is already open for reading
+                // (in this or another process), then setting the last modified date
+                // doesn't work - see also JCR-2872
+                RandomAccessFile r = new RandomAccessFile(file, "rw");
+                try {
+                    r.setLength(r.length());
+                } finally {
+                    r.close();
+                }
+            } catch (IOException e) {
+                throw new DataStoreException("An IO Exception occurred while trying to set the last modified date: " + file.getAbsolutePath(), e);
+            }
+        }
+    }
+}

Propchange: jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/FileDataStore.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/LazyFileInputStream.java
URL: http://svn.apache.org/viewvc/jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/LazyFileInputStream.java?rev=1576690&r1=1576689&r2=1576690&view=diff
==============================================================================
--- jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/LazyFileInputStream.java (original)
+++ jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/LazyFileInputStream.java Wed Mar 12 11:05:06 2014
@@ -1,167 +1,167 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.core.data;
-
-import java.io.File;
-import java.io.FileDescriptor;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-
-import org.apache.commons.io.input.AutoCloseInputStream;
-
-/**
- * This input stream delays opening the file until the first byte is read, and
- * closes and discards the underlying stream as soon as the end of input has
- * been reached or when the stream is explicitly closed.
- */
-public class LazyFileInputStream extends AutoCloseInputStream {
-
-    /**
-     * The file descriptor to use.
-     */
-    protected final FileDescriptor fd;
-
-    /**
-     * The file to read from.
-     */
-    protected final File file;
-
-    /**
-     * True if the input stream was opened. It is also set to true if the stream
-     * was closed without reading (to avoid opening the file after the stream
-     * was closed).
-     */
-    protected boolean opened;
-
-    /**
-     * Creates a new <code>LazyFileInputStream</code> for the given file. If the
-     * file is unreadable, a FileNotFoundException is thrown.
-     * The file is not opened until the first byte is read from the stream.
-     *
-     * @param file the file
-     * @throws java.io.FileNotFoundException
-     */
-    public LazyFileInputStream(File file)
-            throws FileNotFoundException {
-        super(null);
-        if (!file.canRead()) {
-            throw new FileNotFoundException(file.getPath());
-        }
-        this.file = file;
-        this.fd = null;
-    }
-
-    /**
-     * Creates a new <code>LazyFileInputStream</code> for the given file
-     * descriptor.
-     * The file is not opened until the first byte is read from the stream.
-     *
-     * @param fd
-     */
-    public LazyFileInputStream(FileDescriptor fd) {
-        super(null);
-        this.file = null;
-        this.fd = fd;
-    }
-
-    /**
-     * Creates a new <code>LazyFileInputStream</code> for the given file. If the
-     * file is unreadable, a FileNotFoundException is thrown.
-     *
-     * @param name
-     * @throws java.io.FileNotFoundException
-     */
-    public LazyFileInputStream(String name) throws FileNotFoundException {
-        this(new File(name));
-    }
-
-    /**
-     * Open the stream if required.
-     *
-     * @throws java.io.IOException
-     */
-    protected void open() throws IOException {
-        if (!opened) {
-            opened = true;
-            if (fd != null) {
-                in = new FileInputStream(fd);
-            } else {
-                in = new FileInputStream(file);
-            }
-        }
-    }
-
-    public int read() throws IOException {
-        open();
-        return super.read();
-    }
-
-    public int available() throws IOException {
-        open();
-        return super.available();
-    }
-
-    public void close() throws IOException {
-        // make sure the file is not opened afterwards
-        opened = true;
-        
-        // only close the file if it was in fact opened
-        if (in != null) {
-            super.close();
-        }
-    }
-
-    public synchronized void reset() throws IOException {
-        open();
-        super.reset();
-    }
-
-    public boolean markSupported() {
-        try {
-            open();
-        } catch (IOException e) {
-            throw new IllegalStateException(e.toString());
-        }
-        return super.markSupported();
-    }
-
-    public synchronized void mark(int readlimit) {
-        try {
-            open();
-        } catch (IOException e) {
-            throw new IllegalStateException(e.toString());
-        }
-        super.mark(readlimit);
-    }
-
-    public long skip(long n) throws IOException {
-        open();
-        return super.skip(n);
-    }
-
-    public int read(byte[] b) throws IOException {
-        open();
-        return super.read(b, 0, b.length);
-    }
-
-    public int read(byte[] b, int off, int len) throws IOException {
-        open();
-        return super.read(b, off, len);
-    }
-
-}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.core.data;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+import org.apache.commons.io.input.AutoCloseInputStream;
+
+/**
+ * This input stream delays opening the file until the first byte is read, and
+ * closes and discards the underlying stream as soon as the end of input has
+ * been reached or when the stream is explicitly closed.
+ */
+public class LazyFileInputStream extends AutoCloseInputStream {
+
+    /**
+     * The file descriptor to use.
+     */
+    protected final FileDescriptor fd;
+
+    /**
+     * The file to read from.
+     */
+    protected final File file;
+
+    /**
+     * True if the input stream was opened. It is also set to true if the stream
+     * was closed without reading (to avoid opening the file after the stream
+     * was closed).
+     */
+    protected boolean opened;
+
+    /**
+     * Creates a new <code>LazyFileInputStream</code> for the given file. If the
+     * file is unreadable, a FileNotFoundException is thrown.
+     * The file is not opened until the first byte is read from the stream.
+     *
+     * @param file the file
+     * @throws java.io.FileNotFoundException
+     */
+    public LazyFileInputStream(File file)
+            throws FileNotFoundException {
+        super(null);
+        if (!file.canRead()) {
+            throw new FileNotFoundException(file.getPath());
+        }
+        this.file = file;
+        this.fd = null;
+    }
+
+    /**
+     * Creates a new <code>LazyFileInputStream</code> for the given file
+     * descriptor.
+     * The file is not opened until the first byte is read from the stream.
+     *
+     * @param fd
+     */
+    public LazyFileInputStream(FileDescriptor fd) {
+        super(null);
+        this.file = null;
+        this.fd = fd;
+    }
+
+    /**
+     * Creates a new <code>LazyFileInputStream</code> for the given file. If the
+     * file is unreadable, a FileNotFoundException is thrown.
+     *
+     * @param name
+     * @throws java.io.FileNotFoundException
+     */
+    public LazyFileInputStream(String name) throws FileNotFoundException {
+        this(new File(name));
+    }
+
+    /**
+     * Open the stream if required.
+     *
+     * @throws java.io.IOException
+     */
+    protected void open() throws IOException {
+        if (!opened) {
+            opened = true;
+            if (fd != null) {
+                in = new FileInputStream(fd);
+            } else {
+                in = new FileInputStream(file);
+            }
+        }
+    }
+
+    public int read() throws IOException {
+        open();
+        return super.read();
+    }
+
+    public int available() throws IOException {
+        open();
+        return super.available();
+    }
+
+    public void close() throws IOException {
+        // make sure the file is not opened afterwards
+        opened = true;
+        
+        // only close the file if it was in fact opened
+        if (in != null) {
+            super.close();
+        }
+    }
+
+    public synchronized void reset() throws IOException {
+        open();
+        super.reset();
+    }
+
+    public boolean markSupported() {
+        try {
+            open();
+        } catch (IOException e) {
+            throw new IllegalStateException(e.toString());
+        }
+        return super.markSupported();
+    }
+
+    public synchronized void mark(int readlimit) {
+        try {
+            open();
+        } catch (IOException e) {
+            throw new IllegalStateException(e.toString());
+        }
+        super.mark(readlimit);
+    }
+
+    public long skip(long n) throws IOException {
+        open();
+        return super.skip(n);
+    }
+
+    public int read(byte[] b) throws IOException {
+        open();
+        return super.read(b, 0, b.length);
+    }
+
+    public int read(byte[] b, int off, int len) throws IOException {
+        open();
+        return super.read(b, off, len);
+    }
+
+}

Propchange: jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/LazyFileInputStream.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/LocalCache.java
URL: http://svn.apache.org/viewvc/jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/LocalCache.java?rev=1576690&r1=1576689&r2=1576690&view=diff
==============================================================================
--- jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/LocalCache.java (original)
+++ jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/LocalCache.java Wed Mar 12 11:05:06 2014
@@ -1,535 +1,535 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.jackrabbit.core.data;
-
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Set;
-
-import javax.jcr.RepositoryException;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.jackrabbit.core.data.LazyFileInputStream;
-import org.apache.jackrabbit.util.TransientFileFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * This class implements a LRU cache used by {@link CachingDataStore}. If cache
- * size exceeds limit, this cache goes in purge mode. In purge mode any
- * operation to cache is no-op. After purge cache size would be less than
- * cachePurgeResizeFactor * maximum size.
- */
-public class LocalCache {
-
-    /**
-     * Logger instance.
-     */
-    static final Logger LOG = LoggerFactory.getLogger(LocalCache.class);
-
-    /**
-     * The file names of the files that need to be deleted.
-     */
-    final Set<String> toBeDeleted = new HashSet<String>();
-
-    /**
-     * The filename Vs file size LRU cache.
-     */
-    LRUCache cache;
-
-    /**
-     * The directory where the files are created.
-     */
-    private final File directory;
-
-    /**
-     * The directory where tmp files are created.
-     */
-    private final File tmp;
-
-    /**
-     * The maximum size of cache in bytes.
-     */
-    private long maxSize;
-
-    /**
-     * If true cache is in purgeMode and not available. All operation would be
-     * no-op.
-     */
-    private volatile boolean purgeMode;
-
-    /**
-     * Build LRU cache of files located at 'path'. It uses lastModified property
-     * of file to build LRU cache. If cache size exceeds limit size, this cache
-     * goes in purge mode. In purge mode any operation to cache is no-op.
-     * 
-     * @param path file system path
-     * @param tmpPath temporary directory used by cache.
-     * @param maxSize maximum size of cache.
-     * @param cachePurgeTrigFactor factor which triggers cache to purge mode.
-     * That is if current size exceed (cachePurgeTrigFactor * maxSize), the
-     * cache will go in auto-purge mode.
-     * @param cachePurgeResizeFactor after cache purge size of cache will be
-     * just less (cachePurgeResizeFactor * maxSize).
-     * @throws RepositoryException
-     */
-    public LocalCache(final String path, final String tmpPath,
-            final long maxSize, final double cachePurgeTrigFactor,
-            final double cachePurgeResizeFactor) throws RepositoryException {
-        this.maxSize = maxSize;
-        directory = new File(path);
-        tmp = new File(tmpPath);
-        cache = new LRUCache(maxSize, cachePurgeTrigFactor,
-            cachePurgeResizeFactor);
-        ArrayList<File> allFiles = new ArrayList<File>();
-
-        Iterator<File> it = FileUtils.iterateFiles(directory, null, true);
-        while (it.hasNext()) {
-            File f = it.next();
-            allFiles.add(f);
-        }
-        Collections.sort(allFiles, new Comparator<File>() {
-            @Override
-            public int compare(final File o1, final File o2) {
-                long l1 = o1.lastModified(), l2 = o2.lastModified();
-                return l1 < l2 ? -1 : l1 > l2 ? 1 : 0;
-            }
-        });
-        String dataStorePath = directory.getAbsolutePath();
-        long time = System.currentTimeMillis();
-        int count = 0;
-        int deletecount = 0;
-        for (File f : allFiles) {
-            if (f.exists()) {
-                long length = f.length();
-                String name = f.getPath();
-                if (name.startsWith(dataStorePath)) {
-                    name = name.substring(dataStorePath.length());
-                }
-                // convert to java path format
-                name = name.replace("\\", "/");
-                if (name.startsWith("/") || name.startsWith("\\")) {
-                    name = name.substring(1);
-                }
-                if ((cache.currentSizeInBytes + length) < cache.maxSizeInBytes) {
-                    count++;
-                    cache.put(name, length);
-                } else {
-                    if (tryDelete(name)) {
-                        deletecount++;
-                    }
-                }
-                long now = System.currentTimeMillis();
-                if (now > time + 5000) {
-                    LOG.info("Processed {" + (count + deletecount) + "}/{"
-                        + allFiles.size() + "}");
-                    time = now;
-                }
-            }
-        }
-        LOG.info("Cached {" + count + "}/{" + allFiles.size()
-            + "} , currentSizeInBytes = " + cache.currentSizeInBytes);
-        LOG.info("Deleted {" + deletecount + "}/{" + allFiles.size()
-            + "} files .");
-    }
-
-    /**
-     * Store an item in the cache and return the input stream. If cache is in
-     * purgeMode or file doesn't exists, inputstream from a
-     * {@link TransientFileFactory#createTransientFile(String, String, File)} is
-     * returned. Otherwise inputStream from cached file is returned. This method
-     * doesn't close the incoming inputstream.
-     * 
-     * @param fileName the key of cache.
-     * @param in the inputstream.
-     * @return the (new) input stream.
-     */
-    public synchronized InputStream store(String fileName, final InputStream in)
-            throws IOException {
-        fileName = fileName.replace("\\", "/");
-        File f = getFile(fileName);
-        long length = 0;
-        if (!f.exists() || isInPurgeMode()) {
-            OutputStream out = null;
-            File transFile = null;
-            try {
-                TransientFileFactory tff = TransientFileFactory.getInstance();
-                transFile = tff.createTransientFile("s3-", "tmp", tmp);
-                out = new BufferedOutputStream(new FileOutputStream(transFile));
-                length = IOUtils.copyLarge(in, out);
-            } finally {
-                IOUtils.closeQuietly(out);
-            }
-            // rename the file to local fs cache
-            if (canAdmitFile(length)
-                && (f.getParentFile().exists() || f.getParentFile().mkdirs())
-                && transFile.renameTo(f) && f.exists()) {
-                if (transFile.exists() && transFile.delete()) {
-                    LOG.warn("tmp file = " + transFile.getAbsolutePath()
-                        + " not deleted successfully");
-                }
-                transFile = null;
-                toBeDeleted.remove(fileName);
-                if (cache.get(fileName) == null) {
-                    cache.put(fileName, f.length());
-                }
-            } else {
-                f = transFile;
-            }
-        } else {
-            // f.exists and not in purge mode
-            f.setLastModified(System.currentTimeMillis());
-            toBeDeleted.remove(fileName);
-            if (cache.get(fileName) == null) {
-                cache.put(fileName, f.length());
-            }
-        }
-        cache.tryPurge();
-        return new LazyFileInputStream(f);
-    }
-
-    /**
-     * Store an item along with file in cache. Cache size is increased by
-     * {@link File#length()} If file already exists in cache,
-     * {@link File#setLastModified(long)} is updated with current time.
-     * 
-     * @param fileName the key of cache.
-     * @param src file to be added to cache.
-     * @throws IOException
-     */
-    public synchronized void store(String fileName, final File src)
-            throws IOException {
-        fileName = fileName.replace("\\", "/");
-        File dest = getFile(fileName);
-        File parent = dest.getParentFile();
-        if (src.exists() && !dest.exists() && !src.equals(dest)
-            && canAdmitFile(src.length())
-            && (parent.exists() || parent.mkdirs()) && (src.renameTo(dest))) {
-            toBeDeleted.remove(fileName);
-            if (cache.get(fileName) == null) {
-                cache.put(fileName, dest.length());
-            }
-
-        } else if (dest.exists()) {
-            dest.setLastModified(System.currentTimeMillis());
-            toBeDeleted.remove(fileName);
-            if (cache.get(fileName) == null) {
-                cache.put(fileName, dest.length());
-            }
-        }
-        cache.tryPurge();
-    }
-
-    /**
-     * Return the inputstream from from cache, or null if not in the cache.
-     * 
-     * @param fileName name of file.
-     * @return  stream or null.
-     */
-    public InputStream getIfStored(String fileName) throws IOException {
-
-        fileName = fileName.replace("\\", "/");
-        File f = getFile(fileName);
-        synchronized (this) {
-            if (!f.exists() || isInPurgeMode()) {
-                log("purgeMode true or file doesn't exists: getIfStored returned");
-                return null;
-            }
-            f.setLastModified(System.currentTimeMillis());
-            return new LazyFileInputStream(f);
-        }
-    }
-
-    /**
-     * Delete file from cache. Size of cache is reduced by file length. The
-     * method is no-op if file doesn't exist in cache.
-     * 
-     * @param fileName file name that need to be removed from cache.
-     */
-    public synchronized void delete(String fileName) {
-        if (isInPurgeMode()) {
-            log("purgeMode true :delete returned");
-            return;
-        }
-        fileName = fileName.replace("\\", "/");
-        cache.remove(fileName);
-    }
-
-    /**
-     * Returns length of file if exists in cache else returns null.
-     * @param fileName name of the file.
-     */
-    public Long getFileLength(String fileName) {
-        fileName = fileName.replace("\\", "/");
-        File f = getFile(fileName);
-        synchronized (this) {
-            if (!f.exists() || isInPurgeMode()) {
-                log("purgeMode true or file doesn't exists: getFileLength returned");
-                return null;
-            }
-            f.setLastModified(System.currentTimeMillis());
-            return f.length();
-        }
-    }
-
-    /**
-     * Close the cache. Cache maintain set of files which it was not able to
-     * delete successfully. This method will an attempt to delete all
-     * unsuccessful delete files.
-     */
-    public void close() {
-        log("close");
-        deleteOldFiles();
-    }
-
-    /**
-     * Check if cache can admit file of given length.
-     * @param length of the file.
-     * @return true if yes else return false.
-     */
-    private synchronized boolean canAdmitFile(final long length) {
-        // order is important here
-        boolean value = !isInPurgeMode() && cache.canAdmitFile(length);
-        if (!value) {
-            log("cannot admit file of length=" + length
-                + " and currentSizeInBytes=" + cache.currentSizeInBytes);
-        }
-        return value;
-    }
-
-    /**
-     * Return true if cache is in purge mode else return false.
-     */
-    synchronized boolean isInPurgeMode() {
-        return purgeMode || maxSize == 0;
-    }
-
-    /**
-     * Set purge mode. If set to true all cache operation will be no-op. If set
-     * to false, all operations to cache are available.
-     * 
-     * @param purgeMode purge mode
-     */
-    synchronized void setPurgeMode(final boolean purgeMode) {
-        this.purgeMode = purgeMode;
-    }
-
-    File getFile(final String fileName) {
-        return new File(directory, fileName);
-    }
-
-    private void deleteOldFiles() {
-        int initialSize = toBeDeleted.size();
-        int count = 0;
-        for (String n : new ArrayList<String>(toBeDeleted)) {
-            if (tryDelete(n)) {
-                count++;
-            }
-        }
-        LOG.info("deleted [" + count + "]/[" + initialSize + "] files");
-    }
-
-    /**
-     * This method tries to delete a file. If it is not able to delete file due
-     * to any reason, it add it toBeDeleted list.
-     * 
-     * @param fileName name of the file which will be deleted.
-     * @return true if this method deletes file successfuly else return false.
-     */
-    boolean tryDelete(final String fileName) {
-        log("cache delete " + fileName);
-        File f = getFile(fileName);
-        if (f.exists() && f.delete()) {
-            log(fileName + "  deleted successfully");
-            toBeDeleted.remove(fileName);
-            while (true) {
-                f = f.getParentFile();
-                if (f.equals(directory) || f.list().length > 0) {
-                    break;
-                }
-                // delete empty parent folders (except the main directory)
-                f.delete();
-            }
-            return true;
-        } else if (f.exists()) {
-            LOG.info("not able to delete file = " + f.getAbsolutePath());
-            toBeDeleted.add(fileName);
-            return false;
-        }
-        return true;
-    }
-
-    static int maxSizeElements(final long bytes) {
-        // after a CQ installation, the average item in
-        // the data store is about 52 KB
-        int count = (int) (bytes / 65535);
-        count = Math.max(1024, count);
-        count = Math.min(64 * 1024, count);
-        return count;
-    }
-
-    static void log(final String s) {
-        LOG.debug(s);
-    }
-
-    /**
-     * A LRU based extension {@link LinkedHashMap}. The key is file name and
-     * value is length of file.
-     */
-    private class LRUCache extends LinkedHashMap<String, Long> {
-        private static final long serialVersionUID = 1L;
-
-        volatile long currentSizeInBytes;
-
-        final long maxSizeInBytes;
-
-        long cachePurgeResize;
-        
-        private long cachePurgeTrigSize;
-
-        public LRUCache(final long maxSizeInBytes,
-                final double cachePurgeTrigFactor,
-                final double cachePurgeResizeFactor) {
-            super(maxSizeElements(maxSizeInBytes), (float) 0.75, true);
-            this.maxSizeInBytes = maxSizeInBytes;
-            this.cachePurgeTrigSize = new Double(cachePurgeTrigFactor
-                * maxSizeInBytes).longValue();
-            this.cachePurgeResize = new Double(cachePurgeResizeFactor
-                * maxSizeInBytes).longValue();
-        }
-
-        /**
-         * Overridden {@link Map#remove(Object)} to delete corresponding file
-         * from file system.
-         */
-        @Override
-        public synchronized Long remove(final Object key) {
-            String fileName = (String) key;
-            fileName = fileName.replace("\\", "/");
-            Long flength = null;
-            if (tryDelete(fileName)) {
-                flength = super.remove(key);
-                if (flength != null) {
-                    log("cache entry { " + fileName + "} with size {" + flength
-                        + "} removed.");
-                    currentSizeInBytes -= flength.longValue();
-                }
-            } else if (!getFile(fileName).exists()) {
-                // second attempt. remove from cache if file doesn't exists
-                flength = super.remove(key);
-                if (flength != null) {
-                    log(" file not exists. cache entry { " + fileName
-                        + "} with size {" + flength + "} removed.");
-                    currentSizeInBytes -= flength.longValue();
-                }
-            }
-            return flength;
-        }
-
-        @Override
-        public synchronized Long put(final String key, final Long value) {
-            long flength = value.longValue();
-            currentSizeInBytes += flength;
-            return super.put(key.replace("\\", "/"), value);
-        }
-
-        /**
-         * This method tries purging of local cache. It checks if local cache
-         * has exceeded the defined limit then it triggers purge cache job in a
-         * seperate thread.
-         */
-        synchronized void tryPurge() {
-            if (currentSizeInBytes > cachePurgeTrigSize && !isInPurgeMode()) {
-                setPurgeMode(true);
-                LOG.info("currentSizeInBytes[" + cache.currentSizeInBytes
-                    + "] exceeds (cachePurgeTrigSize)["
-                    + cache.cachePurgeTrigSize + "]");
-                new Thread(new PurgeJob()).start();
-            }
-        }
-        /**
-         * This method check if cache can admit file of given length. 
-         * @param length length of file.
-         * @return true if cache size + length is less than maxSize.
-         */
-        synchronized boolean canAdmitFile(final long length) {
-            return cache.currentSizeInBytes + length < cache.maxSizeInBytes;
-        }
-    }
-
-    /**
-     * This class performs purging of local cache. It implements
-     * {@link Runnable} and should be invoked in a separate thread.
-     */
-    private class PurgeJob implements Runnable {
-        public PurgeJob() {
-            // TODO Auto-generated constructor stub
-        }
-
-        /**
-         * This method purges local cache till its size is less than
-         * cacheResizefactor * maxSize
-         */
-        @Override
-        public void run() {
-            try {
-                synchronized (cache) {
-                    LOG.info(" cache purge job started");
-                    // first try to delete toBeDeleted files
-                    int initialSize = cache.size();
-                    for (String fileName : new ArrayList<String>(toBeDeleted)) {
-                        cache.remove(fileName);
-                    }
-                    Iterator<Map.Entry<String, Long>> itr = cache.entrySet().iterator();
-                    while (itr.hasNext()) {
-                        Map.Entry<String, Long> entry = itr.next();
-                        if (entry.getKey() != null) {
-                            if (cache.currentSizeInBytes > cache.cachePurgeResize) {
-                                itr.remove();
-
-                            } else {
-                                break;
-                            }
-                        }
-
-                    }
-                    LOG.info(" cache purge job completed: cleaned ["
-                        + (initialSize - cache.size())
-                        + "] files and currentSizeInBytes = [ "
-                        + cache.currentSizeInBytes + "]");
-                }
-            } catch (Exception e) {
-                LOG.error("error in purge jobs:", e);
-            } finally {
-                setPurgeMode(false);
-            }
-        }
-    }
-}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jackrabbit.core.data;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+import javax.jcr.RepositoryException;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.jackrabbit.core.data.LazyFileInputStream;
+import org.apache.jackrabbit.util.TransientFileFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class implements a LRU cache used by {@link CachingDataStore}. If cache
+ * size exceeds limit, this cache goes in purge mode. In purge mode any
+ * operation to cache is no-op. After purge cache size would be less than
+ * cachePurgeResizeFactor * maximum size.
+ */
+public class LocalCache {
+
+    /**
+     * Logger instance.
+     */
+    static final Logger LOG = LoggerFactory.getLogger(LocalCache.class);
+
+    /**
+     * The file names of the files that need to be deleted.
+     */
+    final Set<String> toBeDeleted = new HashSet<String>();
+
+    /**
+     * The filename Vs file size LRU cache.
+     */
+    LRUCache cache;
+
+    /**
+     * The directory where the files are created.
+     */
+    private final File directory;
+
+    /**
+     * The directory where tmp files are created.
+     */
+    private final File tmp;
+
+    /**
+     * The maximum size of cache in bytes.
+     */
+    private long maxSize;
+
+    /**
+     * If true cache is in purgeMode and not available. All operation would be
+     * no-op.
+     */
+    private volatile boolean purgeMode;
+
+    /**
+     * Build LRU cache of files located at 'path'. It uses lastModified property
+     * of file to build LRU cache. If cache size exceeds limit size, this cache
+     * goes in purge mode. In purge mode any operation to cache is no-op.
+     * 
+     * @param path file system path
+     * @param tmpPath temporary directory used by cache.
+     * @param maxSize maximum size of cache.
+     * @param cachePurgeTrigFactor factor which triggers cache to purge mode.
+     * That is if current size exceed (cachePurgeTrigFactor * maxSize), the
+     * cache will go in auto-purge mode.
+     * @param cachePurgeResizeFactor after cache purge size of cache will be
+     * just less (cachePurgeResizeFactor * maxSize).
+     * @throws RepositoryException
+     */
+    public LocalCache(final String path, final String tmpPath,
+            final long maxSize, final double cachePurgeTrigFactor,
+            final double cachePurgeResizeFactor) throws RepositoryException {
+        this.maxSize = maxSize;
+        directory = new File(path);
+        tmp = new File(tmpPath);
+        cache = new LRUCache(maxSize, cachePurgeTrigFactor,
+            cachePurgeResizeFactor);
+        ArrayList<File> allFiles = new ArrayList<File>();
+
+        Iterator<File> it = FileUtils.iterateFiles(directory, null, true);
+        while (it.hasNext()) {
+            File f = it.next();
+            allFiles.add(f);
+        }
+        Collections.sort(allFiles, new Comparator<File>() {
+            @Override
+            public int compare(final File o1, final File o2) {
+                long l1 = o1.lastModified(), l2 = o2.lastModified();
+                return l1 < l2 ? -1 : l1 > l2 ? 1 : 0;
+            }
+        });
+        String dataStorePath = directory.getAbsolutePath();
+        long time = System.currentTimeMillis();
+        int count = 0;
+        int deletecount = 0;
+        for (File f : allFiles) {
+            if (f.exists()) {
+                long length = f.length();
+                String name = f.getPath();
+                if (name.startsWith(dataStorePath)) {
+                    name = name.substring(dataStorePath.length());
+                }
+                // convert to java path format
+                name = name.replace("\\", "/");
+                if (name.startsWith("/") || name.startsWith("\\")) {
+                    name = name.substring(1);
+                }
+                if ((cache.currentSizeInBytes + length) < cache.maxSizeInBytes) {
+                    count++;
+                    cache.put(name, length);
+                } else {
+                    if (tryDelete(name)) {
+                        deletecount++;
+                    }
+                }
+                long now = System.currentTimeMillis();
+                if (now > time + 5000) {
+                    LOG.info("Processed {" + (count + deletecount) + "}/{"
+                        + allFiles.size() + "}");
+                    time = now;
+                }
+            }
+        }
+        LOG.info("Cached {" + count + "}/{" + allFiles.size()
+            + "} , currentSizeInBytes = " + cache.currentSizeInBytes);
+        LOG.info("Deleted {" + deletecount + "}/{" + allFiles.size()
+            + "} files .");
+    }
+
+    /**
+     * Store an item in the cache and return the input stream. If cache is in
+     * purgeMode or file doesn't exists, inputstream from a
+     * {@link TransientFileFactory#createTransientFile(String, String, File)} is
+     * returned. Otherwise inputStream from cached file is returned. This method
+     * doesn't close the incoming inputstream.
+     * 
+     * @param fileName the key of cache.
+     * @param in the inputstream.
+     * @return the (new) input stream.
+     */
+    public synchronized InputStream store(String fileName, final InputStream in)
+            throws IOException {
+        fileName = fileName.replace("\\", "/");
+        File f = getFile(fileName);
+        long length = 0;
+        if (!f.exists() || isInPurgeMode()) {
+            OutputStream out = null;
+            File transFile = null;
+            try {
+                TransientFileFactory tff = TransientFileFactory.getInstance();
+                transFile = tff.createTransientFile("s3-", "tmp", tmp);
+                out = new BufferedOutputStream(new FileOutputStream(transFile));
+                length = IOUtils.copyLarge(in, out);
+            } finally {
+                IOUtils.closeQuietly(out);
+            }
+            // rename the file to local fs cache
+            if (canAdmitFile(length)
+                && (f.getParentFile().exists() || f.getParentFile().mkdirs())
+                && transFile.renameTo(f) && f.exists()) {
+                if (transFile.exists() && transFile.delete()) {
+                    LOG.warn("tmp file = " + transFile.getAbsolutePath()
+                        + " not deleted successfully");
+                }
+                transFile = null;
+                toBeDeleted.remove(fileName);
+                if (cache.get(fileName) == null) {
+                    cache.put(fileName, f.length());
+                }
+            } else {
+                f = transFile;
+            }
+        } else {
+            // f.exists and not in purge mode
+            f.setLastModified(System.currentTimeMillis());
+            toBeDeleted.remove(fileName);
+            if (cache.get(fileName) == null) {
+                cache.put(fileName, f.length());
+            }
+        }
+        cache.tryPurge();
+        return new LazyFileInputStream(f);
+    }
+
+    /**
+     * Store an item along with file in cache. Cache size is increased by
+     * {@link File#length()} If file already exists in cache,
+     * {@link File#setLastModified(long)} is updated with current time.
+     * 
+     * @param fileName the key of cache.
+     * @param src file to be added to cache.
+     * @throws IOException
+     */
+    public synchronized void store(String fileName, final File src)
+            throws IOException {
+        fileName = fileName.replace("\\", "/");
+        File dest = getFile(fileName);
+        File parent = dest.getParentFile();
+        if (src.exists() && !dest.exists() && !src.equals(dest)
+            && canAdmitFile(src.length())
+            && (parent.exists() || parent.mkdirs()) && (src.renameTo(dest))) {
+            toBeDeleted.remove(fileName);
+            if (cache.get(fileName) == null) {
+                cache.put(fileName, dest.length());
+            }
+
+        } else if (dest.exists()) {
+            dest.setLastModified(System.currentTimeMillis());
+            toBeDeleted.remove(fileName);
+            if (cache.get(fileName) == null) {
+                cache.put(fileName, dest.length());
+            }
+        }
+        cache.tryPurge();
+    }
+
+    /**
+     * Return the inputstream from from cache, or null if not in the cache.
+     * 
+     * @param fileName name of file.
+     * @return  stream or null.
+     */
+    public InputStream getIfStored(String fileName) throws IOException {
+
+        fileName = fileName.replace("\\", "/");
+        File f = getFile(fileName);
+        synchronized (this) {
+            if (!f.exists() || isInPurgeMode()) {
+                log("purgeMode true or file doesn't exists: getIfStored returned");
+                return null;
+            }
+            f.setLastModified(System.currentTimeMillis());
+            return new LazyFileInputStream(f);
+        }
+    }
+
+    /**
+     * Delete file from cache. Size of cache is reduced by file length. The
+     * method is no-op if file doesn't exist in cache.
+     * 
+     * @param fileName file name that need to be removed from cache.
+     */
+    public synchronized void delete(String fileName) {
+        if (isInPurgeMode()) {
+            log("purgeMode true :delete returned");
+            return;
+        }
+        fileName = fileName.replace("\\", "/");
+        cache.remove(fileName);
+    }
+
+    /**
+     * Returns length of file if exists in cache else returns null.
+     * @param fileName name of the file.
+     */
+    public Long getFileLength(String fileName) {
+        fileName = fileName.replace("\\", "/");
+        File f = getFile(fileName);
+        synchronized (this) {
+            if (!f.exists() || isInPurgeMode()) {
+                log("purgeMode true or file doesn't exists: getFileLength returned");
+                return null;
+            }
+            f.setLastModified(System.currentTimeMillis());
+            return f.length();
+        }
+    }
+
+    /**
+     * Close the cache. Cache maintain set of files which it was not able to
+     * delete successfully. This method will an attempt to delete all
+     * unsuccessful delete files.
+     */
+    public void close() {
+        log("close");
+        deleteOldFiles();
+    }
+
+    /**
+     * Check if cache can admit file of given length.
+     * @param length of the file.
+     * @return true if yes else return false.
+     */
+    private synchronized boolean canAdmitFile(final long length) {
+        // order is important here
+        boolean value = !isInPurgeMode() && cache.canAdmitFile(length);
+        if (!value) {
+            log("cannot admit file of length=" + length
+                + " and currentSizeInBytes=" + cache.currentSizeInBytes);
+        }
+        return value;
+    }
+
+    /**
+     * Return true if cache is in purge mode else return false.
+     */
+    synchronized boolean isInPurgeMode() {
+        return purgeMode || maxSize == 0;
+    }
+
+    /**
+     * Set purge mode. If set to true all cache operation will be no-op. If set
+     * to false, all operations to cache are available.
+     * 
+     * @param purgeMode purge mode
+     */
+    synchronized void setPurgeMode(final boolean purgeMode) {
+        this.purgeMode = purgeMode;
+    }
+
+    File getFile(final String fileName) {
+        return new File(directory, fileName);
+    }
+
+    private void deleteOldFiles() {
+        int initialSize = toBeDeleted.size();
+        int count = 0;
+        for (String n : new ArrayList<String>(toBeDeleted)) {
+            if (tryDelete(n)) {
+                count++;
+            }
+        }
+        LOG.info("deleted [" + count + "]/[" + initialSize + "] files");
+    }
+
+    /**
+     * This method tries to delete a file. If it is not able to delete file due
+     * to any reason, it add it toBeDeleted list.
+     * 
+     * @param fileName name of the file which will be deleted.
+     * @return true if this method deletes file successfuly else return false.
+     */
+    boolean tryDelete(final String fileName) {
+        log("cache delete " + fileName);
+        File f = getFile(fileName);
+        if (f.exists() && f.delete()) {
+            log(fileName + "  deleted successfully");
+            toBeDeleted.remove(fileName);
+            while (true) {
+                f = f.getParentFile();
+                if (f.equals(directory) || f.list().length > 0) {
+                    break;
+                }
+                // delete empty parent folders (except the main directory)
+                f.delete();
+            }
+            return true;
+        } else if (f.exists()) {
+            LOG.info("not able to delete file = " + f.getAbsolutePath());
+            toBeDeleted.add(fileName);
+            return false;
+        }
+        return true;
+    }
+
+    static int maxSizeElements(final long bytes) {
+        // after a CQ installation, the average item in
+        // the data store is about 52 KB
+        int count = (int) (bytes / 65535);
+        count = Math.max(1024, count);
+        count = Math.min(64 * 1024, count);
+        return count;
+    }
+
+    static void log(final String s) {
+        LOG.debug(s);
+    }
+
+    /**
+     * A LRU based extension {@link LinkedHashMap}. The key is file name and
+     * value is length of file.
+     */
+    private class LRUCache extends LinkedHashMap<String, Long> {
+        private static final long serialVersionUID = 1L;
+
+        volatile long currentSizeInBytes;
+
+        final long maxSizeInBytes;
+
+        long cachePurgeResize;
+        
+        private long cachePurgeTrigSize;
+
+        public LRUCache(final long maxSizeInBytes,
+                final double cachePurgeTrigFactor,
+                final double cachePurgeResizeFactor) {
+            super(maxSizeElements(maxSizeInBytes), (float) 0.75, true);
+            this.maxSizeInBytes = maxSizeInBytes;
+            this.cachePurgeTrigSize = new Double(cachePurgeTrigFactor
+                * maxSizeInBytes).longValue();
+            this.cachePurgeResize = new Double(cachePurgeResizeFactor
+                * maxSizeInBytes).longValue();
+        }
+
+        /**
+         * Overridden {@link Map#remove(Object)} to delete corresponding file
+         * from file system.
+         */
+        @Override
+        public synchronized Long remove(final Object key) {
+            String fileName = (String) key;
+            fileName = fileName.replace("\\", "/");
+            Long flength = null;
+            if (tryDelete(fileName)) {
+                flength = super.remove(key);
+                if (flength != null) {
+                    log("cache entry { " + fileName + "} with size {" + flength
+                        + "} removed.");
+                    currentSizeInBytes -= flength.longValue();
+                }
+            } else if (!getFile(fileName).exists()) {
+                // second attempt. remove from cache if file doesn't exists
+                flength = super.remove(key);
+                if (flength != null) {
+                    log(" file not exists. cache entry { " + fileName
+                        + "} with size {" + flength + "} removed.");
+                    currentSizeInBytes -= flength.longValue();
+                }
+            }
+            return flength;
+        }
+
+        @Override
+        public synchronized Long put(final String key, final Long value) {
+            long flength = value.longValue();
+            currentSizeInBytes += flength;
+            return super.put(key.replace("\\", "/"), value);
+        }
+
+        /**
+         * This method tries purging of local cache. It checks if local cache
+         * has exceeded the defined limit then it triggers purge cache job in a
+         * seperate thread.
+         */
+        synchronized void tryPurge() {
+            if (currentSizeInBytes > cachePurgeTrigSize && !isInPurgeMode()) {
+                setPurgeMode(true);
+                LOG.info("currentSizeInBytes[" + cache.currentSizeInBytes
+                    + "] exceeds (cachePurgeTrigSize)["
+                    + cache.cachePurgeTrigSize + "]");
+                new Thread(new PurgeJob()).start();
+            }
+        }
+        /**
+         * This method check if cache can admit file of given length. 
+         * @param length length of file.
+         * @return true if cache size + length is less than maxSize.
+         */
+        synchronized boolean canAdmitFile(final long length) {
+            return cache.currentSizeInBytes + length < cache.maxSizeInBytes;
+        }
+    }
+
+    /**
+     * This class performs purging of local cache. It implements
+     * {@link Runnable} and should be invoked in a separate thread.
+     */
+    private class PurgeJob implements Runnable {
+        public PurgeJob() {
+            // TODO Auto-generated constructor stub
+        }
+
+        /**
+         * This method purges local cache till its size is less than
+         * cacheResizefactor * maxSize
+         */
+        @Override
+        public void run() {
+            try {
+                synchronized (cache) {
+                    LOG.info(" cache purge job started");
+                    // first try to delete toBeDeleted files
+                    int initialSize = cache.size();
+                    for (String fileName : new ArrayList<String>(toBeDeleted)) {
+                        cache.remove(fileName);
+                    }
+                    Iterator<Map.Entry<String, Long>> itr = cache.entrySet().iterator();
+                    while (itr.hasNext()) {
+                        Map.Entry<String, Long> entry = itr.next();
+                        if (entry.getKey() != null) {
+                            if (cache.currentSizeInBytes > cache.cachePurgeResize) {
+                                itr.remove();
+
+                            } else {
+                                break;
+                            }
+                        }
+
+                    }
+                    LOG.info(" cache purge job completed: cleaned ["
+                        + (initialSize - cache.size())
+                        + "] files and currentSizeInBytes = [ "
+                        + cache.currentSizeInBytes + "]");
+                }
+            } catch (Exception e) {
+                LOG.error("error in purge jobs:", e);
+            } finally {
+                setPurgeMode(false);
+            }
+        }
+    }
+}

Propchange: jackrabbit/trunk/jackrabbit-data/src/main/java/org/apache/jackrabbit/core/data/LocalCache.java
------------------------------------------------------------------------------
    svn:eol-style = native



Mime
View raw message