commons-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t.@apache.org
Subject svn commit: r1780805 [1/2] - in /commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src: main/java/org/apache/commons/jcs/auxiliary/disk/ main/java/org/apache/commons/jcs/auxiliary/disk/block/ main/java/org/apache/commons/jcs/auxiliary/dis...
Date Sun, 29 Jan 2017 15:16:25 GMT
Author: tv
Date: Sun Jan 29 15:16:25 2017
New Revision: 1780805

URL: http://svn.apache.org/viewvc?rev=1780805&view=rev
Log:
Intermediate commit

Removed:
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/util/DefaultMemoryElementDescriptor.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/AbstractLRUMap.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/LRUElementDescriptor.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/LRUMap.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/LRUMapEntry.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/disk/indexed/LRUMapSizeVsCount.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/utils/struct/JCSvsCommonsLRUMapPerformanceTest.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/utils/struct/LRUMapConcurrentTest.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/utils/struct/LRUMapConcurrentUnitTest.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/utils/struct/LRUMapPerformanceTest.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/utils/struct/LRUMapUnitTest.java
Modified:
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/AbstractDiskCache.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/PurgatoryElement.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDisk.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCache.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCacheAttributes.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskKeyStore.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCache.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCacheConfigurator.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractDoubleLinkedListMemoryCache.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractMemoryCache.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/lru/LHMLRUMemoryCache.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/soft/SoftReferenceMemoryCache.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/util/MemoryElementDescriptor.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/util/SoftReferenceElementDescriptor.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/DoubleLinkedList.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/JCSConcurrentCacheAccessUnitTest.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/MockAuxiliaryCache.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCacheKeyStoreUnitTest.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexDiskCacheSizeUnitTest.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/lateral/socket/tcp/TestTCPLateralUnitTest.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/engine/control/CompositeCacheDiskUsageUnitTest.java
    commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/test/java/org/apache/commons/jcs/engine/control/CompositeCacheUnitTest.java

Modified: commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/AbstractDiskCache.java
URL: http://svn.apache.org/viewvc/commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/AbstractDiskCache.java?rev=1780805&r1=1780804&r2=1780805&view=diff
==============================================================================
--- commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/AbstractDiskCache.java (original)
+++ commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/AbstractDiskCache.java Sun Jan 29 15:16:25 2017
@@ -25,6 +25,9 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import org.apache.commons.jcs.auxiliary.AbstractAuxiliaryCacheEventLogging;
@@ -41,7 +44,7 @@ import org.apache.commons.jcs.engine.sta
 import org.apache.commons.jcs.engine.stats.Stats;
 import org.apache.commons.jcs.engine.stats.behavior.IStatElement;
 import org.apache.commons.jcs.engine.stats.behavior.IStats;
-import org.apache.commons.jcs.utils.struct.LRUMap;
+import org.apache.commons.jcs.utils.clhm.ConcurrentLinkedHashMap;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
@@ -74,7 +77,7 @@ public abstract class AbstractDiskCache<
      * If the elements are pulled into the memory cache while the are still in purgatory, writing to
      * disk can be canceled.
      */
-    private Map<K, PurgatoryElement<K, V>> purgatory;
+    private ConcurrentMap<K, PurgatoryElement<K, V>> purgatory;
 
     /**
      * The CacheEventQueue where changes will be queued for asynchronous updating of the persistent
@@ -86,13 +89,13 @@ public abstract class AbstractDiskCache<
      * Indicates whether the cache is 'alive': initialized, but not yet disposed. Child classes must
      * set this to true.
      */
-    private boolean alive = false;
+    private final AtomicBoolean alive = new AtomicBoolean(false);
 
     /** Every cache will have a name, subclasses must set this when they are initialized. */
     private String cacheName;
 
     /** DEBUG: Keeps a count of the number of purgatory hits for debug messages */
-    private int purgHits = 0;
+    private final AtomicLong purgHits = new AtomicLong(0);
 
     /**
      * We lock here, so that we cannot get an update after a remove all. an individual removal locks
@@ -115,7 +118,8 @@ public abstract class AbstractDiskCache<
 
         // create queue
         CacheEventQueueFactory<K, V> fact = new CacheEventQueueFactory<K, V>();
-        this.cacheEventQueue = fact.createCacheEventQueue( new MyCacheListener(), CacheInfo.listenerId, cacheName,
+        this.cacheEventQueue = fact.createCacheEventQueue( new MyCacheListener(), CacheInfo.listenerId,
+                                                           cacheName,
                                                            diskCacheAttributes.getEventQueuePoolName(),
                                                            diskCacheAttributes.getEventQueueType() );
 
@@ -128,7 +132,7 @@ public abstract class AbstractDiskCache<
      */
     public boolean isAlive()
     {
-        return alive;
+        return alive.get();
     }
 
     /**
@@ -136,7 +140,7 @@ public abstract class AbstractDiskCache<
      */
     public void setAlive(boolean alive)
     {
-        this.alive = alive;
+        this.alive.set(alive);
     }
 
     /**
@@ -155,17 +159,16 @@ public abstract class AbstractDiskCache<
 
         try
         {
-            synchronized (this)
+            long maxPurgatorySize = Long.MAX_VALUE;
+
+            if ( diskCacheAttributes.getMaxPurgatorySize() >= 0 )
             {
-                if ( diskCacheAttributes.getMaxPurgatorySize() >= 0 )
-                {
-                    purgatory = new LRUMap<K, PurgatoryElement<K, V>>( diskCacheAttributes.getMaxPurgatorySize() );
-                }
-                else
-                {
-                    purgatory = new HashMap<K, PurgatoryElement<K, V>>();
-                }
+                maxPurgatorySize = diskCacheAttributes.getMaxPurgatorySize();
             }
+
+            purgatory = new ConcurrentLinkedHashMap.Builder<K, PurgatoryElement<K, V>>()
+                    .maximumWeightedCapacity(maxPurgatorySize) // key count
+                    .build();
         }
         finally
         {
@@ -206,10 +209,7 @@ public abstract class AbstractDiskCache<
             pe.setSpoolable( true );
 
             // Add the element to purgatory
-            synchronized ( purgatory )
-            {
-                purgatory.put( pe.getKey(), pe );
-            }
+            purgatory.put( pe.getKey(), pe );
 
             // Queue element for serialization
             cacheEventQueue.addPutEvent( pe );
@@ -234,8 +234,7 @@ public abstract class AbstractDiskCache<
     public final ICacheElement<K, V> get( K key )
     {
         // If not alive, always return null.
-
-        if ( !alive )
+        if ( !isAlive() )
         {
             if ( log.isDebugEnabled() )
             {
@@ -244,20 +243,16 @@ public abstract class AbstractDiskCache<
             return null;
         }
 
-        PurgatoryElement<K, V> pe = null;
-        synchronized ( purgatory )
-        {
-            pe = purgatory.get( key );
-        }
+        PurgatoryElement<K, V> pe = purgatory.get( key );
 
         // If the element was found in purgatory
         if ( pe != null )
         {
-            purgHits++;
+            purgHits.incrementAndGet();
 
             if ( log.isDebugEnabled() )
             {
-                if ( purgHits % 100 == 0 )
+                if ( purgHits.get() % 100 == 0 )
                 {
                     log.debug( "Purgatory hits = " + purgHits );
                 }
@@ -290,7 +285,7 @@ public abstract class AbstractDiskCache<
         {
             return doGet( key );
         }
-        catch ( Exception e )
+        catch ( IOException e )
         {
             log.error( e );
 
@@ -319,14 +314,7 @@ public abstract class AbstractDiskCache<
         throws IOException
     {
         // Get the keys from purgatory
-        Set<K> keyArray = null;
-
-        // this avoids locking purgatory, but it uses more memory
-        synchronized ( purgatory )
-        {
-            keyArray = new HashSet<K>(purgatory.keySet());
-        }
-
+        Set<K> keyArray = new HashSet<K>(purgatory.keySet());
         Set<K> matchingKeys = getKeyMatcher().getMatchingKeysFromArray( pattern, keyArray );
 
         // call getMultiple with the set
@@ -388,23 +376,15 @@ public abstract class AbstractDiskCache<
     public final boolean remove( K key )
         throws IOException
     {
-        PurgatoryElement<K, V> pe = null;
-
-        synchronized ( purgatory )
-        {
-            // I'm getting the object, so I can lock on the element
-            // Remove element from purgatory if it is there
-            pe = purgatory.get( key );
-        }
+        // I'm getting the object, so I can lock on the element
+        // Remove element from purgatory if it is there
+        PurgatoryElement<K, V> pe = purgatory.get( key );
 
         if ( pe != null )
         {
             synchronized ( pe.getCacheElement() )
             {
-                synchronized ( purgatory )
-                {
-                    purgatory.remove( key );
-                }
+                purgatory.remove( key );
 
                 // no way to remove from queue, just make sure it doesn't get on
                 // disk and then removed right afterwards
@@ -433,8 +413,7 @@ public abstract class AbstractDiskCache<
     {
         if ( this.diskCacheAttributes.isAllowRemoveAll() )
         {
-            // Replace purgatory with a new empty hashtable
-            initPurgatory();
+            purgatory.clear();
 
             // Remove all from persistent store immediately
             doRemoveAll();
@@ -510,7 +489,7 @@ public abstract class AbstractDiskCache<
         // need to handle the disposal first.
         doDispose();
 
-        alive = false;
+        alive.set(false);
     }
 
     /**
@@ -547,7 +526,7 @@ public abstract class AbstractDiskCache<
 
         ArrayList<IStatElement<?>> elems = new ArrayList<IStatElement<?>>();
 
-        elems.add(new StatElement<Integer>( "Purgatory Hits", Integer.valueOf(purgHits) ) );
+        elems.add(new StatElement<AtomicLong>( "Purgatory Hits", purgHits ) );
         elems.add(new StatElement<Integer>( "Purgatory Size", Integer.valueOf(purgatory.size()) ) );
 
         // get the stats from the event queue too
@@ -566,7 +545,7 @@ public abstract class AbstractDiskCache<
     @Override
     public CacheStatus getStatus()
     {
-        return ( alive ? CacheStatus.ALIVE : CacheStatus.DISPOSED );
+        return ( isAlive() ? CacheStatus.ALIVE : CacheStatus.DISPOSED );
     }
 
     /**
@@ -635,7 +614,7 @@ public abstract class AbstractDiskCache<
         public void handlePut( ICacheElement<K, V> element )
             throws IOException
         {
-            if ( alive )
+            if ( isAlive() )
             {
                 // If the element is a PurgatoryElement<K, V> we must check to see
                 // if it is still spoolable, and remove it from purgatory.
@@ -654,19 +633,15 @@ public abstract class AbstractDiskCache<
                         try
                         {
                             // TODO consider changing purgatory sync
-                            // String keyAsString = element.getKey().toString();
-                            synchronized ( purgatory )
+                            // If the element has already been removed from
+                            // purgatory do nothing
+                            if ( !purgatory.containsKey( pe.getKey() ) )
                             {
-                                // If the element has already been removed from
-                                // purgatory do nothing
-                                if ( !purgatory.containsKey( pe.getKey() ) )
-                                {
-                                    return;
-                                }
-
-                                element = pe.getCacheElement();
+                                return;
                             }
 
+                            element = pe.getCacheElement();
+
                             // I took this out of the purgatory sync block.
                             // If the element is still eligible, spool it.
                             if ( pe.isSpoolable() )
@@ -679,12 +654,9 @@ public abstract class AbstractDiskCache<
                             removeAllLock.readLock().unlock();
                         }
 
-                        synchronized ( purgatory )
-                        {
-                            // After the update has completed, it is safe to
-                            // remove the element from purgatory.
-                            purgatory.remove( element.getKey() );
-                        }
+                        // After the update has completed, it is safe to
+                        // remove the element from purgatory.
+                        purgatory.remove( element.getKey() );
                     }
                 }
                 else
@@ -701,10 +673,7 @@ public abstract class AbstractDiskCache<
                  * done before it went in the queue. This block handles the case where the disk
                  * cache fails during normal operations.
                  */
-                synchronized ( purgatory )
-                {
-                    purgatory.remove( element.getKey() );
-                }
+                purgatory.remove( element.getKey() );
             }
         }
 
@@ -718,7 +687,7 @@ public abstract class AbstractDiskCache<
         public void handleRemove( String cacheName, K key )
             throws IOException
         {
-            if ( alive )
+            if ( isAlive() )
             {
                 if ( doRemove( key ) )
                 {
@@ -736,7 +705,7 @@ public abstract class AbstractDiskCache<
         public void handleRemoveAll( String cacheName )
             throws IOException
         {
-            if ( alive )
+            if ( isAlive() )
             {
                 doRemoveAll();
             }
@@ -751,7 +720,7 @@ public abstract class AbstractDiskCache<
         public void handleDispose( String cacheName )
             throws IOException
         {
-            if ( alive )
+            if ( isAlive() )
             {
                 doDispose();
             }

Modified: commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/PurgatoryElement.java
URL: http://svn.apache.org/viewvc/commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/PurgatoryElement.java?rev=1780805&r1=1780804&r2=1780805&view=diff
==============================================================================
--- commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/PurgatoryElement.java (original)
+++ commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/PurgatoryElement.java Sun Jan 29 15:16:25 2017
@@ -21,11 +21,10 @@ package org.apache.commons.jcs.auxiliary
 
 import org.apache.commons.jcs.engine.CacheElement;
 import org.apache.commons.jcs.engine.behavior.ICacheElement;
-import org.apache.commons.jcs.engine.behavior.IElementAttributes;
 
 /**
  * Implementation of cache elements in purgatory.
- * 
+ *
  * Elements are stored in purgatory when they are spooled to the auxiliary cache, but have not yet
  * been written to disk.
  */
@@ -38,12 +37,9 @@ public class PurgatoryElement<K, V>
     /** Is the element ready to be spooled? */
     private boolean spoolable = false;
 
-    /** Wrapped cache Element */
-    private ICacheElement<K, V> cacheElement;
-
     /**
      * Constructor for the PurgatoryElement&lt;K, V&gt; object
-     * 
+     *
      * @param cacheElement CacheElement
      */
     public PurgatoryElement( ICacheElement<K, V> cacheElement )
@@ -51,12 +47,11 @@ public class PurgatoryElement<K, V>
         super(cacheElement.getCacheName(),
                 cacheElement.getKey(), cacheElement.getVal(),
                 cacheElement.getElementAttributes());
-        this.cacheElement = cacheElement;
     }
 
     /**
      * Gets the spoolable property.
-     * 
+     *
      * @return The spoolable value
      */
     public boolean isSpoolable()
@@ -66,7 +61,7 @@ public class PurgatoryElement<K, V>
 
     /**
      * Sets the spoolable property.
-     * 
+     *
      * @param spoolable The new spoolable value
      */
     public void setSpoolable( boolean spoolable )
@@ -76,67 +71,17 @@ public class PurgatoryElement<K, V>
 
     /**
      * Get the wrapped cache element.
-     * 
+     *
      * @return ICacheElement
      */
     public ICacheElement<K, V> getCacheElement()
     {
-        return cacheElement;
+        return this;
     }
 
     // ------------------------------------------------ interface ICacheElement
 
     /**
-     * @return cacheElement.getCacheName();
-     * @see ICacheElement#getCacheName
-     */
-    @Override
-    public String getCacheName()
-    {
-        return cacheElement.getCacheName();
-    }
-
-    /**
-     * @return cacheElement.getKey();
-     * @see ICacheElement#getKey
-     */
-    @Override
-    public K getKey()
-    {
-        return cacheElement.getKey();
-    }
-
-    /**
-     * @return cacheElement.getVal();
-     * @see ICacheElement#getVal
-     */
-    @Override
-    public V getVal()
-    {
-        return cacheElement.getVal();
-    }
-
-    /**
-     * @return cacheElement.getElementAttributes();
-     * @see ICacheElement#getElementAttributes
-     */
-    @Override
-    public IElementAttributes getElementAttributes()
-    {
-        return cacheElement.getElementAttributes();
-    }
-
-    /**
-     * @param attr
-     * @see ICacheElement#setElementAttributes
-     */
-    @Override
-    public void setElementAttributes( IElementAttributes attr )
-    {
-        cacheElement.setElementAttributes( attr );
-    }
-
-    /**
      * @return debug string
      */
     @Override
@@ -145,7 +90,7 @@ public class PurgatoryElement<K, V>
         StringBuilder buf = new StringBuilder();
         buf.append( "[PurgatoryElement: " );
         buf.append( " isSpoolable = " + isSpoolable() );
-        buf.append( " CacheElement = " + getCacheElement() );
+        buf.append( " CacheElement = " + super.toString() );
         buf.append( " CacheName = " + getCacheName() );
         buf.append( " Key = " + getKey() );
         buf.append( " Value = " + getVal() );

Modified: commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDisk.java
URL: http://svn.apache.org/viewvc/commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDisk.java?rev=1780805&r1=1780804&r2=1780805&view=diff
==============================================================================
--- commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDisk.java (original)
+++ commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDisk.java Sun Jan 29 15:16:25 2017
@@ -50,9 +50,6 @@ public class BlockDisk
     public static final byte HEADER_SIZE_BYTES = 4;
     // N.B. 4 bytes is the size used for ByteBuffer.putInt(int value) and ByteBuffer.getInt()
 
-    /** defaults to 4kb */
-    private static final int DEFAULT_BLOCK_SIZE_BYTES = 4 * 1024;
-
     /** Size of the blocks */
     private final int blockSizeBytes;
 
@@ -90,7 +87,7 @@ public class BlockDisk
     public BlockDisk( File file, IElementSerializer elementSerializer )
         throws IOException
     {
-        this( file, DEFAULT_BLOCK_SIZE_BYTES, elementSerializer );
+        this( file, BlockDiskCacheAttributes.DEFAULT_BLOCK_SIZE_BYTES, elementSerializer );
     }
 
     /**

Modified: commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCache.java
URL: http://svn.apache.org/viewvc/commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCache.java?rev=1780805&r1=1780804&r2=1780805&view=diff
==============================================================================
--- commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCache.java (original)
+++ commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCache.java Sun Jan 29 15:16:25 2017
@@ -21,11 +21,9 @@ package org.apache.commons.jcs.auxiliary
 
 import java.io.File;
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
@@ -44,7 +42,6 @@ import org.apache.commons.jcs.engine.beh
 import org.apache.commons.jcs.engine.control.group.GroupAttrName;
 import org.apache.commons.jcs.engine.control.group.GroupId;
 import org.apache.commons.jcs.engine.stats.StatElement;
-import org.apache.commons.jcs.engine.stats.Stats;
 import org.apache.commons.jcs.engine.stats.behavior.IStatElement;
 import org.apache.commons.jcs.engine.stats.behavior.IStats;
 import org.apache.commons.logging.Log;
@@ -128,17 +125,9 @@ public class BlockDiskCache<K, V>
 
         try
         {
-            if ( this.blockDiskCacheAttributes.getBlockSizeBytes() > 0 )
-            {
-                this.dataFile = new BlockDisk( new File( rootDirectory, fileName + ".data" ),
-                                               this.blockDiskCacheAttributes.getBlockSizeBytes(),
-                                               getElementSerializer() );
-            }
-            else
-            {
-                this.dataFile = new BlockDisk( new File( rootDirectory, fileName + ".data" ),
-                                               getElementSerializer() );
-            }
+            this.dataFile = new BlockDisk( new File( rootDirectory, fileName + ".data" ),
+                                           this.blockDiskCacheAttributes.getBlockSizeBytes(),
+                                           getElementSerializer() );
 
             keyStore = new BlockDiskKeyStore<K>( this.blockDiskCacheAttributes, this );
 
@@ -206,11 +195,14 @@ public class BlockDiskCache<K, V>
         {
             int maxToTest = 100;
             int count = 0;
-            Iterator<Map.Entry<K, int[]>> it = this.keyStore.entrySet().iterator();
-            while ( it.hasNext() && count < maxToTest )
+            for (Map.Entry<K, int[]> entry : this.keyStore.entrySet())
             {
+                if (count >= maxToTest)
+                {
+                    break;
+                }
+
                 count++;
-                Map.Entry<K, int[]> entry = it.next();
                 Object data = this.dataFile.read( entry.getValue() );
                 if ( data == null )
                 {
@@ -296,7 +288,6 @@ public class BlockDiskCache<K, V>
     /**
      * Returns the number of keys.
      * <p>
-     * (non-Javadoc)
      * @see org.apache.commons.jcs.auxiliary.disk.AbstractDiskCache#getSize()
      */
     @Override
@@ -308,9 +299,11 @@ public class BlockDiskCache<K, V>
     /**
      * Gets the ICacheElement&lt;K, V&gt; for the key if it is in the cache. The program flow is as follows:
      * <ol>
-     * <li>Make sure the disk cache is alive.</li> <li>Get a read lock.</li> <li>See if the key is
-     * in the key store.</li> <li>If we found a key, ask the BlockDisk for the object at the
-     * blocks..</li> <li>Release the lock.</li>
+     * <li>Make sure the disk cache is alive.</li>
+     * <li>Get a read lock.</li>
+     * <li>See if the key is in the key store.</li>
+     * <li>If we found a key, ask the BlockDisk for the object at the blocks..</li>
+     * <li>Release the lock.</li>
      * </ol>
      * @param key
      * @return ICacheElement
@@ -335,17 +328,19 @@ public class BlockDiskCache<K, V>
 
         ICacheElement<K, V> object = null;
 
-
         try
         {
             storageLock.readLock().lock();
-            try {
+            try
+            {
                 int[] ded = this.keyStore.get( key );
                 if ( ded != null )
                 {
                     object = this.dataFile.read( ded );
                 }
-            } finally {
+            }
+            finally
+            {
                 storageLock.readLock().unlock();
             }
 
@@ -365,12 +360,14 @@ public class BlockDiskCache<K, V>
     /**
      * Writes an element to disk. The program flow is as follows:
      * <ol>
-     * <li>Acquire write lock.</li> <li>See id an item exists for this key.</li> <li>If an item
-     * already exists, add its blocks to the remove list.</li> <li>Have the Block disk write the
-     * item.</li> <li>Create a descriptor and add it to the key map.</li> <li>Release the write
-     * lock.</li>
+     * <li>Acquire write lock.</li>
+     * <li>See id an item exists for this key.</li>
+     * <li>If an item already exists, add its blocks to the remove list.</li>
+     * <li>Have the Block disk write the item.</li>
+     * <li>Create a descriptor and add it to the key map.</li>
+     * <li>Release the write lock.</li>
      * </ol>
-     * @param element
+     * @param element the cache element to write
      * @see org.apache.commons.jcs.auxiliary.disk.AbstractDiskCache#update(ICacheElement)
      */
     @Override
@@ -443,7 +440,6 @@ public class BlockDiskCache<K, V>
             return false;
         }
 
-        boolean reset = false;
         boolean removed = false;
 
         storageLock.writeLock().lock();
@@ -463,21 +459,11 @@ public class BlockDiskCache<K, V>
                 removed = performSingleKeyRemoval(key);
             }
         }
-        catch ( Exception e )
-        {
-            log.error( logCacheName + "Problem removing element.", e );
-            reset = true;
-        }
         finally
         {
             storageLock.writeLock().unlock();
         }
 
-        if ( reset )
-        {
-            reset();
-        }
-
         return removed;
     }
 
@@ -559,7 +545,8 @@ public class BlockDiskCache<K, V>
     }
 
 
-	private boolean performSingleKeyRemoval(K key) {
+	private boolean performSingleKeyRemoval(K key)
+	{
 		boolean removed;
 		// remove single item.
 		int[] ded = this.keyStore.remove( key );
@@ -740,10 +727,11 @@ public class BlockDiskCache<K, V>
     @Override
     public IStats getStatistics()
     {
-        IStats stats = new Stats();
+        // get the stats from the super too
+        IStats stats = super.getStatistics();
         stats.setTypeName( "Block Disk Cache" );
 
-        ArrayList<IStatElement<?>> elems = new ArrayList<IStatElement<?>>();
+        List<IStatElement<?>> elems = stats.getStatElements();
 
         elems.add(new StatElement<Boolean>( "Is Alive", Boolean.valueOf(isAlive()) ) );
         elems.add(new StatElement<Integer>( "Key Map Size", Integer.valueOf(this.keyStore.size()) ) );
@@ -769,10 +757,6 @@ public class BlockDiskCache<K, V>
                     Integer.valueOf(this.dataFile.getEmptyBlocks()) ) );
         }
 
-        // get the stats from the super too
-        IStats sStats = super.getStatistics();
-        elems.addAll(sStats.getStatElements());
-
         stats.setStatElements( elems );
 
         return stats;

Modified: commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCacheAttributes.java
URL: http://svn.apache.org/viewvc/commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCacheAttributes.java?rev=1780805&r1=1780804&r2=1780805&view=diff
==============================================================================
--- commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCacheAttributes.java (original)
+++ commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCacheAttributes.java Sun Jan 29 15:16:25 2017
@@ -32,17 +32,20 @@ public class BlockDiskCacheAttributes
     /** Don't change */
     private static final long serialVersionUID = 6568840097657265989L;
 
+    /** Defaults to 4kb */
+    public static final int DEFAULT_BLOCK_SIZE_BYTES = 4 * 1024;
+
     /** The size per block in bytes. */
-    private int blockSizeBytes;
+    private int blockSizeBytes = DEFAULT_BLOCK_SIZE_BYTES;
 
     /** Maximum number of keys to be kept in memory */
-    private static final int DEFAULT_MAX_KEY_SIZE = 5000;
+    public static final int DEFAULT_MAX_KEY_SIZE = 5000;
 
     /** -1 means no limit. */
     private int maxKeySize = DEFAULT_MAX_KEY_SIZE;
 
     /** How often should we persist the keys. */
-    private static final long DEFAULT_KEY_PERSISTENCE_INTERVAL_SECONDS = 5 * 60;
+    public static final long DEFAULT_KEY_PERSISTENCE_INTERVAL_SECONDS = 5 * 60;
 
     /** The keys will be persisted at this interval.  -1 mean never. */
     private long keyPersistenceIntervalSeconds = DEFAULT_KEY_PERSISTENCE_INTERVAL_SECONDS;

Modified: commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskKeyStore.java
URL: http://svn.apache.org/viewvc/commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskKeyStore.java?rev=1780805&r1=1780804&r2=1780805&view=diff
==============================================================================
--- commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskKeyStore.java (original)
+++ commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskKeyStore.java Sun Jan 29 15:16:25 2017
@@ -34,12 +34,12 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeMap;
-import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.commons.jcs.auxiliary.disk.behavior.IDiskCacheAttributes.DiskLimitType;
 import org.apache.commons.jcs.io.ObjectInputStreamClassLoaderAware;
-import org.apache.commons.jcs.utils.struct.AbstractLRUMap;
-import org.apache.commons.jcs.utils.struct.LRUMap;
+import org.apache.commons.jcs.utils.clhm.ConcurrentLinkedHashMap;
+import org.apache.commons.jcs.utils.clhm.EntryWeigher;
+import org.apache.commons.jcs.utils.clhm.EvictionListener;
 import org.apache.commons.jcs.utils.timing.ElapsedTimer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -216,13 +216,50 @@ public class BlockDiskKeyStore<K>
         keyHash = null;
         if (maxKeySize >= 0)
         {
+            EvictionListener<K, int[]> listener = new EvictionListener<K, int[]>()
+            {
+                @Override public void onEviction(K key, int[] value)
+                {
+                    blockDiskCache.freeBlocks(value);
+                    if (log.isDebugEnabled())
+                    {
+                        log.debug(logCacheName + "Removing key: [" + key + "] from key store.");
+                        log.debug(logCacheName + "Key store size: [" + keyHash.size() + "].");
+                    }
+                }
+            };
+
             if (this.diskLimitType == DiskLimitType.SIZE)
             {
-                keyHash = new LRUMapSizeLimited(maxKeySize);
+                EntryWeigher<K, int[]> sizeWeigher = new EntryWeigher<K, int[]>()
+                {
+                    @Override
+                    public int weightOf(K key, int[] value)
+                    {
+                        int size = value != null ? value.length * blockSize : 1;
+
+                        if (size == 0)
+                        {
+                            return 1;
+                        }
+                        else
+                        {
+                            return size;
+                        }
+                    }
+                };
+                keyHash = new ConcurrentLinkedHashMap.Builder<K, int[]>()
+                        .maximumWeightedCapacity(maxKeySize * 1024L) // kB
+                        .weigher(sizeWeigher)
+                        .listener(listener)
+                        .build();
             }
             else
             {
-                keyHash = new LRUMapCountLimited(maxKeySize);
+                keyHash = new ConcurrentLinkedHashMap.Builder<K, int[]>()
+                        .maximumWeightedCapacity(maxKeySize) // count
+                        .listener(listener)
+                        .build();
             }
             if (log.isInfoEnabled())
             {
@@ -233,8 +270,9 @@ public class BlockDiskKeyStore<K>
         {
             // If no max size, use a plain map for memory and processing
             // efficiency.
-            keyHash = new HashMap<K, int[]>();
-            // keyHash = Collections.synchronizedMap( new HashMap() );
+            keyHash = new ConcurrentLinkedHashMap.Builder<K, int[]>()
+                    .maximumWeightedCapacity(Long.MAX_VALUE) // count
+                    .build();
             if (log.isInfoEnabled())
             {
                 log.info(logCacheName + "Set maxKeySize to unlimited'");
@@ -423,162 +461,4 @@ public class BlockDiskKeyStore<K>
             return ok;
         }
     }
-
-    /**
-     * Class for recycling and lru. This implements the LRU size overflow
-     * callback, so we can mark the blocks as free.
-     */
-    public class LRUMapSizeLimited extends AbstractLRUMap<K, int[]>
-    {
-        /**
-         * <code>tag</code> tells us which map we are working on.
-         */
-        public final static String TAG = "orig-lru-size";
-
-        // size of the content in kB
-        private AtomicInteger contentSize;
-        private int maxSize;
-
-        /**
-         * Default
-         */
-        public LRUMapSizeLimited()
-        {
-            this(-1);
-        }
-
-        /**
-         * @param maxSize
-         *            maximum cache size in kB
-         */
-        public LRUMapSizeLimited(int maxSize)
-        {
-            super();
-            this.maxSize = maxSize;
-            this.contentSize = new AtomicInteger(0);
-        }
-
-        // keep the content size in kB, so 2^31 kB is reasonable value
-        private void subLengthFromCacheSize(int[] value)
-        {
-            contentSize.addAndGet(value.length * blockSize / -1024 - 1);
-        }
-
-        // keep the content size in kB, so 2^31 kB is reasonable value
-        private void addLengthToCacheSize(int[] value)
-        {
-            contentSize.addAndGet(value.length * blockSize / 1024 + 1);
-        }
-
-        @Override
-        public int[] put(K key, int[] value)
-        {
-            int[] oldValue = null;
-
-            try
-            {
-                oldValue = super.put(key, value);
-            }
-            finally
-            {
-                if (value != null)
-                {
-                    addLengthToCacheSize(value);
-                }
-                if (oldValue != null)
-                {
-                    subLengthFromCacheSize(oldValue);
-                }
-            }
-
-            return oldValue;
-        }
-
-        @Override
-        public int[] remove(Object key)
-        {
-            int[] value = null;
-
-            try
-            {
-                value = super.remove(key);
-                return value;
-            }
-            finally
-            {
-                if (value != null)
-                {
-                    subLengthFromCacheSize(value);
-                }
-            }
-        }
-
-        /**
-         * This is called when the may key size is reached. The least recently
-         * used item will be passed here. We will store the position and size of
-         * the spot on disk in the recycle bin.
-         * <p>
-         *
-         * @param key
-         * @param value
-         */
-        @Override
-        protected void processRemovedLRU(K key, int[] value)
-        {
-            blockDiskCache.freeBlocks(value);
-            if (log.isDebugEnabled())
-            {
-                log.debug(logCacheName + "Removing key: [" + key + "] from key store.");
-                log.debug(logCacheName + "Key store size: [" + super.size() + "].");
-            }
-
-            if (value != null)
-            {
-                subLengthFromCacheSize(value);
-            }
-        }
-
-        @Override
-        protected boolean shouldRemove()
-        {
-            return maxSize > 0 && contentSize.get() > maxSize && this.size() > 1;
-        }
-    }
-
-    /**
-     * Class for recycling and lru. This implements the LRU overflow callback,
-     * so we can mark the blocks as free.
-     */
-    public class LRUMapCountLimited extends LRUMap<K, int[]>
-    {
-        /**
-         * <code>tag</code> tells us which map we are working on.
-         */
-        public final static String TAG = "orig-lru-count";
-
-        public LRUMapCountLimited(int maxKeySize)
-        {
-            super(maxKeySize);
-        }
-
-        /**
-         * This is called when the may key size is reached. The least recently
-         * used item will be passed here. We will store the position and size of
-         * the spot on disk in the recycle bin.
-         * <p>
-         *
-         * @param key
-         * @param value
-         */
-        @Override
-        protected void processRemovedLRU(K key, int[] value)
-        {
-            blockDiskCache.freeBlocks(value);
-            if (log.isDebugEnabled())
-            {
-                log.debug(logCacheName + "Removing key: [" + key + "] from key store.");
-                log.debug(logCacheName + "Key store size: [" + super.size() + "].");
-            }
-        }
-    }
 }

Modified: commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java
URL: http://svn.apache.org/viewvc/commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java?rev=1780805&r1=1780804&r2=1780805&view=diff
==============================================================================
--- commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java (original)
+++ commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java Sun Jan 29 15:16:25 2017
@@ -48,11 +48,11 @@ import org.apache.commons.jcs.engine.con
 import org.apache.commons.jcs.engine.logging.behavior.ICacheEvent;
 import org.apache.commons.jcs.engine.logging.behavior.ICacheEventLogger;
 import org.apache.commons.jcs.engine.stats.StatElement;
-import org.apache.commons.jcs.engine.stats.Stats;
 import org.apache.commons.jcs.engine.stats.behavior.IStatElement;
 import org.apache.commons.jcs.engine.stats.behavior.IStats;
-import org.apache.commons.jcs.utils.struct.AbstractLRUMap;
-import org.apache.commons.jcs.utils.struct.LRUMap;
+import org.apache.commons.jcs.utils.clhm.ConcurrentLinkedHashMap;
+import org.apache.commons.jcs.utils.clhm.EntryWeigher;
+import org.apache.commons.jcs.utils.clhm.EvictionListener;
 import org.apache.commons.jcs.utils.timing.ElapsedTimer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -994,13 +994,51 @@ public class IndexedDiskCache<K, V> exte
         keyHash = null;
         if (maxKeySize >= 0)
         {
-            if (this.diskLimitType == DiskLimitType.COUNT)
+            EvictionListener<K, IndexedDiskElementDescriptor> listener = new EvictionListener<K, IndexedDiskElementDescriptor>()
             {
-                keyHash = new LRUMapCountLimited(maxKeySize);
+                @Override
+                public void onEviction(K key, IndexedDiskElementDescriptor value)
+                {
+                    addToRecycleBin(value);
+                    if (log.isDebugEnabled())
+                    {
+                        log.debug(logCacheName + "Removing key: [" + key + "] from key store.");
+                        log.debug(logCacheName + "Key store size: [" + keyHash.size() + "].");
+                    }
+
+                    doOptimizeRealTime();
+                }
+            };
+
+            if (this.diskLimitType == DiskLimitType.SIZE)
+            {
+                EntryWeigher<K, IndexedDiskElementDescriptor> sizeWeigher = new EntryWeigher<K, IndexedDiskElementDescriptor>()
+                {
+                    @Override
+                    public int weightOf(K key, IndexedDiskElementDescriptor value)
+                    {
+                        if (value != null)
+                        {
+                            return value.len + IndexedDisk.HEADER_SIZE_BYTES;
+                        }
+                        else
+                        {
+                            return 1;
+                        }
+                    }
+                };
+                keyHash = new ConcurrentLinkedHashMap.Builder<K, IndexedDiskElementDescriptor>()
+                        .maximumWeightedCapacity(maxKeySize * 1024L) // kB
+                        .weigher(sizeWeigher)
+                        .listener(listener)
+                        .build();
             }
             else
             {
-                keyHash = new LRUMapSizeLimited(maxKeySize);
+                keyHash = new ConcurrentLinkedHashMap.Builder<K, IndexedDiskElementDescriptor>()
+                        .maximumWeightedCapacity(maxKeySize) // key count
+                        .listener(listener)
+                        .build();
             }
 
             if (log.isInfoEnabled())
@@ -1011,8 +1049,9 @@ public class IndexedDiskCache<K, V> exte
         else
         {
             // If no max size, use a plain map for memory and processing efficiency.
-            keyHash = new HashMap<K, IndexedDiskElementDescriptor>();
-            // keyHash = Collections.synchronizedMap( new HashMap() );
+            keyHash = new ConcurrentLinkedHashMap.Builder<K, IndexedDiskElementDescriptor>()
+                    .maximumWeightedCapacity(Long.MAX_VALUE) // unlimited key count
+                    .build();
             if (log.isInfoEnabled())
             {
                 log.info(logCacheName + "Set maxKeySize to unlimited'");
@@ -1540,7 +1579,8 @@ public class IndexedDiskCache<K, V> exte
     @Override
     public synchronized IStats getStatistics()
     {
-        IStats stats = new Stats();
+        // get the stats from the super too
+        IStats stats = super.getStatistics();
         stats.setTypeName("Indexed Disk Cache");
 
         ArrayList<IStatElement<?>> elems = new ArrayList<IStatElement<?>>();
@@ -1549,8 +1589,8 @@ public class IndexedDiskCache<K, V> exte
         elems.add(new StatElement<Integer>("Key Map Size", Integer.valueOf(this.keyHash != null ? this.keyHash.size() : -1)));
         try
         {
-            elems
-                .add(new StatElement<Long>("Data File Length", Long.valueOf(this.dataFile != null ? this.dataFile.length() : -1L)));
+            elems.add(new StatElement<Long>("Data File Length",
+                Long.valueOf(this.dataFile != null ? this.dataFile.length() : -1L)));
         }
         catch (IOException e)
         {
@@ -1565,10 +1605,6 @@ public class IndexedDiskCache<K, V> exte
         elems.add(new StatElement<Integer>("Recycle Bin Size", Integer.valueOf(this.recycle.size())));
         elems.add(new StatElement<Integer>("Startup Size", Integer.valueOf(this.startupSize)));
 
-        // get the stats from the super too
-        IStats sStats = super.getStatistics();
-        elems.addAll(sStats.getStatElements());
-
         stats.setStatElements(elems);
 
         return stats;
@@ -1629,162 +1665,4 @@ public class IndexedDiskCache<K, V> exte
             }
         }
     }
-
-    /**
-     * Class for recycling and lru. This implements the LRU overflow callback, so we can add items
-     * to the recycle bin. This class counts the size element to decide, when to throw away an element
-     */
-    public class LRUMapSizeLimited extends AbstractLRUMap<K, IndexedDiskElementDescriptor>
-    {
-        /**
-         * <code>tag</code> tells us which map we are working on.
-         */
-        public static final String TAG = "orig";
-
-        // size of the content in kB
-        private AtomicInteger contentSize;
-        private int maxSize;
-
-        /**
-         * Default
-         */
-        public LRUMapSizeLimited()
-        {
-            this(-1);
-        }
-
-        /**
-         * @param maxKeySize
-         */
-        public LRUMapSizeLimited(int maxKeySize)
-        {
-            super();
-            this.maxSize = maxKeySize;
-            this.contentSize = new AtomicInteger(0);
-        }
-
-        // keep the content size in kB, so 2^31 kB is reasonable value
-        private void subLengthFromCacheSize(IndexedDiskElementDescriptor value)
-        {
-            contentSize.addAndGet((value.len + IndexedDisk.HEADER_SIZE_BYTES) / -1024 - 1);
-        }
-
-        // keep the content size in kB, so 2^31 kB is reasonable value
-        private void addLengthToCacheSize(IndexedDiskElementDescriptor value)
-        {
-            contentSize.addAndGet((value.len + IndexedDisk.HEADER_SIZE_BYTES) / 1024 + 1);
-        }
-
-        @Override
-        public IndexedDiskElementDescriptor put(K key, IndexedDiskElementDescriptor value)
-        {
-            IndexedDiskElementDescriptor oldValue = null;
-
-            try
-            {
-                oldValue = super.put(key, value);
-            }
-            finally
-            {
-                // keep the content size in kB, so 2^31 kB is reasonable value
-                if (value != null)
-                {
-                    addLengthToCacheSize(value);
-                }
-                if (oldValue != null)
-                {
-                    subLengthFromCacheSize(oldValue);
-                }
-            }
-
-            return oldValue;
-        }
-
-        @Override
-        public IndexedDiskElementDescriptor remove(Object key)
-        {
-            IndexedDiskElementDescriptor value = null;
-
-            try
-            {
-                value = super.remove(key);
-                return value;
-            }
-            finally
-            {
-                if (value != null)
-                {
-                    subLengthFromCacheSize(value);
-                }
-            }
-        }
-
-        /**
-         * This is called when the may key size is reached. The least recently used item will be
-         * passed here. We will store the position and size of the spot on disk in the recycle bin.
-         * <p>
-         *
-         * @param key
-         * @param value
-         */
-        @Override
-        protected void processRemovedLRU(K key, IndexedDiskElementDescriptor value)
-        {
-            if (value != null)
-            {
-                subLengthFromCacheSize(value);
-            }
-
-            addToRecycleBin(value);
-
-            if (log.isDebugEnabled())
-            {
-                log.debug(logCacheName + "Removing key: [" + key + "] from key store.");
-                log.debug(logCacheName + "Key store size: [" + this.size() + "].");
-            }
-
-            doOptimizeRealTime();
-        }
-
-        @Override
-        protected boolean shouldRemove()
-        {
-            return maxSize > 0 && contentSize.get() > maxSize && this.size() > 0;
-        }
-    }
-
-    /**
-     * Class for recycling and lru. This implements the LRU overflow callback, so we can add items
-     * to the recycle bin. This class counts the elements to decide, when to throw away an element
-     */
-
-    public class LRUMapCountLimited extends LRUMap<K, IndexedDiskElementDescriptor>
-    // implements Serializable
-    {
-        public LRUMapCountLimited(int maxKeySize)
-        {
-            super(maxKeySize);
-        }
-
-        /**
-         * This is called when the may key size is reached. The least recently used item will be
-         * passed here. We will store the position and size of the spot on disk in the recycle bin.
-         * <p>
-         *
-         * @param key
-         * @param value
-         */
-        @Override
-        protected void processRemovedLRU(K key, IndexedDiskElementDescriptor value)
-        {
-            addToRecycleBin(value);
-            if (log.isDebugEnabled())
-            {
-                log.debug(logCacheName + "Removing key: [" + key + "] from key store.");
-                log.debug(logCacheName + "Key store size: [" + this.size() + "].");
-            }
-
-            doOptimizeRealTime();
-        }
-    }
 }

Modified: commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCache.java
URL: http://svn.apache.org/viewvc/commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCache.java?rev=1780805&r1=1780804&r2=1780805&view=diff
==============================================================================
--- commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCache.java (original)
+++ commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCache.java Sun Jan 29 15:16:25 2017
@@ -24,13 +24,16 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
+import java.util.List;
+import java.util.ListIterator;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.CopyOnWriteArrayList;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.ScheduledFuture;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.jcs.access.exception.CacheException;
 import org.apache.commons.jcs.access.exception.ObjectNotFoundException;
@@ -81,8 +84,8 @@ public class CompositeCache<K, V>
     private IElementEventQueue elementEventQ;
 
     /** Auxiliary caches. */
-    @SuppressWarnings("unchecked") // OK because this is an empty array
-    private AuxiliaryCache<K, V>[] auxCaches = new AuxiliaryCache[0];
+    private CopyOnWriteArrayList<AuxiliaryCache<K, V>> auxCaches =
+            new CopyOnWriteArrayList<AuxiliaryCache<K, V>>();
 
     /** is this alive? */
     private AtomicBoolean alive;
@@ -94,22 +97,22 @@ public class CompositeCache<K, V>
     private ICompositeCacheAttributes cacheAttr;
 
     /** How many times update was called. */
-    private AtomicInteger updateCount;
+    private AtomicLong updateCount;
 
     /** How many times remove was called. */
-    private AtomicInteger removeCount;
+    private AtomicLong removeCount;
 
     /** Memory cache hit count */
-    private AtomicInteger hitCountRam;
+    private AtomicLong hitCountRam;
 
     /** Auxiliary cache hit count (number of times found in ANY auxiliary) */
-    private AtomicInteger hitCountAux;
+    private AtomicLong hitCountAux;
 
     /** Count of misses where element was not found. */
-    private AtomicInteger missCountNotFound;
+    private AtomicLong missCountNotFound;
 
     /** Count of misses where element was expired. */
-    private AtomicInteger missCountExpired;
+    private AtomicLong missCountExpired;
 
     /**
      * The cache hub can only have one memory cache. This could be made more flexible in the future,
@@ -133,12 +136,12 @@ public class CompositeCache<K, V>
         this.attr = attr;
         this.cacheAttr = cattr;
         this.alive = new AtomicBoolean(true);
-        this.updateCount = new AtomicInteger(0);
-        this.removeCount = new AtomicInteger(0);
-        this.hitCountRam = new AtomicInteger(0);
-        this.hitCountAux = new AtomicInteger(0);
-        this.missCountNotFound = new AtomicInteger(0);
-        this.missCountExpired = new AtomicInteger(0);
+        this.updateCount = new AtomicLong(0);
+        this.removeCount = new AtomicLong(0);
+        this.hitCountRam = new AtomicLong(0);
+        this.hitCountAux = new AtomicLong(0);
+        this.missCountNotFound = new AtomicLong(0);
+        this.missCountExpired = new AtomicLong(0);
 
         createMemoryCache( cattr );
 
@@ -177,9 +180,20 @@ public class CompositeCache<K, V>
      * <p>
      * @param auxCaches
      */
+    @Deprecated
     public void setAuxCaches( AuxiliaryCache<K, V>[] auxCaches )
     {
-        this.auxCaches = auxCaches;
+        this.auxCaches = new CopyOnWriteArrayList<AuxiliaryCache<K,V>>(auxCaches);
+    }
+
+    /**
+     * This sets the list of auxiliary caches for this region.
+     * <p>
+     * @param auxCaches
+     */
+    public void setAuxCaches( List<? extends AuxiliaryCache<K, V>> auxCaches )
+    {
+        this.auxCaches = new CopyOnWriteArrayList<AuxiliaryCache<K,V>>(auxCaches);
     }
 
     /**
@@ -187,8 +201,20 @@ public class CompositeCache<K, V>
      * <p>
      * @return an array of auxiliary caches, may be empty, never null
      */
+    @SuppressWarnings("unchecked") // no generic arrays in Java
+    @Deprecated
     public AuxiliaryCache<K, V>[] getAuxCaches()
     {
+        return auxCaches.toArray( new AuxiliaryCache[0] );
+    }
+
+    /**
+     * Get the list of auxiliary caches for this region.
+     * <p>
+     * @return a list of auxiliary caches, may be empty, never null
+     */
+    public List<AuxiliaryCache<K, V>> getAuxCachesAsList()
+    {
         return this.auxCaches;
     }
 
@@ -247,11 +273,8 @@ public class CompositeCache<K, V>
 
         updateCount.incrementAndGet();
 
-        synchronized ( this )
-        {
-            memCache.update( cacheElement );
-            updateAuxiliaries( cacheElement, localOnly );
-        }
+        memCache.update( cacheElement );
+        updateAuxiliaries( cacheElement, localOnly );
 
         cacheElement.getElementAttributes().setLastAccessTimeNow();
     }
@@ -283,13 +306,13 @@ public class CompositeCache<K, V>
         // The types would describe the purpose.
         if ( log.isDebugEnabled() )
         {
-            if ( auxCaches.length > 0 )
+            if ( auxCaches.isEmpty() )
             {
-                log.debug( "Updating auxiliary caches" );
+                log.debug( "No auxiliary cache to update" );
             }
             else
             {
-                log.debug( "No auxiliary cache to update" );
+                log.debug( "Updating auxiliary caches" );
             }
         }
 
@@ -496,116 +519,113 @@ public class CompositeCache<K, V>
             log.debug( "get: key = " + key + ", localOnly = " + localOnly );
         }
 
-        synchronized (this)
+        try
         {
-            try
-            {
-                // First look in memory cache
-                element = memCache.get( key );
+            // First look in memory cache
+            element = memCache.get( key );
 
-                if ( element != null )
+            if ( element != null )
+            {
+                // Found in memory cache
+                if ( isExpired( element ) )
                 {
-                    // Found in memory cache
-                    if ( isExpired( element ) )
+                    if ( log.isDebugEnabled() )
                     {
-                        if ( log.isDebugEnabled() )
-                        {
-                            log.debug( cacheAttr.getCacheName() + " - Memory cache hit, but element expired" );
-                        }
-
-                        missCountExpired.incrementAndGet();
-                        remove( key );
-                        element = null;
+                        log.debug( cacheAttr.getCacheName() + " - Memory cache hit, but element expired" );
                     }
-                    else
-                    {
-                        if ( log.isDebugEnabled() )
-                        {
-                            log.debug( cacheAttr.getCacheName() + " - Memory cache hit" );
-                        }
 
-                        // Update counters
-                        hitCountRam.incrementAndGet();
+                    missCountExpired.incrementAndGet();
+                    remove( key );
+                    element = null;
+                }
+                else
+                {
+                    if ( log.isDebugEnabled() )
+                    {
+                        log.debug( cacheAttr.getCacheName() + " - Memory cache hit" );
                     }
 
-                    found = true;
+                    // Update counters
+                    hitCountRam.incrementAndGet();
                 }
-                else
+
+                found = true;
+            }
+            else
+            {
+                // Item not found in memory. If local invocation look in aux
+                // caches, even if not local look in disk auxiliaries
+                for (AuxiliaryCache<K, V> aux : auxCaches)
                 {
-                    // Item not found in memory. If local invocation look in aux
-                    // caches, even if not local look in disk auxiliaries
-                    for (AuxiliaryCache<K, V> aux : auxCaches)
+                    if ( aux != null )
                     {
-                        if ( aux != null )
-                        {
-                            CacheType cacheType = aux.getCacheType();
+                        CacheType cacheType = aux.getCacheType();
 
-                            if ( !localOnly || cacheType == CacheType.DISK_CACHE )
+                        if ( !localOnly || cacheType == CacheType.DISK_CACHE )
+                        {
+                            if ( log.isDebugEnabled() )
                             {
-                                if ( log.isDebugEnabled() )
-                                {
-                                    log.debug( "Attempting to get from aux [" + aux.getCacheName() + "] which is of type: "
-                                        + cacheType );
-                                }
-
-                                try
-                                {
-                                    element = aux.get( key );
-                                }
-                                catch ( IOException e )
-                                {
-                                    log.error( "Error getting from aux", e );
-                                }
+                                log.debug( "Attempting to get from aux [" + aux.getCacheName() + "] which is of type: "
+                                    + cacheType );
                             }
 
-                            if ( log.isDebugEnabled() )
+                            try
+                            {
+                                element = aux.get( key );
+                            }
+                            catch ( IOException e )
                             {
-                                log.debug( "Got CacheElement: " + element );
+                                log.error( "Error getting from aux", e );
                             }
+                        }
+
+                        if ( log.isDebugEnabled() )
+                        {
+                            log.debug( "Got CacheElement: " + element );
+                        }
 
-                            // Item found in one of the auxiliary caches.
-                            if ( element != null )
+                        // Item found in one of the auxiliary caches.
+                        if ( element != null )
+                        {
+                            if ( isExpired( element ) )
                             {
-                                if ( isExpired( element ) )
+                                if ( log.isDebugEnabled() )
                                 {
-                                    if ( log.isDebugEnabled() )
-                                    {
-                                        log.debug( cacheAttr.getCacheName() + " - Aux cache[" + aux.getCacheName() + "] hit, but element expired." );
-                                    }
-
-                                    missCountExpired.incrementAndGet();
-
-                                    // This will tell the remotes to remove the item
-                                    // based on the element's expiration policy. The elements attributes
-                                    // associated with the item when it created govern its behavior
-                                    // everywhere.
-                                    remove( key );
-                                    element = null;
+                                    log.debug( cacheAttr.getCacheName() + " - Aux cache[" + aux.getCacheName() + "] hit, but element expired." );
                                 }
-                                else
+
+                                missCountExpired.incrementAndGet();
+
+                                // This will tell the remotes to remove the item
+                                // based on the element's expiration policy. The elements attributes
+                                // associated with the item when it created govern its behavior
+                                // everywhere.
+                                remove( key );
+                                element = null;
+                            }
+                            else
+                            {
+                                if ( log.isDebugEnabled() )
                                 {
-                                    if ( log.isDebugEnabled() )
-                                    {
-                                        log.debug( cacheAttr.getCacheName() + " - Aux cache[" + aux.getCacheName() + "] hit" );
-                                    }
-
-                                    // Update counters
-                                    hitCountAux.incrementAndGet();
-                                    copyAuxiliaryRetrievedItemToMemory( element );
+                                    log.debug( cacheAttr.getCacheName() + " - Aux cache[" + aux.getCacheName() + "] hit" );
                                 }
 
-                                found = true;
-
-                                break;
+                                // Update counters
+                                hitCountAux.incrementAndGet();
+                                copyAuxiliaryRetrievedItemToMemory( element );
                             }
+
+                            found = true;
+
+                            break;
                         }
                     }
                 }
             }
-            catch ( IOException e )
-            {
-                log.error( "Problem encountered getting element.", e );
-            }
+        }
+        catch ( IOException e )
+        {
+            log.error( "Problem encountered getting element.", e );
         }
 
         if ( !found )
@@ -892,9 +912,7 @@ public class CompositeCache<K, V>
         throws IOException
     {
         // find matches in key array
-        // this avoids locking the memory cache, but it uses more memory
         Set<K> keyArray = memCache.getKeySet();
-
         Set<K> matchingKeys = getKeyMatcher().getMatchingKeysFromArray( pattern, keyArray );
 
         // call get multiple
@@ -918,9 +936,9 @@ public class CompositeCache<K, V>
     {
         Map<K, ICacheElement<K, V>> elements = new HashMap<K, ICacheElement<K, V>>();
 
-        for ( int i = auxCaches.length - 1; i >= 0; i-- )
+        for (ListIterator<AuxiliaryCache<K, V>> i = auxCaches.listIterator(auxCaches.size()); i.hasPrevious();)
         {
-            AuxiliaryCache<K, V> aux = auxCaches[i];
+            AuxiliaryCache<K, V> aux = i.previous();
 
             if ( aux != null )
             {
@@ -1078,18 +1096,15 @@ public class CompositeCache<K, V>
         allKeys.addAll( memCache.getKeySet() );
         for ( AuxiliaryCache<K, V> aux : auxCaches )
         {
-            if ( aux != null )
+            if ( aux != null && (!localOnly || aux.getCacheType() == CacheType.DISK_CACHE))
             {
-                if(!localOnly || aux.getCacheType() == CacheType.DISK_CACHE)
+                try
                 {
-                    try
-                    {
-                        allKeys.addAll( aux.getKeySet() );
-                    }
-                    catch ( IOException e )
-                    {
-                        // ignore
-                    }
+                    allKeys.addAll( aux.getKeySet() );
+                }
+                catch ( IOException e )
+                {
+                    // ignore
                 }
             }
         }
@@ -1143,53 +1158,50 @@ public class CompositeCache<K, V>
 
         boolean removed = false;
 
-        synchronized (this)
+        try
         {
-            try
-            {
-                removed = memCache.remove( key );
-            }
-            catch ( IOException e )
-            {
-                log.error( e );
-            }
+            removed = memCache.remove( key );
+        }
+        catch ( IOException e )
+        {
+            log.error( e );
+        }
 
-            // Removes from all auxiliary caches.
-            for ( ICache<K, V> aux : auxCaches )
+        // Removes from all auxiliary caches.
+        for ( ICache<K, V> aux : auxCaches )
+        {
+            if ( aux == null )
             {
-                if ( aux == null )
-                {
-                    continue;
-                }
+                continue;
+            }
 
-                CacheType cacheType = aux.getCacheType();
+            CacheType cacheType = aux.getCacheType();
 
-                // for now let laterals call remote remove but not vice versa
+            // for now let laterals call remote remove but not vice versa
 
-                if ( localOnly && ( cacheType == CacheType.REMOTE_CACHE || cacheType == CacheType.LATERAL_CACHE ) )
+            if ( localOnly && ( cacheType == CacheType.REMOTE_CACHE || cacheType == CacheType.LATERAL_CACHE ) )
+            {
+                continue;
+            }
+            try
+            {
+                if ( log.isDebugEnabled() )
                 {
-                    continue;
+                    log.debug( "Removing " + key + " from cacheType" + cacheType );
                 }
-                try
-                {
-                    if ( log.isDebugEnabled() )
-                    {
-                        log.debug( "Removing " + key + " from cacheType" + cacheType );
-                    }
 
-                    boolean b = aux.remove( key );
+                boolean b = aux.remove( key );
 
-                    // Don't take the remote removal into account.
-                    if ( !removed && cacheType != CacheType.REMOTE_CACHE )
-                    {
-                        removed = b;
-                    }
-                }
-                catch ( IOException ex )
+                // Don't take the remote removal into account.
+                if ( !removed && cacheType != CacheType.REMOTE_CACHE )
                 {
-                    log.error( "Failure removing from aux", ex );
+                    removed = b;
                 }
             }
+            catch ( IOException ex )
+            {
+                log.error( "Failure removing from aux", ex );
+            }
         }
 
         return removed;
@@ -1391,31 +1403,30 @@ public class CompositeCache<K, V>
             return;
         }
 
-        synchronized ( this )
+        Set<K> keySet = new HashSet<K>(memCache.getKeySet());
+        for ( ICache<K, V> aux : auxCaches )
         {
-            for ( ICache<K, V> aux : auxCaches )
+            try
             {
-                try
+                if ( aux.getStatus() == CacheStatus.ALIVE )
                 {
-                    if ( aux.getStatus() == CacheStatus.ALIVE )
+                    for (K key : keySet)
                     {
-                        for (K key : memCache.getKeySet())
-                        {
-                            ICacheElement<K, V> ce = memCache.get(key);
+                        ICacheElement<K, V> ce = memCache.get(key);
 
-                            if (ce != null)
-                            {
-                                aux.update( ce );
-                            }
+                        if (ce != null)
+                        {
+                            aux.update( ce );
                         }
                     }
                 }
-                catch ( IOException ex )
-                {
-                    log.error( "Failure saving aux caches.", ex );
-                }
+            }
+            catch ( IOException ex )
+            {
+                log.error( "Failure saving aux caches.", ex );
             }
         }
+
         if ( log.isDebugEnabled() )
         {
             log.debug( "Called save for [" + cacheAttr.getCacheName() + "]" );
@@ -1480,14 +1491,13 @@ public class CompositeCache<K, V>
         // store the composite cache stats first
         ArrayList<IStatElement<?>> elems = new ArrayList<IStatElement<?>>();
 
-        elems.add(new StatElement<Integer>( "HitCountRam", Integer.valueOf(getHitCountRam()) ) );
-        elems.add(new StatElement<Integer>( "HitCountAux", Integer.valueOf(getHitCountAux()) ) );
+        elems.add(new StatElement<AtomicLong>( "HitCountRam", hitCountRam ) );
+        elems.add(new StatElement<AtomicLong>( "HitCountAux", hitCountAux ) );
 
         stats.setStatElements( elems );
 
         // memory + aux, memory is not considered an auxiliary internally
-        int total = auxCaches.length + 1;
-        ArrayList<IStats> auxStats = new ArrayList<IStats>(total);
+        ArrayList<IStats> auxStats = new ArrayList<IStats>(auxCaches.size() + 1);
 
         auxStats.add(getMemoryCache().getStatistics());
 
@@ -1749,7 +1759,7 @@ public class CompositeCache<K, V>
      */
     public int getHitCountRam()
     {
-        return hitCountRam.get();
+        return hitCountRam.intValue();
     }
 
     /**
@@ -1758,7 +1768,7 @@ public class CompositeCache<K, V>
      */
     public int getHitCountAux()
     {
-        return hitCountAux.get();
+        return hitCountAux.intValue();
     }
 
     /**
@@ -1767,7 +1777,7 @@ public class CompositeCache<K, V>
      */
     public int getMissCountNotFound()
     {
-        return missCountNotFound.get();
+        return missCountNotFound.intValue();
     }
 
     /**
@@ -1776,7 +1786,7 @@ public class CompositeCache<K, V>
      */
     public int getMissCountExpired()
     {
-        return missCountExpired.get();
+        return missCountExpired.intValue();
     }
 
     /**
@@ -1784,6 +1794,51 @@ public class CompositeCache<K, V>
      */
     public int getUpdateCount()
     {
+        return updateCount.intValue();
+    }
+
+    /**
+     * Number of times a requested item was found in the memory cache.
+     * <p>
+     * @return number of hits in memory
+     */
+    public long getHitCountRamLong()
+    {
+        return hitCountRam.get();
+    }
+
+    /**
+     * Number of times a requested item was found in and auxiliary cache.
+     * @return number of auxiliary hits.
+     */
+    public long getHitCountAuxLong()
+    {
+        return hitCountAux.get();
+    }
+
+    /**
+     * Number of times a requested element was not found.
+     * @return number of misses.
+     */
+    public long getMissCountNotFoundLong()
+    {
+        return missCountNotFound.get();
+    }
+
+    /**
+     * Number of times a requested element was found but was expired.
+     * @return number of found but expired gets.
+     */
+    public long getMissCountExpiredLong()
+    {
+        return missCountExpired.get();
+    }
+
+    /**
+     * @return Returns the updateCount.
+     */
+    public long getUpdateCountLong()
+    {
         return updateCount.get();
     }
 

Modified: commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCacheConfigurator.java
URL: http://svn.apache.org/viewvc/commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCacheConfigurator.java?rev=1780805&r1=1780804&r2=1780805&view=diff
==============================================================================
--- commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCacheConfigurator.java (original)
+++ commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCacheConfigurator.java Sun Jan 29 15:16:25 2017
@@ -270,9 +270,7 @@ public class CompositeCacheConfigurator
             }
 
             // Associate the auxiliaries with the cache
-            @SuppressWarnings("unchecked") // No generic arrays in java
-            AuxiliaryCache<K, V>[] auxArray = auxList.toArray( new AuxiliaryCache[0] );
-            cache.setAuxCaches( auxArray );
+            cache.setAuxCaches( auxList );
         }
 
         // Return the new cache

Modified: commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractDoubleLinkedListMemoryCache.java
URL: http://svn.apache.org/viewvc/commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractDoubleLinkedListMemoryCache.java?rev=1780805&r1=1780804&r2=1780805&view=diff
==============================================================================
--- commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractDoubleLinkedListMemoryCache.java (original)
+++ commons/proper/jcs/branches/jcs-core-with-clhm/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractDoubleLinkedListMemoryCache.java Sun Jan 29 15:16:25 2017
@@ -32,7 +32,6 @@ import org.apache.commons.jcs.engine.Cac
 import org.apache.commons.jcs.engine.behavior.ICacheElement;
 import org.apache.commons.jcs.engine.control.CompositeCache;
 import org.apache.commons.jcs.engine.control.group.GroupAttrName;
-import org.apache.commons.jcs.engine.memory.util.DefaultMemoryElementDescriptor;
 import org.apache.commons.jcs.engine.memory.util.MemoryElementDescriptor;
 import org.apache.commons.jcs.engine.stats.StatElement;
 import org.apache.commons.jcs.engine.stats.behavior.IStatElement;
@@ -101,24 +100,16 @@ public abstract class AbstractDoubleLink
     {
         putCnt.incrementAndGet();
 
-        lock.lock();
-        try
-        {
-            MemoryElementDescriptor<K, V> newNode = adjustListForUpdate(ce);
+        MemoryElementDescriptor<K, V> newNode = adjustListForUpdate(ce);
 
-            // this should be synchronized if we were not using a ConcurrentHashMap
-            final K key = newNode.getCacheElement().getKey();
-            MemoryElementDescriptor<K, V> oldNode = map.put(key, newNode);
+        // this should be synchronized if we were not using a ConcurrentHashMap
+        final K key = newNode.getCacheElement().getKey();
+        MemoryElementDescriptor<K, V> oldNode = map.put(key, newNode);
 
-            // If the node was the same as an existing node, remove it.
-            if (oldNode != null && key.equals(oldNode.getCacheElement().getKey()))
-            {
-                list.remove(oldNode);
-            }
-        }
-        finally
+        // If the node was the same as an existing node, remove it.
+        if (oldNode != null && key.equals(oldNode.getCacheElement().getKey()))
         {
-            lock.unlock();
+            list.remove(oldNode);
         }
 
         // If we are over the max spool some
@@ -168,34 +159,20 @@ public abstract class AbstractDoubleLink
         // The spool will put them in a disk event queue, so there is no
         // need to pre-queue the queuing. This would be a bit wasteful
         // and wouldn't save much time in this synchronous call.
-        lock.lock();
-
-        try
+        for (int i = 0; i < chunkSizeCorrected; i++)
         {
-            for (int i = 0; i < chunkSizeCorrected; i++)
+            ICacheElement<K, V> lastElement = spoolLastElement();
+            if (lastElement == null)
             {
-                ICacheElement<K, V> lastElement = spoolLastElement();
-                if (lastElement == null)
-                {
-                    break;
-                }
-            }
-
-            // If this is out of the sync block it can detect a mismatch
-            // where there is none.
-            if (log.isDebugEnabled() && map.size() != list.size())
-            {
-                log.debug("update: After spool, size mismatch: map.size() = " + map.size() + ", linked list size = " + list.size());
+                break;
             }
         }
-        finally
-        {
-            lock.unlock();
-        }
 
-        if (log.isDebugEnabled())
+        // If this is out of the sync block it can detect a mismatch
+        // where there is none.
+        if (log.isDebugEnabled() && map.size() != list.size())
         {
-            log.debug("update: After spool map size: " + map.size() + " linked list size = " + list.size());
+            log.debug("update: After spool, size mismatch: map.size() = " + map.size() + ", linked list size = " + list.size());
         }
     }
 
@@ -211,7 +188,7 @@ public abstract class AbstractDoubleLink
     @Override
     public final ICacheElement<K, V> get(K key) throws IOException
     {
-        ICacheElement<K, V> ce = null;
+        ICacheElement<K, V> ce;
 
         if (log.isDebugEnabled())
         {
@@ -224,17 +201,9 @@ public abstract class AbstractDoubleLink
         {
             hitCnt.incrementAndGet();
 
-            lock.lock();
-            try
-            {
-                ce = me.getCacheElement();
-                // ABSTRACT
-                adjustListForGet(me);
-            }
-            finally
-            {
-                lock.unlock();
-            }
+            ce = me.getCacheElement();
+            // ABSTRACT
+            adjustListForGet(me);
 
             if (log.isDebugEnabled())
             {
@@ -245,6 +214,8 @@ public abstract class AbstractDoubleLink
         {
             missCnt.incrementAndGet();
 
+            ce = null;
+
             if (log.isDebugEnabled())
             {
                 log.debug(getCacheName() + ": LRUMemoryCache miss for " + key);
@@ -281,25 +252,16 @@ public abstract class AbstractDoubleLink
     @Override
     public int freeElements(int numberToFree) throws IOException
     {
-        int freed = 0;
-
-        lock.lock();
+        int freed;
 
-        try
+        for (freed = 0; freed < numberToFree; freed++)
         {
-            for (; freed < numberToFree; freed++)
+            ICacheElement<K, V> element = spoolLastElement();
+            if (element == null)
             {
-                ICacheElement<K, V> element = spoolLastElement();
-                if (element == null)
-                {
-                    break;
-                }
+                break;
             }
         }
-        finally
-        {
-            lock.unlock();
-        }
 
         return freed;
     }
@@ -315,13 +277,13 @@ public abstract class AbstractDoubleLink
     {
         ICacheElement<K, V> toSpool = null;
 
-        final MemoryElementDescriptor<K, V> last = list.getLast();
+        final MemoryElementDescriptor<K, V> last = list.removeLast();
         if (last != null)
         {
             toSpool = last.getCacheElement();
             if (toSpool != null)
             {
-                getCompositeCache().spoolToDisk(toSpool);
+                waterfal(toSpool);
                 if (map.remove(toSpool.getKey()) == null)
                 {
                     log.warn("update: remove failed for key: " + toSpool.getKey());
@@ -336,8 +298,6 @@ public abstract class AbstractDoubleLink
             {
                 throw new Error("update: last.ce is null!");
             }
-
-            list.remove(last);
         }
 
         return toSpool;
@@ -374,17 +334,9 @@ public abstract class AbstractDoubleLink
 
                 if (k instanceof String && ((String) k).startsWith(key.toString()))
                 {
-                    lock.lock();
-                    try
-                    {
-                        list.remove(entry.getValue());
-                        itr.remove();
-                        removed = true;
-                    }
-                    finally
-                    {
-                        lock.unlock();
-                    }
+                    itr.remove();
+                    list.remove(entry.getValue());
+                    removed = true;
                 }
             }
         }
@@ -398,36 +350,20 @@ public abstract class AbstractDoubleLink
 
                 if (k instanceof GroupAttrName && ((GroupAttrName<?>) k).groupId.equals(((GroupAttrName<?>) key).groupId))
                 {
-                    lock.lock();
-                    try
-                    {
-                        list.remove(entry.getValue());
-                        itr.remove();
-                        removed = true;
-                    }
-                    finally
-                    {
-                        lock.unlock();
-                    }
+                    itr.remove();
+                    list.remove(entry.getValue());
+                    removed = true;
                 }
             }
         }
         else
         {
             // remove single item.
-            lock.lock();
-            try
-            {
-                MemoryElementDescriptor<K, V> me = map.remove(key);
-                if (me != null)
-                {
-                    list.remove(me);
-                    removed = true;
-                }
-            }
-            finally
+            MemoryElementDescriptor<K, V> me = map.remove(key);
+            if (me != null)
             {
-                lock.unlock();
+                list.remove(me);
+                removed = true;
             }
         }
 
@@ -444,16 +380,8 @@ public abstract class AbstractDoubleLink
     @Override
     public void removeAll() throws IOException
     {
-        lock.lock();
-        try
-        {
-            list.removeAll();
-            map.clear();
-        }
-        finally
-        {
-            lock.unlock();
-        }
+        super.removeAll();
+        list.removeAll();
     }
 
     // --------------------------- internal methods (linked list implementation)
@@ -467,21 +395,13 @@ public abstract class AbstractDoubleLink
      */
     protected MemoryElementDescriptor<K, V> addFirst(ICacheElement<K, V> ce)
     {
-        lock.lock();
-        try
-        {
-            MemoryElementDescriptor<K, V> me = new DefaultMemoryElementDescriptor<K, V>(ce);
-            list.addFirst(me);
-            if ( log.isDebugEnabled() )
-            {
-                verifyCache(ce.getKey());
-            }
-            return me;
-        }
-        finally
+        MemoryElementDescriptor<K, V> me = new MemoryElementDescriptor<K, V>(ce);
+        list.addFirst(me);
+        if ( log.isDebugEnabled() )
         {
-            lock.unlock();
+            verifyCache(ce.getKey());
         }
+        return me;
     }
 
     /**
@@ -494,21 +414,13 @@ public abstract class AbstractDoubleLink
      */
     protected MemoryElementDescriptor<K, V> addLast(ICacheElement<K, V> ce)
     {
-        lock.lock();
-        try
-        {
-            MemoryElementDescriptor<K, V> me = new DefaultMemoryElementDescriptor<K, V>(ce);
-            list.addLast(me);
-            if ( log.isDebugEnabled() )
-            {
-                verifyCache(ce.getKey());
-            }
-            return me;
-        }
-        finally
+        MemoryElementDescriptor<K, V> me = new MemoryElementDescriptor<K, V>(ce);
+        list.addLast(me);
+        if ( log.isDebugEnabled() )
         {
-            lock.unlock();
+            verifyCache(ce.getKey());
         }
+        return me;
     }
 
     // ---------------------------------------------------------- debug methods



Mime
View raw message