On 5/12/14, 12:46 PM, rmannibu...@apache.org wrote:
> Author: rmannibucau
> Date: Mon May 12 19:46:08 2014
> New Revision: 1594073
>
> URL: http://svn.apache.org/r1594073
> Log:
> removing a bunch of synchronized thanks to ConcurrentHashMap - still 
> removeAll to review since it can be not that good but using any synch would 
> make it slower and not scalable
>
> Modified:
>     
> commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/remote/server/RemoteCacheServerFactory.java
>     
> commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractDoubleLinkedListMemoryCache.java
>     
> commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/util/MemoryElementDescriptor.java
>
> Modified: 
> commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/remote/server/RemoteCacheServerFactory.java
> URL: 
> http://svn.apache.org/viewvc/commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/remote/server/RemoteCacheServerFactory.java?rev=1594073&r1=1594072&r2=1594073&view=diff
> ==============================================================================
> --- 
> commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/remote/server/RemoteCacheServerFactory.java
>  (original)
> +++ 
> commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/remote/server/RemoteCacheServerFactory.java
>  Mon May 12 19:46:08 2014
> @@ -81,7 +81,7 @@ public class RemoteCacheServerFactory
>       * @return Returns the remoteCacheServer.
>       */
>      @SuppressWarnings("unchecked") // Need cast to specific RemoteCacheServer
> -    public static <K extends Serializable, V extends Serializable> 
> RemoteCacheServer<K, V> getRemoteCacheServer()
> +    public static <K, V> RemoteCacheServer<K, V> getRemoteCacheServer()
>      {
>          return (RemoteCacheServer<K, V>)remoteCacheServer;
>      }
>
> Modified: 
> commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractDoubleLinkedListMemoryCache.java
> URL: 
> http://svn.apache.org/viewvc/commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractDoubleLinkedListMemoryCache.java?rev=1594073&r1=1594072&r2=1594073&view=diff
> ==============================================================================
> --- 
> commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractDoubleLinkedListMemoryCache.java
>  (original)
> +++ 
> commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractDoubleLinkedListMemoryCache.java
>  Mon May 12 19:46:08 2014
> @@ -86,8 +86,11 @@ public abstract class AbstractDoubleLink
>  
>      /**
>       * This is called by super initialize.
> +     *
> +     * NOTE: should return a thread safe map
> +     *
>       * <p>
> -     * @return new Hashtable()
> +     * @return new ConcurrentHashMap()
>       */
>      @Override
>      public Map<K, MemoryElementDescriptor<K, V>> createMap()
> @@ -109,19 +112,15 @@ public abstract class AbstractDoubleLink
>      {
>          putCnt++;
>  
> -        synchronized ( this )

I am not really familiar with this code, so this could be needless
concern; but removing this synch makes the adjustListForUpdate no
longer atomic with the put below.  Is this OK? 

Phil
> -        {
> -            // ABSTRACT
> -            MemoryElementDescriptor<K, V> newNode = adjustListForUpdate( ce 
> );
> +        MemoryElementDescriptor<K, V> newNode = adjustListForUpdate( ce );
>  
> -            // this must be synchronized
> -            MemoryElementDescriptor<K, V> oldNode = map.put( 
> newNode.ce.getKey(), newNode );
> +        // this should be synchronized if we were not using a 
> ConcurrentHashMap
> +        MemoryElementDescriptor<K, V> oldNode = map.put( 
> newNode.ce.getKey(), newNode );
>  
> -            // If the node was the same as an existing node, remove it.
> -            if ( oldNode != null && newNode.ce.getKey().equals( 
> oldNode.ce.getKey() ) )
> -            {
> -                list.remove( oldNode );
> -            }
> +        // If the node was the same as an existing node, remove it.
> +        if ( oldNode != null && newNode.ce.getKey().equals( 
> oldNode.ce.getKey() ) )
> +        {
> +            list.remove( oldNode );
>          }
>  
>          // If we are over the max spool some
> @@ -190,12 +189,14 @@ public abstract class AbstractDoubleLink
>       * @throws IOException
>       */
>      @Override
> -    public final synchronized ICacheElement<K, V> get( K key )
> +    public final ICacheElement<K, V> get( K key )
>          throws IOException
>      {
>          ICacheElement<K, V> ce = null;
>  
> -        if ( log.isDebugEnabled() )
> +        final boolean debugEnabled = log.isDebugEnabled();
> +
> +        if (debugEnabled)
>          {
>              log.debug( "getting item from cache " + cacheName + " for key " 
> + key );
>          }
> @@ -206,7 +207,7 @@ public abstract class AbstractDoubleLink
>          {
>              ce = me.ce;
>              hitCnt++;
> -            if ( log.isDebugEnabled() )
> +            if (debugEnabled)
>              {
>                  log.debug( cacheName + ": LRUMemoryCache hit for " + 
> ce.getKey() );
>              }
> @@ -217,7 +218,7 @@ public abstract class AbstractDoubleLink
>          else
>          {
>              missCnt++;
> -            if ( log.isDebugEnabled() )
> +            if (debugEnabled)
>              {
>                  log.debug( cacheName + ": LRUMemoryCache miss for " + key );
>              }
> @@ -270,46 +271,38 @@ public abstract class AbstractDoubleLink
>          throws Error
>      {
>          ICacheElement<K, V> toSpool = null;
> -        synchronized ( this )
> +        final MemoryElementDescriptor<K, V> last = list.getLast();
> +        if ( last != null )
>          {
> -            if ( list.getLast() != null )
> +            toSpool = last.ce;
> +            if ( toSpool != null )
>              {
> -                toSpool = list.getLast().ce;
> -                if ( toSpool != null )
> -                {
> -                    cache.spoolToDisk( list.getLast().ce );
> -                    if ( !map.containsKey( list.getLast().ce.getKey() ) )
> -                    {
> -                        log.error( "update: map does not contain key: "
> -                            + list.getLast().ce.getKey() );
> -                        verifyCache();
> -                    }
> -                    if ( map.remove( list.getLast().ce.getKey() ) == null )
> -                    {
> -                        log.warn( "update: remove failed for key: "
> -                            + list.getLast().ce.getKey() );
> -                        verifyCache();
> -                    }
> -                }
> -                else
> +                cache.spoolToDisk( last.ce );
> +                if ( map.remove( last.ce.getKey() ) == null )
>                  {
> -                    throw new Error( "update: last.ce is null!" );
> +                    log.warn( "update: remove failed for key: "
> +                        + last.ce.getKey() );
> +                    verifyCache();
>                  }
> -                list.removeLast();
>              }
>              else
>              {
> -                verifyCache();
> -                throw new Error( "update: last is null!" );
> +                throw new Error( "update: last.ce is null!" );
>              }
> +            list.remove(last);
> +        }
> +        else
> +        {
> +            verifyCache();
> +            throw new Error( "update: last is null!" );
> +        }
>  
> -            // If this is out of the sync block it can detect a mismatch
> -            // where there is none.
> -            if ( map.size() != dumpCacheSize() )
> -            {
> -                log.warn( "update: After spool, size mismatch: map.size() = 
> " + map.size() + ", linked list size = "
> -                    + dumpCacheSize() );
> -            }
> +        // If this is out of the sync block it can detect a mismatch
> +        // where there is none.
> +        if ( map.size() != dumpCacheSize() )
> +        {
> +            log.warn( "update: After spool, size mismatch: map.size() = " + 
> map.size() + ", linked list size = "
> +                + dumpCacheSize() );
>          }
>          return toSpool;
>      }
> @@ -324,7 +317,7 @@ public abstract class AbstractDoubleLink
>       * @throws IOException
>       */
>      @Override
> -    public synchronized boolean remove( K key )
> +    public boolean remove( K key )
>          throws IOException
>      {
>          if ( log.isDebugEnabled() )
> @@ -338,39 +331,33 @@ public abstract class AbstractDoubleLink
>          if ( key instanceof String && ( (String) key ).endsWith( 
> CacheConstants.NAME_COMPONENT_DELIMITER ) )
>          {
>              // remove all keys of the same name hierarchy.
> -            synchronized ( map )
> +            for (Iterator<Map.Entry<K, MemoryElementDescriptor<K, V>>> itr = 
> map.entrySet().iterator(); itr.hasNext(); )
>              {
> -                for (Iterator<Map.Entry<K, MemoryElementDescriptor<K, V>>> 
> itr = map.entrySet().iterator(); itr.hasNext(); )
> -                {
> -                    Map.Entry<K, MemoryElementDescriptor<K, V>> entry = 
> itr.next();
> -                    K k = entry.getKey();
> +                Map.Entry<K, MemoryElementDescriptor<K, V>> entry = 
> itr.next();
> +                K k = entry.getKey();
>  
> -                    if ( k instanceof String && ( (String) k ).startsWith( 
> key.toString() ) )
> -                    {
> -                        list.remove( entry.getValue() );
> -                        itr.remove();
> -                        removed = true;
> -                    }
> +                if ( k instanceof String && ( (String) k ).startsWith( 
> key.toString() ) )
> +                {
> +                    list.remove( entry.getValue() );
> +                    itr.remove();
> +                    removed = true;
>                  }
>              }
>          }
>          else if ( key instanceof GroupAttrName )
>          {
>              // remove all keys of the same name hierarchy.
> -            synchronized ( map )
> +            for (Iterator<Map.Entry<K, MemoryElementDescriptor<K, V>>> itr = 
> map.entrySet().iterator(); itr.hasNext(); )
>              {
> -                for (Iterator<Map.Entry<K, MemoryElementDescriptor<K, V>>> 
> itr = map.entrySet().iterator(); itr.hasNext(); )
> -                {
> -                    Map.Entry<K, MemoryElementDescriptor<K, V>> entry = 
> itr.next();
> -                    K k = entry.getKey();
> +                Map.Entry<K, MemoryElementDescriptor<K, V>> entry = 
> itr.next();
> +                K k = entry.getKey();
>  
> -                    if ( k instanceof GroupAttrName &&
> -                        
> ((GroupAttrName<?>)k).groupId.equals(((GroupAttrName<?>)key).groupId))
> -                    {
> -                        list.remove( entry.getValue() );
> -                        itr.remove();
> -                        removed = true;
> -                    }
> +                if ( k instanceof GroupAttrName &&
> +                    
> ((GroupAttrName<?>)k).groupId.equals(((GroupAttrName<?>)key).groupId))
> +                {
> +                    list.remove( entry.getValue() );
> +                    itr.remove();
> +                    removed = true;
>                  }
>              }
>          }
> @@ -396,7 +383,7 @@ public abstract class AbstractDoubleLink
>       * @throws IOException
>       */
>      @Override
> -    public synchronized void removeAll()
> +    public void removeAll()
>          throws IOException
>      {
>          map.clear();
> @@ -410,7 +397,7 @@ public abstract class AbstractDoubleLink
>       * @param ce The feature to be added to the First
>       * @return MemoryElementDescriptor
>       */
> -    protected synchronized MemoryElementDescriptor<K, V> addFirst( 
> ICacheElement<K, V> ce )
> +    protected MemoryElementDescriptor<K, V> addFirst( ICacheElement<K, V> ce 
> )
>      {
>          MemoryElementDescriptor<K, V> me = new MemoryElementDescriptor<K, 
> V>( ce );
>          list.addFirst( me );
> @@ -424,7 +411,7 @@ public abstract class AbstractDoubleLink
>       * @param ce The feature to be added to the First
>       * @return MemoryElementDescriptor
>       */
> -    protected synchronized MemoryElementDescriptor<K, V> addLast( 
> ICacheElement<K, V> ce )
> +    protected MemoryElementDescriptor<K, V> addLast( ICacheElement<K, V> ce )
>      {
>          MemoryElementDescriptor<K, V> me = new MemoryElementDescriptor<K, 
> V>( ce );
>          list.addLast( me );
>
> Modified: 
> commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/util/MemoryElementDescriptor.java
> URL: 
> http://svn.apache.org/viewvc/commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/util/MemoryElementDescriptor.java?rev=1594073&r1=1594072&r2=1594073&view=diff
> ==============================================================================
> --- 
> commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/util/MemoryElementDescriptor.java
>  (original)
> +++ 
> commons/proper/jcs/trunk/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/util/MemoryElementDescriptor.java
>  Mon May 12 19:46:08 2014
> @@ -32,7 +32,7 @@ public class MemoryElementDescriptor<K, 
>      private static final long serialVersionUID = -1905161209035522460L;
>  
>      /** The CacheElement wrapped by this descriptor */
> -    public ICacheElement<K, V> ce; // TODO privatise
> +    public final ICacheElement<K, V> ce; // TODO privatise
>  
>      /**
>       * Constructs a usable MemoryElementDescriptor.
>
>
>


---------------------------------------------------------------------
To unsubscribe, e-mail: dev-unsubscr...@commons.apache.org
For additional commands, e-mail: dev-h...@commons.apache.org

Reply via email to