]> source.dussan.org Git - jgit.git/commitdiff
Reduce synchronized scope around ConcurrentHashMap 65/84565/2
authorPhilipp Marx <smigfu@googlemail.com>
Sat, 12 Nov 2016 10:11:19 +0000 (11:11 +0100)
committerPhilipp Marx <smigfu@googlemail.com>
Sat, 12 Nov 2016 10:11:19 +0000 (11:11 +0100)
Change-Id: I982a78070efb6bc2d3395330456d62e0d5ce6da7
Signed-off-by: Philipp Marx <smigfu@googlemail.com>
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java

index 05627ed86baad404e1cd23219f8a354c773c5706..f7decf132471730918a315997f7c90ae6982ec5f 100644 (file)
@@ -263,20 +263,22 @@ public final class DfsBlockCache {
                // TODO This table grows without bound. It needs to clean up
                // entries that aren't in cache anymore, and aren't being used
                // by a live DfsObjDatabase reference.
-               synchronized (packCache) {
-                       DfsPackFile pack = packCache.get(dsc);
-                       if (pack != null && pack.invalid()) {
-                               packCache.remove(dsc);
-                               pack = null;
-                       }
-                       if (pack == null) {
-                               if (key == null)
-                                       key = new DfsPackKey();
-                               pack = new DfsPackFile(this, dsc, key);
-                               packCache.put(dsc, pack);
-                       }
+
+               DfsPackFile pack = packCache.get(dsc);
+               if (pack != null && !pack.invalid()) {
                        return pack;
                }
+
+               // 'pack' either didn't exist or was invalid. Compute a new
+               // entry atomically (guaranteed by ConcurrentHashMap).
+               return packCache.compute(dsc, (k, v) -> {
+                       if (v != null && !v.invalid()) { // valid value added by
+                               return v;                    // another thread
+                       } else {
+                               return new DfsPackFile(
+                                               this, dsc, key != null ? key : new DfsPackKey());
+                       }
+               });
        }
 
        private int hash(int packHash, long off) {
@@ -504,9 +506,7 @@ public final class DfsBlockCache {
        }
 
        void remove(DfsPackFile pack) {
-               synchronized (packCache) {
-                       packCache.remove(pack.getPackDescription());
-               }
+               packCache.remove(pack.getPackDescription());
        }
 
        private int slot(DfsPackKey pack, long position) {