DfsBlockCache oc = cache;
cache = nc;
- if (oc != null && oc.readAheadService != null)
- oc.readAheadService.shutdown();
+ if (oc != null) {
+ if (oc.readAheadService != null)
+ oc.readAheadService.shutdown();
+ for (DfsPackFile pack : oc.getPackFiles())
+ pack.key.cachedSize.set(0);
+ }
}
/** @return the currently active DfsBlockCache. */
e2 = table.get(slot);
}
+ key.cachedSize.addAndGet(v.size());
Ref<DfsBlock> ref = new Ref<DfsBlock>(key, position, v.size(), v);
ref.hot = true;
for (;;) {
dead.next = null;
dead.value = null;
live -= dead.size;
+ dead.pack.cachedSize.addAndGet(-dead.size);
statEvict++;
} while (maxBytes < live);
clockHand = prev;
}
}
+ key.cachedSize.addAndGet(size);
ref = new Ref<T>(key, pos, size, v);
ref.hot = true;
for (;;) {
return packDesc;
}
+ /** @return bytes cached in memory for this pack, excluding the index. */
+ public long getCachedSize() {
+ return key.cachedSize.get();
+ }
+
private String getPackName() {
return packDesc.getPackName();
}
package org.eclipse.jgit.storage.dfs;
+import java.util.concurrent.atomic.AtomicLong;
+
final class DfsPackKey {
final int hash;
+ final AtomicLong cachedSize;
+
DfsPackKey() {
// Multiply by 31 here so we can more directly combine with another
// value without doing the multiply there.
//
hash = System.identityHashCode(this) * 31;
+ cachedSize = new AtomicLong();
}
}