aboutsummaryrefslogtreecommitdiffstats
path: root/org.eclipse.jgit/src/org
diff options
context:
space:
mode:
Diffstat (limited to 'org.eclipse.jgit/src/org')
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java12
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java16
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java54
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java126
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java139
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java49
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableStack.java35
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java1
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java58
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexConstants.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexLoader.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexPrettyPrinter.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexWriter.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/PackIndexMerger.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/revplot/PlotWalk.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/AmazonS3.java9
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ChangedPathTreeFilter.java1
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java13
19 files changed, 267 insertions, 265 deletions
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java
index b033177e05..58b4d3dc56 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java
@@ -142,7 +142,17 @@ public class ManifestParser extends DefaultHandler {
xmlInRead++;
final XMLReader xr;
try {
- xr = SAXParserFactory.newInstance().newSAXParser().getXMLReader();
+ SAXParserFactory spf = SAXParserFactory.newInstance();
+ spf.setFeature(
+ "http://xml.org/sax/features/external-general-entities", //$NON-NLS-1$
+ false);
+ spf.setFeature(
+ "http://xml.org/sax/features/external-parameter-entities", //$NON-NLS-1$
+ false);
+ spf.setFeature(
+ "http://apache.org/xml/features/disallow-doctype-decl", //$NON-NLS-1$
+ true);
+ xr = spf.newSAXParser().getXMLReader();
} catch (SAXException | ParserConfigurationException e) {
throw new IOException(JGitText.get().noXMLParserAvailable, e);
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
index 16315bf4f2..dd9e4b96a4 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
@@ -83,7 +83,6 @@ public class DfsInserter extends ObjectInserter {
DfsPackDescription packDsc;
PackStream packOut;
private boolean rollback;
- private boolean checkExisting = true;
/**
* Initialize a new inserter.
@@ -98,18 +97,6 @@ public class DfsInserter extends ObjectInserter {
ConfigConstants.CONFIG_KEY_MIN_BYTES_OBJ_SIZE_INDEX, -1);
}
- /**
- * Check existence
- *
- * @param check
- * if {@code false}, will write out possibly-duplicate objects
- * without first checking whether they exist in the repo; default
- * is true.
- */
- public void checkExisting(boolean check) {
- checkExisting = check;
- }
-
void setCompressionLevel(int compression) {
this.compression = compression;
}
@@ -130,8 +117,9 @@ public class DfsInserter extends ObjectInserter {
if (objectMap != null && objectMap.contains(id))
return id;
// Ignore unreachable (garbage) objects here.
- if (checkExisting && db.has(id, true))
+ if (db.has(id, true)) {
return id;
+ }
long offset = beginObject(type, len);
packOut.compress.write(data, off, len);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
index efd666ff27..1a873d1204 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
@@ -52,16 +52,6 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
boolean dirty() {
return true;
}
-
- @Override
- void clearDirty() {
- // Always dirty.
- }
-
- @Override
- public void markDirty() {
- // Always dirty.
- }
};
/**
@@ -534,7 +524,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
DfsPackFile[] packs = new DfsPackFile[1 + o.packs.length];
packs[0] = newPack;
System.arraycopy(o.packs, 0, packs, 1, o.packs.length);
- n = new PackListImpl(packs, o.reftables);
+ n = new PackList(packs, o.reftables);
} while (!packList.compareAndSet(o, n));
}
@@ -559,7 +549,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
}
}
tables.add(new DfsReftable(add));
- n = new PackListImpl(o.packs, tables.toArray(new DfsReftable[0]));
+ n = new PackList(o.packs, tables.toArray(new DfsReftable[0]));
} while (!packList.compareAndSet(o, n));
}
@@ -613,13 +603,12 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
}
if (newPacks.isEmpty() && newReftables.isEmpty())
- return new PackListImpl(NO_PACKS.packs, NO_PACKS.reftables);
+ return new PackList(NO_PACKS.packs, NO_PACKS.reftables);
if (!foundNew) {
- old.clearDirty();
return old;
}
Collections.sort(newReftables, reftableComparator());
- return new PackListImpl(
+ return new PackList(
newPacks.toArray(new DfsPackFile[0]),
newReftables.toArray(new DfsReftable[0]));
}
@@ -685,7 +674,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
}
/** Snapshot of packs scanned in a single pass. */
- public abstract static class PackList {
+ public static class PackList {
/** All known packs, sorted. */
public final DfsPackFile[] packs;
@@ -715,39 +704,8 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
return lastModified;
}
- abstract boolean dirty();
- abstract void clearDirty();
-
- /**
- * Mark pack list as dirty.
- * <p>
- * Used when the caller knows that new data might have been written to the
- * repository that could invalidate open readers depending on this pack list,
- * for example if refs are newly scanned.
- */
- public abstract void markDirty();
- }
-
- private static final class PackListImpl extends PackList {
- private volatile boolean dirty;
-
- PackListImpl(DfsPackFile[] packs, DfsReftable[] reftables) {
- super(packs, reftables);
- }
-
- @Override
boolean dirty() {
- return dirty;
- }
-
- @Override
- void clearDirty() {
- dirty = false;
- }
-
- @Override
- public void markDirty() {
- dirty = true;
+ return false;
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java
index f9c01b9d6e..6339b0326a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java
@@ -405,7 +405,7 @@ public class DfsPackCompactor {
pw.addObject(obj);
obj.add(added);
- src.representation(rep, id.offset, ctx, rev);
+ src.fillRepresentation(rep, id.offset, ctx, rev);
if (rep.getFormat() != PACK_DELTA)
continue;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
index 9a95ddc370..05b63eaca1 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
@@ -27,6 +27,9 @@ import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.text.MessageFormat;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
@@ -49,6 +52,7 @@ import org.eclipse.jgit.internal.storage.file.PackObjectSizeIndexLoader;
import org.eclipse.jgit.internal.storage.file.PackReverseIndex;
import org.eclipse.jgit.internal.storage.file.PackReverseIndexFactory;
import org.eclipse.jgit.internal.storage.pack.BinaryDelta;
+import org.eclipse.jgit.internal.storage.pack.ObjectToPack;
import org.eclipse.jgit.internal.storage.pack.PackOutputStream;
import org.eclipse.jgit.internal.storage.pack.StoredObjectRepresentation;
import org.eclipse.jgit.lib.AbbreviatedObjectId;
@@ -59,6 +63,7 @@ import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.StoredConfig;
+import org.eclipse.jgit.util.BlockList;
import org.eclipse.jgit.util.LongList;
/**
@@ -71,6 +76,10 @@ public final class DfsPackFile extends BlockBasedFile {
private static final long REF_POSITION = 0;
+ private static final Comparator<DfsObjectToPack> OFFSET_SORT = (
+ DfsObjectToPack a,
+ DfsObjectToPack b) -> Long.signum(a.getOffset() - b.getOffset());
+
/**
* Loader for the default file-based {@link PackBitmapIndex} implementation.
*/
@@ -433,6 +442,10 @@ public final class DfsPackFile extends BlockBasedFile {
return 0 < offset && !isCorrupt(offset);
}
+ int findIdxPosition(DfsReader ctx, AnyObjectId id) throws IOException {
+ return idx(ctx).findPosition(id);
+ }
+
/**
* Get an object from this pack.
*
@@ -455,23 +468,43 @@ public final class DfsPackFile extends BlockBasedFile {
return idx(ctx).findOffset(id);
}
- void resolve(DfsReader ctx, Set<ObjectId> matches, AbbreviatedObjectId id,
- int matchLimit) throws IOException {
- idx(ctx).resolve(matches, id, matchLimit);
- }
-
/**
- * Obtain the total number of objects available in this pack. This method
- * relies on pack index, giving number of effectively available objects.
+ * Return objects in the list available in this pack, sorted in (pack,
+ * offset) order.
*
* @param ctx
- * current reader for the calling thread.
- * @return number of objects in index of this pack, likewise in this pack
+ * a reader
+ * @param objects
+ * objects we are looking for
+ * @param skipFound
+ * ignore objects already found.
+ * @return list of objects with pack and offset set.
* @throws IOException
- * the index file cannot be loaded into memory.
+ * an error occurred
*/
- long getObjectCount(DfsReader ctx) throws IOException {
- return idx(ctx).getObjectCount();
+ List<DfsObjectToPack> findAllFromPack(DfsReader ctx,
+ Iterable<ObjectToPack> objects, boolean skipFound)
+ throws IOException {
+ List<DfsObjectToPack> tmp = new BlockList<>();
+ for (ObjectToPack obj : objects) {
+ DfsObjectToPack otp = (DfsObjectToPack) obj;
+ if (skipFound && otp.isFound()) {
+ continue;
+ }
+ long p = idx(ctx).findOffset(otp);
+ if (p <= 0 || isCorrupt(p)) {
+ continue;
+ }
+ otp.setOffset(p);
+ tmp.add(otp);
+ }
+ Collections.sort(tmp, OFFSET_SORT);
+ return tmp;
+ }
+
+ void resolve(DfsReader ctx, Set<ObjectId> matches, AbbreviatedObjectId id,
+ int matchLimit) throws IOException {
+ idx(ctx).resolve(matches, id, matchLimit);
}
private byte[] decompress(long position, int sz, DfsReader ctx)
@@ -1135,31 +1168,29 @@ public final class DfsPackFile extends BlockBasedFile {
/**
* Return the size of the object from the object-size index. The object
* should be a blob. Any other type is not indexed and returns -1.
- *
- * Caller MUST be sure that the object is in the pack (e.g. with
- * {@link #hasObject(DfsReader, AnyObjectId)}) and the pack has object size
- * index (e.g. with {@link #hasObjectSizeIndex(DfsReader)}) before asking
- * the indexed size.
+ * <p>
+ * Caller MUST pass a valid index position, as returned by
+ * {@link #findIdxPosition(DfsReader, AnyObjectId)} and verify the pack has
+ * object size index (e.g. with {@link #hasObjectSizeIndex(DfsReader)})
+ * before asking the indexed size.
*
* @param ctx
* reader context to support reading from the backing store if
* the object size index is not already loaded in memory.
- * @param id
- * object id of an object in the pack
+ * @param idxPosition
+ * position in the primary index of the object we are looking
+ * for, as returned by findIdxPosition
* @return size of the object from the index. Negative if object is not in
* the index (below threshold or not a blob)
* @throws IOException
* could not read the object size index. IO problem or the pack
* doesn't have it.
*/
- long getIndexedObjectSize(DfsReader ctx, AnyObjectId id)
+ long getIndexedObjectSize(DfsReader ctx, int idxPosition)
throws IOException {
- int idxPosition = idx(ctx).findPosition(id);
if (idxPosition < 0) {
- throw new IllegalArgumentException(
- "Cannot get size from index since object is not in pack"); //$NON-NLS-1$
+ throw new IllegalArgumentException("Invalid index position"); //$NON-NLS-1$
}
-
PackObjectSizeIndex sizeIdx = getObjectSizeIndex(ctx);
if (sizeIdx == null) {
throw new IllegalStateException(
@@ -1169,12 +1200,47 @@ public final class DfsPackFile extends BlockBasedFile {
return sizeIdx.getSize(idxPosition);
}
- void representation(DfsObjectRepresentation r, final long pos,
+ /**
+ * Populates the representation object with the details of how the object at
+ * "pos" is stored in this pack (e.g. whole or deltified, its packed
+ * length).
+ *
+ * @param r
+ * represention object to carry data
+ * @param offset
+ * offset in this pack of the object
+ * @param ctx
+ * a reader
+ * @throws IOException
+ * an error reading the object from disk
+ */
+ void fillRepresentation(DfsObjectRepresentation r, long offset,
+ DfsReader ctx) throws IOException {
+ fillRepresentation(r, offset, ctx, getReverseIdx(ctx));
+ }
+
+ /**
+ * Populates the representation object with the details of how the object at
+ * "pos" is stored in this pack (e.g. whole or deltified, its packed
+ * length).
+ *
+ * @param r
+ * represention object to carry data
+ * @param offset
+ * offset in this pack of the object
+ * @param ctx
+ * a reader
+ * @param rev
+ * reverse index of this pack
+ * @throws IOException
+ * an error reading the object from disk
+ */
+ void fillRepresentation(DfsObjectRepresentation r, long offset,
DfsReader ctx, PackReverseIndex rev)
throws IOException {
- r.offset = pos;
+ r.offset = offset;
final byte[] ib = ctx.tempId;
- readFully(pos, ib, 0, 20, ctx);
+ readFully(offset, ib, 0, 20, ctx);
int c = ib[0] & 0xff;
int p = 1;
final int typeCode = (c >> 4) & 7;
@@ -1182,7 +1248,7 @@ public final class DfsPackFile extends BlockBasedFile {
c = ib[p++] & 0xff;
}
- long len = rev.findNextOffset(pos, length - 20) - pos;
+ long len = rev.findNextOffset(offset, length - 20) - offset;
switch (typeCode) {
case Constants.OBJ_COMMIT:
case Constants.OBJ_TREE:
@@ -1203,13 +1269,13 @@ public final class DfsPackFile extends BlockBasedFile {
ofs += (c & 127);
}
r.format = StoredObjectRepresentation.PACK_DELTA;
- r.baseId = rev.findObject(pos - ofs);
+ r.baseId = rev.findObject(offset - ofs);
r.length = len - p;
return;
}
case Constants.OBJ_REF_DELTA: {
- readFully(pos + p, ib, 0, 20, ctx);
+ readFully(offset + p, ib, 0, 20, ctx);
r.format = StoredObjectRepresentation.PACK_DELTA;
r.baseId = ObjectId.fromRaw(ib);
r.length = len - p - 20;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
index 62f6753e5d..f50cd597e5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
@@ -38,8 +38,6 @@ import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource;
import org.eclipse.jgit.internal.storage.dfs.DfsReader.PackLoadListener.DfsBlockData;
import org.eclipse.jgit.internal.storage.file.BitmapIndexImpl;
import org.eclipse.jgit.internal.storage.file.PackBitmapIndex;
-import org.eclipse.jgit.internal.storage.file.PackIndex;
-import org.eclipse.jgit.internal.storage.file.PackReverseIndex;
import org.eclipse.jgit.internal.storage.pack.CachedPack;
import org.eclipse.jgit.internal.storage.pack.ObjectReuseAsIs;
import org.eclipse.jgit.internal.storage.pack.ObjectToPack;
@@ -58,7 +56,6 @@ import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.ProgressMonitor;
-import org.eclipse.jgit.util.BlockList;
/**
* Reader to access repository content through.
@@ -190,31 +187,44 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
@Override
public boolean has(AnyObjectId objectId) throws IOException {
+ return findPack(objectId) >= 0;
+ }
+
+ private int findPack(AnyObjectId objectId) throws IOException {
if (last != null
- && !skipGarbagePack(last)
- && last.hasObject(this, objectId))
- return true;
+ && !skipGarbagePack(last)) {
+ int idxPos = last.findIdxPosition(this, objectId);
+ if (idxPos >= 0) {
+ return idxPos;
+ }
+ }
+
PackList packList = db.getPackList();
- if (hasImpl(packList, objectId)) {
- return true;
+ int idxPos = findInPackList(packList, objectId);
+ if (idxPos >= 0) {
+ return idxPos;
} else if (packList.dirty()) {
stats.scanPacks++;
- return hasImpl(db.scanPacks(packList), objectId);
+ idxPos = findInPackList(db.scanPacks(packList), objectId);
+ return idxPos;
}
- return false;
+ return -1;
}
- private boolean hasImpl(PackList packList, AnyObjectId objectId)
+ // Leave "last" pointing to the pack and return the idx position of the
+ // object (-1 if not found)
+ private int findInPackList(PackList packList, AnyObjectId objectId)
throws IOException {
for (DfsPackFile pack : packList.packs) {
if (pack == last || skipGarbagePack(pack))
continue;
- if (pack.hasObject(this, objectId)) {
+ int idxPos = pack.findIdxPosition(this, objectId);
+ if (idxPos >= 0) {
last = pack;
- return true;
+ return idxPos;
}
}
- return false;
+ return -1;
}
@Override
@@ -502,8 +512,8 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
public long getObjectSize(AnyObjectId objectId, int typeHint)
throws MissingObjectException, IncorrectObjectTypeException,
IOException {
- DfsPackFile pack = findPackWithObject(objectId);
- if (pack == null) {
+ int idxPos = findPack(objectId);
+ if (idxPos < 0) {
if (typeHint == OBJ_ANY) {
throw new MissingObjectException(objectId.copy(),
JGitText.get().unknownObjectType2);
@@ -511,16 +521,15 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
throw new MissingObjectException(objectId.copy(), typeHint);
}
- if (typeHint != Constants.OBJ_BLOB || !safeHasObjectSizeIndex(pack)) {
- return pack.getObjectSize(this, objectId);
+ if (typeHint != Constants.OBJ_BLOB || !safeHasObjectSizeIndex(last)) {
+ return last.getObjectSize(this, objectId);
}
- Optional<Long> maybeSz = safeGetIndexedObjectSize(pack, objectId);
- long sz = maybeSz.orElse(-1L);
+ long sz = safeGetIndexedObjectSize(last, idxPos);
if (sz >= 0) {
return sz;
}
- return pack.getObjectSize(this, objectId);
+ return last.getObjectSize(this, objectId);
}
@@ -528,8 +537,8 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
public boolean isNotLargerThan(AnyObjectId objectId, int typeHint,
long limit) throws MissingObjectException,
IncorrectObjectTypeException, IOException {
- DfsPackFile pack = findPackWithObject(objectId);
- if (pack == null) {
+ int idxPos = findPack(objectId);
+ if (idxPos < 0) {
if (typeHint == OBJ_ANY) {
throw new MissingObjectException(objectId.copy(),
JGitText.get().unknownObjectType2);
@@ -538,28 +547,22 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
}
stats.isNotLargerThanCallCount += 1;
- if (typeHint != Constants.OBJ_BLOB || !safeHasObjectSizeIndex(pack)) {
- return pack.getObjectSize(this, objectId) <= limit;
+ if (typeHint != Constants.OBJ_BLOB || !safeHasObjectSizeIndex(last)) {
+ return last.getObjectSize(this, objectId) <= limit;
}
- Optional<Long> maybeSz = safeGetIndexedObjectSize(pack, objectId);
- if (maybeSz.isEmpty()) {
- // Exception in object size index
- return pack.getObjectSize(this, objectId) <= limit;
- }
-
- long sz = maybeSz.get();
+ long sz = safeGetIndexedObjectSize(last, idxPos);
if (sz >= 0) {
return sz <= limit;
}
- if (isLimitInsideIndexThreshold(pack, limit)) {
+ if (isLimitInsideIndexThreshold(last, limit)) {
// With threshold T, not-found means object < T
// If limit L > T, then object < T < L
return true;
}
- return pack.getObjectSize(this, objectId) <= limit;
+ return last.getObjectSize(this, objectId) <= limit;
}
private boolean safeHasObjectSizeIndex(DfsPackFile pack) {
@@ -570,21 +573,22 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
}
}
- private Optional<Long> safeGetIndexedObjectSize(DfsPackFile pack,
- AnyObjectId objectId) {
+ private long safeGetIndexedObjectSize(DfsPackFile pack,
+ int idxPos) {
long sz;
try {
- sz = pack.getIndexedObjectSize(this, objectId);
+ sz = pack.getIndexedObjectSize(this, idxPos);
} catch (IOException e) {
- // Do not count the exception as an index miss
- return Optional.empty();
+ // If there is any error in the index, we should have seen it
+ // on hasObjectSizeIndex.
+ throw new IllegalStateException(e);
}
if (sz < 0) {
stats.objectSizeIndexMiss += 1;
} else {
stats.objectSizeIndexHit += 1;
}
- return Optional.of(sz);
+ return sz;
}
private boolean isLimitInsideIndexThreshold(DfsPackFile pack, long limit) {
@@ -595,34 +599,11 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
}
}
- private DfsPackFile findPackWithObject(AnyObjectId objectId)
- throws IOException {
- if (last != null && !skipGarbagePack(last)
- && last.hasObject(this, objectId)) {
- return last;
- }
- PackList packList = db.getPackList();
- // hasImpl doesn't check "last", but leaves "last" pointing to the pack
- // with the object
- if (hasImpl(packList, objectId)) {
- return last;
- } else if (packList.dirty()) {
- if (hasImpl(db.getPackList(), objectId)) {
- return last;
- }
- }
- return null;
- }
-
@Override
public DfsObjectToPack newObjectToPack(AnyObjectId objectId, int type) {
return new DfsObjectToPack(objectId, type);
}
- private static final Comparator<DfsObjectToPack> OFFSET_SORT = (
- DfsObjectToPack a,
- DfsObjectToPack b) -> Long.signum(a.getOffset() - b.getOffset());
-
@Override
public void selectObjectRepresentation(PackWriter packer,
ProgressMonitor monitor, Iterable<ObjectToPack> objects)
@@ -642,16 +623,15 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
ProgressMonitor monitor, Iterable<ObjectToPack> objects,
List<DfsPackFile> packs, boolean skipFound) throws IOException {
for (DfsPackFile pack : packs) {
- List<DfsObjectToPack> tmp = findAllFromPack(pack, objects, skipFound);
- if (tmp.isEmpty())
+ List<DfsObjectToPack> inPack = pack.findAllFromPack(this, objects, skipFound);
+ if (inPack.isEmpty())
continue;
- Collections.sort(tmp, OFFSET_SORT);
- PackReverseIndex rev = pack.getReverseIdx(this);
DfsObjectRepresentation rep = new DfsObjectRepresentation(pack);
- for (DfsObjectToPack otp : tmp) {
- pack.representation(rep, otp.getOffset(), this, rev);
+ for (DfsObjectToPack otp : inPack) {
+ // Populate rep.{offset,length} from the pack
+ pack.fillRepresentation(rep, otp.getOffset(), this);
otp.setOffset(0);
- packer.select(otp, rep);
+ packer.select(otp, rep); // Set otp.offset from rep
if (!otp.isFound()) {
otp.setFound();
monitor.update(1);
@@ -698,24 +678,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
return false;
}
- private List<DfsObjectToPack> findAllFromPack(DfsPackFile pack,
- Iterable<ObjectToPack> objects, boolean skipFound)
- throws IOException {
- List<DfsObjectToPack> tmp = new BlockList<>();
- PackIndex idx = pack.getPackIndex(this);
- for (ObjectToPack obj : objects) {
- DfsObjectToPack otp = (DfsObjectToPack) obj;
- if (skipFound && otp.isFound()) {
- continue;
- }
- long p = idx.findOffset(otp);
- if (0 < p && !pack.isCorrupt(p)) {
- otp.setOffset(p);
- tmp.add(otp);
- }
- }
- return tmp;
- }
+
@Override
public void copyObjectAsIs(PackOutputStream out, ObjectToPack otp,
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java
index b9e9e661e9..64f8c9b0e3 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java
@@ -75,16 +75,11 @@ public class FileReftableDatabase extends RefDatabase {
private volatile boolean autoRefresh;
FileReftableDatabase(FileRepository repo) throws IOException {
- this(repo, new File(new File(repo.getCommonDirectory(), Constants.REFTABLE),
- Constants.TABLES_LIST));
- }
-
- FileReftableDatabase(FileRepository repo, File refstackName) throws IOException {
this.fileRepository = repo;
this.autoRefresh = repo.getConfig().getBoolean(
ConfigConstants.CONFIG_REFTABLE_SECTION,
ConfigConstants.CONFIG_KEY_AUTOREFRESH, false);
- this.reftableStack = new FileReftableStack(refstackName,
+ this.reftableStack = new FileReftableStack(
new File(fileRepository.getCommonDirectory(), Constants.REFTABLE),
() -> fileRepository.fireEvent(new RefsChangedEvent()),
() -> fileRepository.getConfig());
@@ -269,8 +264,14 @@ public class FileReftableDatabase extends RefDatabase {
public void refresh() {
try {
if (!reftableStack.isUpToDate()) {
- reftableDatabase.clearCache();
- reftableStack.reload();
+ ReentrantLock lock = getLock();
+ lock.lock();
+ try {
+ reftableDatabase.clearCache();
+ reftableStack.reload();
+ } finally {
+ lock.unlock();
+ }
}
} catch (IOException e) {
throw new UncheckedIOException(e);
@@ -683,32 +684,20 @@ public class FileReftableDatabase extends RefDatabase {
* the repository
* @param writeLogs
* whether to write reflogs
- * @return a reftable based RefDB from an existing repository.
* @throws IOException
* on IO error
*/
- public static FileReftableDatabase convertFrom(FileRepository repo,
- boolean writeLogs) throws IOException {
- FileReftableDatabase newDb = null;
- File reftableList = null;
- try {
- File reftableDir = new File(repo.getCommonDirectory(),
- Constants.REFTABLE);
- reftableList = new File(reftableDir, Constants.TABLES_LIST);
- if (!reftableDir.isDirectory()) {
- reftableDir.mkdir();
- }
+ public static void convertFrom(FileRepository repo, boolean writeLogs)
+ throws IOException {
+ File reftableDir = new File(repo.getCommonDirectory(),
+ Constants.REFTABLE);
+ if (!reftableDir.isDirectory()) {
+ reftableDir.mkdir();
+ }
- try (FileReftableStack stack = new FileReftableStack(reftableList,
- reftableDir, null, () -> repo.getConfig())) {
- stack.addReftable(rw -> writeConvertTable(repo, rw, writeLogs));
- }
- reftableList = null;
- } finally {
- if (reftableList != null) {
- reftableList.delete();
- }
+ try (FileReftableStack stack = new FileReftableStack(reftableDir, null,
+ () -> repo.getConfig())) {
+ stack.addReftable(rw -> writeConvertTable(repo, rw, writeLogs));
}
- return newDb;
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableStack.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableStack.java
index b2c88922b8..6658575fc5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableStack.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableStack.java
@@ -42,6 +42,7 @@ import org.eclipse.jgit.internal.storage.reftable.ReftableConfig;
import org.eclipse.jgit.internal.storage.reftable.ReftableReader;
import org.eclipse.jgit.internal.storage.reftable.ReftableWriter;
import org.eclipse.jgit.lib.Config;
+import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.CoreConfig;
import org.eclipse.jgit.lib.CoreConfig.TrustStat;
import org.eclipse.jgit.util.FileUtils;
@@ -69,7 +70,7 @@ public class FileReftableStack implements AutoCloseable {
private long lastNextUpdateIndex;
- private final File stackPath;
+ private final File tablesListFile;
private final File reftableDir;
@@ -111,8 +112,6 @@ public class FileReftableStack implements AutoCloseable {
/**
* Creates a stack corresponding to the list of reftables in the argument
*
- * @param stackPath
- * the filename for the stack.
* @param reftableDir
* the dir holding the tables.
* @param onChange
@@ -122,10 +121,10 @@ public class FileReftableStack implements AutoCloseable {
* @throws IOException
* on I/O problems
*/
- public FileReftableStack(File stackPath, File reftableDir,
+ public FileReftableStack(File reftableDir,
@Nullable Runnable onChange, Supplier<Config> configSupplier)
throws IOException {
- this.stackPath = stackPath;
+ this.tablesListFile = new File(reftableDir, Constants.TABLES_LIST);
this.reftableDir = reftableDir;
this.stack = new ArrayList<>();
this.configSupplier = configSupplier;
@@ -244,7 +243,7 @@ public class FileReftableStack implements AutoCloseable {
}
if (!success) {
- throw new LockFailedException(stackPath);
+ throw new LockFailedException(tablesListFile);
}
mergedReftable = new MergedReftable(stack.stream()
@@ -288,14 +287,14 @@ public class FileReftableStack implements AutoCloseable {
List<String> names = new ArrayList<>(stack.size() + 1);
old = snapshot.get();
try (BufferedReader br = new BufferedReader(
- new InputStreamReader(new FileInputStream(stackPath), UTF_8))) {
+ new InputStreamReader(new FileInputStream(tablesListFile), UTF_8))) {
String line;
while ((line = br.readLine()) != null) {
if (!line.isEmpty()) {
names.add(line);
}
}
- snapshot.compareAndSet(old, FileSnapshot.save(stackPath));
+ snapshot.compareAndSet(old, FileSnapshot.save(tablesListFile));
} catch (FileNotFoundException e) {
// file isn't there: empty repository.
snapshot.compareAndSet(old, FileSnapshot.MISSING_FILE);
@@ -315,15 +314,16 @@ public class FileReftableStack implements AutoCloseable {
break;
case AFTER_OPEN:
try (InputStream stream = Files
- .newInputStream(stackPath.toPath())) {
- // open the tables.list file to refresh attributes (on some
- // NFS clients)
+ .newInputStream(reftableDir.toPath())) {
+ // open the refs/reftable/ directory to refresh attributes
+ // of reftable files and the tables.list file listing their
+ // names (on some NFS clients)
} catch (FileNotFoundException | NoSuchFileException e) {
// ignore
}
//$FALL-THROUGH$
case ALWAYS:
- if (!snapshot.get().isModified(stackPath)) {
+ if (!snapshot.get().isModified(tablesListFile)) {
return true;
}
break;
@@ -387,7 +387,7 @@ public class FileReftableStack implements AutoCloseable {
*/
@SuppressWarnings("nls")
public boolean addReftable(Writer w) throws IOException {
- LockFile lock = new LockFile(stackPath);
+ LockFile lock = new LockFile(tablesListFile);
try {
if (!lock.lockForAppend()) {
return false;
@@ -398,8 +398,7 @@ public class FileReftableStack implements AutoCloseable {
String fn = filename(nextUpdateIndex(), nextUpdateIndex());
- File tmpTable = File.createTempFile(fn + "_", ".ref",
- stackPath.getParentFile());
+ File tmpTable = File.createTempFile(fn + "_", ".ref", reftableDir);
ReftableWriter.Stats s;
try (FileOutputStream fos = new FileOutputStream(tmpTable)) {
@@ -453,7 +452,7 @@ public class FileReftableStack implements AutoCloseable {
String fn = filename(first, last);
File tmpTable = File.createTempFile(fn + "_", ".ref", //$NON-NLS-1$//$NON-NLS-2$
- stackPath.getParentFile());
+ reftableDir);
try (FileOutputStream fos = new FileOutputStream(tmpTable)) {
ReftableCompactor c = new ReftableCompactor(fos)
.setConfig(reftableConfig())
@@ -497,7 +496,7 @@ public class FileReftableStack implements AutoCloseable {
if (first >= last) {
return true;
}
- LockFile lock = new LockFile(stackPath);
+ LockFile lock = new LockFile(tablesListFile);
File tmpTable = null;
List<LockFile> subtableLocks = new ArrayList<>();
@@ -526,7 +525,7 @@ public class FileReftableStack implements AutoCloseable {
tmpTable = compactLocked(first, last);
- lock = new LockFile(stackPath);
+ lock = new LockFile(tablesListFile);
if (!lock.lock()) {
return false;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java
index 9f21481a13..3a6de4e8e2 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java
@@ -200,6 +200,7 @@ public class ObjectDirectory extends FileObjectDatabase {
loose.close();
packed.close();
+ preserved.close();
// Fully close all loaded alternates and clear the alternate list.
AlternateHandle[] alt = alternates.get();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
index 05f1ef53a1..319a9ed710 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
@@ -701,41 +701,47 @@ public class RefDirectory extends RefDatabase {
}
String name = dst.getName();
- // Write the packed-refs file using an atomic update. We might
- // wind up reading it twice, before and after the lock, to ensure
- // we don't miss an edit made externally.
- PackedRefList packed = getPackedRefs();
- if (packed.contains(name)) {
- inProcessPackedRefsLock.lock();
+ // Get and keep the packed-refs lock while updating packed-refs and
+ // removing any loose ref
+ inProcessPackedRefsLock.lock();
+ try {
+ LockFile lck = lockPackedRefsOrThrow();
try {
- LockFile lck = lockPackedRefsOrThrow();
- try {
+ // Write the packed-refs file using an atomic update. We might
+ // wind up reading it twice, before and after checking if the
+ // ref to delete is included or not, to ensure
+ // we don't rely on a PackedRefList that is a result of in-memory
+ // or NFS caching.
+ PackedRefList packed = getPackedRefs();
+ if (packed.contains(name)) {
+ // Force update our packed-refs snapshot before writing
packed = refreshPackedRefs();
int idx = packed.find(name);
if (0 <= idx) {
commitPackedRefs(lck, packed.remove(idx), packed, true);
}
- } finally {
- lck.unlock();
}
- } finally {
- inProcessPackedRefsLock.unlock();
- }
- }
- RefList<LooseRef> curLoose, newLoose;
- do {
- curLoose = looseRefs.get();
- int idx = curLoose.find(name);
- if (idx < 0)
- break;
- newLoose = curLoose.remove(idx);
- } while (!looseRefs.compareAndSet(curLoose, newLoose));
+ RefList<LooseRef> curLoose, newLoose;
+ do {
+ curLoose = looseRefs.get();
+ int idx = curLoose.find(name);
+ if (idx < 0) {
+ break;
+ }
+ newLoose = curLoose.remove(idx);
+ } while (!looseRefs.compareAndSet(curLoose, newLoose));
- int levels = levelsIn(name) - 2;
- delete(logFor(name), levels);
- if (dst.getStorage().isLoose()) {
- deleteAndUnlock(fileFor(name), levels, update);
+ int levels = levelsIn(name) - 2;
+ delete(logFor(name), levels);
+ if (dst.getStorage().isLoose()) {
+ deleteAndUnlock(fileFor(name), levels, update);
+ }
+ } finally {
+ lck.unlock();
+ }
+ } finally {
+ inProcessPackedRefsLock.unlock();
}
modCnt.incrementAndGet();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexConstants.java
index 0e45c9dfe5..5d86f44baf 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexConstants.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexConstants.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2025, Google Inc.
+ * Copyright (C) 2025, Google LLC
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexLoader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexLoader.java
index 0e51e90815..61caddc221 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexLoader.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexLoader.java
@@ -257,7 +257,7 @@ public class MultiPackIndexLoader {
MultiPackIndexBuilder addPackNames(byte[] buffer)
throws MultiPackIndexFormatException {
assertChunkNotSeenYet(packNames, MIDX_CHUNKID_PACKNAMES);
- packNames = new String(buffer, UTF_8).split("\u0000");
+ packNames = new String(buffer, UTF_8).split("\u0000"); //$NON-NLS-1$
return this;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexPrettyPrinter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexPrettyPrinter.java
index 795d39e375..948b7bc174 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexPrettyPrinter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexPrettyPrinter.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2025, Google Inc.
+ * Copyright (C) 2025, Google LLC
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexWriter.java
index 9d37450a46..b42c821a44 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexWriter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexWriter.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2025, Google Inc.
+ * Copyright (C) 2025, Google LLC
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/PackIndexMerger.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/PackIndexMerger.java
index 89814af107..f23665849e 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/PackIndexMerger.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/PackIndexMerger.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2025, Google Inc.
+ * Copyright (C) 2025, Google LLC
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revplot/PlotWalk.java b/org.eclipse.jgit/src/org/eclipse/jgit/revplot/PlotWalk.java
index 0f6bd2d6cc..c8c454a228 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/revplot/PlotWalk.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/revplot/PlotWalk.java
@@ -169,8 +169,9 @@ public class PlotWalk extends RevWalk {
}
long timeof(RevObject o) {
- if (o instanceof RevCommit)
- return ((RevCommit) o).getCommitTime();
+ if (o instanceof RevCommit) {
+ return ((RevCommit) o).getCommitTime() * 1000L;
+ }
if (o instanceof RevTag) {
RevTag tag = (RevTag) o;
try {
@@ -179,7 +180,7 @@ public class PlotWalk extends RevWalk {
return 0;
}
PersonIdent who = tag.getTaggerIdent();
- return who != null ? who.getWhen().getTime() : 0;
+ return who != null ? who.getWhenAsInstant().toEpochMilli() : 0;
}
return 0;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/AmazonS3.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/AmazonS3.java
index aaf9f8a08a..9d9f5495fe 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/AmazonS3.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/AmazonS3.java
@@ -760,6 +760,15 @@ public class AmazonS3 {
SAXParserFactory saxParserFactory = SAXParserFactory
.newInstance();
saxParserFactory.setNamespaceAware(true);
+ saxParserFactory.setFeature(
+ "http://xml.org/sax/features/external-general-entities", //$NON-NLS-1$
+ false);
+ saxParserFactory.setFeature(
+ "http://xml.org/sax/features/external-parameter-entities", //$NON-NLS-1$
+ false);
+ saxParserFactory.setFeature(
+ "http://apache.org/xml/features/disallow-doctype-decl", //$NON-NLS-1$
+ true);
xr = saxParserFactory.newSAXParser().getXMLReader();
} catch (SAXException | ParserConfigurationException e) {
throw new IOException(
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ChangedPathTreeFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ChangedPathTreeFilter.java
index 2400e12240..a74b9b617f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ChangedPathTreeFilter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ChangedPathTreeFilter.java
@@ -125,6 +125,7 @@ public class ChangedPathTreeFilter extends TreeFilter {
return paths;
}
+ @SuppressWarnings("nls")
@Override
public String toString() {
return "(CHANGED_PATH(" + pathFilter.toString() + ")" //
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java
index 59bbacfa76..6a40fad1db 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java
@@ -363,6 +363,7 @@ public abstract class FS {
private static FileStoreAttributes getFileStoreAttributes(Path dir) {
FileStore s;
+ CompletableFuture<Optional<FileStoreAttributes>> f = null;
try {
if (Files.exists(dir)) {
s = Files.getFileStore(dir);
@@ -385,7 +386,7 @@ public abstract class FS {
return FALLBACK_FILESTORE_ATTRIBUTES;
}
- CompletableFuture<Optional<FileStoreAttributes>> f = CompletableFuture
+ f = CompletableFuture
.supplyAsync(() -> {
Lock lock = locks.computeIfAbsent(s,
l -> new ReentrantLock());
@@ -455,10 +456,13 @@ public abstract class FS {
}
// fall through and return fallback
} catch (IOException | ExecutionException | CancellationException e) {
+ cancel(f);
LOG.error(e.getMessage(), e);
} catch (TimeoutException | SecurityException e) {
+ cancel(f);
// use fallback
} catch (InterruptedException e) {
+ cancel(f);
LOG.error(e.getMessage(), e);
Thread.currentThread().interrupt();
}
@@ -467,6 +471,13 @@ public abstract class FS {
return FALLBACK_FILESTORE_ATTRIBUTES;
}
+ private static void cancel(
+ CompletableFuture<Optional<FileStoreAttributes>> f) {
+ if (f != null) {
+ f.cancel(true);
+ }
+ }
+
@SuppressWarnings("boxing")
private static Duration measureMinimalRacyInterval(Path dir) {
LOG.debug("{}: start measure minimal racy interval in {}", //$NON-NLS-1$