aboutsummaryrefslogtreecommitdiffstats
path: root/org.eclipse.jgit/src
diff options
context:
space:
mode:
Diffstat (limited to 'org.eclipse.jgit/src')
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java3
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/CherryPickCommand.java6
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java3
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java5
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/ResetCommand.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/RevertCommand.java6
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java3
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/StatusCommand.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/errors/CheckoutConflictException.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/BareSuperprojectWriter.java3
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java3
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoProject.java52
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java1
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/diffmergetool/CommandExecutor.java6
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/commitgraph/CommitGraphLoader.java51
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/commitgraph/CommitGraphWriter.java6
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ClockBlockCacheTable.java563
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java581
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheTable.java340
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java9
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java28
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java49
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java15
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/BasePackBitmapIndex.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/Pack.java10
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndex.java16
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexBuilder.java69
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexRemapper.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexV1.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexWriterV1.java6
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndex.java54
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndexV1.java18
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndexV2.java16
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java6
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SnapshottingRefDirectory.java28
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/memory/TernarySearchTree.java8
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaTask.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackBitmapIndexWriter.java33
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java22
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/transport/ssh/OpenSshConfigFile.java5
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/util/CleanupService.java121
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/util/ShutdownHook.java70
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/RebaseTodoFile.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeAlgorithm.java13
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/merge/ResolveMerger.java78
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/patch/PatchApplier.java163
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/revwalk/MergeBaseGenerator.java5
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/Transport.java6
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java5
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkFetchConnection.java66
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/http/JDKHttpConnection.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ByteArraySet.java8
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/IndexDiffFilter.java17
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/PathFilterGroup.java11
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/TreeFilter.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/util/FS_POSIX.java19
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/util/io/UnionInputStream.java12
61 files changed, 1818 insertions, 864 deletions
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java
index a03ea01864..c133219d4d 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java
@@ -17,7 +17,6 @@ import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashSet;
-import java.util.LinkedList;
import java.util.List;
import java.util.Set;
@@ -164,7 +163,7 @@ public class CheckoutCommand extends GitCommand<Ref> {
*/
protected CheckoutCommand(Repository repo) {
super(repo);
- this.paths = new LinkedList<>();
+ this.paths = new ArrayList<>();
}
@Override
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/CherryPickCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/CherryPickCommand.java
index a1c64788bd..a353d1a135 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/CherryPickCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/CherryPickCommand.java
@@ -14,7 +14,7 @@ import static org.eclipse.jgit.lib.Constants.OBJECT_ID_ABBREV_STRING_LENGTH;
import java.io.IOException;
import java.text.MessageFormat;
-import java.util.LinkedList;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -63,7 +63,7 @@ import org.eclipse.jgit.treewalk.FileTreeIterator;
public class CherryPickCommand extends GitCommand<CherryPickResult> {
private String reflogPrefix = "cherry-pick:"; //$NON-NLS-1$
- private List<Ref> commits = new LinkedList<>();
+ private List<Ref> commits = new ArrayList<>();
private String ourCommitName = null;
@@ -102,7 +102,7 @@ public class CherryPickCommand extends GitCommand<CherryPickResult> {
UnmergedPathsException, ConcurrentRefUpdateException,
WrongRepositoryStateException, NoHeadException {
RevCommit newHead = null;
- List<Ref> cherryPickedRefs = new LinkedList<>();
+ List<Ref> cherryPickedRefs = new ArrayList<>();
checkCallable();
try (RevWalk revWalk = new RevWalk(repo)) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java
index 483b9602da..a1a2cc09d2 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java
@@ -16,7 +16,6 @@ import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
-import java.util.LinkedList;
import java.util.List;
import org.eclipse.jgit.annotations.NonNull;
@@ -109,7 +108,7 @@ public class CommitCommand extends GitCommand<RevCommit> {
* parents this commit should have. The current HEAD will be in this list
* and also all commits mentioned in .git/MERGE_HEAD
*/
- private List<ObjectId> parents = new LinkedList<>();
+ private List<ObjectId> parents = new ArrayList<>();
private String reflogComment;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java
index ed4a5342b3..7064f5a57a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java
@@ -13,9 +13,9 @@ package org.eclipse.jgit.api;
import java.io.IOException;
import java.text.MessageFormat;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
-import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@@ -75,7 +75,7 @@ public class MergeCommand extends GitCommand<MergeResult> {
private ContentMergeStrategy contentStrategy;
- private List<Ref> commits = new LinkedList<>();
+ private List<Ref> commits = new ArrayList<>();
private Boolean squash;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java
index 757aff89c3..858bd961cd 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java
@@ -23,7 +23,6 @@ import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
-import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
@@ -1120,8 +1119,8 @@ public class RebaseCommand extends GitCommand<RebaseResult> {
private void popSteps(int numSteps) throws IOException {
if (numSteps == 0)
return;
- List<RebaseTodoLine> todoLines = new LinkedList<>();
- List<RebaseTodoLine> poppedLines = new LinkedList<>();
+ List<RebaseTodoLine> todoLines = new ArrayList<>();
+ List<RebaseTodoLine> poppedLines = new ArrayList<>();
for (RebaseTodoLine line : repo.readRebaseTodo(
rebaseState.getPath(GIT_REBASE_TODO), true)) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/ResetCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/ResetCommand.java
index 225c9996cf..47145a0563 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/ResetCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/ResetCommand.java
@@ -11,8 +11,8 @@ package org.eclipse.jgit.api;
import java.io.IOException;
import java.text.MessageFormat;
+import java.util.ArrayList;
import java.util.Collection;
-import java.util.LinkedList;
import org.eclipse.jgit.api.errors.CheckoutConflictException;
import org.eclipse.jgit.api.errors.GitAPIException;
@@ -90,7 +90,7 @@ public class ResetCommand extends GitCommand<Ref> {
private ResetType mode;
- private Collection<String> filepaths = new LinkedList<>();
+ private Collection<String> filepaths = new ArrayList<>();
private boolean isReflogDisabled;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/RevertCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/RevertCommand.java
index 5231d4a327..855c3b1cf3 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/RevertCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/RevertCommand.java
@@ -13,7 +13,7 @@ import static org.eclipse.jgit.lib.Constants.OBJECT_ID_ABBREV_STRING_LENGTH;
import java.io.IOException;
import java.text.MessageFormat;
-import java.util.LinkedList;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -58,13 +58,13 @@ import org.eclipse.jgit.treewalk.FileTreeIterator;
* >Git documentation about revert</a>
*/
public class RevertCommand extends GitCommand<RevCommit> {
- private List<Ref> commits = new LinkedList<>();
+ private List<Ref> commits = new ArrayList<>();
private String ourCommitName = null;
private boolean insertChangeId;
- private List<Ref> revertedRefs = new LinkedList<>();
+ private List<Ref> revertedRefs = new ArrayList<>();
private MergeResult failingResult;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java
index 656f36a81a..7459e7298f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java
@@ -13,7 +13,6 @@ import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.LinkedList;
import java.util.List;
import org.eclipse.jgit.api.errors.GitAPIException;
@@ -71,7 +70,7 @@ public class RmCommand extends GitCommand<DirCache> {
*/
public RmCommand(Repository repo) {
super(repo);
- filepatterns = new LinkedList<>();
+ filepatterns = new ArrayList<>();
}
/**
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/StatusCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/StatusCommand.java
index eab389460a..cdd078ea25 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/StatusCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/StatusCommand.java
@@ -10,7 +10,7 @@
package org.eclipse.jgit.api;
import java.io.IOException;
-import java.util.LinkedList;
+import java.util.ArrayList;
import java.util.List;
import org.eclipse.jgit.api.errors.GitAPIException;
@@ -83,7 +83,7 @@ public class StatusCommand extends GitCommand<Status> {
*/
public StatusCommand addPath(String path) {
if (paths == null)
- paths = new LinkedList<>();
+ paths = new ArrayList<>();
paths.add(path);
return this;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/errors/CheckoutConflictException.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/errors/CheckoutConflictException.java
index 0224b3dbb3..5538711192 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/errors/CheckoutConflictException.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/errors/CheckoutConflictException.java
@@ -37,7 +37,7 @@
*/
package org.eclipse.jgit.api.errors;
-import java.util.LinkedList;
+import java.util.ArrayList;
import java.util.List;
/**
@@ -99,7 +99,7 @@ public class CheckoutConflictException extends GitAPIException {
*/
CheckoutConflictException addConflictingPath(String conflictingPath) {
if (conflictingPaths == null)
- conflictingPaths = new LinkedList<>();
+ conflictingPaths = new ArrayList<>();
conflictingPaths.add(conflictingPath);
return this;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/BareSuperprojectWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/BareSuperprojectWriter.java
index 3ce97a4ff7..d191e23399 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/BareSuperprojectWriter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/BareSuperprojectWriter.java
@@ -156,6 +156,9 @@ class BareSuperprojectWriter {
ObjectId objectId;
if (ObjectId.isId(proj.getRevision())) {
objectId = ObjectId.fromString(proj.getRevision());
+ if (config.recordRemoteBranch && proj.getUpstream() != null) {
+ cfg.setString("submodule", name, "ref", proj.getUpstream());
+ }
} else {
objectId = callback.sha1(url, proj.getRevision());
if (objectId == null && !config.ignoreRemoteFailures) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java
index 957b3869f2..b033177e05 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java
@@ -176,6 +176,10 @@ public class ManifestParser extends DefaultHandler {
attributes.getValue("groups"));
currentProject
.setRecommendShallow(attributes.getValue("clone-depth"));
+ currentProject
+ .setUpstream(attributes.getValue("upstream"));
+ currentProject
+ .setDestBranch(attributes.getValue("dest-branch"));
break;
case "remote":
String alias = attributes.getValue("alias");
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java
index 95c1c8b22e..9979664ceb 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java
@@ -255,7 +255,7 @@ public class RepoCommand extends GitCommand<RevCommit> {
@SuppressWarnings("serial")
static class ManifestErrorException extends GitAPIException {
ManifestErrorException(Throwable cause) {
- super(RepoText.get().invalidManifest, cause);
+ super(RepoText.get().invalidManifest + " " + cause.getMessage(), cause);
}
}
@@ -615,6 +615,7 @@ public class RepoCommand extends GitCommand<RevCommit> {
p.setUrl(proj.getUrl());
p.addCopyFiles(proj.getCopyFiles());
p.addLinkFiles(proj.getLinkFiles());
+ p.setUpstream(proj.getUpstream());
ret.add(p);
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoProject.java b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoProject.java
index 8deb7386a6..b7a9ac5b73 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoProject.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoProject.java
@@ -38,6 +38,8 @@ public class RepoProject implements Comparable<RepoProject> {
private final Set<String> groups;
private final List<CopyFile> copyfiles;
private final List<LinkFile> linkfiles;
+ private String upstream;
+ private String destBranch;
private String recommendShallow;
private String url;
private String defaultRevision;
@@ -389,6 +391,56 @@ public class RepoProject implements Comparable<RepoProject> {
this.linkfiles.clear();
}
+ /**
+ * Return the upstream attribute of the project
+ *
+ * @return the upstream value if present, null otherwise.
+ *
+ * @since 6.10
+ */
+ public String getUpstream() {
+ return this.upstream;
+ }
+
+ /**
+ * Return the dest-branch attribute of the project
+ *
+ * @return the dest-branch value if present, null otherwise.
+ *
+ * @since 7.0
+ */
+ public String getDestBranch() {
+ return this.destBranch;
+ }
+
+ /**
+ * Set the upstream attribute of the project
+ *
+ * Name of the git ref in which a sha1 can be found, when the revision is a
+ * sha1.
+ *
+ * @param upstream value of the attribute in the manifest
+ *
+ * @since 6.10
+ */
+ public void setUpstream(String upstream) {
+ this.upstream = upstream;
+ }
+
+ /**
+ * Set the dest-branch attribute of the project
+ *
+ * Name of a Git branch.
+ *
+ * @param destBranch
+ * value of the attribute in the manifest
+ *
+ * @since 7.0
+ */
+ public void setDestBranch(String destBranch) {
+ this.destBranch = destBranch;
+ }
+
private String getPathWithSlash() {
if (path.endsWith("/")) { //$NON-NLS-1$
return path;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
index ef464e3172..700b54a7a6 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
@@ -745,6 +745,7 @@ public class JGitText extends TranslationBundle {
/***/ public String shortReadOfOptionalDIRCExtensionExpectedAnotherBytes;
/***/ public String shortSkipOfBlock;
/***/ public String shutdownCleanup;
+ /***/ public String shutdownCleanupFailed;
/***/ public String shutdownCleanupListenerFailed;
/***/ public String signatureVerificationError;
/***/ public String signatureVerificationUnavailable;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/diffmergetool/CommandExecutor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/diffmergetool/CommandExecutor.java
index 25b7b8e5ba..c64a844af1 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/diffmergetool/CommandExecutor.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/diffmergetool/CommandExecutor.java
@@ -165,6 +165,9 @@ public class CommandExecutor {
if (fs instanceof FS_POSIX) {
commandArray = new String[1];
commandArray[0] = commandFile.getCanonicalPath();
+ } else if (fs instanceof FS_Win32_Cygwin) {
+ commandArray = new String[1];
+ commandArray[0] = commandFile.getCanonicalPath().replace("\\", "/"); //$NON-NLS-1$ //$NON-NLS-2$
} else if (fs instanceof FS_Win32) {
if (useMsys2) {
commandArray = new String[3];
@@ -176,9 +179,6 @@ public class CommandExecutor {
commandArray = new String[1];
commandArray[0] = commandFile.getCanonicalPath();
}
- } else if (fs instanceof FS_Win32_Cygwin) {
- commandArray = new String[1];
- commandArray[0] = commandFile.getCanonicalPath().replace("\\", "/"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
throw new ToolException(
"JGit: file system not supported: " + fs.toString()); //$NON-NLS-1$
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/commitgraph/CommitGraphLoader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/commitgraph/CommitGraphLoader.java
index 867d522e08..7e9220dc0d 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/commitgraph/CommitGraphLoader.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/commitgraph/CommitGraphLoader.java
@@ -98,6 +98,46 @@ public class CommitGraphLoader {
*/
public static CommitGraph read(InputStream fd)
throws CommitGraphFormatException, IOException {
+
+ boolean readChangedPathFilters;
+ try {
+ readChangedPathFilters = SystemReader.getInstance().getJGitConfig()
+ .getBoolean(ConfigConstants.CONFIG_COMMIT_GRAPH_SECTION,
+ ConfigConstants.CONFIG_KEY_READ_CHANGED_PATHS,
+ false);
+ } catch (ConfigInvalidException e) {
+ // Use the default value if, for some reason, the config couldn't be
+ // read.
+ readChangedPathFilters = false;
+ }
+
+ return read(fd, readChangedPathFilters);
+ }
+
+ /**
+ * Read an existing commit-graph file from a buffered stream.
+ * <p>
+ * The format of the file will be automatically detected and a proper access
+ * implementation for that format will be constructed and returned to the
+ * caller. The file may or may not be held open by the returned instance.
+ *
+ * @param fd
+ * stream to read the commit-graph file from. The stream must be
+ * buffered as some small IOs are performed against the stream.
+ * The caller is responsible for closing the stream.
+ *
+ * @param readChangedPathFilters
+ * enable reading bloom filter chunks.
+ *
+ * @return a copy of the commit-graph file in memory
+ * @throws CommitGraphFormatException
+ * the commit-graph file's format is different from we expected.
+ * @throws java.io.IOException
+ * the stream cannot be read.
+ */
+ public static CommitGraph read(InputStream fd,
+ boolean readChangedPathFilters)
+ throws CommitGraphFormatException, IOException {
byte[] hdr = new byte[8];
IO.readFully(fd, hdr, 0, hdr.length);
@@ -142,17 +182,6 @@ public class CommitGraphLoader {
chunks.add(new ChunkSegment(id, offset));
}
- boolean readChangedPathFilters;
- try {
- readChangedPathFilters = SystemReader.getInstance()
- .getJGitConfig()
- .getBoolean(ConfigConstants.CONFIG_COMMIT_GRAPH_SECTION,
- ConfigConstants.CONFIG_KEY_READ_CHANGED_PATHS, false);
- } catch (ConfigInvalidException e) {
- // Use the default value if, for some reason, the config couldn't be read.
- readChangedPathFilters = false;
- }
-
CommitGraphBuilder builder = CommitGraphBuilder.builder();
for (int i = 0; i < numberOfChunks; i++) {
long chunkOffset = chunks.get(i).offset;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/commitgraph/CommitGraphWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/commitgraph/CommitGraphWriter.java
index 1f1c35ac19..0d9815eceb 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/commitgraph/CommitGraphWriter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/commitgraph/CommitGraphWriter.java
@@ -31,12 +31,12 @@ import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
-import java.util.Stack;
import org.eclipse.jgit.annotations.NonNull;
import org.eclipse.jgit.annotations.Nullable;
@@ -336,10 +336,10 @@ public class CommitGraphWriter {
continue;
}
- Stack<RevCommit> commitStack = new Stack<>();
+ ArrayDeque<RevCommit> commitStack = new ArrayDeque<>();
commitStack.push(cmit);
- while (!commitStack.empty()) {
+ while (!commitStack.isEmpty()) {
int maxGeneration = 0;
boolean allParentComputed = true;
RevCommit current = commitStack.peek();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ClockBlockCacheTable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ClockBlockCacheTable.java
new file mode 100644
index 0000000000..d0907bcc8d
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ClockBlockCacheTable.java
@@ -0,0 +1,563 @@
+/*
+ * Copyright (c) 2024, Google LLC and others
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Distribution License v. 1.0 which is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import java.io.IOException;
+import java.time.Duration;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicReferenceArray;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.function.Consumer;
+import java.util.stream.LongStream;
+
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.ReadableChannelSupplier;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.Ref;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.RefLoader;
+import org.eclipse.jgit.internal.storage.pack.PackExt;
+
+/**
+ * Default implementation of the {@link DfsBlockCacheTable}.
+ * <p>
+ * This cache implements a clock replacement algorithm, giving each block at
+ * least one chance to have been accessed during a sweep of the cache to save
+ * itself from eviction. The number of swipe chances is configurable per pack
+ * extension.
+ * <p>
+ * Entities created by the cache are held under hard references, preventing the
+ * Java VM from clearing anything. Blocks are discarded by the replacement
+ * algorithm when adding a new block would cause the cache to exceed its
+ * configured maximum size.
+ * <p>
+ * Whenever a cache miss occurs, loading is invoked by exactly one thread for
+ * the given <code>(DfsStreamKey,position)</code> key tuple. This is ensured by
+ * an array of locks, with the tuple hashed to a lock instance.
+ * <p>
+ * The internal hash table does not expand at runtime, instead it is fixed in
+ * size at cache creation time. The internal lock table used to gate load
+ * invocations is also fixed in size.
+ */
+final class ClockBlockCacheTable implements DfsBlockCacheTable {
+ /** Number of entries in {@link #table}. */
+ private final int tableSize;
+
+ /** Maximum number of bytes the cache should hold. */
+ private final long maxBytes;
+
+ /**
+ * Used to reserve space for blocks.
+ * <p>
+ * The value for blockSize must be a power of 2.
+ */
+ private final int blockSize;
+
+ private final Hash hash;
+
+ /** Hash bucket directory; entries are chained below. */
+ private final AtomicReferenceArray<HashEntry> table;
+
+ /**
+ * Locks to prevent concurrent loads for same (PackFile,position) block. The
+ * number of locks is {@link DfsBlockCacheConfig#getConcurrencyLevel()} to
+ * cap the overall concurrent block loads.
+ */
+ private final ReentrantLock[] loadLocks;
+
+ /**
+ * A separate pool of locks per pack extension to prevent concurrent loads
+ * for same index or bitmap from PackFile.
+ */
+ private final ReentrantLock[][] refLocks;
+
+ /** Protects the clock and its related data. */
+ private final ReentrantLock clockLock;
+
+ /** Current position of the clock. */
+ private Ref clockHand;
+
+ private final DfsBlockCacheStats dfsBlockCacheStats;
+
+ /**
+ * A consumer of object reference lock wait time milliseconds. May be used
+ * to build a metric.
+ */
+ private final Consumer<Long> refLockWaitTime;
+
+ /** Consumer of loading and eviction events of indexes. */
+ private final DfsBlockCacheConfig.IndexEventConsumer indexEventConsumer;
+
+ /** Stores timestamps of the last eviction of indexes. */
+ private final Map<EvictKey, Long> indexEvictionMap = new ConcurrentHashMap<>();
+
+ ClockBlockCacheTable(DfsBlockCacheConfig cfg) {
+ this.tableSize = tableSize(cfg);
+ if (tableSize < 1) {
+ throw new IllegalArgumentException(
+ JGitText.get().tSizeMustBeGreaterOrEqual1);
+ }
+ int concurrencyLevel = cfg.getConcurrencyLevel();
+ this.maxBytes = cfg.getBlockLimit();
+ this.blockSize = cfg.getBlockSize();
+ int blockSizeShift = Integer.numberOfTrailingZeros(blockSize);
+ this.hash = new Hash(blockSizeShift);
+ table = new AtomicReferenceArray<>(tableSize);
+
+ loadLocks = new ReentrantLock[concurrencyLevel];
+ for (int i = 0; i < loadLocks.length; i++) {
+ loadLocks[i] = new ReentrantLock(/* fair= */ true);
+ }
+ refLocks = new ReentrantLock[PackExt.values().length][concurrencyLevel];
+ for (int i = 0; i < PackExt.values().length; i++) {
+ for (int j = 0; j < concurrencyLevel; ++j) {
+ refLocks[i][j] = new ReentrantLock(/* fair= */ true);
+ }
+ }
+
+ clockLock = new ReentrantLock(/* fair= */ true);
+ String none = ""; //$NON-NLS-1$
+ clockHand = new Ref<>(
+ DfsStreamKey.of(new DfsRepositoryDescription(none), none, null),
+ -1, 0, null);
+ clockHand.next = clockHand;
+
+ this.dfsBlockCacheStats = new DfsBlockCacheStats();
+ this.refLockWaitTime = cfg.getRefLockWaitTimeConsumer();
+ this.indexEventConsumer = cfg.getIndexEventConsumer();
+ }
+
+ @Override
+ public DfsBlockCacheStats getDfsBlockCacheStats() {
+ return dfsBlockCacheStats;
+ }
+
+ @Override
+ public boolean hasBlock0(DfsStreamKey key) {
+ HashEntry e1 = table.get(slot(key, 0));
+ DfsBlock v = scan(e1, key, 0);
+ return v != null && v.contains(key, 0);
+ }
+
+ @Override
+ public DfsBlock getOrLoad(BlockBasedFile file, long position, DfsReader ctx,
+ ReadableChannelSupplier fileChannel) throws IOException {
+ final long requestedPosition = position;
+ position = file.alignToBlock(position);
+
+ DfsStreamKey key = file.key;
+ int slot = slot(key, position);
+ HashEntry e1 = table.get(slot);
+ DfsBlock v = scan(e1, key, position);
+ if (v != null && v.contains(key, requestedPosition)) {
+ ctx.stats.blockCacheHit++;
+ dfsBlockCacheStats.incrementHit(key);
+ return v;
+ }
+
+ reserveSpace(blockSize, key);
+ ReentrantLock regionLock = lockFor(key, position);
+ regionLock.lock();
+ try {
+ HashEntry e2 = table.get(slot);
+ if (e2 != e1) {
+ v = scan(e2, key, position);
+ if (v != null) {
+ ctx.stats.blockCacheHit++;
+ dfsBlockCacheStats.incrementHit(key);
+ creditSpace(blockSize, key);
+ return v;
+ }
+ }
+
+ dfsBlockCacheStats.incrementMiss(key);
+ boolean credit = true;
+ try {
+ v = file.readOneBlock(position, ctx, fileChannel.get());
+ credit = false;
+ } finally {
+ if (credit) {
+ creditSpace(blockSize, key);
+ }
+ }
+ if (position != v.start) {
+ // The file discovered its blockSize and adjusted.
+ position = v.start;
+ slot = slot(key, position);
+ e2 = table.get(slot);
+ }
+
+ Ref<DfsBlock> ref = new Ref<>(key, position, v.size(), v);
+ ref.markHotter();
+ for (;;) {
+ HashEntry n = new HashEntry(HashEntry.clean(e2), ref);
+ if (table.compareAndSet(slot, e2, n)) {
+ break;
+ }
+ e2 = table.get(slot);
+ }
+ addToClock(ref, blockSize - v.size());
+ } finally {
+ regionLock.unlock();
+ }
+
+ // If the block size changed from the default, it is possible the
+ // block
+ // that was loaded is the wrong block for the requested position.
+ if (v.contains(file.key, requestedPosition)) {
+ return v;
+ }
+ return getOrLoad(file, requestedPosition, ctx, fileChannel);
+ }
+
+ @Override
+ public <T> Ref<T> getOrLoadRef(DfsStreamKey key, long position,
+ RefLoader<T> loader) throws IOException {
+ long start = System.nanoTime();
+ int slot = slot(key, position);
+ HashEntry e1 = table.get(slot);
+ Ref<T> ref = scanRef(e1, key, position);
+ if (ref != null) {
+ dfsBlockCacheStats.incrementHit(key);
+ reportIndexRequested(ref, true /* cacheHit= */, start);
+ return ref;
+ }
+
+ ReentrantLock regionLock = lockForRef(key);
+ long lockStart = System.currentTimeMillis();
+ regionLock.lock();
+ try {
+ HashEntry e2 = table.get(slot);
+ if (e2 != e1) {
+ ref = scanRef(e2, key, position);
+ if (ref != null) {
+ dfsBlockCacheStats.incrementHit(key);
+ reportIndexRequested(ref, true /* cacheHit= */, start);
+ return ref;
+ }
+ }
+
+ if (refLockWaitTime != null) {
+ refLockWaitTime.accept(
+ Long.valueOf(System.currentTimeMillis() - lockStart));
+ }
+ dfsBlockCacheStats.incrementMiss(key);
+ ref = loader.load();
+ ref.markHotter();
+ // Reserve after loading to get the size of the object
+ reserveSpace(ref.size, key);
+ for (;;) {
+ HashEntry n = new HashEntry(HashEntry.clean(e2), ref);
+ if (table.compareAndSet(slot, e2, n)) {
+ break;
+ }
+ e2 = table.get(slot);
+ }
+ addToClock(ref, 0);
+ } finally {
+ regionLock.unlock();
+ }
+ reportIndexRequested(ref, /* cacheHit= */ false, start);
+ return ref;
+ }
+
+ @Override
+ public void put(DfsBlock v) {
+ put(v.stream, v.start, v.size(), v);
+ }
+
+ @Override
+ public <T> Ref<T> put(DfsStreamKey key, long pos, long size, T v) {
+ int slot = slot(key, pos);
+ HashEntry e1 = table.get(slot);
+ Ref<T> ref = scanRef(e1, key, pos);
+ if (ref != null) {
+ return ref;
+ }
+
+ reserveSpace(size, key);
+ ReentrantLock regionLock = lockFor(key, pos);
+ regionLock.lock();
+ try {
+ HashEntry e2 = table.get(slot);
+ if (e2 != e1) {
+ ref = scanRef(e2, key, pos);
+ if (ref != null) {
+ creditSpace(size, key);
+ return ref;
+ }
+ }
+
+ ref = new Ref<>(key, pos, size, v);
+ ref.markHotter();
+ for (;;) {
+ HashEntry n = new HashEntry(HashEntry.clean(e2), ref);
+ if (table.compareAndSet(slot, e2, n)) {
+ break;
+ }
+ e2 = table.get(slot);
+ }
+ addToClock(ref, 0);
+ } finally {
+ regionLock.unlock();
+ }
+ return ref;
+ }
+
+ @Override
+ public <T> Ref<T> putRef(DfsStreamKey key, long size, T v) {
+ return put(key, 0, size, v);
+ }
+
+ @Override
+ public boolean contains(DfsStreamKey key, long position) {
+ return scan(table.get(slot(key, position)), key, position) != null;
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public <T> T get(DfsStreamKey key, long position) {
+ T val = (T) scan(table.get(slot(key, position)), key, position);
+ if (val == null) {
+ dfsBlockCacheStats.incrementMiss(key);
+ } else {
+ dfsBlockCacheStats.incrementHit(key);
+ }
+ return val;
+ }
+
+ private int slot(DfsStreamKey key, long position) {
+ return (hash.hash(key.hash, position) >>> 1) % tableSize;
+ }
+
+ @SuppressWarnings("unchecked")
+ private void reserveSpace(long reserve, DfsStreamKey key) {
+ clockLock.lock();
+ try {
+ long live = LongStream.of(dfsBlockCacheStats.getCurrentSize()).sum()
+ + reserve;
+ if (maxBytes < live) {
+ Ref prev = clockHand;
+ Ref hand = clockHand.next;
+ do {
+ if (hand.isHot()) {
+ // Value was recently touched. Cache is still hot so
+ // give it another chance, but cool it down a bit.
+ hand.markColder();
+ prev = hand;
+ hand = hand.next;
+ continue;
+ } else if (prev == hand) {
+ break;
+ }
+
+ // No recent access since last scan, kill
+ // value and remove from clock.
+ Ref dead = hand;
+ hand = hand.next;
+ prev.next = hand;
+ dead.next = null;
+ dead.value = null;
+ live -= dead.size;
+ dfsBlockCacheStats.addToLiveBytes(dead.key, -dead.size);
+ dfsBlockCacheStats.incrementEvict(dead.key);
+ reportIndexEvicted(dead);
+ } while (maxBytes < live);
+ clockHand = prev;
+ }
+ dfsBlockCacheStats.addToLiveBytes(key, reserve);
+ } finally {
+ clockLock.unlock();
+ }
+ }
+
+ private void creditSpace(long credit, DfsStreamKey key) {
+ clockLock.lock();
+ try {
+ dfsBlockCacheStats.addToLiveBytes(key, -credit);
+ } finally {
+ clockLock.unlock();
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void addToClock(Ref ref, long credit) {
+ clockLock.lock();
+ try {
+ if (credit != 0) {
+ dfsBlockCacheStats.addToLiveBytes(ref.key, -credit);
+ }
+ Ref ptr = clockHand;
+ ref.next = ptr.next;
+ ptr.next = ref;
+ clockHand = ref;
+ } finally {
+ clockLock.unlock();
+ }
+ }
+
+ private <T> T scan(HashEntry n, DfsStreamKey key, long position) {
+ Ref<T> r = scanRef(n, key, position);
+ return r != null ? r.get() : null;
+ }
+
+ @SuppressWarnings("unchecked")
+ private <T> Ref<T> scanRef(HashEntry n, DfsStreamKey key, long position) {
+ for (; n != null; n = n.next) {
+ Ref<T> r = n.ref;
+ if (r.position == position && r.key.equals(key)) {
+ return r.get() != null ? r : null;
+ }
+ }
+ return null;
+ }
+
+ private ReentrantLock lockFor(DfsStreamKey key, long position) {
+ return loadLocks[(hash.hash(key.hash, position) >>> 1)
+ % loadLocks.length];
+ }
+
+ private ReentrantLock lockForRef(DfsStreamKey key) {
+ int slot = (key.hash >>> 1) % refLocks[key.packExtPos].length;
+ return refLocks[key.packExtPos][slot];
+ }
+
+ private void reportIndexRequested(Ref<?> ref, boolean cacheHit,
+ long start) {
+ if (indexEventConsumer == null || !isIndexExtPos(ref.key.packExtPos)) {
+ return;
+ }
+ EvictKey evictKey = createEvictKey(ref);
+ Long prevEvictedTime = indexEvictionMap.get(evictKey);
+ long now = System.nanoTime();
+ long sinceLastEvictionNanos = prevEvictedTime == null ? 0L
+ : now - prevEvictedTime.longValue();
+ indexEventConsumer.acceptRequestedEvent(ref.key.packExtPos, cacheHit,
+ (now - start) / 1000L /* micros */, ref.size,
+ Duration.ofNanos(sinceLastEvictionNanos));
+ }
+
+ private void reportIndexEvicted(Ref<?> dead) {
+ if (indexEventConsumer == null
+ || !indexEventConsumer.shouldReportEvictedEvent()
+ || !isIndexExtPos(dead.key.packExtPos)) {
+ return;
+ }
+ EvictKey evictKey = createEvictKey(dead);
+ Long prevEvictedTime = indexEvictionMap.get(evictKey);
+ long now = System.nanoTime();
+ long sinceLastEvictionNanos = prevEvictedTime == null ? 0L
+ : now - prevEvictedTime.longValue();
+ indexEvictionMap.put(evictKey, Long.valueOf(now));
+ indexEventConsumer.acceptEvictedEvent(dead.key.packExtPos, dead.size,
+ dead.getTotalHitCount(),
+ Duration.ofNanos(sinceLastEvictionNanos));
+ }
+
+ private static final class HashEntry {
+ /** Next entry in the hash table's chain list. */
+ final HashEntry next;
+
+ /** The referenced object. */
+ final Ref ref;
+
+ HashEntry(HashEntry n, Ref r) {
+ next = n;
+ ref = r;
+ }
+
+ private static HashEntry clean(HashEntry top) {
+ while (top != null && top.ref.next == null) {
+ top = top.next;
+ }
+ if (top == null) {
+ return null;
+ }
+ HashEntry n = clean(top.next);
+ return n == top.next ? top : new HashEntry(n, top.ref);
+ }
+ }
+
+ private EvictKey createEvictKey(Ref<?> ref) {
+ return new EvictKey(hash, ref);
+ }
+
+ private static boolean isIndexExtPos(int packExtPos) {
+ return packExtPos == PackExt.INDEX.getPosition()
+ || packExtPos == PackExt.REVERSE_INDEX.getPosition()
+ || packExtPos == PackExt.BITMAP_INDEX.getPosition();
+ }
+
+ private static int tableSize(DfsBlockCacheConfig cfg) {
+ final int wsz = cfg.getBlockSize();
+ final long limit = cfg.getBlockLimit();
+ if (wsz <= 0) {
+ throw new IllegalArgumentException(
+ JGitText.get().invalidWindowSize);
+ }
+ if (limit < wsz) {
+ throw new IllegalArgumentException(
+ JGitText.get().windowSizeMustBeLesserThanLimit);
+ }
+ return (int) Math.min(5 * (limit / wsz) / 2, Integer.MAX_VALUE);
+ }
+
+ private static final class Hash {
+ /**
+ * As {@link #blockSize} is a power of 2, bits to shift for a /
+ * blockSize.
+ */
+ private final int blockSizeShift;
+
+ Hash(int blockSizeShift) {
+ this.blockSizeShift = blockSizeShift;
+ }
+
+ int hash(int packHash, long off) {
+ return packHash + (int) (off >>> blockSizeShift);
+ }
+ }
+
+ private static final class EvictKey {
+ /**
+ * Provides the hash function to be used for this key's hashCode method.
+ */
+ private final Hash hash;
+
+ private final int keyHash;
+
+ private final int packExtPos;
+
+ private final long position;
+
+ EvictKey(Hash hash, Ref<?> ref) {
+ this.hash = hash;
+ keyHash = ref.key.hash;
+ packExtPos = ref.key.packExtPos;
+ position = ref.position;
+ }
+
+ @Override
+ public boolean equals(Object object) {
+ if (object instanceof EvictKey) {
+ EvictKey other = (EvictKey) object;
+ return keyHash == other.keyHash
+ && packExtPos == other.packExtPos
+ && position == other.position;
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ return hash.hash(keyHash, position);
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
index f7c460c1a8..56719cf0f4 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
@@ -12,18 +12,9 @@
package org.eclipse.jgit.internal.storage.dfs;
import java.io.IOException;
-import java.time.Duration;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.concurrent.atomic.AtomicReferenceArray;
-import java.util.concurrent.locks.ReentrantLock;
-import java.util.function.Consumer;
import java.util.stream.LongStream;
-import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.pack.PackExt;
/**
@@ -36,31 +27,14 @@ import org.eclipse.jgit.internal.storage.pack.PackExt;
* reads of only tiny slices of a file, the DfsBlockCache tries to smooth out
* these tiny reads into larger block-sized IO operations.
* <p>
- * Whenever a cache miss occurs, loading is invoked by exactly one thread for
- * the given <code>(DfsStreamKey,position)</code> key tuple. This is ensured by
- * an array of locks, with the tuple hashed to a lock instance.
- * <p>
* Its too expensive during object access to be accurate with a least recently
* used (LRU) algorithm. Strictly ordering every read is a lot of overhead that
- * typically doesn't yield a corresponding benefit to the application. This
- * cache implements a clock replacement algorithm, giving each block at least
- * one chance to have been accessed during a sweep of the cache to save itself
- * from eviction. The number of swipe chances is configurable per pack
- * extension.
- * <p>
- * Entities created by the cache are held under hard references, preventing the
- * Java VM from clearing anything. Blocks are discarded by the replacement
- * algorithm when adding a new block would cause the cache to exceed its
- * configured maximum size.
+ * typically doesn't yield a corresponding benefit to the application.
* <p>
* The key tuple is passed through to methods as a pair of parameters rather
* than as a single Object, thus reducing the transient memory allocations of
* callers. It is more efficient to avoid the allocation, as we can't be 100%
* sure that a JIT would be able to stack-allocate a key tuple.
- * <p>
- * The internal hash table does not expand at runtime, instead it is fixed in
- * size at cache creation time. The internal lock table used to gate load
- * invocations is also fixed in size.
*/
public final class DfsBlockCache {
private static volatile DfsBlockCache cache;
@@ -94,24 +68,7 @@ public final class DfsBlockCache {
return cache;
}
- /** Number of entries in {@link #table}. */
- private final int tableSize;
-
- /** Hash bucket directory; entries are chained below. */
- private final AtomicReferenceArray<HashEntry> table;
-
- /**
- * Locks to prevent concurrent loads for same (PackFile,position) block. The
- * number of locks is {@link DfsBlockCacheConfig#getConcurrencyLevel()} to
- * cap the overall concurrent block loads.
- */
- private final ReentrantLock[] loadLocks;
-
- /**
- * A separate pool of locks per pack extension to prevent concurrent loads
- * for same index or bitmap from PackFile.
- */
- private final ReentrantLock[][] refLocks;
+ private final DfsBlockCacheTable dfsBlockCacheTable;
/** Maximum number of bytes the cache should hold. */
private final long maxBytes;
@@ -131,89 +88,16 @@ public final class DfsBlockCache {
*/
private final int blockSize;
- /** As {@link #blockSize} is a power of 2, bits to shift for a / blockSize. */
- private final int blockSizeShift;
-
- /**
- * Number of times a block was found in the cache, per pack file extension.
- */
- private final AtomicReference<AtomicLong[]> statHit;
-
- /**
- * Number of times a block was not found, and had to be loaded, per pack
- * file extension.
- */
- private final AtomicReference<AtomicLong[]> statMiss;
-
- /**
- * Number of blocks evicted due to cache being full, per pack file
- * extension.
- */
- private final AtomicReference<AtomicLong[]> statEvict;
-
- /**
- * Number of bytes currently loaded in the cache, per pack file extension.
- */
- private final AtomicReference<AtomicLong[]> liveBytes;
-
- /** Protects the clock and its related data. */
- private final ReentrantLock clockLock;
-
- /**
- * A consumer of object reference lock wait time milliseconds. May be used to build a metric.
- */
- private final Consumer<Long> refLockWaitTime;
-
- /** Current position of the clock. */
- private Ref clockHand;
-
/** Limits of cache hot count per pack file extension. */
private final int[] cacheHotLimits = new int[PackExt.values().length];
- /** Consumer of loading and eviction events of indexes. */
- private final DfsBlockCacheConfig.IndexEventConsumer indexEventConsumer;
-
- /** Stores timestamps of the last eviction of indexes. */
- private final Map<EvictKey, Long> indexEvictionMap = new ConcurrentHashMap<>();
-
- @SuppressWarnings("unchecked")
private DfsBlockCache(DfsBlockCacheConfig cfg) {
- tableSize = tableSize(cfg);
- if (tableSize < 1) {
- throw new IllegalArgumentException(JGitText.get().tSizeMustBeGreaterOrEqual1);
- }
-
- table = new AtomicReferenceArray<>(tableSize);
- int concurrencyLevel = cfg.getConcurrencyLevel();
- loadLocks = new ReentrantLock[concurrencyLevel];
- for (int i = 0; i < loadLocks.length; i++) {
- loadLocks[i] = new ReentrantLock(true /* fair */);
- }
- refLocks = new ReentrantLock[PackExt.values().length][concurrencyLevel];
- for (int i = 0; i < PackExt.values().length; i++) {
- for (int j = 0; j < concurrencyLevel; ++j) {
- refLocks[i][j] = new ReentrantLock(true /* fair */);
- }
- }
-
maxBytes = cfg.getBlockLimit();
- maxStreamThroughCache = (long) (maxBytes * cfg.getStreamRatio());
blockSize = cfg.getBlockSize();
- blockSizeShift = Integer.numberOfTrailingZeros(blockSize);
-
- clockLock = new ReentrantLock(true /* fair */);
- String none = ""; //$NON-NLS-1$
- clockHand = new Ref<>(
- DfsStreamKey.of(new DfsRepositoryDescription(none), none, null),
- -1, 0, null);
- clockHand.next = clockHand;
-
- statHit = new AtomicReference<>(newCounters());
- statMiss = new AtomicReference<>(newCounters());
- statEvict = new AtomicReference<>(newCounters());
- liveBytes = new AtomicReference<>(newCounters());
+ double streamRatio = cfg.getStreamRatio();
+ maxStreamThroughCache = (long) (maxBytes * streamRatio);
- refLockWaitTime = cfg.getRefLockWaitTimeConsumer();
+ dfsBlockCacheTable = new ClockBlockCacheTable(cfg);
for (int i = 0; i < PackExt.values().length; ++i) {
Integer limit = cfg.getCacheHotMap().get(PackExt.values()[i]);
@@ -223,7 +107,6 @@ public final class DfsBlockCache {
cacheHotLimits[i] = DfsBlockCacheConfig.DEFAULT_CACHE_HOT_MAX;
}
}
- indexEventConsumer = cfg.getIndexEventConsumer();
}
boolean shouldCopyThroughCache(long length) {
@@ -236,7 +119,7 @@ public final class DfsBlockCache {
* @return total number of bytes in the cache, per pack file extension.
*/
public long[] getCurrentSize() {
- return getStatVals(liveBytes);
+ return dfsBlockCacheTable.getDfsBlockCacheStats().getCurrentSize();
}
/**
@@ -255,7 +138,7 @@ public final class DfsBlockCache {
* extension.
*/
public long[] getHitCount() {
- return getStatVals(statHit);
+ return dfsBlockCacheTable.getDfsBlockCacheStats().getHitCount();
}
/**
@@ -266,7 +149,7 @@ public final class DfsBlockCache {
* extension.
*/
public long[] getMissCount() {
- return getStatVals(statMiss);
+ return dfsBlockCacheTable.getDfsBlockCacheStats().getMissCount();
}
/**
@@ -275,16 +158,8 @@ public final class DfsBlockCache {
* @return total number of requests (hit + miss), per pack file extension.
*/
public long[] getTotalRequestCount() {
- AtomicLong[] hit = statHit.get();
- AtomicLong[] miss = statMiss.get();
- long[] cnt = new long[Math.max(hit.length, miss.length)];
- for (int i = 0; i < hit.length; i++) {
- cnt[i] += hit[i].get();
- }
- for (int i = 0; i < miss.length; i++) {
- cnt[i] += miss[i].get();
- }
- return cnt;
+ return dfsBlockCacheTable.getDfsBlockCacheStats()
+ .getTotalRequestCount();
}
/**
@@ -293,22 +168,7 @@ public final class DfsBlockCache {
* @return hit ratios
*/
public long[] getHitRatio() {
- AtomicLong[] hit = statHit.get();
- AtomicLong[] miss = statMiss.get();
- long[] ratio = new long[Math.max(hit.length, miss.length)];
- for (int i = 0; i < ratio.length; i++) {
- if (i >= hit.length) {
- ratio[i] = 0;
- } else if (i >= miss.length) {
- ratio[i] = 100;
- } else {
- long hitVal = hit[i].get();
- long missVal = miss[i].get();
- long total = hitVal + missVal;
- ratio[i] = total == 0 ? 0 : hitVal * 100 / total;
- }
- }
- return ratio;
+ return dfsBlockCacheTable.getDfsBlockCacheStats().getHitRatio();
}
/**
@@ -319,7 +179,7 @@ public final class DfsBlockCache {
* file extension.
*/
public long[] getEvictions() {
- return getStatVals(statEvict);
+ return dfsBlockCacheTable.getDfsBlockCacheStats().getEvictions();
}
/**
@@ -334,31 +194,13 @@ public final class DfsBlockCache {
* @return true if block 0 (the first block) is in the cache.
*/
public boolean hasBlock0(DfsStreamKey key) {
- HashEntry e1 = table.get(slot(key, 0));
- DfsBlock v = scan(e1, key, 0);
- return v != null && v.contains(key, 0);
- }
-
- private int hash(int packHash, long off) {
- return packHash + (int) (off >>> blockSizeShift);
+ return dfsBlockCacheTable.hasBlock0(key);
}
int getBlockSize() {
return blockSize;
}
- private static int tableSize(DfsBlockCacheConfig cfg) {
- final int wsz = cfg.getBlockSize();
- final long limit = cfg.getBlockLimit();
- if (wsz <= 0) {
- throw new IllegalArgumentException(JGitText.get().invalidWindowSize);
- }
- if (limit < wsz) {
- throw new IllegalArgumentException(JGitText.get().windowSizeMustBeLesserThanLimit);
- }
- return (int) Math.min(5 * (limit / wsz) / 2, Integer.MAX_VALUE);
- }
-
/**
* Look up a cached object, creating and loading it if it doesn't exist.
*
@@ -376,139 +218,11 @@ public final class DfsBlockCache {
*/
DfsBlock getOrLoad(BlockBasedFile file, long position, DfsReader ctx,
ReadableChannelSupplier fileChannel) throws IOException {
- final long requestedPosition = position;
- position = file.alignToBlock(position);
-
- DfsStreamKey key = file.key;
- int slot = slot(key, position);
- HashEntry e1 = table.get(slot);
- DfsBlock v = scan(e1, key, position);
- if (v != null && v.contains(key, requestedPosition)) {
- ctx.stats.blockCacheHit++;
- getStat(statHit, key).incrementAndGet();
- return v;
- }
-
- reserveSpace(blockSize, key);
- ReentrantLock regionLock = lockFor(key, position);
- regionLock.lock();
- try {
- HashEntry e2 = table.get(slot);
- if (e2 != e1) {
- v = scan(e2, key, position);
- if (v != null) {
- ctx.stats.blockCacheHit++;
- getStat(statHit, key).incrementAndGet();
- creditSpace(blockSize, key);
- return v;
- }
- }
-
- getStat(statMiss, key).incrementAndGet();
- boolean credit = true;
- try {
- v = file.readOneBlock(position, ctx, fileChannel.get());
- credit = false;
- } finally {
- if (credit) {
- creditSpace(blockSize, key);
- }
- }
- if (position != v.start) {
- // The file discovered its blockSize and adjusted.
- position = v.start;
- slot = slot(key, position);
- e2 = table.get(slot);
- }
-
- Ref<DfsBlock> ref = new Ref<>(key, position, v.size(), v);
- ref.markHotter();
- for (;;) {
- HashEntry n = new HashEntry(clean(e2), ref);
- if (table.compareAndSet(slot, e2, n)) {
- break;
- }
- e2 = table.get(slot);
- }
- addToClock(ref, blockSize - v.size());
- } finally {
- regionLock.unlock();
- }
-
- // If the block size changed from the default, it is possible the block
- // that was loaded is the wrong block for the requested position.
- if (v.contains(file.key, requestedPosition)) {
- return v;
- }
- return getOrLoad(file, requestedPosition, ctx, fileChannel);
- }
-
- @SuppressWarnings("unchecked")
- private void reserveSpace(long reserve, DfsStreamKey key) {
- clockLock.lock();
- try {
- long live = LongStream.of(getCurrentSize()).sum() + reserve;
- if (maxBytes < live) {
- Ref prev = clockHand;
- Ref hand = clockHand.next;
- do {
- if (hand.isHot()) {
- // Value was recently touched. Cache is still hot so
- // give it another chance, but cool it down a bit.
- hand.markColder();
- prev = hand;
- hand = hand.next;
- continue;
- } else if (prev == hand)
- break;
-
- // No recent access since last scan, kill
- // value and remove from clock.
- Ref dead = hand;
- hand = hand.next;
- prev.next = hand;
- dead.next = null;
- dead.value = null;
- live -= dead.size;
- getStat(liveBytes, dead.key).addAndGet(-dead.size);
- getStat(statEvict, dead.key).incrementAndGet();
- reportIndexEvicted(dead);
- } while (maxBytes < live);
- clockHand = prev;
- }
- getStat(liveBytes, key).addAndGet(reserve);
- } finally {
- clockLock.unlock();
- }
- }
-
- private void creditSpace(long credit, DfsStreamKey key) {
- clockLock.lock();
- try {
- getStat(liveBytes, key).addAndGet(-credit);
- } finally {
- clockLock.unlock();
- }
- }
-
- @SuppressWarnings("unchecked")
- private void addToClock(Ref ref, long credit) {
- clockLock.lock();
- try {
- if (credit != 0) {
- getStat(liveBytes, ref.key).addAndGet(-credit);
- }
- Ref ptr = clockHand;
- ref.next = ptr.next;
- ptr.next = ref;
- clockHand = ref;
- } finally {
- clockLock.unlock();
- }
+ return dfsBlockCacheTable.getOrLoad(file, position, ctx, fileChannel);
}
void put(DfsBlock v) {
- put(v.stream, v.start, v.size(), v);
+ dfsBlockCacheTable.put(v);
}
/**
@@ -524,252 +238,41 @@ public final class DfsBlockCache {
* @throws IOException
* the reference was not in the cache and could not be loaded.
*/
- <T> Ref<T> getOrLoadRef(
- DfsStreamKey key, long position, RefLoader<T> loader)
- throws IOException {
- long start = System.nanoTime();
- int slot = slot(key, position);
- HashEntry e1 = table.get(slot);
- Ref<T> ref = scanRef(e1, key, position);
- if (ref != null) {
- getStat(statHit, key).incrementAndGet();
- reportIndexRequested(ref, true /* cacheHit */, start);
- return ref;
- }
-
- ReentrantLock regionLock = lockForRef(key);
- long lockStart = System.currentTimeMillis();
- regionLock.lock();
- try {
- HashEntry e2 = table.get(slot);
- if (e2 != e1) {
- ref = scanRef(e2, key, position);
- if (ref != null) {
- getStat(statHit, key).incrementAndGet();
- reportIndexRequested(ref, true /* cacheHit */,
- start);
- return ref;
- }
- }
-
- if (refLockWaitTime != null) {
- refLockWaitTime.accept(
- Long.valueOf(System.currentTimeMillis() - lockStart));
- }
- getStat(statMiss, key).incrementAndGet();
- ref = loader.load();
- ref.markHotter();
- // Reserve after loading to get the size of the object
- reserveSpace(ref.size, key);
- for (;;) {
- HashEntry n = new HashEntry(clean(e2), ref);
- if (table.compareAndSet(slot, e2, n)) {
- break;
- }
- e2 = table.get(slot);
- }
- addToClock(ref, 0);
- } finally {
- regionLock.unlock();
- }
- reportIndexRequested(ref, false /* cacheHit */, start);
- return ref;
+ <T> Ref<T> getOrLoadRef(DfsStreamKey key, long position,
+ RefLoader<T> loader) throws IOException {
+ return dfsBlockCacheTable.getOrLoadRef(key, position, loader);
}
<T> Ref<T> putRef(DfsStreamKey key, long size, T v) {
- return put(key, 0, size, v);
+ return dfsBlockCacheTable.putRef(key, size, v);
}
<T> Ref<T> put(DfsStreamKey key, long pos, long size, T v) {
- int slot = slot(key, pos);
- HashEntry e1 = table.get(slot);
- Ref<T> ref = scanRef(e1, key, pos);
- if (ref != null) {
- return ref;
- }
-
- reserveSpace(size, key);
- ReentrantLock regionLock = lockFor(key, pos);
- regionLock.lock();
- try {
- HashEntry e2 = table.get(slot);
- if (e2 != e1) {
- ref = scanRef(e2, key, pos);
- if (ref != null) {
- creditSpace(size, key);
- return ref;
- }
- }
-
- ref = new Ref<>(key, pos, size, v);
- ref.markHotter();
- for (;;) {
- HashEntry n = new HashEntry(clean(e2), ref);
- if (table.compareAndSet(slot, e2, n)) {
- break;
- }
- e2 = table.get(slot);
- }
- addToClock(ref, 0);
- } finally {
- regionLock.unlock();
- }
- return ref;
+ return dfsBlockCacheTable.put(key, pos, size, v);
}
boolean contains(DfsStreamKey key, long position) {
- return scan(table.get(slot(key, position)), key, position) != null;
+ return dfsBlockCacheTable.contains(key, position);
}
- @SuppressWarnings("unchecked")
<T> T get(DfsStreamKey key, long position) {
- T val = (T) scan(table.get(slot(key, position)), key, position);
- if (val == null) {
- getStat(statMiss, key).incrementAndGet();
- } else {
- getStat(statHit, key).incrementAndGet();
- }
- return val;
- }
-
- private <T> T scan(HashEntry n, DfsStreamKey key, long position) {
- Ref<T> r = scanRef(n, key, position);
- return r != null ? r.get() : null;
- }
-
- @SuppressWarnings("unchecked")
- private <T> Ref<T> scanRef(HashEntry n, DfsStreamKey key, long position) {
- for (; n != null; n = n.next) {
- Ref<T> r = n.ref;
- if (r.position == position && r.key.equals(key)) {
- return r.get() != null ? r : null;
- }
- }
- return null;
- }
-
- private int slot(DfsStreamKey key, long position) {
- return (hash(key.hash, position) >>> 1) % tableSize;
- }
-
- private ReentrantLock lockFor(DfsStreamKey key, long position) {
- return loadLocks[(hash(key.hash, position) >>> 1) % loadLocks.length];
- }
-
- private ReentrantLock lockForRef(DfsStreamKey key) {
- int slot = (key.hash >>> 1) % refLocks[key.packExtPos].length;
- return refLocks[key.packExtPos][slot];
- }
-
- private static AtomicLong[] newCounters() {
- AtomicLong[] ret = new AtomicLong[PackExt.values().length];
- for (int i = 0; i < ret.length; i++) {
- ret[i] = new AtomicLong();
- }
- return ret;
- }
-
- private static AtomicLong getStat(AtomicReference<AtomicLong[]> stats,
- DfsStreamKey key) {
- int pos = key.packExtPos;
- while (true) {
- AtomicLong[] vals = stats.get();
- if (pos < vals.length) {
- return vals[pos];
- }
- AtomicLong[] expect = vals;
- vals = new AtomicLong[Math.max(pos + 1, PackExt.values().length)];
- System.arraycopy(expect, 0, vals, 0, expect.length);
- for (int i = expect.length; i < vals.length; i++) {
- vals[i] = new AtomicLong();
- }
- if (stats.compareAndSet(expect, vals)) {
- return vals[pos];
- }
- }
- }
-
- private static long[] getStatVals(AtomicReference<AtomicLong[]> stat) {
- AtomicLong[] stats = stat.get();
- long[] cnt = new long[stats.length];
- for (int i = 0; i < stats.length; i++) {
- cnt[i] = stats[i].get();
- }
- return cnt;
- }
-
- private static HashEntry clean(HashEntry top) {
- while (top != null && top.ref.next == null) {
- top = top.next;
- }
- if (top == null) {
- return null;
- }
- HashEntry n = clean(top.next);
- return n == top.next ? top : new HashEntry(n, top.ref);
- }
-
- private void reportIndexRequested(Ref<?> ref, boolean cacheHit,
- long start) {
- if (indexEventConsumer == null
- || !isIndexExtPos(ref.key.packExtPos)) {
- return;
- }
- EvictKey evictKey = new EvictKey(ref);
- Long prevEvictedTime = indexEvictionMap.get(evictKey);
- long now = System.nanoTime();
- long sinceLastEvictionNanos = prevEvictedTime == null ? 0L
- : now - prevEvictedTime.longValue();
- indexEventConsumer.acceptRequestedEvent(ref.key.packExtPos, cacheHit,
- (now - start) / 1000L /* micros */, ref.size,
- Duration.ofNanos(sinceLastEvictionNanos));
- }
-
- private void reportIndexEvicted(Ref<?> dead) {
- if (indexEventConsumer == null
- || !indexEventConsumer.shouldReportEvictedEvent()
- || !isIndexExtPos(dead.key.packExtPos)) {
- return;
- }
- EvictKey evictKey = new EvictKey(dead);
- Long prevEvictedTime = indexEvictionMap.get(evictKey);
- long now = System.nanoTime();
- long sinceLastEvictionNanos = prevEvictedTime == null ? 0L
- : now - prevEvictedTime.longValue();
- indexEvictionMap.put(evictKey, Long.valueOf(now));
- indexEventConsumer.acceptEvictedEvent(dead.key.packExtPos, dead.size,
- dead.totalHitCount.get(),
- Duration.ofNanos(sinceLastEvictionNanos));
- }
-
- private static boolean isIndexExtPos(int packExtPos) {
- return packExtPos == PackExt.INDEX.getPosition()
- || packExtPos == PackExt.REVERSE_INDEX.getPosition()
- || packExtPos == PackExt.BITMAP_INDEX.getPosition();
- }
-
- private static final class HashEntry {
- /** Next entry in the hash table's chain list. */
- final HashEntry next;
-
- /** The referenced object. */
- final Ref ref;
-
- HashEntry(HashEntry n, Ref r) {
- next = n;
- ref = r;
- }
+ return dfsBlockCacheTable.get(key, position);
}
static final class Ref<T> {
final DfsStreamKey key;
+
final long position;
+
final long size;
+
volatile T value;
+
Ref next;
private volatile int hotCount;
- private AtomicInteger totalHitCount = new AtomicInteger();
+
+ private final AtomicInteger totalHitCount = new AtomicInteger();
Ref(DfsStreamKey key, long position, long size, T v) {
this.key = key;
@@ -804,33 +307,9 @@ public final class DfsBlockCache {
boolean isHot() {
return hotCount > 0;
}
- }
-
- private static final class EvictKey {
- private final int keyHash;
- private final int packExtPos;
- private final long position;
-
- EvictKey(Ref<?> ref) {
- keyHash = ref.key.hash;
- packExtPos = ref.key.packExtPos;
- position = ref.position;
- }
-
- @Override
- public boolean equals(Object object) {
- if (object instanceof EvictKey) {
- EvictKey other = (EvictKey) object;
- return keyHash == other.keyHash
- && packExtPos == other.packExtPos
- && position == other.position;
- }
- return false;
- }
- @Override
- public int hashCode() {
- return DfsBlockCache.getInstance().hash(keyHash, position);
+ int getTotalHitCount() {
+ return totalHitCount.get();
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheTable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheTable.java
new file mode 100644
index 0000000000..701d1fdce3
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheTable.java
@@ -0,0 +1,340 @@
+/*
+ * Copyright (c) 2024, Google LLC and others
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Distribution License v. 1.0 which is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import java.io.IOException;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.atomic.AtomicReference;
+
+import org.eclipse.jgit.internal.storage.pack.PackExt;
+
+/**
+ * Block cache table.
+ */
+public interface DfsBlockCacheTable {
+ /**
+ * Quickly check if the cache contains block 0 of the given stream.
+ * <p>
+ * This can be useful for sophisticated pre-read algorithms to quickly
+ * determine if a file is likely already in cache, especially small
+ * reftables which may be smaller than a typical DFS block size.
+ *
+ * @param key
+ * the file to check.
+ * @return true if block 0 (the first block) is in the cache.
+ */
+ boolean hasBlock0(DfsStreamKey key);
+
+ /**
+ * Look up a cached object, creating and loading it if it doesn't exist.
+ *
+ * @param file
+ * the pack that "contains" the cached object.
+ * @param position
+ * offset within <code>pack</code> of the object.
+ * @param dfsReader
+ * current thread's reader.
+ * @param fileChannel
+ * supplier for channel to read {@code pack}.
+ * @return the object reference.
+ * @throws IOException
+ * the reference was not in the cache and could not be loaded.
+ */
+ DfsBlock getOrLoad(BlockBasedFile file, long position, DfsReader dfsReader,
+ DfsBlockCache.ReadableChannelSupplier fileChannel)
+ throws IOException;
+
+ /**
+ * Look up a cached object, creating and loading it if it doesn't exist.
+ *
+ * @param key
+ * the stream key of the pack.
+ * @param position
+ * the position in the key. The default should be 0.
+ * @param loader
+ * the function to load the reference.
+ * @return the object reference.
+ * @throws IOException
+ * the reference was not in the cache and could not be loaded.
+ */
+ <T> DfsBlockCache.Ref<T> getOrLoadRef(DfsStreamKey key, long position,
+ DfsBlockCache.RefLoader<T> loader) throws IOException;
+
+ /**
+ * Put a block in the block cache.
+ *
+ * @param v
+ * the block to put in the cache.
+ */
+ void put(DfsBlock v);
+
+ /**
+ * Put a block in the block cache.
+ *
+ * @param key
+ * the stream key of the pack.
+ * @param pos
+ * the position in the key.
+ * @param size
+ * the size of the object.
+ * @param v
+ * the object to put in the block cache.
+ * @return the object reference.
+ */
+ <T> DfsBlockCache.Ref<T> put(DfsStreamKey key, long pos, long size, T v);
+
+ /**
+ * Put an object in the block cache.
+ *
+ * @param key
+ * the stream key of the pack.
+ * @param size
+ * the size of the object.
+ * @param v
+ * the object to put in the block cache.
+ * @return the object reference.
+ */
+ <T> DfsBlockCache.Ref<T> putRef(DfsStreamKey key, long size, T v);
+
+ /**
+ * Check if the block cache contains an object identified by (key,
+ * position).
+ *
+ * @param key
+ * the stream key of the pack.
+ * @param position
+ * the position in the key.
+ * @return if the block cache contains the object identified by (key,
+ * position).
+ */
+ boolean contains(DfsStreamKey key, long position);
+
+ /**
+ * Get the object identified by (key, position) from the block cache.
+ *
+ * @param key
+ * the stream key of the pack.
+ * @param position
+ * the position in the key.
+ * @return the object identified by (key, position).
+ */
+ <T> T get(DfsStreamKey key, long position);
+
+ /**
+ * Get the DfsBlockCacheStats object for this block cache table's
+ * statistics.
+ *
+ * @return the DfsBlockCacheStats tracking this block cache table's
+ * statistics.
+ */
+ DfsBlockCacheStats getDfsBlockCacheStats();
+
+ /**
+ * Keeps track of stats for a Block Cache table.
+ */
+ class DfsBlockCacheStats {
+ /**
+ * Number of times a block was found in the cache, per pack file
+ * extension.
+ */
+ private final AtomicReference<AtomicLong[]> statHit;
+
+ /**
+ * Number of times a block was not found, and had to be loaded, per pack
+ * file extension.
+ */
+ private final AtomicReference<AtomicLong[]> statMiss;
+
+ /**
+ * Number of blocks evicted due to cache being full, per pack file
+ * extension.
+ */
+ private final AtomicReference<AtomicLong[]> statEvict;
+
+ /**
+ * Number of bytes currently loaded in the cache, per pack file
+ * extension.
+ */
+ private final AtomicReference<AtomicLong[]> liveBytes;
+
+ DfsBlockCacheStats() {
+ statHit = new AtomicReference<>(newCounters());
+ statMiss = new AtomicReference<>(newCounters());
+ statEvict = new AtomicReference<>(newCounters());
+ liveBytes = new AtomicReference<>(newCounters());
+ }
+
+ /**
+ * Increment the {@code statHit} count.
+ *
+ * @param key
+ * key identifying which liveBytes entry to update.
+ */
+ void incrementHit(DfsStreamKey key) {
+ getStat(statHit, key).incrementAndGet();
+ }
+
+ /**
+ * Increment the {@code statMiss} count.
+ *
+ * @param key
+ * key identifying which liveBytes entry to update.
+ */
+ void incrementMiss(DfsStreamKey key) {
+ getStat(statMiss, key).incrementAndGet();
+ }
+
+ /**
+ * Increment the {@code statEvict} count.
+ *
+ * @param key
+ * key identifying which liveBytes entry to update.
+ */
+ void incrementEvict(DfsStreamKey key) {
+ getStat(statEvict, key).incrementAndGet();
+ }
+
+ /**
+ * Add {@code size} to the {@code liveBytes} count.
+ *
+ * @param key
+ * key identifying which liveBytes entry to update.
+ * @param size
+ * amount to increment the count by.
+ */
+ void addToLiveBytes(DfsStreamKey key, long size) {
+ getStat(liveBytes, key).addAndGet(size);
+ }
+
+ /**
+ * Get total number of bytes in the cache, per pack file extension.
+ *
+ * @return total number of bytes in the cache, per pack file extension.
+ */
+ long[] getCurrentSize() {
+ return getStatVals(liveBytes);
+ }
+
+ /**
+ * Get number of requests for items in the cache, per pack file
+ * extension.
+ *
+ * @return the number of requests for items in the cache, per pack file
+ * extension.
+ */
+ long[] getHitCount() {
+ return getStatVals(statHit);
+ }
+
+ /**
+ * Get number of requests for items not in the cache, per pack file
+ * extension.
+ *
+ * @return the number of requests for items not in the cache, per pack
+ * file extension.
+ */
+ long[] getMissCount() {
+ return getStatVals(statMiss);
+ }
+
+ /**
+ * Get total number of requests (hit + miss), per pack file extension.
+ *
+ * @return total number of requests (hit + miss), per pack file
+ * extension.
+ */
+ long[] getTotalRequestCount() {
+ AtomicLong[] hit = statHit.get();
+ AtomicLong[] miss = statMiss.get();
+ long[] cnt = new long[Math.max(hit.length, miss.length)];
+ for (int i = 0; i < hit.length; i++) {
+ cnt[i] += hit[i].get();
+ }
+ for (int i = 0; i < miss.length; i++) {
+ cnt[i] += miss[i].get();
+ }
+ return cnt;
+ }
+
+ /**
+ * Get hit ratios.
+ *
+ * @return hit ratios.
+ */
+ long[] getHitRatio() {
+ AtomicLong[] hit = statHit.get();
+ AtomicLong[] miss = statMiss.get();
+ long[] ratio = new long[Math.max(hit.length, miss.length)];
+ for (int i = 0; i < ratio.length; i++) {
+ if (i >= hit.length) {
+ ratio[i] = 0;
+ } else if (i >= miss.length) {
+ ratio[i] = 100;
+ } else {
+ long hitVal = hit[i].get();
+ long missVal = miss[i].get();
+ long total = hitVal + missVal;
+ ratio[i] = total == 0 ? 0 : hitVal * 100 / total;
+ }
+ }
+ return ratio;
+ }
+
+ /**
+ * Get number of evictions performed due to cache being full, per pack
+ * file extension.
+ *
+ * @return the number of evictions performed due to cache being full,
+ * per pack file extension.
+ */
+ long[] getEvictions() {
+ return getStatVals(statEvict);
+ }
+
+ private static AtomicLong[] newCounters() {
+ AtomicLong[] ret = new AtomicLong[PackExt.values().length];
+ for (int i = 0; i < ret.length; i++) {
+ ret[i] = new AtomicLong();
+ }
+ return ret;
+ }
+
+ private static long[] getStatVals(AtomicReference<AtomicLong[]> stat) {
+ AtomicLong[] stats = stat.get();
+ long[] cnt = new long[stats.length];
+ for (int i = 0; i < stats.length; i++) {
+ cnt[i] = stats[i].get();
+ }
+ return cnt;
+ }
+
+ private static AtomicLong getStat(AtomicReference<AtomicLong[]> stats,
+ DfsStreamKey key) {
+ int pos = key.packExtPos;
+ while (true) {
+ AtomicLong[] vals = stats.get();
+ if (pos < vals.length) {
+ return vals[pos];
+ }
+ AtomicLong[] expect = vals;
+ vals = new AtomicLong[Math.max(pos + 1,
+ PackExt.values().length)];
+ System.arraycopy(expect, 0, vals, 0, expect.length);
+ for (int i = expect.length; i < vals.length; i++) {
+ vals[i] = new AtomicLong();
+ }
+ if (stats.compareAndSet(expect, vals)) {
+ return vals[pos];
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java
index 62b55d4734..a177669788 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java
@@ -17,7 +17,6 @@ import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.IN
import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.RECEIVE;
import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.UNREACHABLE_GARBAGE;
import static org.eclipse.jgit.internal.storage.dfs.DfsPackCompactor.configureReftable;
-import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.COMMIT_GRAPH;
import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.OBJECT_SIZE_INDEX;
@@ -709,13 +708,7 @@ public class DfsGarbageCollector {
}
if (pw.prepareBitmapIndex(pm)) {
- try (DfsOutputStream out = objdb.writeFile(pack, BITMAP_INDEX)) {
- CountingOutputStream cnt = new CountingOutputStream(out);
- pw.writeBitmapIndex(cnt);
- pack.addFileExt(BITMAP_INDEX);
- pack.setFileSize(BITMAP_INDEX, cnt.getCount());
- pack.setBlockSize(BITMAP_INDEX, out.blockSize());
- }
+ pw.writeBitmapIndex(objdb.getPackBitmapIndexWriter(pack));
}
PackStatistics stats = pw.getStatistics();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
index 9f6eb10256..616563ffdd 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
@@ -11,6 +11,7 @@
package org.eclipse.jgit.internal.storage.dfs;
import static java.util.stream.Collectors.joining;
+import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
import java.io.FileNotFoundException;
import java.io.IOException;
@@ -26,11 +27,14 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
+import org.eclipse.jgit.internal.storage.file.PackBitmapIndexWriterV1;
+import org.eclipse.jgit.internal.storage.pack.PackBitmapIndexWriter;
import org.eclipse.jgit.internal.storage.pack.PackExt;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectDatabase;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectReader;
+import org.eclipse.jgit.util.io.CountingOutputStream;
/**
* Manages objects stored in
@@ -743,4 +747,28 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
dirty = true;
}
}
+
+ /**
+ * Returns a writer to store the bitmap index in this object database.
+ *
+ * @param pack
+ * Pack file to which the bitmaps are associated.
+ * @return a writer to store bitmaps associated with the pack
+ * @throws IOException
+ * when some I/O problem occurs while creating or writing to
+ * output stream
+ */
+ public PackBitmapIndexWriter getPackBitmapIndexWriter(
+ DfsPackDescription pack) throws IOException {
+ return (bitmaps, packDataChecksum) -> {
+ try (DfsOutputStream out = writeFile(pack, BITMAP_INDEX)) {
+ CountingOutputStream cnt = new CountingOutputStream(out);
+ PackBitmapIndexWriterV1 iw = new PackBitmapIndexWriterV1(cnt);
+ iw.write(bitmaps, packDataChecksum);
+ pack.addFileExt(BITMAP_INDEX);
+ pack.setFileSize(BITMAP_INDEX, cnt.getCount());
+ pack.setBlockSize(BITMAP_INDEX, out.blockSize());
+ }
+ };
+ }
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
index 42b1d235bf..5cc2a57aba 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
@@ -52,10 +52,12 @@ import org.eclipse.jgit.internal.storage.pack.PackOutputStream;
import org.eclipse.jgit.internal.storage.pack.StoredObjectRepresentation;
import org.eclipse.jgit.lib.AbbreviatedObjectId;
import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.lib.StoredConfig;
import org.eclipse.jgit.util.LongList;
/**
@@ -68,7 +70,10 @@ public final class DfsPackFile extends BlockBasedFile {
private static final long REF_POSITION = 0;
- private static final PackBitmapIndexLoader DEFAULT_BITMAP_LOADER = new StreamPackBitmapIndexLoader();
+ /**
+ * Loader for the default file-based {@link PackBitmapIndex} implementation.
+ */
+ public static final PackBitmapIndexLoader DEFAULT_BITMAP_LOADER = new StreamPackBitmapIndexLoader();
/** Index mapping {@link ObjectId} to position within the pack stream. */
private volatile PackIndex index;
@@ -113,7 +118,18 @@ public final class DfsPackFile extends BlockBasedFile {
this(cache, desc, DEFAULT_BITMAP_LOADER);
}
- DfsPackFile(DfsBlockCache cache, DfsPackDescription desc, PackBitmapIndexLoader bitmapLoader) {
+ /**
+ * Create an instance of DfsPackFile with a custom bitmap loader
+ *
+ * @param cache
+ * cache that owns the pack data
+ * @param desc
+ * description of the pack within the DFS
+ * @param bitmapLoader
+ * loader to get the bitmaps of this pack (if any)
+ */
+ public DfsPackFile(DfsBlockCache cache, DfsPackDescription desc,
+ PackBitmapIndexLoader bitmapLoader) {
super(cache, desc, PACK);
int bs = desc.getBlockSize(PACK);
@@ -1283,11 +1299,16 @@ public final class DfsPackFile extends BlockBasedFile {
DfsStreamKey cgkey) throws IOException {
ctx.stats.readCommitGraph++;
long start = System.nanoTime();
+ StoredConfig repoConfig = ctx.db.getRepository().getConfig();
+ boolean readChangedPathFilters = repoConfig.getBoolean(
+ ConfigConstants.CONFIG_COMMIT_GRAPH_SECTION,
+ ConfigConstants.CONFIG_KEY_READ_CHANGED_PATHS, false);
try (ReadableChannel rc = ctx.db.openFile(desc, COMMIT_GRAPH)) {
long size;
CommitGraph cg;
try {
- cg = CommitGraphLoader.read(alignTo8kBlocks(rc));
+ cg = CommitGraphLoader.read(alignTo8kBlocks(rc),
+ readChangedPathFilters);
} finally {
size = rc.position();
ctx.stats.readCommitGraphBytes += size;
@@ -1364,12 +1385,22 @@ public final class DfsPackFile extends BlockBasedFile {
* The bytes can be 0, if the implementation doesn't do any initial
* loading.
*/
- class LoadResult {
- final PackBitmapIndex bitmapIndex;
-
- final long bytesRead;
-
- LoadResult(PackBitmapIndex packBitmapIndex, long bytesRead) {
+ public class LoadResult {
+ /** The loaded {@link PackBitmapIndex}. */
+ public final PackBitmapIndex bitmapIndex;
+
+ /** The bytes read upon initial load (may be 0). */
+ public final long bytesRead;
+
+ /**
+ * Constructs the LoadResult.
+ *
+ * @param packBitmapIndex
+ * the loaded index.
+ * @param bytesRead
+ * the bytes read upon loading.
+ */
+ public LoadResult(PackBitmapIndex packBitmapIndex, long bytesRead) {
this.bitmapIndex = packBitmapIndex;
this.bytesRead = bytesRead;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
index a342796cbe..9cfcbaa5f7 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
@@ -21,7 +21,6 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
-import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
import java.util.Set;
@@ -308,7 +307,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
private <T extends ObjectId> Iterable<FoundObject<T>> findAll(
Iterable<T> objectIds) throws IOException {
- Collection<T> pending = new LinkedList<>();
+ HashSet<T> pending = new HashSet<>();
for (T id : objectIds) {
pending.add(id);
}
@@ -328,22 +327,21 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
}
private <T extends ObjectId> void findAllImpl(PackList packList,
- Collection<T> pending, List<FoundObject<T>> r) {
+ HashSet<T> pending, List<FoundObject<T>> r) {
DfsPackFile[] packs = packList.packs;
if (packs.length == 0) {
return;
}
int lastIdx = 0;
DfsPackFile lastPack = packs[lastIdx];
-
- OBJECT_SCAN: for (Iterator<T> it = pending.iterator(); it.hasNext();) {
- T t = it.next();
+ HashSet<T> toRemove = new HashSet<>();
+ OBJECT_SCAN: for (T t : pending) {
if (!skipGarbagePack(lastPack)) {
try {
long p = lastPack.findOffset(this, t);
if (0 < p) {
r.add(new FoundObject<>(t, lastIdx, lastPack, p));
- it.remove();
+ toRemove.add(t);
continue;
}
} catch (IOException e) {
@@ -361,7 +359,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
long p = pack.findOffset(this, t);
if (0 < p) {
r.add(new FoundObject<>(t, i, pack, p));
- it.remove();
+ toRemove.add(t);
lastIdx = i;
lastPack = pack;
continue OBJECT_SCAN;
@@ -371,6 +369,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
}
}
}
+ pending.removeAll(toRemove);
last = lastPack;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/BasePackBitmapIndex.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/BasePackBitmapIndex.java
index c2b3926309..5f979b0daa 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/BasePackBitmapIndex.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/BasePackBitmapIndex.java
@@ -18,7 +18,7 @@ import com.googlecode.javaewah.EWAHCompressedBitmap;
/**
* Base implementation of the PackBitmapIndex.
*/
-abstract class BasePackBitmapIndex extends PackBitmapIndex {
+abstract class BasePackBitmapIndex implements PackBitmapIndex {
private final ObjectIdOwnerMap<StoredBitmap> bitmaps;
BasePackBitmapIndex(ObjectIdOwnerMap<StoredBitmap> bitmaps) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
index fc058abcca..cf26f8d284 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
@@ -49,7 +49,6 @@ import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
-import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -866,7 +865,7 @@ public class GC {
}
}
- List<ObjectIdSet> excluded = new LinkedList<>();
+ List<ObjectIdSet> excluded = new ArrayList<>();
for (Pack p : repo.getObjectDatabase().getPacks()) {
checkCancelled();
if (!shouldPackKeptObjects() && p.shouldBeKept()) {
@@ -1399,7 +1398,7 @@ public class GC {
FileChannel idxChannel = fos.getChannel();
OutputStream idxStream = Channels
.newOutputStream(idxChannel)) {
- pw.writeBitmapIndex(idxStream);
+ pw.writeBitmapIndex(new PackBitmapIndexWriterV1(idxStream));
idxChannel.force(true);
}
}
@@ -1897,7 +1896,7 @@ public class GC {
}
private String getProcDesc() {
- StringBuffer s = new StringBuffer(Long.toString(getPID()));
+ StringBuilder s = new StringBuilder(Long.toString(getPID()));
s.append(' ');
s.append(getHostName());
return s.toString();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/Pack.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/Pack.java
index 212dbb20aa..be457644d9 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/Pack.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/Pack.java
@@ -187,18 +187,18 @@ public class Pack implements Iterable<PackIndex.MutableEntry> {
}
if (packChecksum == null) {
- packChecksum = idx.packChecksum;
+ packChecksum = idx.getChecksum();
fileSnapshot.setChecksum(
ObjectId.fromRaw(packChecksum));
} else if (!Arrays.equals(packChecksum,
- idx.packChecksum)) {
+ idx.getChecksum())) {
throw new PackMismatchException(MessageFormat
.format(JGitText.get().packChecksumMismatch,
packFile.getPath(),
PackExt.PACK.getExtension(),
Hex.toHexString(packChecksum),
PackExt.INDEX.getExtension(),
- Hex.toHexString(idx.packChecksum)));
+ Hex.toHexString(idx.getChecksum())));
}
loadedIdx = optionally(idx);
return idx;
@@ -791,7 +791,7 @@ public class Pack implements Iterable<PackIndex.MutableEntry> {
MessageFormat.format(JGitText.get().packChecksumMismatch,
getPackFile(), PackExt.PACK.getExtension(),
Hex.toHexString(buf), PackExt.INDEX.getExtension(),
- Hex.toHexString(idx.packChecksum)));
+ Hex.toHexString(idx.getChecksum())));
}
}
@@ -1154,7 +1154,7 @@ public class Pack implements Iterable<PackIndex.MutableEntry> {
PackBitmapIndex idx = PackBitmapIndex.open(bitmapIdxFile, idx(),
getReverseIdx());
// At this point, idx() will have set packChecksum.
- if (Arrays.equals(packChecksum, idx.packChecksum)) {
+ if (Arrays.equals(packChecksum, idx.getPackChecksum())) {
bitmapIdx = optionally(idx);
return idx;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndex.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndex.java
index def4f3dc11..cbda8fc77c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndex.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndex.java
@@ -35,7 +35,7 @@ import com.googlecode.javaewah.EWAHCompressedBitmap;
* {@link #findPosition(AnyObjectId)} can be used to build other bitmaps that a
* compatible with the encoded bitmaps available from the index.
*/
-public abstract class PackBitmapIndex {
+public interface PackBitmapIndex {
/** Flag bit denoting the bitmap should be reused during index creation. */
public static final int FLAG_REUSE = 1;
@@ -132,8 +132,14 @@ public abstract class PackBitmapIndex {
reverseIndexSupplier, loadParallelRevIndex);
}
- /** Footer checksum applied on the bottom of the pack file. */
- byte[] packChecksum;
+ /**
+ * Footer checksum applied on the bottom of the pack file.
+ *
+ * @return checksum as a byte array
+ */
+ default byte[] getPackChecksum() {
+ return null;
+ }
/**
* Finds the position in the bitmap of the object.
@@ -148,7 +154,9 @@ public abstract class PackBitmapIndex {
* Get the object at the bitmap position.
*
* @param position
- * the id for which the object will be found.
+ * the offset in the bitmap which corresponds to an object of
+ * interest. This position is the same as the order of the object
+ * in the {@link PackFile}.
* @return the ObjectId.
* @throws java.lang.IllegalArgumentException
* when the item is not found.
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexBuilder.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexBuilder.java
index a47f2dfcc8..08d2b7178f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexBuilder.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexBuilder.java
@@ -11,9 +11,9 @@
package org.eclipse.jgit.internal.storage.file;
import java.text.MessageFormat;
+import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
-import java.util.LinkedList;
import java.util.List;
import org.eclipse.jgit.internal.JGitText;
@@ -41,8 +41,7 @@ public class PackBitmapIndexBuilder extends BasePackBitmapIndex {
private final EWAHCompressedBitmap tags;
private final BlockList<PositionEntry> byOffset;
- private final LinkedList<StoredBitmap>
- bitmapsToWriteXorBuffer = new LinkedList<>();
+ private final ArrayDeque<StoredBitmap> bitmapsToWriteXorBuffer = new ArrayDeque<>();
private List<StoredEntry> bitmapsToWrite = new ArrayList<>();
@@ -106,7 +105,7 @@ public class PackBitmapIndexBuilder extends BasePackBitmapIndex {
.signum(a.getOffset() - b.getOffset()));
for (int i = 0; i < entries.size(); i++) {
PositionEntry e = positionEntries.get(entries.get(i));
- e.offsetPosition = i;
+ e.ridxPosition = i;
byOffset.add(e);
}
}
@@ -191,8 +190,8 @@ public class PackBitmapIndexBuilder extends BasePackBitmapIndex {
throw new IllegalStateException();
}
bestBitmap.trim();
- StoredEntry result = new StoredEntry(entry.namePosition, bestBitmap,
- bestXorOffset, bitmapToWrite.getFlags());
+ StoredEntry result = new StoredEntry(entry, entry.idxPosition,
+ bestBitmap, bestXorOffset, bitmapToWrite.getFlags());
return result;
}
@@ -235,7 +234,7 @@ public class PackBitmapIndexBuilder extends BasePackBitmapIndex {
PositionEntry entry = positionEntries.get(objectId);
if (entry == null)
return -1;
- return entry.offsetPosition;
+ return entry.ridxPosition;
}
@Override
@@ -323,20 +322,44 @@ public class PackBitmapIndexBuilder extends BasePackBitmapIndex {
generateStoredEntry(bitmapsToWriteXorBuffer.pollFirst()));
}
- Collections.reverse(bitmapsToWrite);
- return bitmapsToWrite;
+ List<StoredEntry> bitmapsToReturn = new ArrayList<>(bitmapsToWrite);
+ Collections.reverse(bitmapsToReturn);
+ return bitmapsToReturn;
}
/** Data object for the on disk representation of a bitmap entry. */
public static final class StoredEntry {
- private final long objectId;
+ private final ObjectId objectId;
+
+ private final long idxPosition;
+
private final EWAHCompressedBitmap bitmap;
+
private final int xorOffset;
+
private final int flags;
- StoredEntry(long objectId, EWAHCompressedBitmap bitmap,
- int xorOffset, int flags) {
+ /**
+ * Create a StoredEntry
+ *
+ * @param objectId
+ * objectId of the object associated with the bitmap
+ * @param idxPosition
+ * position of this object into the pack index (i.e. sorted
+ * by sha1)
+ * @param bitmap
+ * bitmap associated with this object
+ * @param xorOffset
+ * offset of the bitmap against which this bitmap is
+ * xor-compressed. If 0, then this bitmap is not
+ * xor-compressed against any other bitmap
+ * @param flags
+ * flags for this bitmap
+ */
+ public StoredEntry(ObjectId objectId, long idxPosition,
+ EWAHCompressedBitmap bitmap, int xorOffset, int flags) {
this.objectId = objectId;
+ this.idxPosition = idxPosition;
this.bitmap = bitmap;
this.xorOffset = xorOffset;
this.flags = flags;
@@ -370,23 +393,29 @@ public class PackBitmapIndexBuilder extends BasePackBitmapIndex {
}
/**
- * Get the ObjectId
- *
- * @return the ObjectId
+ * @return the position of the object with this bitmap in the primary
+ * index (i.e. ordered by sha1)
+ */
+ public long getIdxPosition() {
+ return idxPosition;
+ }
+
+ /**
+ * @return the objectId of the object associated with this bitmap
*/
- public long getObjectId() {
+ public ObjectId getObjectId() {
return objectId;
}
}
private static final class PositionEntry extends ObjectIdOwnerMap.Entry {
- final int namePosition;
+ final int idxPosition;
- int offsetPosition;
+ int ridxPosition;
- PositionEntry(AnyObjectId objectId, int namePosition) {
+ PositionEntry(AnyObjectId objectId, int idxPosition) {
super(objectId);
- this.namePosition = namePosition;
+ this.idxPosition = idxPosition;
}
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexRemapper.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexRemapper.java
index bb7cfd0464..ffbc0737ac 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexRemapper.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexRemapper.java
@@ -28,8 +28,8 @@ import com.googlecode.javaewah.IntIterator;
* implementations this implementation is not thread safe, as it is intended to
* be used with a PackBitmapIndexBuilder, which is also not thread safe.
*/
-public class PackBitmapIndexRemapper extends PackBitmapIndex
- implements Iterable<PackBitmapIndexRemapper.Entry> {
+public class PackBitmapIndexRemapper
+ implements PackBitmapIndex, Iterable<PackBitmapIndexRemapper.Entry> {
private final BasePackBitmapIndex oldPackIndex;
final PackBitmapIndex newPackIndex;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexV1.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexV1.java
index f2f24b39cb..19608c1ce5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexV1.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexV1.java
@@ -46,6 +46,8 @@ class PackBitmapIndexV1 extends BasePackBitmapIndex {
private static final int MAX_XOR_OFFSET = 126;
+ private byte[] packChecksum;
+
private static final ExecutorService executor = Executors
.newCachedThreadPool(new ThreadFactory() {
private final ThreadFactory baseFactory = Executors
@@ -269,6 +271,11 @@ class PackBitmapIndexV1 extends BasePackBitmapIndex {
return getPackIndex().hashCode();
}
+ @Override
+ public byte[] getPackChecksum() {
+ return this.packChecksum;
+ }
+
PackIndex getPackIndex() {
return packIndex;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexWriterV1.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexWriterV1.java
index a5c8423dfd..38d7c90894 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexWriterV1.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackBitmapIndexWriterV1.java
@@ -19,6 +19,7 @@ import java.text.MessageFormat;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.file.PackBitmapIndexBuilder.StoredEntry;
+import org.eclipse.jgit.internal.storage.pack.PackBitmapIndexWriter;
import org.eclipse.jgit.lib.Constants;
import com.googlecode.javaewah.EWAHCompressedBitmap;
@@ -28,7 +29,7 @@ import com.googlecode.javaewah.EWAHCompressedBitmap;
*
* @see PackBitmapIndexV1
*/
-public class PackBitmapIndexWriterV1 {
+public class PackBitmapIndexWriterV1 implements PackBitmapIndexWriter {
private final DigestOutputStream out;
private final DataOutput dataOutput;
@@ -60,6 +61,7 @@ public class PackBitmapIndexWriterV1 {
* an error occurred while writing to the output stream, or this
* index format cannot store the object data supplied.
*/
+ @Override
public void write(PackBitmapIndexBuilder bitmaps, byte[] packDataChecksum)
throws IOException {
if (bitmaps == null || packDataChecksum.length != 20)
@@ -113,7 +115,7 @@ public class PackBitmapIndexWriterV1 {
private void writeBitmapEntry(StoredEntry entry) throws IOException {
// Write object, XOR offset, and bitmap
- dataOutput.writeInt((int) entry.getObjectId());
+ dataOutput.writeInt((int) entry.getIdxPosition());
out.write(entry.getXorOffset());
out.write(entry.getFlags());
writeBitmap(entry.getBitmap());
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndex.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndex.java
index c42d1c8866..c0540d5a4c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndex.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndex.java
@@ -42,8 +42,8 @@ import org.eclipse.jgit.util.io.SilentFileInputStream;
* by ObjectId.
* </p>
*/
-public abstract class PackIndex
- implements Iterable<PackIndex.MutableEntry>, ObjectIdSet {
+public interface PackIndex
+ extends Iterable<PackIndex.MutableEntry>, ObjectIdSet {
/**
* Open an existing pack <code>.idx</code> file for reading.
* <p>
@@ -61,7 +61,7 @@ public abstract class PackIndex
* the file exists but could not be read due to security errors,
* unrecognized data version, or unexpected data corruption.
*/
- public static PackIndex open(File idxFile) throws IOException {
+ static PackIndex open(File idxFile) throws IOException {
try (SilentFileInputStream fd = new SilentFileInputStream(
idxFile)) {
return read(fd);
@@ -92,7 +92,7 @@ public abstract class PackIndex
* @throws org.eclipse.jgit.errors.CorruptObjectException
* the stream does not contain a valid pack index.
*/
- public static PackIndex read(InputStream fd) throws IOException,
+ static PackIndex read(InputStream fd) throws IOException,
CorruptObjectException {
final byte[] hdr = new byte[8];
IO.readFully(fd, hdr, 0, hdr.length);
@@ -116,9 +116,6 @@ public abstract class PackIndex
return true;
}
- /** Footer checksum applied on the bottom of the pack file. */
- protected byte[] packChecksum;
-
/**
* Determine if an object is contained within the pack file.
*
@@ -126,12 +123,12 @@ public abstract class PackIndex
* the object to look for. Must not be null.
* @return true if the object is listed in this index; false otherwise.
*/
- public boolean hasObject(AnyObjectId id) {
+ default boolean hasObject(AnyObjectId id) {
return findOffset(id) != -1;
}
@Override
- public boolean contains(AnyObjectId id) {
+ default boolean contains(AnyObjectId id) {
return findOffset(id) != -1;
}
@@ -147,7 +144,7 @@ public abstract class PackIndex
* </p>
*/
@Override
- public abstract Iterator<MutableEntry> iterator();
+ Iterator<MutableEntry> iterator();
/**
* Obtain the total number of objects described by this index.
@@ -155,7 +152,7 @@ public abstract class PackIndex
* @return number of objects in this index, and likewise in the associated
* pack that this index was generated from.
*/
- public abstract long getObjectCount();
+ long getObjectCount();
/**
* Obtain the total number of objects needing 64 bit offsets.
@@ -163,7 +160,7 @@ public abstract class PackIndex
* @return number of objects in this index using a 64 bit offset; that is an
* object positioned after the 2 GB position within the file.
*/
- public abstract long getOffset64Count();
+ long getOffset64Count();
/**
* Get ObjectId for the n-th object entry returned by {@link #iterator()}.
@@ -185,7 +182,7 @@ public abstract class PackIndex
* is 0, the second is 1, etc.
* @return the ObjectId for the corresponding entry.
*/
- public abstract ObjectId getObjectId(long nthPosition);
+ ObjectId getObjectId(long nthPosition);
/**
* Get ObjectId for the n-th object entry returned by {@link #iterator()}.
@@ -209,7 +206,7 @@ public abstract class PackIndex
* negative, but still valid.
* @return the ObjectId for the corresponding entry.
*/
- public final ObjectId getObjectId(int nthPosition) {
+ default ObjectId getObjectId(int nthPosition) {
if (nthPosition >= 0)
return getObjectId((long) nthPosition);
final int u31 = nthPosition >>> 1;
@@ -228,7 +225,7 @@ public abstract class PackIndex
* etc. Positions past 2**31-1 are negative, but still valid.
* @return the offset in a pack for the corresponding entry.
*/
- abstract long getOffset(long nthPosition);
+ long getOffset(long nthPosition);
/**
* Locate the file offset position for the requested object.
@@ -239,7 +236,7 @@ public abstract class PackIndex
* object does not exist in this index and is thus not stored in the
* associated pack.
*/
- public abstract long findOffset(AnyObjectId objId);
+ long findOffset(AnyObjectId objId);
/**
* Locate the position of this id in the list of object-ids in the index
@@ -250,7 +247,7 @@ public abstract class PackIndex
* of ids stored in this index; -1 if the object does not exist in
* this index and is thus not stored in the associated pack.
*/
- public abstract int findPosition(AnyObjectId objId);
+ int findPosition(AnyObjectId objId);
/**
* Retrieve stored CRC32 checksum of the requested object raw-data
@@ -264,7 +261,7 @@ public abstract class PackIndex
* @throws java.lang.UnsupportedOperationException
* when this index doesn't support CRC32 checksum
*/
- public abstract long findCRC32(AnyObjectId objId)
+ long findCRC32(AnyObjectId objId)
throws MissingObjectException, UnsupportedOperationException;
/**
@@ -272,7 +269,7 @@ public abstract class PackIndex
*
* @return true if CRC32 is stored, false otherwise
*/
- public abstract boolean hasCRC32Support();
+ boolean hasCRC32Support();
/**
* Find objects matching the prefix abbreviation.
@@ -288,7 +285,7 @@ public abstract class PackIndex
* @throws java.io.IOException
* the index cannot be read.
*/
- public abstract void resolve(Set<ObjectId> matches, AbbreviatedObjectId id,
+ void resolve(Set<ObjectId> matches, AbbreviatedObjectId id,
int matchLimit) throws IOException;
/**
@@ -297,16 +294,14 @@ public abstract class PackIndex
* @return the checksum of the pack; caller must not modify it
* @since 5.5
*/
- public byte[] getChecksum() {
- return packChecksum;
- }
+ byte[] getChecksum();
/**
* Represent mutable entry of pack index consisting of object id and offset
* in pack (both mutable).
*
*/
- public static class MutableEntry {
+ class MutableEntry {
final MutableObjectId idBuffer = new MutableObjectId();
long offset;
@@ -358,16 +353,25 @@ public abstract class PackIndex
}
}
+ /**
+ * Base implementation of the iterator over index entries.
+ */
abstract class EntriesIterator implements Iterator<MutableEntry> {
protected final MutableEntry entry = initEntry();
+ private final long objectCount;
+
+ protected EntriesIterator(long objectCount) {
+ this.objectCount = objectCount;
+ }
+
protected long returnedNumber = 0;
protected abstract MutableEntry initEntry();
@Override
public boolean hasNext() {
- return returnedNumber < getObjectCount();
+ return returnedNumber < objectCount;
}
/**
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndexV1.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndexV1.java
index 4563c9039c..d7c83785d8 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndexV1.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndexV1.java
@@ -29,13 +29,16 @@ import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.NB;
-class PackIndexV1 extends PackIndex {
+class PackIndexV1 implements PackIndex {
private static final int IDX_HDR_LEN = 256 * 4;
private static final int RECORD_SIZE = 4 + Constants.OBJECT_ID_LENGTH;
private final long[] idxHeader;
+ /** Footer checksum applied on the bottom of the pack file. */
+ protected byte[] packChecksum;
+
byte[][] idxdata;
private long objectCnt;
@@ -118,7 +121,7 @@ class PackIndexV1 extends PackIndex {
}
@Override
- long getOffset(long nthPosition) {
+ public long getOffset(long nthPosition) {
final int levelOne = findLevelOne(nthPosition);
final int levelTwo = getLevelTwo(nthPosition, levelOne);
final int p = (4 + Constants.OBJECT_ID_LENGTH) * levelTwo;
@@ -200,7 +203,7 @@ class PackIndexV1 extends PackIndex {
@Override
public Iterator<MutableEntry> iterator() {
- return new IndexV1Iterator();
+ return new IndexV1Iterator(objectCnt);
}
@Override
@@ -238,11 +241,20 @@ class PackIndexV1 extends PackIndex {
return (RECORD_SIZE * mid) + 4;
}
+ @Override
+ public byte[] getChecksum() {
+ return packChecksum;
+ }
+
private class IndexV1Iterator extends EntriesIterator {
int levelOne;
int levelTwo;
+ IndexV1Iterator(long objectCount) {
+ super(objectCount);
+ }
+
@Override
protected MutableEntry initEntry() {
return new MutableEntry() {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndexV2.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndexV2.java
index 751b62dc40..caf8b71180 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndexV2.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndexV2.java
@@ -28,7 +28,7 @@ import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.NB;
/** Support for the pack index v2 format. */
-class PackIndexV2 extends PackIndex {
+class PackIndexV2 implements PackIndex {
private static final long IS_O64 = 1L << 31;
private static final int FANOUT = 256;
@@ -37,6 +37,9 @@ class PackIndexV2 extends PackIndex {
private static final byte[] NO_BYTES = {};
+ /** Footer checksum applied on the bottom of the pack file. */
+ protected byte[] packChecksum;
+
private long objectCnt;
private final long[] fanoutTable;
@@ -221,7 +224,7 @@ class PackIndexV2 extends PackIndex {
@Override
public Iterator<MutableEntry> iterator() {
- return new EntriesIteratorV2();
+ return new EntriesIteratorV2(objectCnt);
}
@Override
@@ -281,11 +284,20 @@ class PackIndexV2 extends PackIndex {
return -1;
}
+ @Override
+ public byte[] getChecksum() {
+ return packChecksum;
+ }
+
private class EntriesIteratorV2 extends EntriesIterator {
int levelOne;
int levelTwo;
+ EntriesIteratorV2(long objectCount){
+ super(objectCount);
+ }
+
@Override
protected MutableEntry initEntry() {
return new MutableEntry() {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
index 169dce1cc0..8e57bf9f2f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
@@ -41,11 +41,11 @@ import java.nio.file.Paths;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.text.MessageFormat;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
-import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
@@ -64,8 +64,8 @@ import org.eclipse.jgit.events.RefsChangedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.lib.CoreConfig.TrustPackedRefsStat;
import org.eclipse.jgit.lib.CoreConfig.TrustLooseRefStat;
+import org.eclipse.jgit.lib.CoreConfig.TrustPackedRefsStat;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectIdRef;
import org.eclipse.jgit.lib.Ref;
@@ -413,7 +413,7 @@ public class RefDirectory extends RefDatabase {
@Override
public List<Ref> getAdditionalRefs() throws IOException {
- List<Ref> ret = new LinkedList<>();
+ List<Ref> ret = new ArrayList<>();
for (String name : additionalRefsNames) {
Ref r = exactRef(name);
if (r != null)
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SnapshottingRefDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SnapshottingRefDirectory.java
index 46607f60d9..1dc5776e06 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SnapshottingRefDirectory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SnapshottingRefDirectory.java
@@ -16,15 +16,21 @@ import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.revwalk.RevWalk;
+import java.io.File;
import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
import java.util.List;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
/**
* Snapshotting write-through cache of a {@link RefDirectory}.
* <p>
* This is intended to be short-term write-through snapshot based cache used in
- * a request scope to avoid re-reading packed-refs on each read. A future
- * improvement could also snapshot loose refs.
+ * a request scope to avoid re-reading packed-refs on each read and to avoid
+ * refreshing paths to a loose ref that has already been refreshed.
* <p>
* Only use this class when concurrent writes from other requests (not using the
* same instance of SnapshottingRefDirectory) generally need not be visible to
@@ -34,6 +40,7 @@ import java.util.List;
*/
class SnapshottingRefDirectory extends RefDirectory {
final RefDirectory refDb;
+ private final Set<File> refreshedLooseRefDirs = ConcurrentHashMap.newKeySet();
private volatile boolean isValid;
@@ -67,6 +74,22 @@ class SnapshottingRefDirectory extends RefDirectory {
}
@Override
+ void refreshPathToLooseRef(Path refPath) {
+ for (int i = 1; i < refPath.getNameCount(); i++) {
+ File dir = fileFor(refPath.subpath(0, i).toString());
+ if (!refreshedLooseRefDirs.contains(dir)) {
+ try (InputStream stream = Files.newInputStream(dir.toPath())) {
+ // open the dir to refresh attributes (on some NFS clients)
+ } catch (IOException e) {
+ break; // loose ref may not exist
+ } finally {
+ refreshedLooseRefDirs.add(dir);
+ }
+ }
+ }
+ }
+
+ @Override
void delete(RefDirectoryUpdate update) throws IOException {
refreshSnapshot();
super.delete(update);
@@ -107,6 +130,7 @@ class SnapshottingRefDirectory extends RefDirectory {
}
synchronized void invalidateSnapshot() {
+ refreshedLooseRefDirs.clear();
isValid = false;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/memory/TernarySearchTree.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/memory/TernarySearchTree.java
index acc1c830d5..72434dbffe 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/memory/TernarySearchTree.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/memory/TernarySearchTree.java
@@ -9,9 +9,9 @@
*/
package org.eclipse.jgit.internal.storage.memory;
+import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
-import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
@@ -340,7 +340,7 @@ public final class TernarySearchTree<Value> {
* @return all keys
*/
public Iterable<String> getKeys() {
- Queue<String> queue = new LinkedList<>();
+ Queue<String> queue = new ArrayDeque<>();
lock.readLock().lock();
try {
findKeysWithPrefix(root, new StringBuilder(), queue);
@@ -358,7 +358,7 @@ public final class TernarySearchTree<Value> {
* @return keys starting with given prefix
*/
public Iterable<String> getKeysWithPrefix(String prefix) {
- Queue<String> keys = new LinkedList<>();
+ Queue<String> keys = new ArrayDeque<>();
if (prefix == null) {
return keys;
}
@@ -486,7 +486,7 @@ public final class TernarySearchTree<Value> {
* @return keys matching given pattern.
*/
public Iterable<String> getKeysMatching(String pattern) {
- Queue<String> keys = new LinkedList<>();
+ Queue<String> keys = new ArrayDeque<>();
lock.readLock().lock();
try {
findKeysWithPrefix(root, new StringBuilder(), 0, pattern, keys);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaTask.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaTask.java
index 9a3f4b07ee..03d6f16733 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaTask.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaTask.java
@@ -11,10 +11,10 @@
package org.eclipse.jgit.internal.storage.pack;
import java.io.IOException;
+import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
-import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Callable;
@@ -232,14 +232,15 @@ final class DeltaTask implements Callable<Object> {
}
private final Block block;
- final LinkedList<Slice> slices;
+
+ final ArrayDeque<Slice> slices;
private ObjectReader or;
private DeltaWindow dw;
DeltaTask(Block b) {
this.block = b;
- this.slices = new LinkedList<>();
+ this.slices = new ArrayDeque<>();
}
void add(Slice s) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackBitmapIndexWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackBitmapIndexWriter.java
new file mode 100644
index 0000000000..9cf8c7f2b5
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackBitmapIndexWriter.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2024, Google Inc.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Distribution License v. 1.0 which is available at
+ * https://www.eclipse.org/org/documents/edl-v10.php.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+package org.eclipse.jgit.internal.storage.pack;
+
+import java.io.IOException;
+
+import org.eclipse.jgit.internal.storage.file.PackBitmapIndexBuilder;
+
+/**
+ * Represents a function that accepts a collection of bitmaps and write them
+ * into storage.
+ */
+@FunctionalInterface
+public interface PackBitmapIndexWriter {
+ /**
+ * @param bitmaps
+ * list of bitmaps to be written to a bitmap index
+ * @param packChecksum
+ * checksum of the pack that the bitmap index refers to
+ * @throws IOException
+ * thrown in case of IO errors while writing the bitmap index
+ */
+ public void write(PackBitmapIndexBuilder bitmaps, byte[] packChecksum)
+ throws IOException;
+} \ No newline at end of file
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java
index 9e95231253..4350f97915 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriter.java
@@ -58,11 +58,10 @@ import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.errors.SearchForReuseTimeout;
import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException;
import org.eclipse.jgit.internal.JGitText;
-import org.eclipse.jgit.internal.storage.file.PackBitmapIndexBuilder;
-import org.eclipse.jgit.internal.storage.file.PackBitmapIndexWriterV1;
import org.eclipse.jgit.internal.storage.file.PackIndexWriter;
import org.eclipse.jgit.internal.storage.file.PackObjectSizeIndexWriter;
import org.eclipse.jgit.internal.storage.file.PackReverseIndexWriter;
+import org.eclipse.jgit.internal.storage.file.PackBitmapIndexBuilder;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.AsyncObjectSizeQueue;
import org.eclipse.jgit.lib.BatchingProgressMonitor;
@@ -121,7 +120,7 @@ import org.eclipse.jgit.util.TemporaryBuffer;
* pack is being stored as a file the matching index can be written out after
* writing the pack by {@link #writeIndex(OutputStream)}. An optional bitmap
* index can be made by calling {@link #prepareBitmapIndex(ProgressMonitor)}
- * followed by {@link #writeBitmapIndex(OutputStream)}.
+ * followed by {@link #writeBitmapIndex(PackBitmapIndexWriter)}.
* </p>
* <p>
* Class provide set of configurable options and
@@ -1130,7 +1129,7 @@ public class PackWriter implements AutoCloseable {
* Called after
* {@link #writePack(ProgressMonitor, ProgressMonitor, OutputStream)} that
* populates the list of objects to pack and before
- * {@link #writeBitmapIndex(OutputStream)} that destroys it.
+ * {@link #writeBitmapIndex(PackBitmapIndexWriter)} that destroys it.
* <p>
* Writing this index is only required for local pack storage. Packs sent on
* the network do not need to create an object size index.
@@ -1204,20 +1203,18 @@ public class PackWriter implements AutoCloseable {
* <p>
* Called after {@link #prepareBitmapIndex(ProgressMonitor)}.
*
- * @param bitmapIndexStream
- * output for the bitmap index data. Caller is responsible for
- * closing this stream.
+ * @param bitmapIndexWriter
+ * a writer to store the bitmap index in this object database
* @throws java.io.IOException
- * the index data could not be written to the supplied stream.
+ * the index data could not be written using the supplied writer
*/
- public void writeBitmapIndex(OutputStream bitmapIndexStream)
+ public void writeBitmapIndex(PackBitmapIndexWriter bitmapIndexWriter)
throws IOException {
if (writeBitmaps == null)
throw new IOException(JGitText.get().bitmapsMustBePrepared);
long writeStart = System.currentTimeMillis();
- final PackBitmapIndexWriterV1 iw = new PackBitmapIndexWriterV1(bitmapIndexStream);
- iw.write(writeBitmaps, packcsum);
+ bitmapIndexWriter.write(writeBitmaps, packcsum);
stats.timeWriting += System.currentTimeMillis() - writeStart;
}
@@ -2468,7 +2465,8 @@ public class PackWriter implements AutoCloseable {
* <p>
* To reduce memory internal state is cleared during this method, rendering
* the PackWriter instance useless for anything further than a call to write
- * out the new bitmaps with {@link #writeBitmapIndex(OutputStream)}.
+ * out the new bitmaps with
+ * {@link #writeBitmapIndex(PackBitmapIndexWriter)}.
*
* @param pm
* progress monitor to report bitmap building work.
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/transport/ssh/OpenSshConfigFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/transport/ssh/OpenSshConfigFile.java
index 29a2922136..3e75a9dde3 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/transport/ssh/OpenSshConfigFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/transport/ssh/OpenSshConfigFile.java
@@ -22,7 +22,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
-import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -100,7 +99,7 @@ public class OpenSshConfigFile implements SshConfigStore {
* fully resolved entries created from that.
*/
private static class State {
- List<HostEntry> entries = new LinkedList<>();
+ List<HostEntry> entries = new ArrayList<>();
// Previous lookups, keyed by user@hostname:port
Map<String, HostEntry> hosts = new HashMap<>();
@@ -218,7 +217,7 @@ public class OpenSshConfigFile implements SshConfigStore {
private List<HostEntry> parse(BufferedReader reader)
throws IOException {
- final List<HostEntry> entries = new LinkedList<>();
+ final List<HostEntry> entries = new ArrayList<>();
// The man page doesn't say so, but the openssh parser (readconf.c)
// starts out in active mode and thus always applies any lines that
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/util/CleanupService.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/util/CleanupService.java
new file mode 100644
index 0000000000..29ed7564d3
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/util/CleanupService.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2024, Thomas Wolf <twolf@apache.org> and others
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Distribution License v. 1.0 which is available at
+ * https://www.eclipse.org/org/documents/edl-v10.php.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+package org.eclipse.jgit.internal.util;
+
+import org.eclipse.jgit.internal.JGitText;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A class that is registered as an OSGi service via the manifest. If JGit runs
+ * in OSGi, OSGi will instantiate a singleton as soon as the bundle is activated
+ * since this class is an immediate OSGi component with no dependencies. OSGi
+ * will then call its {@link #start()} method. If JGit is not running in OSGi,
+ * {@link #getInstance()} will lazily create an instance.
+ * <p>
+ * An OSGi-created {@link CleanupService} will run the registered cleanup when
+ * the {@code org.eclipse.jgit} bundle is deactivated. A lazily created instance
+ * will register the cleanup as a JVM shutdown hook.
+ * </p>
+ */
+public final class CleanupService {
+
+ private static final Logger LOG = LoggerFactory
+ .getLogger(CleanupService.class);
+
+ private static final Object LOCK = new Object();
+
+ private static CleanupService INSTANCE;
+
+ private final boolean isOsgi;
+
+ private JGitText jgitText;
+
+ private Runnable cleanup;
+
+ /**
+ * Public component constructor for OSGi DS. Do <em>not</em> call this
+ * explicitly! (Unfortunately this constructor must be public because of
+ * OSGi requirements.)
+ */
+ public CleanupService() {
+ this.isOsgi = true;
+ setInstance(this);
+ }
+
+ private CleanupService(boolean isOsgi) {
+ this.isOsgi = isOsgi;
+ }
+
+ private static void setInstance(CleanupService service) {
+ synchronized (LOCK) {
+ INSTANCE = service;
+ }
+ }
+
+ /**
+ * Obtains the singleton instance of the {@link CleanupService} that knows
+ * whether or not it is running on OSGi.
+ *
+ * @return the {@link CleanupService} singleton instance
+ */
+ public static CleanupService getInstance() {
+ synchronized (LOCK) {
+ if (INSTANCE == null) {
+ INSTANCE = new CleanupService(false);
+ }
+ return INSTANCE;
+ }
+ }
+
+ void start() {
+ // Nothing to do
+ }
+
+ void register(Runnable cleanUp) {
+ if (isOsgi) {
+ cleanup = cleanUp;
+ } else {
+ // Ensure the JGitText class is loaded. Depending on the framework
+ // JGit runs in, it may not be possible anymore to load classes when
+ // the hook runs. For instance when run in a maven plug-in: the
+ // Plexus class world that loaded JGit may already have been
+ // disposed by the time the JVM shutdown hook runs when the whole
+ // maven build terminates.
+ jgitText = JGitText.get();
+ assert jgitText != null;
+ try {
+ Runtime.getRuntime().addShutdownHook(new Thread(() -> {
+ try {
+ cleanUp.run();
+ // Don't catch exceptions; let the JVM do the problem
+ // reporting.
+ } finally {
+ jgitText = null;
+ }
+ }));
+ } catch (IllegalStateException e) {
+ // Ignore -- the JVM is already shutting down.
+ }
+ }
+ }
+
+ void shutDown() {
+ if (isOsgi && cleanup != null) {
+ Runnable r = cleanup;
+ cleanup = null;
+ try {
+ r.run();
+ } catch (RuntimeException e) {
+ LOG.error(JGitText.get().shutdownCleanupFailed, e);
+ }
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/util/ShutdownHook.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/util/ShutdownHook.java
index f52025fd6b..f6b4723489 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/util/ShutdownHook.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/util/ShutdownHook.java
@@ -15,17 +15,21 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicBoolean;
import org.eclipse.jgit.internal.JGitText;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
- * A hook registered as a JVM shutdown hook managing a set of objects needing
- * cleanup during JVM shutdown. See {@link Runtime#addShutdownHook}.
+ * The singleton {@link ShutdownHook} provides a means to register
+ * {@link Listener}s that are run when JGit is uninstalled, either
+ * <ul>
+ * <li>in an OSGi framework when this bundle is deactivated, or</li>
+ * <li>otherwise, when the JVM as a whole shuts down.</li>
+ * </ul>
*/
@SuppressWarnings("ImmutableEnumChecker")
public enum ShutdownHook {
@@ -35,11 +39,11 @@ public enum ShutdownHook {
INSTANCE;
/**
- * Object that needs to cleanup on JVM shutdown.
+ * Object that needs to cleanup on shutdown.
*/
public interface Listener {
/**
- * Cleanup resources when JVM shuts down, called from JVM shutdown hook.
+ * Cleanup resources when JGit is shut down.
* <p>
* Implementations should be coded defensively
* <ul>
@@ -62,31 +66,27 @@ public enum ShutdownHook {
private final Set<Listener> listeners = ConcurrentHashMap.newKeySet();
- private volatile boolean shutdownInProgress;
+ private final AtomicBoolean shutdownInProgress = new AtomicBoolean();
private ShutdownHook() {
- try {
- Runtime.getRuntime().addShutdownHook(new Thread(this::cleanup));
- } catch (IllegalStateException e) {
- // ignore - the VM is already shutting down
- }
+ CleanupService.getInstance().register(this::cleanup);
}
private void cleanup() {
- shutdownInProgress = true;
- ExecutorService runner = Executors.newWorkStealingPool();
- try {
- runner.submit(() -> {
- this.doCleanup();
- return null;
- }).get(30L, TimeUnit.SECONDS);
- } catch (RejectedExecutionException | InterruptedException
- | ExecutionException | TimeoutException e) {
- // message isn't localized since during shutdown there's no
- // guarantee which classes are still loaded
- LOG.error("Cleanup during JVM shutdown failed", e); //$NON-NLS-1$
+ if (!shutdownInProgress.getAndSet(true)) {
+ ExecutorService runner = Executors.newWorkStealingPool();
+ try {
+ runner.submit(() -> {
+ this.doCleanup();
+ return null;
+ }).get(30L, TimeUnit.SECONDS);
+ } catch (InterruptedException | ExecutionException
+ | TimeoutException e) {
+ throw new RuntimeException(e.getMessage(), e);
+ } finally {
+ runner.shutdownNow();
+ }
}
- runner.shutdownNow();
}
private void doCleanup() {
@@ -104,17 +104,17 @@ public enum ShutdownHook {
}
/**
- * Register object that needs cleanup during JVM shutdown if it is not
- * already registered. Registration is disabled when JVM shutdown is already
- * in progress.
+ * Register object that needs cleanup during JGit shutdown if it is not
+ * already registered. Registration is disabled when JGit shutdown is
+ * already in progress.
*
* @param l
- * the object to call {@link Listener#onShutdown} on when JVM
+ * the object to call {@link Listener#onShutdown} on when JGit
* shuts down
* @return {@code true} if this object has been registered
*/
public boolean register(Listener l) {
- if (shutdownInProgress) {
+ if (shutdownInProgress.get()) {
return listeners.contains(l);
}
LOG.debug("register {} with shutdown hook", l); //$NON-NLS-1$
@@ -123,8 +123,8 @@ public enum ShutdownHook {
}
/**
- * Unregister object that no longer needs cleanup during JVM shutdown if it
- * is still registered. Unregistration is disabled when JVM shutdown is
+ * Unregister object that no longer needs cleanup during JGit shutdown if it
+ * is still registered. Unregistration is disabled when JGit shutdown is
* already in progress.
*
* @param l
@@ -133,7 +133,7 @@ public enum ShutdownHook {
* @return {@code true} if this object is no longer registered
*/
public boolean unregister(Listener l) {
- if (shutdownInProgress) {
+ if (shutdownInProgress.get()) {
return !listeners.contains(l);
}
LOG.debug("unregister {} from shutdown hook", l); //$NON-NLS-1$
@@ -142,11 +142,11 @@ public enum ShutdownHook {
}
/**
- * Whether a JVM shutdown is in progress
+ * Whether a JGit shutdown is in progress
*
- * @return {@code true} if a JVM shutdown is in progress
+ * @return {@code true} if a JGit shutdown is in progress
*/
public boolean isShutdownInProgress() {
- return shutdownInProgress;
+ return shutdownInProgress.get();
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RebaseTodoFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RebaseTodoFile.java
index c716f464d3..b036a0b6a1 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RebaseTodoFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RebaseTodoFile.java
@@ -17,7 +17,7 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
-import java.util.LinkedList;
+import java.util.ArrayList;
import java.util.List;
import org.eclipse.jgit.lib.RebaseTodoLine.Action;
@@ -62,7 +62,7 @@ public class RebaseTodoFile {
byte[] buf = IO.readFully(new File(repo.getDirectory(), path));
int ptr = 0;
int tokenBegin = 0;
- List<RebaseTodoLine> r = new LinkedList<>();
+ List<RebaseTodoLine> r = new ArrayList<>();
while (ptr < buf.length) {
tokenBegin = ptr;
ptr = RawParseUtils.nextLF(buf, ptr);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeAlgorithm.java b/org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeAlgorithm.java
index b902492366..5734a25276 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeAlgorithm.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeAlgorithm.java
@@ -218,13 +218,18 @@ public final class MergeAlgorithm {
// set some initial values for the ranges in A and B which we
// want to handle
+ int oursBeginA = oursEdit.getBeginA();
+ int theirsBeginA = theirsEdit.getBeginA();
int oursBeginB = oursEdit.getBeginB();
int theirsBeginB = theirsEdit.getBeginB();
// harmonize the start of the ranges in A and B
if (oursEdit.getBeginA() < theirsEdit.getBeginA()) {
+ theirsBeginA -= theirsEdit.getBeginA()
+ - oursEdit.getBeginA();
theirsBeginB -= theirsEdit.getBeginA()
- oursEdit.getBeginA();
} else {
+ oursBeginA -= oursEdit.getBeginA() - theirsEdit.getBeginA();
oursBeginB -= oursEdit.getBeginA() - theirsEdit.getBeginA();
}
@@ -270,11 +275,15 @@ public final class MergeAlgorithm {
}
// harmonize the end of the ranges in A and B
+ int oursEndA = oursEdit.getEndA();
+ int theirsEndA = theirsEdit.getEndA();
int oursEndB = oursEdit.getEndB();
int theirsEndB = theirsEdit.getEndB();
if (oursEdit.getEndA() < theirsEdit.getEndA()) {
+ oursEndA += theirsEdit.getEndA() - oursEdit.getEndA();
oursEndB += theirsEdit.getEndA() - oursEdit.getEndA();
} else {
+ theirsEndA += oursEdit.getEndA() - theirsEdit.getEndA();
theirsEndB += oursEdit.getEndA() - theirsEdit.getEndA();
}
@@ -329,10 +338,10 @@ public final class MergeAlgorithm {
oursEndB - commonSuffix,
ConflictState.FIRST_CONFLICTING_RANGE);
- int baseBegin = Math.min(oursBeginB, theirsBeginB)
+ int baseBegin = Math.min(oursBeginA, theirsBeginA)
+ commonPrefix;
int baseEnd = Math.min(base.size(),
- Math.max(oursEndB, theirsEndB)) - commonSuffix;
+ Math.max(oursEndA, theirsEndA)) - commonSuffix;
result.add(0, baseBegin, baseEnd,
ConflictState.BASE_CONFLICTING_RANGE);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/merge/ResolveMerger.java b/org.eclipse.jgit/src/org/eclipse/jgit/merge/ResolveMerger.java
index 13cccee16b..1ad41be423 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/merge/ResolveMerger.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/merge/ResolveMerger.java
@@ -32,7 +32,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
-import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -106,9 +105,9 @@ public class ResolveMerger extends ThreeWayMerger {
*/
public static class Result {
- private final List<String> modifiedFiles = new LinkedList<>();
+ private final List<String> modifiedFiles = new ArrayList<>();
- private final List<String> failedToDelete = new LinkedList<>();
+ private final List<String> failedToDelete = new ArrayList<>();
private ObjectId treeId = null;
@@ -1274,10 +1273,15 @@ public class ResolveMerger extends ThreeWayMerger {
default:
break;
}
+ // add the conflicting path to merge result
+ String currentPath = tw.getPathString();
+ MergeResult<RawText> result = new MergeResult<>(
+ Collections.emptyList());
+ result.setContainsConflicts(true);
+ mergeResults.put(currentPath, result);
addConflict(base, ours, theirs);
-
// attribute merge issues are conflicts but not failures
- unmergedPaths.add(tw.getPathString());
+ unmergedPaths.add(currentPath);
return true;
}
@@ -1289,38 +1293,48 @@ public class ResolveMerger extends ThreeWayMerger {
MergeResult<RawText> result = null;
boolean hasSymlink = FileMode.SYMLINK.equals(modeO)
|| FileMode.SYMLINK.equals(modeT);
+
+ String currentPath = tw.getPathString();
+ // if the path is not a symlink in ours and theirs
if (!hasSymlink) {
try {
result = contentMerge(base, ours, theirs, attributes,
getContentMergeStrategy());
- } catch (BinaryBlobException e) {
- // result == null
- }
- }
- if (result == null) {
- switch (getContentMergeStrategy()) {
- case OURS:
- keep(ourDce);
- return true;
- case THEIRS:
- DirCacheEntry e = add(tw.getRawPath(), theirs,
- DirCacheEntry.STAGE_0, EPOCH, 0);
- if (e != null) {
- addToCheckout(tw.getPathString(), e, attributes);
+ if (result.containsConflicts() && !ignoreConflicts) {
+ result.setContainsConflicts(true);
+ unmergedPaths.add(currentPath);
+ } else if (ignoreConflicts) {
+ result.setContainsConflicts(false);
}
+ updateIndex(base, ours, theirs, result, attributes[T_OURS]);
+ workTreeUpdater.markAsModified(currentPath);
+ // Entry is null - only add the metadata
+ addToCheckout(currentPath, null, attributes);
return true;
- default:
- result = new MergeResult<>(Collections.emptyList());
- result.setContainsConflicts(true);
- break;
+ } catch (BinaryBlobException e) {
+ // if the file is binary in either OURS, THEIRS or BASE
+ // here, we don't have an option to ignore conflicts
}
}
- if (ignoreConflicts) {
- result.setContainsConflicts(false);
+ switch (getContentMergeStrategy()) {
+ case OURS:
+ keep(ourDce);
+ return true;
+ case THEIRS:
+ DirCacheEntry e = add(tw.getRawPath(), theirs,
+ DirCacheEntry.STAGE_0, EPOCH, 0);
+ if (e != null) {
+ addToCheckout(currentPath, e, attributes);
+ }
+ return true;
+ default:
+ result = new MergeResult<>(Collections.emptyList());
+ result.setContainsConflicts(true);
+ break;
}
- String currentPath = tw.getPathString();
if (hasSymlink) {
if (ignoreConflicts) {
+ result.setContainsConflicts(false);
if (((modeT & FileMode.TYPE_MASK) == FileMode.TYPE_FILE)) {
DirCacheEntry e = add(tw.getRawPath(), theirs,
DirCacheEntry.STAGE_0, EPOCH, 0);
@@ -1329,9 +1343,9 @@ public class ResolveMerger extends ThreeWayMerger {
keep(ourDce);
}
} else {
- // Record the conflict
DirCacheEntry e = addConflict(base, ours, theirs);
mergeResults.put(currentPath, result);
+ unmergedPaths.add(currentPath);
// If theirs is a file, check it out. In link/file
// conflicts, C git prefers the file.
if (((modeT & FileMode.TYPE_MASK) == FileMode.TYPE_FILE)
@@ -1340,14 +1354,12 @@ public class ResolveMerger extends ThreeWayMerger {
}
}
} else {
- updateIndex(base, ours, theirs, result, attributes[T_OURS]);
- }
- if (result.containsConflicts() && !ignoreConflicts) {
+ result.setContainsConflicts(true);
+ addConflict(base, ours, theirs);
unmergedPaths.add(currentPath);
+ mergeResults.put(currentPath, result);
}
- workTreeUpdater.markAsModified(currentPath);
- // Entry is null - only adds the metadata.
- addToCheckout(currentPath, null, attributes);
+ return true;
} else if (modeO != modeT) {
// OURS or THEIRS has been deleted
if (((modeO != 0 && !tw.idEqual(T_BASE, T_OURS)) || (modeT != 0 && !tw
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/patch/PatchApplier.java b/org.eclipse.jgit/src/org/eclipse/jgit/patch/PatchApplier.java
index a327095c81..cb6cc6efa7 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/patch/PatchApplier.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/patch/PatchApplier.java
@@ -23,6 +23,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
@@ -33,9 +34,11 @@ import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
+import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.zip.InflaterInputStream;
+
import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.api.errors.FilterFailedException;
import org.eclipse.jgit.api.errors.PatchFormatException;
@@ -101,11 +104,12 @@ import org.eclipse.jgit.util.sha1.SHA1;
* @since 6.4
*/
public class PatchApplier {
-
private static final byte[] NO_EOL = "\\ No newline at end of file" //$NON-NLS-1$
.getBytes(StandardCharsets.US_ASCII);
- /** The tree before applying the patch. Only non-null for inCore operation. */
+ /**
+ * The tree before applying the patch. Only non-null for inCore operation.
+ */
@Nullable
private final RevTree beforeTree;
@@ -115,10 +119,14 @@ public class PatchApplier {
private final ObjectReader reader;
+ private final Charset charset;
+
private WorkingTreeOptions workingTreeOptions;
private int inCoreSizeLimit;
+ private boolean allowConflicts;
+
/**
* @param repo
* repository to apply the patch in
@@ -128,7 +136,8 @@ public class PatchApplier {
inserter = repo.newObjectInserter();
reader = inserter.newReader();
beforeTree = null;
-
+ allowConflicts = false;
+ charset = StandardCharsets.UTF_8;
Config config = repo.getConfig();
workingTreeOptions = config.get(WorkingTreeOptions.KEY);
inCoreSizeLimit = config.getInt(ConfigConstants.CONFIG_MERGE_SECTION,
@@ -143,11 +152,14 @@ public class PatchApplier {
* @param oi
* to be used for modifying objects
*/
- public PatchApplier(Repository repo, RevTree beforeTree, ObjectInserter oi) {
+ public PatchApplier(Repository repo, RevTree beforeTree,
+ ObjectInserter oi) {
this.repo = repo;
this.beforeTree = beforeTree;
inserter = oi;
reader = oi.newReader();
+ allowConflicts = false;
+ charset = StandardCharsets.UTF_8;
}
/**
@@ -157,7 +169,6 @@ public class PatchApplier {
* @since 6.3
*/
public static class Result {
-
/**
* A wrapper for a patch applying error that affects a given file.
*
@@ -166,28 +177,68 @@ public class PatchApplier {
// TODO(ms): rename this class in next major release
@SuppressWarnings("JavaLangClash")
public static class Error {
+ final String msg;
+
+ final String oldFileName;
+
+ @Nullable
+ final HunkHeader hh;
- private String msg;
- private String oldFileName;
- private @Nullable HunkHeader hh;
+ final boolean isGitConflict;
- private Error(String msg, String oldFileName,
- @Nullable HunkHeader hh) {
+ Error(String msg, String oldFileName, @Nullable HunkHeader hh,
+ boolean isGitConflict) {
this.msg = msg;
this.oldFileName = oldFileName;
this.hh = hh;
+ this.isGitConflict = isGitConflict;
+ }
+
+ /**
+ * Signals if as part of encountering this error, conflict markers
+ * were added to the file.
+ *
+ * @return {@code true} if conflict markers were added for this
+ * error.
+ *
+ * @since 6.10
+ */
+ public boolean isGitConflict() {
+ return isGitConflict;
}
@Override
public String toString() {
if (hh != null) {
- return MessageFormat.format(JGitText.get().patchApplyErrorWithHunk,
- oldFileName, hh, msg);
+ return MessageFormat.format(
+ JGitText.get().patchApplyErrorWithHunk, oldFileName,
+ hh, msg);
+ }
+ return MessageFormat.format(
+ JGitText.get().patchApplyErrorWithoutHunk, oldFileName,
+ msg);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || !(o instanceof Error)) {
+ return false;
}
- return MessageFormat.format(JGitText.get().patchApplyErrorWithoutHunk,
- oldFileName, msg);
+ Error error = (Error) o;
+ return Objects.equals(msg, error.msg)
+ && Objects.equals(oldFileName, error.oldFileName)
+ && Objects.equals(hh, error.hh)
+ && isGitConflict == error.isGitConflict;
}
+ @Override
+ public int hashCode() {
+ return Objects.hash(msg, oldFileName, hh,
+ Boolean.valueOf(isGitConflict));
+ }
}
private ObjectId treeId;
@@ -225,8 +276,14 @@ public class PatchApplier {
return errors;
}
- private void addError(String msg,String oldFileName, @Nullable HunkHeader hh) {
- errors.add(new Error(msg, oldFileName, hh));
+ private void addError(String msg, String oldFileName,
+ @Nullable HunkHeader hh) {
+ errors.add(new Error(msg, oldFileName, hh, false));
+ }
+
+ private void addErrorWithGitConflict(String msg, String oldFileName,
+ @Nullable HunkHeader hh) {
+ errors.add(new Error(msg, oldFileName, hh, true));
}
}
@@ -357,6 +414,17 @@ public class PatchApplier {
return result;
}
+ /**
+ * Sets up the {@link PatchApplier} to apply patches even if they conflict.
+ *
+ * @return the {@link PatchApplier} to apply any patches
+ * @since 6.10
+ */
+ public PatchApplier allowConflicts() {
+ allowConflicts = true;
+ return this;
+ }
+
private File getFile(String path) {
return inCore() ? null : new File(repo.getWorkTree(), path);
}
@@ -439,6 +507,7 @@ public class PatchApplier {
return false;
}
}
+
private static final int FILE_TREE_INDEX = 1;
/**
@@ -539,7 +608,9 @@ public class PatchApplier {
convertCrLf);
resultStreamLoader = applyText(raw, fh, result);
}
- if (resultStreamLoader == null || !result.getErrors().isEmpty()) {
+ if (resultStreamLoader == null
+ || (!result.getErrors().isEmpty() && result.getErrors().stream()
+ .anyMatch(e -> !e.msg.equals("cannot apply hunk")))) { //$NON-NLS-1$
return;
}
@@ -961,9 +1032,51 @@ public class PatchApplier {
}
}
if (!applies) {
- result.addError(JGitText.get().applyTextPatchCannotApplyHunk,
- fh.getOldPath(), hh);
- return null;
+ if (!allowConflicts) {
+ result.addError(
+ JGitText.get().applyTextPatchCannotApplyHunk,
+ fh.getOldPath(), hh);
+ return null;
+ }
+ // Insert conflict markers. This is best-guess because the
+ // file might have changed completely. But at least we give
+ // the user a graceful state that they can resolve manually.
+ // An alternative to this is using the 3-way merger. This
+ // only works if the pre-image SHA is contained in the repo.
+ // If that was the case, cherry-picking the original commit
+ // should be preferred to apply a patch.
+ result.addErrorWithGitConflict("cannot apply hunk", fh.getOldPath(), hh); //$NON-NLS-1$
+ newLines.add(Math.min(applyAt++, newLines.size()),
+ asBytes("<<<<<<< HEAD")); //$NON-NLS-1$
+ applyAt += hh.getOldImage().lineCount;
+ newLines.add(Math.min(applyAt++, newLines.size()),
+ asBytes("=======")); //$NON-NLS-1$
+
+ int sz = hunkLines.size();
+ for (int j = 1; j < sz; j++) {
+ ByteBuffer hunkLine = hunkLines.get(j);
+ if (!hunkLine.hasRemaining()) {
+ // Completely empty line; accept as empty context
+ // line
+ applyAt++;
+ lastWasRemoval = false;
+ continue;
+ }
+ switch (hunkLine.array()[hunkLine.position()]) {
+ case ' ':
+ case '+':
+ newLines.add(Math.min(applyAt++, newLines.size()),
+ slice(hunkLine, 1));
+ break;
+ case '-':
+ case '\\':
+ default:
+ break;
+ }
+ }
+ newLines.add(Math.min(applyAt++, newLines.size()),
+ asBytes(">>>>>>> PATCH")); //$NON-NLS-1$
+ continue;
}
// Hunk applies at applyAt. Apply it, and update afterLastHunk and
// lineNumberShift
@@ -1010,7 +1123,11 @@ public class PatchApplier {
} else if (!rt.isMissingNewlineAtEnd()) {
newLines.add(null);
}
+ return toContentStreamLoader(newLines);
+ }
+ private static ContentStreamLoader toContentStreamLoader(
+ List<ByteBuffer> newLines) throws IOException {
// We could check if old == new, but the short-circuiting complicates
// logic for inCore patching, so just write the new thing regardless.
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null);
@@ -1034,6 +1151,10 @@ public class PatchApplier {
}
}
+ private ByteBuffer asBytes(String str) {
+ return ByteBuffer.wrap(str.getBytes(charset));
+ }
+
@SuppressWarnings("ByteBufferBackingArray")
private boolean canApplyAt(List<ByteBuffer> hunkLines,
List<ByteBuffer> newLines, int line) {
@@ -1123,4 +1244,4 @@ public class PatchApplier {
in.close();
}
}
-}
+} \ No newline at end of file
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/MergeBaseGenerator.java b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/MergeBaseGenerator.java
index a213dd47c6..be29dc3138 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/MergeBaseGenerator.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/MergeBaseGenerator.java
@@ -12,7 +12,7 @@ package org.eclipse.jgit.revwalk;
import java.io.IOException;
import java.text.MessageFormat;
-import java.util.LinkedList;
+import java.util.ArrayDeque;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.MissingObjectException;
@@ -47,7 +47,8 @@ class MergeBaseGenerator extends Generator {
private int recarryTest;
private int recarryMask;
private int mergeBaseAncestor = -1;
- private LinkedList<RevCommit> ret = new LinkedList<>();
+
+ private ArrayDeque<RevCommit> ret = new ArrayDeque<>();
private CarryStack stack;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java
index 43571a6868..99943b78e6 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java
@@ -139,11 +139,8 @@ public class TreeRevFilter extends RevFilter {
.getPathsBestEffort();
if (paths.isPresent()) {
changedPathFilterUsed = true;
- for (byte[] path : paths.get()) {
- if (!cpf.maybeContains(path)) {
- mustCalculateChgs = false;
- break;
- }
+ if (paths.get().stream().noneMatch(cpf::maybeContains)) {
+ mustCalculateChgs = false;
}
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/Transport.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/Transport.java
index 4a02d6d452..b335675da5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/Transport.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/Transport.java
@@ -33,10 +33,8 @@ import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.LinkedHashSet;
-import java.util.LinkedList;
import java.util.List;
import java.util.Map;
-import java.util.Vector;
import java.util.concurrent.CopyOnWriteArrayList;
import org.eclipse.jgit.annotations.NonNull;
@@ -109,7 +107,7 @@ public abstract class Transport implements AutoCloseable {
String name = prefix + Transport.class.getName();
return ldr.getResources(name);
} catch (IOException err) {
- return new Vector<URL>().elements();
+ return Collections.emptyEnumeration();
}
}
@@ -595,7 +593,7 @@ public abstract class Transport implements AutoCloseable {
Collection<RefSpec> fetchSpecs) throws IOException {
if (fetchSpecs == null)
fetchSpecs = Collections.emptyList();
- final List<RemoteRefUpdate> result = new LinkedList<>();
+ final List<RemoteRefUpdate> result = new ArrayList<>();
final Collection<RefSpec> procRefs = expandPushWildcardsFor(db, specs);
for (RefSpec spec : procRefs) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java
index 3162b89908..a65d0b756c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java
@@ -66,7 +66,6 @@ import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.LinkedHashSet;
-import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@@ -763,7 +762,7 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
void processResponseCookies(HttpConnection conn) {
if (cookieFile != null && http.getSaveCookies()) {
- List<HttpCookie> foundCookies = new LinkedList<>();
+ List<HttpCookie> foundCookies = new ArrayList<>();
List<String> cookieHeaderValues = conn
.getHeaderFields(HDR_SET_COOKIE);
@@ -795,7 +794,7 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
private List<HttpCookie> extractCookies(String headerKey,
List<String> headerValues) {
- List<HttpCookie> foundCookies = new LinkedList<>();
+ List<HttpCookie> foundCookies = new ArrayList<>();
for (String headerValue : headerValues) {
foundCookies
.addAll(HttpCookie.parse(headerKey + ':' + headerValue));
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkFetchConnection.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkFetchConnection.java
index 8d89107c2b..b7bb0cbce3 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkFetchConnection.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkFetchConnection.java
@@ -16,13 +16,16 @@ import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.MessageFormat;
+import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
+import java.util.LinkedHashMap;
import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
import java.util.Set;
import org.eclipse.jgit.errors.CompoundException;
@@ -112,16 +115,16 @@ class WalkFetchConnection extends BaseFetchConnection {
private final DateRevQueue localCommitQueue;
/** Objects we need to copy from the remote repository. */
- private LinkedList<ObjectId> workQueue;
+ private Deque<ObjectId> workQueue;
/** Databases we have not yet obtained the list of packs from. */
- private final LinkedList<WalkRemoteObjectDatabase> noPacksYet;
+ private final Deque<WalkRemoteObjectDatabase> noPacksYet;
/** Databases we have not yet obtained the alternates from. */
- private final LinkedList<WalkRemoteObjectDatabase> noAlternatesYet;
+ private final Deque<WalkRemoteObjectDatabase> noAlternatesYet;
/** Packs we have discovered, but have not yet fetched locally. */
- private final LinkedList<RemotePack> unfetchedPacks;
+ private final Map<String, RemotePack> unfetchedPacks;
/**
* Packs whose indexes we have looked at in {@link #unfetchedPacks}.
@@ -163,13 +166,13 @@ class WalkFetchConnection extends BaseFetchConnection {
remotes = new ArrayList<>();
remotes.add(w);
- unfetchedPacks = new LinkedList<>();
+ unfetchedPacks = new LinkedHashMap<>();
packsConsidered = new HashSet<>();
- noPacksYet = new LinkedList<>();
+ noPacksYet = new ArrayDeque<>();
noPacksYet.add(w);
- noAlternatesYet = new LinkedList<>();
+ noAlternatesYet = new ArrayDeque<>();
noAlternatesYet.add(w);
fetchErrors = new HashMap<>();
@@ -183,7 +186,7 @@ class WalkFetchConnection extends BaseFetchConnection {
LOCALLY_SEEN = revWalk.newFlag("LOCALLY_SEEN"); //$NON-NLS-1$
localCommitQueue = new DateRevQueue();
- workQueue = new LinkedList<>();
+ workQueue = new ArrayDeque<>();
}
@Override
@@ -226,7 +229,7 @@ class WalkFetchConnection extends BaseFetchConnection {
public void close() {
inserter.close();
reader.close();
- for (RemotePack p : unfetchedPacks) {
+ for (RemotePack p : unfetchedPacks.values()) {
if (p.tmpIdx != null)
p.tmpIdx.delete();
}
@@ -421,8 +424,9 @@ class WalkFetchConnection extends BaseFetchConnection {
if (packNameList == null || packNameList.isEmpty())
continue;
for (String packName : packNameList) {
- if (packsConsidered.add(packName))
- unfetchedPacks.add(new RemotePack(wrr, packName));
+ if (packsConsidered.add(packName)) {
+ unfetchedPacks.put(packName, new RemotePack(wrr, packName));
+ }
}
if (downloadPackedObject(pm, id))
return;
@@ -465,15 +469,27 @@ class WalkFetchConnection extends BaseFetchConnection {
}
}
+ private boolean downloadPackedObject(ProgressMonitor monitor,
+ AnyObjectId id) throws TransportException {
+ Set<String> brokenPacks = new HashSet<>();
+ try {
+ return downloadPackedObject(monitor, id, brokenPacks);
+ } finally {
+ brokenPacks.forEach(unfetchedPacks::remove);
+ }
+ }
+
@SuppressWarnings("Finally")
private boolean downloadPackedObject(final ProgressMonitor monitor,
- final AnyObjectId id) throws TransportException {
+ final AnyObjectId id, Set<String> brokenPacks) throws TransportException {
// Search for the object in a remote pack whose index we have,
// but whose pack we do not yet have.
//
- final Iterator<RemotePack> packItr = unfetchedPacks.iterator();
- while (packItr.hasNext() && !monitor.isCancelled()) {
- final RemotePack pack = packItr.next();
+ for (Entry<String, RemotePack> entry : unfetchedPacks.entrySet()) {
+ if (monitor.isCancelled()) {
+ break;
+ }
+ final RemotePack pack = entry.getValue();
try {
pack.openIndex(monitor);
} catch (IOException err) {
@@ -483,7 +499,7 @@ class WalkFetchConnection extends BaseFetchConnection {
// another source, so don't consider it a failure.
//
recordError(id, err);
- packItr.remove();
+ brokenPacks.add(entry.getKey());
continue;
}
@@ -534,7 +550,7 @@ class WalkFetchConnection extends BaseFetchConnection {
}
throw new TransportException(e.getMessage(), e);
}
- packItr.remove();
+ brokenPacks.add(entry.getKey());
}
if (!alreadyHave(id)) {
@@ -549,11 +565,9 @@ class WalkFetchConnection extends BaseFetchConnection {
// Complete any other objects that we can.
//
- final Iterator<ObjectId> pending = swapFetchQueue();
- while (pending.hasNext()) {
- final ObjectId p = pending.next();
+ final Deque<ObjectId> pending = swapFetchQueue();
+ for (ObjectId p : pending) {
if (pack.index.hasObject(p)) {
- pending.remove();
process(p);
} else {
workQueue.add(p);
@@ -565,9 +579,9 @@ class WalkFetchConnection extends BaseFetchConnection {
return false;
}
- private Iterator<ObjectId> swapFetchQueue() {
- final Iterator<ObjectId> r = workQueue.iterator();
- workQueue = new LinkedList<>();
+ private Deque<ObjectId> swapFetchQueue() {
+ final Deque<ObjectId> r = workQueue;
+ workQueue = new ArrayDeque<>();
return r;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/http/JDKHttpConnection.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/http/JDKHttpConnection.java
index 8e9be1dde8..e20acadc4a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/http/JDKHttpConnection.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/http/JDKHttpConnection.java
@@ -20,7 +20,7 @@ import java.net.URL;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
-import java.util.LinkedList;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -152,7 +152,7 @@ public class JDKHttpConnection implements HttpConnection {
private static List<String> mapValuesToListIgnoreCase(String keyName,
Map<String, List<String>> m) {
- List<String> fields = new LinkedList<>();
+ List<String> fields = new ArrayList<>();
m.entrySet().stream().filter(e -> keyName.equalsIgnoreCase(e.getKey()))
.filter(e -> e.getValue() != null)
.forEach(e -> fields.addAll(e.getValue()));
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ByteArraySet.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ByteArraySet.java
index c94160144e..bcf79a285d 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ByteArraySet.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ByteArraySet.java
@@ -15,6 +15,10 @@ package org.eclipse.jgit.treewalk.filter;
import org.eclipse.jgit.util.RawParseUtils;
+import java.util.Arrays;
+import java.util.Set;
+import java.util.stream.Collectors;
+
/**
* Specialized set for byte arrays, interpreted as strings for use in
* {@link PathFilterGroup.Group}. Most methods assume the hash is already know
@@ -291,4 +295,8 @@ class ByteArraySet {
return ret;
}
+ Set<byte[]> toSet() {
+ return Arrays.stream(toArray()).collect(Collectors.toSet());
+ }
+
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/IndexDiffFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/IndexDiffFilter.java
index 699ff6b68b..cfdc4dd358 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/IndexDiffFilter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/IndexDiffFilter.java
@@ -10,8 +10,9 @@
package org.eclipse.jgit.treewalk.filter;
import java.io.IOException;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
import java.util.HashSet;
-import java.util.LinkedList;
import java.util.List;
import java.util.Set;
@@ -62,9 +63,9 @@ public class IndexDiffFilter extends TreeFilter {
private final Set<String> ignoredPaths = new HashSet<>();
- private final LinkedList<String> untrackedParentFolders = new LinkedList<>();
+ private final ArrayDeque<String> untrackedParentFolders = new ArrayDeque<>();
- private final LinkedList<String> untrackedFolders = new LinkedList<>();
+ private final ArrayDeque<String> untrackedFolders = new ArrayDeque<>();
/**
* Creates a new instance of this filter. Do not use an instance of this
@@ -272,12 +273,14 @@ public class IndexDiffFilter extends TreeFilter {
* empty list will be returned.
*/
public List<String> getUntrackedFolders() {
- LinkedList<String> ret = new LinkedList<>(untrackedFolders);
+ ArrayList<String> ret = new ArrayList<>(untrackedFolders);
if (!untrackedParentFolders.isEmpty()) {
String toBeAdded = untrackedParentFolders.getLast();
- while (!ret.isEmpty() && ret.getLast().startsWith(toBeAdded))
- ret.removeLast();
- ret.addLast(toBeAdded);
+ while (!ret.isEmpty()
+ && ret.get(ret.size() - 1).startsWith(toBeAdded)) {
+ ret.remove(ret.size() - 1);
+ }
+ ret.add(toBeAdded);
}
return ret;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/PathFilterGroup.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/PathFilterGroup.java
index 59855572f2..4c0604ad56 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/PathFilterGroup.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/PathFilterGroup.java
@@ -12,6 +12,8 @@
package org.eclipse.jgit.treewalk.filter;
import java.util.Collection;
+import java.util.Optional;
+import java.util.Set;
import org.eclipse.jgit.errors.StopWalkException;
import org.eclipse.jgit.internal.JGitText;
@@ -232,6 +234,15 @@ public class PathFilterGroup {
}
@Override
+ public Optional<Set<byte[]>> getPathsBestEffort() {
+ Set<byte[]> result = fullpaths.toSet();
+ if (result.isEmpty()) {
+ return Optional.empty();
+ }
+ return Optional.of(result);
+ }
+
+ @Override
public TreeFilter clone() {
return this;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/TreeFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/TreeFilter.java
index 22d430bc27..a9066dc8f8 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/TreeFilter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/TreeFilter.java
@@ -210,7 +210,7 @@ public abstract class TreeFilter {
public abstract boolean shouldBeRecursive();
/**
- * If this filter checks that a specific set of paths have all been
+ * If this filter checks that at least one of the paths in a set has been
* modified, returns that set of paths to be checked against a changed path
* filter. Otherwise, returns empty.
*
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS_POSIX.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS_POSIX.java
index d91f37b51a..e73095f5a8 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS_POSIX.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS_POSIX.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2010, Robin Rosenberg and others
+ * Copyright (C) 2010, 2024, Robin Rosenberg and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@@ -203,7 +203,16 @@ public class FS_POSIX extends FS {
/** {@inheritDoc} */
@Override
public boolean canExecute(File f) {
- return FileUtils.canExecute(f);
+ if (!isFile(f)) {
+ return false;
+ }
+ try {
+ Path path = FileUtils.toPath(f);
+ Set<PosixFilePermission> pset = Files.getPosixFilePermissions(path);
+ return pset.contains(PosixFilePermission.OWNER_EXECUTE);
+ } catch (IOException ex) {
+ return false;
+ }
}
/** {@inheritDoc} */
@@ -250,8 +259,12 @@ public class FS_POSIX extends FS {
/** {@inheritDoc} */
@Override
public ProcessBuilder runInShell(String cmd, String[] args) {
- List<String> argv = new ArrayList<>(4 + args.length);
+ List<String> argv = new ArrayList<>(5 + args.length);
argv.add("sh"); //$NON-NLS-1$
+ if (SystemReader.getInstance().isMacOS()) {
+ // Use a login shell to get the full normal $PATH
+ argv.add("-l"); //$NON-NLS-1$
+ }
argv.add("-c"); //$NON-NLS-1$
argv.add(cmd + " \"$@\""); //$NON-NLS-1$
argv.add(cmd);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/io/UnionInputStream.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/UnionInputStream.java
index b37f28b161..7e950f6529 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/io/UnionInputStream.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/UnionInputStream.java
@@ -12,8 +12,8 @@ package org.eclipse.jgit.util.io;
import java.io.IOException;
import java.io.InputStream;
-import java.util.Iterator;
-import java.util.LinkedList;
+import java.util.ArrayDeque;
+import java.util.Deque;
/**
* An InputStream which reads from one or more InputStreams.
@@ -34,7 +34,7 @@ public class UnionInputStream extends InputStream {
}
};
- private final LinkedList<InputStream> streams = new LinkedList<>();
+ private final Deque<InputStream> streams = new ArrayDeque<>();
/**
* Create an empty InputStream that is currently at EOF state.
@@ -163,14 +163,14 @@ public class UnionInputStream extends InputStream {
public void close() throws IOException {
IOException err = null;
- for (Iterator<InputStream> i = streams.iterator(); i.hasNext();) {
+ for (InputStream stream : streams) {
try {
- i.next().close();
+ stream.close();
} catch (IOException closeError) {
err = closeError;
}
- i.remove();
}
+ streams.clear();
if (err != null)
throw err;