diff options
Diffstat (limited to 'org.eclipse.jgit/src')
83 files changed, 4486 insertions, 765 deletions
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/AddCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/AddCommand.java index c895dc9aaa..b4d1cab513 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/AddCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/AddCommand.java @@ -1,6 +1,6 @@ /* * Copyright (C) 2010, Christian Halstrick <christian.halstrick@sap.com> - * Copyright (C) 2010, Stefan Lay <stefan.lay@sap.com> and others + * Copyright (C) 2010, 2025 Stefan Lay <stefan.lay@sap.com> and others * * This program and the accompanying materials are made available under the * terms of the Eclipse Distribution License v. 1.0 which is available at @@ -17,6 +17,7 @@ import static org.eclipse.jgit.lib.FileMode.TYPE_TREE; import java.io.IOException; import java.io.InputStream; +import java.text.MessageFormat; import java.time.Instant; import java.util.ArrayList; import java.util.List; @@ -59,8 +60,15 @@ public class AddCommand extends GitCommand<DirCache> { private WorkingTreeIterator workingTreeIterator; + // Update only known index entries, don't add new ones. If there's no file + // for an index entry, remove it: stage deletions. private boolean update = false; + // If TRUE, also stage deletions, otherwise only update and add index + // entries. + // If not set explicitly + private Boolean all; + // This defaults to true because it's what JGit has been doing // traditionally. The C git default would be false. private boolean renormalize = true; @@ -82,6 +90,17 @@ public class AddCommand extends GitCommand<DirCache> { * A directory name (e.g. <code>dir</code> to add <code>dir/file1</code> and * <code>dir/file2</code>) can also be given to add all files in the * directory, recursively. Fileglobs (e.g. *.c) are not yet supported. + * </p> + * <p> + * If a pattern {@code "."} is added, all changes in the git repository's + * working tree will be added. + * </p> + * <p> + * File patterns are required unless {@code isUpdate() == true} or + * {@link #setAll(boolean)} is called. If so and no file patterns are given, + * all changes will be added (i.e., a file pattern of {@code "."} is + * implied). + * </p> * * @param filepattern * repository-relative path of file/directory to add (with @@ -113,15 +132,41 @@ public class AddCommand extends GitCommand<DirCache> { * Executes the {@code Add} command. Each instance of this class should only * be used for one invocation of the command. Don't call this method twice * on an instance. + * </p> + * + * @throws JGitInternalException + * on errors, but also if {@code isUpdate() == true} _and_ + * {@link #setAll(boolean)} had been called + * @throws NoFilepatternException + * if no file patterns are given if {@code isUpdate() == false} + * and {@link #setAll(boolean)} was not called */ @Override public DirCache call() throws GitAPIException, NoFilepatternException { - - if (filepatterns.isEmpty()) - throw new NoFilepatternException(JGitText.get().atLeastOnePatternIsRequired); checkCallable(); + + if (update && all != null) { + throw new JGitInternalException(MessageFormat.format( + JGitText.get().illegalCombinationOfArguments, + "--update", "--all/--no-all")); //$NON-NLS-1$ //$NON-NLS-2$ + } + boolean addAll; + if (filepatterns.isEmpty()) { + if (update || all != null) { + addAll = true; + } else { + throw new NoFilepatternException( + JGitText.get().atLeastOnePatternIsRequired); + } + } else { + addAll = filepatterns.contains("."); //$NON-NLS-1$ + if (all == null && !update) { + all = Boolean.TRUE; + } + } + boolean stageDeletions = update || (all != null && all.booleanValue()); + DirCache dc = null; - boolean addAll = filepatterns.contains("."); //$NON-NLS-1$ try (ObjectInserter inserter = repo.newObjectInserter(); NameConflictTreeWalk tw = new NameConflictTreeWalk(repo)) { @@ -181,7 +226,8 @@ public class AddCommand extends GitCommand<DirCache> { if (f == null) { // working tree file does not exist if (entry != null - && (!update || GITLINK == entry.getFileMode())) { + && (!stageDeletions + || GITLINK == entry.getFileMode())) { builder.add(entry); } continue; @@ -252,7 +298,8 @@ public class AddCommand extends GitCommand<DirCache> { } /** - * Set whether to only match against already tracked files + * Set whether to only match against already tracked files. If + * {@code update == true}, re-sets a previous {@link #setAll(boolean)}. * * @param update * If set to true, the command only matches {@code filepattern} @@ -314,4 +361,32 @@ public class AddCommand extends GitCommand<DirCache> { public boolean isRenormalize() { return renormalize; } + + /** + * Defines whether the command will use '--all' mode: update existing index + * entries, add new entries, and remove index entries for which there is no + * file. (In other words: also stage deletions.) + * <p> + * The setting is independent of {@link #setUpdate(boolean)}. + * </p> + * + * @param all + * whether to enable '--all' mode + * @return {@code this} + * @since 7.2 + */ + public AddCommand setAll(boolean all) { + this.all = Boolean.valueOf(all); + return this; + } + + /** + * Tells whether '--all' has been set for this command. + * + * @return {@code true} if it was set; {@code false} otherwise + * @since 7.2 + */ + public boolean isAll() { + return all != null && all.booleanValue(); + } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/DescribeCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/DescribeCommand.java index 805a886392..d2526287f9 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/DescribeCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/DescribeCommand.java @@ -15,11 +15,11 @@ import static org.eclipse.jgit.lib.TypedConfigGetter.UNSET_INT; import java.io.IOException; import java.text.MessageFormat; +import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; -import java.util.Date; import java.util.List; import java.util.Map; import java.util.Optional; @@ -76,6 +76,11 @@ public class DescribeCommand extends GitCommand<String> { private List<FileNameMatcher> matchers = new ArrayList<>(); /** + * Pattern matchers to be applied to tags for exclusion. + */ + private List<FileNameMatcher> excludeMatchers = new ArrayList<>(); + + /** * Whether to use all refs in the refs/ namespace */ private boolean useAll; @@ -263,6 +268,27 @@ public class DescribeCommand extends GitCommand<String> { return this; } + /** + * Sets one or more {@code glob(7)} patterns that tags must not match to be + * considered. If multiple patterns are provided, they will all be applied. + * + * @param patterns + * the {@code glob(7)} pattern or patterns + * @return {@code this} + * @throws org.eclipse.jgit.errors.InvalidPatternException + * if the pattern passed in was invalid. + * @see <a href= + * "https://www.kernel.org/pub/software/scm/git/docs/git-describe.html" + * >Git documentation about describe</a> + * @since 7.2 + */ + public DescribeCommand setExclude(String... patterns) throws InvalidPatternException { + for (String p : patterns) { + excludeMatchers.add(new FileNameMatcher(p, null)); + } + return this; + } + private final Comparator<Ref> TAG_TIE_BREAKER = new Comparator<>() { @Override @@ -274,25 +300,28 @@ public class DescribeCommand extends GitCommand<String> { } } - private Date tagDate(Ref tag) throws IOException { + private Instant tagDate(Ref tag) throws IOException { RevTag t = w.parseTag(tag.getObjectId()); w.parseBody(t); - return t.getTaggerIdent().getWhen(); + return t.getTaggerIdent().getWhenAsInstant(); } }; private Optional<Ref> getBestMatch(List<Ref> tags) { if (tags == null || tags.isEmpty()) { return Optional.empty(); - } else if (matchers.isEmpty()) { + } else if (matchers.isEmpty() && excludeMatchers.isEmpty()) { Collections.sort(tags, TAG_TIE_BREAKER); return Optional.of(tags.get(0)); - } else { + } + + Stream<Ref> matchingTags; + if (!matchers.isEmpty()) { // Find the first tag that matches in the stream of all tags // filtered by matchers ordered by tie break order - Stream<Ref> matchingTags = Stream.empty(); + matchingTags = Stream.empty(); for (FileNameMatcher matcher : matchers) { - Stream<Ref> m = tags.stream().filter( + Stream<Ref> m = tags.stream().filter( // tag -> { matcher.append(formatRefName(tag.getName())); boolean result = matcher.isMatch(); @@ -301,8 +330,22 @@ public class DescribeCommand extends GitCommand<String> { }); matchingTags = Stream.of(matchingTags, m).flatMap(i -> i); } - return matchingTags.sorted(TAG_TIE_BREAKER).findFirst(); + } else { + // If there are no matchers, there are only excluders + // Assume all tags match for now before applying excluders + matchingTags = tags.stream(); + } + + for (FileNameMatcher matcher : excludeMatchers) { + matchingTags = matchingTags.filter( // + tag -> { + matcher.append(formatRefName(tag.getName())); + boolean result = matcher.isMatch(); + matcher.reset(); + return !result; + }); } + return matchingTags.sorted(TAG_TIE_BREAKER).findFirst(); } private ObjectId getObjectIdFromRef(Ref r) throws JGitInternalException { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/FetchCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/FetchCommand.java index 0713c38931..f24127bd51 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/FetchCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/FetchCommand.java @@ -124,7 +124,7 @@ public class FetchCommand extends TransportCommand<FetchCommand, FetchResult> { FetchRecurseSubmodulesMode mode = repo.getConfig().getEnum( FetchRecurseSubmodulesMode.values(), ConfigConstants.CONFIG_SUBMODULE_SECTION, path, - ConfigConstants.CONFIG_KEY_FETCH_RECURSE_SUBMODULES, null); + ConfigConstants.CONFIG_KEY_FETCH_RECURSE_SUBMODULES); if (mode != null) { return mode; } @@ -132,7 +132,7 @@ public class FetchCommand extends TransportCommand<FetchCommand, FetchResult> { // Fall back to fetch.recurseSubmodules, if set mode = repo.getConfig().getEnum(FetchRecurseSubmodulesMode.values(), ConfigConstants.CONFIG_FETCH_SECTION, null, - ConfigConstants.CONFIG_KEY_RECURSE_SUBMODULES, null); + ConfigConstants.CONFIG_KEY_RECURSE_SUBMODULES); if (mode != null) { return mode; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/GarbageCollectCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/GarbageCollectCommand.java index 88d7e91860..f6935e1c67 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/GarbageCollectCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/GarbageCollectCommand.java @@ -12,6 +12,7 @@ package org.eclipse.jgit.api; import java.io.IOException; import java.text.MessageFormat; import java.text.ParseException; +import java.time.Instant; import java.util.Date; import java.util.Properties; import java.util.concurrent.ExecutionException; @@ -59,7 +60,7 @@ public class GarbageCollectCommand extends GitCommand<Properties> { private ProgressMonitor monitor; - private Date expire; + private Instant expire; private PackConfig pconfig; @@ -98,8 +99,29 @@ public class GarbageCollectCommand extends GitCommand<Properties> { * @param expire * minimal age of objects to be pruned. * @return this instance + * @deprecated use {@link #setExpire(Instant)} instead */ + @Deprecated(since = "7.2") public GarbageCollectCommand setExpire(Date expire) { + if (expire != null) { + this.expire = expire.toInstant(); + } + return this; + } + + /** + * During gc() or prune() each unreferenced, loose object which has been + * created or modified after <code>expire</code> will not be pruned. Only + * older objects may be pruned. If set to null then every object is a + * candidate for pruning. Use {@link org.eclipse.jgit.util.GitTimeParser} to + * parse time formats used by git gc. + * + * @param expire + * minimal age of objects to be pruned. + * @return this instance + * @since 7.2 + */ + public GarbageCollectCommand setExpire(Instant expire) { this.expire = expire; return this; } @@ -108,8 +130,8 @@ public class GarbageCollectCommand extends GitCommand<Properties> { * Whether to use aggressive mode or not. If set to true JGit behaves more * similar to native git's "git gc --aggressive". If set to * <code>true</code> compressed objects found in old packs are not reused - * but every object is compressed again. Configuration variables - * pack.window and pack.depth are set to 250 for this GC. + * but every object is compressed again. Configuration variables pack.window + * and pack.depth are set to 250 for this GC. * * @since 3.6 * @param aggressive diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/LogCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/LogCommand.java index 555e351d32..2a8d34ed68 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/LogCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/LogCommand.java @@ -110,10 +110,10 @@ public class LogCommand extends GitCommand<Iterable<RevCommit>> { } if (!filters.isEmpty()) { if (filters.size() == 1) { - filters.add(TreeFilter.ANY_DIFF); + walk.setTreeFilter(filters.get(0)); + } else { + walk.setTreeFilter(AndTreeFilter.create(filters)); } - walk.setTreeFilter(AndTreeFilter.create(filters)); - } if (skip > -1 && maxCount > -1) walk.setRevFilter(AndRevFilter.create(SkipRevFilter.create(skip), diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/PullCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/PullCommand.java index 83ae0fc9d4..4b2cee45c2 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/PullCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/PullCommand.java @@ -533,9 +533,9 @@ public class PullCommand extends TransportCommand<PullCommand, PullResult> { Config config) { BranchRebaseMode mode = config.getEnum(BranchRebaseMode.values(), ConfigConstants.CONFIG_BRANCH_SECTION, - branchName, ConfigConstants.CONFIG_KEY_REBASE, null); + branchName, ConfigConstants.CONFIG_KEY_REBASE); if (mode == null) { - mode = config.getEnum(BranchRebaseMode.values(), + mode = config.getEnum( ConfigConstants.CONFIG_PULL_SECTION, null, ConfigConstants.CONFIG_KEY_REBASE, BranchRebaseMode.NONE); } @@ -549,7 +549,7 @@ public class PullCommand extends TransportCommand<PullCommand, PullResult> { Config config = repo.getConfig(); Merge ffMode = config.getEnum(Merge.values(), ConfigConstants.CONFIG_PULL_SECTION, null, - ConfigConstants.CONFIG_KEY_FF, null); + ConfigConstants.CONFIG_KEY_FF); return ffMode != null ? FastForwardMode.valueOf(ffMode) : null; } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java index 858bd961cd..3ae7a6c81e 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java @@ -18,6 +18,8 @@ import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.text.MessageFormat; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -1835,23 +1837,26 @@ public class RebaseCommand extends GitCommand<RebaseResult> { // the time is saved as <seconds since 1970> <timezone offset> int timeStart = 0; - if (time.startsWith("@")) //$NON-NLS-1$ + if (time.startsWith("@")) { //$NON-NLS-1$ timeStart = 1; - else + } else { timeStart = 0; - long when = Long - .parseLong(time.substring(timeStart, time.indexOf(' '))) * 1000; + } + Instant when = Instant.ofEpochSecond( + Long.parseLong(time.substring(timeStart, time.indexOf(' ')))); String tzOffsetString = time.substring(time.indexOf(' ') + 1); int multiplier = -1; - if (tzOffsetString.charAt(0) == '+') + if (tzOffsetString.charAt(0) == '+') { multiplier = 1; + } int hours = Integer.parseInt(tzOffsetString.substring(1, 3)); int minutes = Integer.parseInt(tzOffsetString.substring(3, 5)); // this is in format (+/-)HHMM (hours and minutes) - // we need to convert into minutes - int tz = (hours * 60 + minutes) * multiplier; - if (name != null && email != null) + ZoneOffset tz = ZoneOffset.ofHoursMinutes(hours * multiplier, + minutes * multiplier); + if (name != null && email != null) { return new PersonIdent(name, email, when, tz); + } return null; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java index dead2749b7..a149649004 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java @@ -68,7 +68,7 @@ public class ReflogCommand extends GitCommand<Collection<ReflogEntry>> { checkCallable(); try { - ReflogReader reader = repo.getReflogReader(ref); + ReflogReader reader = repo.getRefDatabase().getReflogReader(ref); if (reader == null) throw new RefNotFoundException(MessageFormat.format( JGitText.get().refNotResolved, ref)); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/StashDropCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/StashDropCommand.java index 23fbe0197f..2dba0ef0f2 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/StashDropCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/StashDropCommand.java @@ -165,7 +165,8 @@ public class StashDropCommand extends GitCommand<ObjectId> { List<ReflogEntry> entries; try { - ReflogReader reader = repo.getReflogReader(R_STASH); + ReflogReader reader = repo.getRefDatabase() + .getReflogReader(R_STASH); if (reader == null) { throw new RefNotFoundException(MessageFormat .format(JGitText.get().refNotResolved, stashRef)); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java index 3524984347..5e4b2ee0b7 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java @@ -28,6 +28,7 @@ import org.eclipse.jgit.api.errors.RefNotFoundException; import org.eclipse.jgit.api.errors.WrongRepositoryStateException; import org.eclipse.jgit.dircache.DirCacheCheckout; import org.eclipse.jgit.errors.ConfigInvalidException; +import org.eclipse.jgit.internal.storage.file.LockFile; import org.eclipse.jgit.lib.ConfigConstants; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.NullProgressMonitor; @@ -39,6 +40,7 @@ import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.submodule.SubmoduleWalk; import org.eclipse.jgit.treewalk.filter.PathFilterGroup; +import org.eclipse.jgit.util.FileUtils; /** * A class used to execute a submodule update command. @@ -62,6 +64,8 @@ public class SubmoduleUpdateCommand extends private boolean fetch = false; + private boolean clonedRestored; + /** * <p> * Constructor for SubmoduleUpdateCommand. @@ -116,26 +120,77 @@ public class SubmoduleUpdateCommand extends return this; } + private static boolean submoduleExists(File gitDir) { + if (gitDir != null && gitDir.isDirectory()) { + File[] files = gitDir.listFiles(); + return files != null && files.length != 0; + } + return false; + } + + private static void restoreSubmodule(File gitDir, File workingTree) + throws IOException { + LockFile dotGitLock = new LockFile( + new File(workingTree, Constants.DOT_GIT)); + if (dotGitLock.lock()) { + String content = Constants.GITDIR + + getRelativePath(gitDir, workingTree); + dotGitLock.write(Constants.encode(content)); + dotGitLock.commit(); + } + } + + private static String getRelativePath(File gitDir, File workingTree) { + File relPath; + try { + relPath = workingTree.toPath().relativize(gitDir.toPath()) + .toFile(); + } catch (IllegalArgumentException e) { + relPath = gitDir; + } + return FileUtils.pathToString(relPath); + } + + private String determineUpdateMode(String mode) { + if (clonedRestored) { + return ConfigConstants.CONFIG_KEY_CHECKOUT; + } + return mode; + } + private Repository getOrCloneSubmodule(SubmoduleWalk generator, String url) throws IOException, GitAPIException { Repository repository = generator.getRepository(); + boolean restored = false; + boolean cloned = false; if (repository == null) { - if (callback != null) { - callback.cloningSubmodule(generator.getPath()); - } - CloneCommand clone = Git.cloneRepository(); - configure(clone); - clone.setURI(url); - clone.setDirectory(generator.getDirectory()); - clone.setGitDir(new File( + File gitDir = new File( new File(repo.getCommonDirectory(), Constants.MODULES), - generator.getPath())); - clone.setRelativePaths(true); - if (monitor != null) { - clone.setProgressMonitor(monitor); + generator.getPath()); + if (submoduleExists(gitDir)) { + restoreSubmodule(gitDir, generator.getDirectory()); + restored = true; + clonedRestored = true; + repository = generator.getRepository(); + } else { + if (callback != null) { + callback.cloningSubmodule(generator.getPath()); + } + CloneCommand clone = Git.cloneRepository(); + configure(clone); + clone.setURI(url); + clone.setDirectory(generator.getDirectory()); + clone.setGitDir(gitDir); + clone.setRelativePaths(true); + if (monitor != null) { + clone.setProgressMonitor(monitor); + } + repository = clone.call().getRepository(); + cloned = true; + clonedRestored = true; } - repository = clone.call().getRepository(); - } else if (this.fetch) { + } + if ((this.fetch || restored) && !cloned) { if (fetchCallback != null) { fetchCallback.fetchingSubmodule(generator.getPath()); } @@ -172,15 +227,17 @@ public class SubmoduleUpdateCommand extends continue; // Skip submodules not registered in parent repository's config String url = generator.getConfigUrl(); - if (url == null) + if (url == null) { continue; - + } + clonedRestored = false; try (Repository submoduleRepo = getOrCloneSubmodule(generator, url); RevWalk walk = new RevWalk(submoduleRepo)) { RevCommit commit = walk .parseCommit(generator.getObjectId()); - String update = generator.getConfigUpdate(); + String update = determineUpdateMode( + generator.getConfigUpdate()); if (ConfigConstants.CONFIG_KEY_MERGE.equals(update)) { MergeCommand merge = new MergeCommand(submoduleRepo); merge.include(commit); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/blame/BlameGenerator.java b/org.eclipse.jgit/src/org/eclipse/jgit/blame/BlameGenerator.java index 77967df2e5..979c8cef88 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/blame/BlameGenerator.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/blame/BlameGenerator.java @@ -28,6 +28,8 @@ import org.eclipse.jgit.blame.Candidate.BlobCandidate; import org.eclipse.jgit.blame.Candidate.HeadCandidate; import org.eclipse.jgit.blame.Candidate.ReverseCandidate; import org.eclipse.jgit.blame.ReverseWalk.ReverseCommit; +import org.eclipse.jgit.blame.cache.BlameCache; +import org.eclipse.jgit.blame.cache.CacheRegion; import org.eclipse.jgit.diff.DiffAlgorithm; import org.eclipse.jgit.diff.DiffEntry; import org.eclipse.jgit.diff.DiffEntry.ChangeType; @@ -129,8 +131,19 @@ public class BlameGenerator implements AutoCloseable { /** Blame is currently assigned to this source. */ private Candidate outCandidate; + private Region outRegion; + private final BlameCache blameCache; + + /** + * Blame in reverse order needs the source lines, but we don't have them in + * the cache. We need to ignore the cache in that case. + */ + private boolean useCache = true; + + private final Stats stats = new Stats(); + /** * Create a blame generator for the repository and path (relative to * repository) @@ -142,6 +155,25 @@ public class BlameGenerator implements AutoCloseable { * repository). */ public BlameGenerator(Repository repository, String path) { + this(repository, path, null); + } + + /** + * Create a blame generator for the repository and path (relative to + * repository) + * + * @param repository + * repository to access revision data from. + * @param path + * initial path of the file to start scanning (relative to the + * repository). + * @param blameCache + * previously calculated blames. This generator will *not* + * populate it, just consume it. + * @since 7.2 + */ + public BlameGenerator(Repository repository, String path, + @Nullable BlameCache blameCache) { this.repository = repository; this.resultPath = PathFilter.create(path); @@ -150,6 +182,7 @@ public class BlameGenerator implements AutoCloseable { initRevPool(false); remaining = -1; + this.blameCache = blameCache; } private void initRevPool(boolean reverse) { @@ -159,10 +192,12 @@ public class BlameGenerator implements AutoCloseable { if (revPool != null) revPool.close(); - if (reverse) + if (reverse) { + useCache = false; revPool = new ReverseWalk(getRepository()); - else + } else { revPool = new RevWalk(getRepository()); + } SEEN = revPool.newFlag("SEEN"); //$NON-NLS-1$ reader = revPool.getObjectReader(); @@ -245,6 +280,31 @@ public class BlameGenerator implements AutoCloseable { } /** + * Stats about this generator + * + * @return the stats of this generator + * @since 7.2 + */ + public Stats getStats() { + return stats; + } + + /** + * Enable/disable the use of cache (if present). Enabled by default. + * <p> + * If caller need source line numbers, the generator cannot use the cache + * (source lines are not there). Use this method to disable the cache in + * that case. + * + * @param useCache + * should this generator use the cache. + * @since 7.2 + */ + public void setUseCache(boolean useCache) { + this.useCache = useCache; + } + + /** * Push a candidate blob onto the generator's traversal stack. * <p> * Candidates should be pushed in history order from oldest-to-newest. @@ -591,6 +651,7 @@ public class BlameGenerator implements AutoCloseable { Candidate n = pop(); if (n == null) return done(); + stats.candidatesVisited += 1; int pCnt = n.getParentCount(); if (pCnt == 1) { @@ -605,7 +666,7 @@ public class BlameGenerator implements AutoCloseable { // Do not generate a tip of a reverse. The region // survives and should not appear to be deleted. - } else /* if (pCnt == 0) */{ + } else /* if (pCnt == 0) */ { // Root commit, with at least one surviving region. // Assign the remaining blame here. return result(n); @@ -695,6 +756,27 @@ public class BlameGenerator implements AutoCloseable { } } + @Nullable + private Candidate blameFromCache(Candidate n) throws IOException { + if (blameCache == null || !useCache) { + return null; + } + + List<CacheRegion> cachedBlame = blameCache.get(repository, + n.sourceCommit, n.sourcePath.getPath()); + if (cachedBlame == null) { + return null; + } + BlameRegionMerger rb = new BlameRegionMerger(repository, revPool, + cachedBlame); + Candidate fullyBlamed = rb.mergeCandidate(n); + if (fullyBlamed == null) { + return null; + } + stats.cacheHit = true; + return fullyBlamed; + } + private boolean processOne(Candidate n) throws IOException { RevCommit parent = n.getParent(0); if (parent == null) @@ -717,12 +799,17 @@ public class BlameGenerator implements AutoCloseable { if (0 == r.getOldId().prefixCompare(n.sourceBlob)) { // A 100% rename without any content change can also // skip directly to the parent. + Candidate cached = blameFromCache(n); + if (cached != null) { + return result(cached); + } n.sourceCommit = parent; n.sourcePath = PathFilter.create(r.getOldPath()); push(n); return false; } + Candidate next = n.create(getRepository(), parent, PathFilter.create(r.getOldPath())); next.sourceBlob = r.getOldId().toObjectId(); @@ -759,6 +846,11 @@ public class BlameGenerator implements AutoCloseable { return false; } + Candidate cached = blameFromCache(source); + if (cached != null) { + return result(cached); + } + parent.takeBlame(editList, source); if (parent.regionList != null) push(parent); @@ -846,8 +938,8 @@ public class BlameGenerator implements AutoCloseable { editList = new EditList(0); } else { p.loadText(reader); - editList = diffAlgorithm.diff(textComparator, - p.sourceText, n.sourceText); + editList = diffAlgorithm.diff(textComparator, p.sourceText, + n.sourceText); } if (editList.isEmpty()) { @@ -981,6 +1073,10 @@ public class BlameGenerator implements AutoCloseable { /** * Get first line of the source data that has been blamed for the current * region + * <p> + * This value is not reliable when the generator is reusing cached values. + * Cache doesn't keep the source lines, the returned value is based on the + * result and can be off if the region moved in previous commits. * * @return first line of the source data that has been blamed for the * current region. This is line number of where the region was added @@ -994,6 +1090,10 @@ public class BlameGenerator implements AutoCloseable { /** * Get one past the range of the source data that has been blamed for the * current region + * <p> + * This value is not reliable when the generator is reusing cached values. + * Cache doesn't keep the source lines, the returned value is based on the + * result and can be off if the region moved in previous commits. * * @return one past the range of the source data that has been blamed for * the current region. This is line number of where the region was @@ -1124,4 +1224,39 @@ public class BlameGenerator implements AutoCloseable { return ent.getChangeType() == ChangeType.RENAME || ent.getChangeType() == ChangeType.COPY; } + + /** + * Stats about the work done by the generator + * + * @since 7.2 + */ + public static class Stats { + + /** Candidates taken from the queue */ + private int candidatesVisited; + + private boolean cacheHit; + + /** + * Number of candidates taken from the queue + * <p> + * The generator could signal it's done without exhausting all + * candidates if there is no more remaining lines or the last visited + * candidate is found in the cache. + * + * @return number of candidates taken from the queue + */ + public int getCandidatesVisited() { + return candidatesVisited; + } + + /** + * The generator found a blamed version in the cache + * + * @return true if we used results from the cache + */ + public boolean isCacheHit() { + return cacheHit; + } + } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/blame/BlameRegionMerger.java b/org.eclipse.jgit/src/org/eclipse/jgit/blame/BlameRegionMerger.java new file mode 100644 index 0000000000..67bc6fb789 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/blame/BlameRegionMerger.java @@ -0,0 +1,158 @@ +/* + * Copyright (C) 2025, Google LLC. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ +package org.eclipse.jgit.blame; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.eclipse.jgit.blame.cache.CacheRegion; +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.revwalk.RevCommit; +import org.eclipse.jgit.revwalk.RevWalk; +import org.eclipse.jgit.treewalk.filter.PathFilter; + +/** + * Translates an unblamed region into one or more blamed regions, using the + * fully blamed data from cache. + * <p> + * Blamed and unblamed regions are not symmetrical: An unblamed region is just a + * range of lines over the file. A blamed region is a Candidate (with the commit + * info) with a region inside (the range blamed). + */ +class BlameRegionMerger { + private final Repository repo; + + private final List<CacheRegion> cachedRegions; + + private final RevWalk rw; + + BlameRegionMerger(Repository repo, RevWalk rw, + List<CacheRegion> cachedRegions) { + this.repo = repo; + List<CacheRegion> sorted = new ArrayList<>(cachedRegions); + Collections.sort(sorted); + this.cachedRegions = sorted; + this.rw = rw; + } + + /** + * Return one or more candidates blaming all the regions of the "unblamed" + * incoming candidate. + * + * @param candidate + * a candidate with a list of unblamed regions + * @return A linked list of Candidates with their blamed regions, null if + * there was any error. + */ + Candidate mergeCandidate(Candidate candidate) { + List<Candidate> newCandidates = new ArrayList<>(); + Region r = candidate.regionList; + while (r != null) { + try { + newCandidates.addAll(mergeOneRegion(r)); + } catch (IOException e) { + return null; + } + r = r.next; + } + return asLinkedCandidate(newCandidates); + } + + // Visible for testing + List<Candidate> mergeOneRegion(Region region) throws IOException { + List<CacheRegion> overlaps = findOverlaps(region); + if (overlaps.isEmpty()) { + throw new IOException( + "Cached blame should cover all lines"); + } + /* + * Cached regions cover the whole file. We find first which ones overlap + * with our unblamed region. Then we take the overlapping portions with + * the corresponding blame. + */ + List<Candidate> candidates = new ArrayList<>(); + for (CacheRegion overlap : overlaps) { + Region blamedRegions = intersectRegions(region, overlap); + Candidate c = new Candidate(repo, parse(overlap.getSourceCommit()), + PathFilter.create(overlap.getSourcePath())); + c.regionList = blamedRegions; + candidates.add(c); + } + return candidates; + } + + // Visible for testing + List<CacheRegion> findOverlaps(Region unblamed) { + int unblamedStart = unblamed.sourceStart; + int unblamedEnd = unblamedStart + unblamed.length; + List<CacheRegion> overlapping = new ArrayList<>(); + for (CacheRegion blamed : cachedRegions) { + // End is not included + if (blamed.getEnd() <= unblamedStart) { + // Blamed region is completely before + continue; + } + + if (blamed.getStart() >= unblamedEnd) { + // Blamed region is completely after + // Blamed regions are sorted by start position, nothing will + // match anymore + break; + } + overlapping.add(blamed); + } + return overlapping; + } + + // Visible for testing + /** + * Calculate the intersection between a Region and a CacheRegion, adjusting + * the start if needed. + * <p> + * This should be called only if there is an overlap (filtering the cached + * regions with {@link #findOverlaps(Region)}), otherwise the result is + * meaningless. + * + * @param unblamed + * a region from the blame generator + * @param cached + * a cached region + * @return a new region with the intersection. + */ + static Region intersectRegions(Region unblamed, CacheRegion cached) { + int blamedStart = Math.max(cached.getStart(), unblamed.sourceStart); + int blamedEnd = Math.min(cached.getEnd(), + unblamed.sourceStart + unblamed.length); + int length = blamedEnd - blamedStart; + + // result start and source start should move together + int blameStartDelta = blamedStart - unblamed.sourceStart; + return new Region(unblamed.resultStart + blameStartDelta, blamedStart, + length); + } + + // Tests can override this, so they don't need a real repo, commit and walk + protected RevCommit parse(ObjectId oid) throws IOException { + return rw.parseCommit(oid); + } + + private static Candidate asLinkedCandidate(List<Candidate> c) { + Candidate head = c.get(0); + Candidate tail = head; + for (int i = 1; i < c.size(); i++) { + tail.queueNext = c.get(i); + tail = tail.queueNext; + } + return head; + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/blame/cache/BlameCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/blame/cache/BlameCache.java new file mode 100644 index 0000000000..d44fb5f62b --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/blame/cache/BlameCache.java @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2025, Google LLC. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ +package org.eclipse.jgit.blame.cache; + +import java.io.IOException; +import java.util.List; + +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.Repository; + +/** + * Keeps the blame information for a path at certain commit. + * <p> + * If there is a result, it covers the whole file at that revision + * + * @since 7.2 + */ +public interface BlameCache { + /** + * Gets the blame of a path at a given commit if available. + * <p> + * Since this cache is used in blame calculation, this get() method should + * only retrieve the cache value, and not re-trigger blame calculation. In + * other words, this acts as "getIfPresent", and not "computeIfAbsent". + * + * @param repo + * repository containing the commit + * @param commitId + * we are looking at the file in this revision + * @param path + * path a file in the repo + * + * @return the blame of a path at a given commit or null if not in cache + * @throws IOException + * error retrieving/parsing values from storage + */ + List<CacheRegion> get(Repository repo, ObjectId commitId, String path) + throws IOException; +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/blame/cache/CacheRegion.java b/org.eclipse.jgit/src/org/eclipse/jgit/blame/cache/CacheRegion.java new file mode 100644 index 0000000000..cf3f978044 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/blame/cache/CacheRegion.java @@ -0,0 +1,125 @@ +/* + * Copyright (C) 2025, Google LLC. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ +package org.eclipse.jgit.blame.cache; + +import java.text.MessageFormat; + +import org.eclipse.jgit.internal.JGitText; +import org.eclipse.jgit.lib.ObjectId; + +/** + * Region of the blame of a file. + * <p> + * Usually all parameters are non-null, except when the Region was created + * to fill an unblamed gap (to cover for bugs in the calculation). In that + * case, path, commit and author will be null. + * + * @since 7.2 + **/ +public class CacheRegion implements Comparable<CacheRegion> { + private final String sourcePath; + + private final ObjectId sourceCommit; + + private final int end; + + private final int start; + + /** + * A blamed portion of a file + * + * @param path + * location of the file + * @param commit + * commit that is modifying this region + * @param start + * first line of this region (inclusive) + * @param end + * last line of this region (non-inclusive!) + */ + public CacheRegion(String path, ObjectId commit, + int start, int end) { + allOrNoneNull(path, commit); + this.sourcePath = path; + this.sourceCommit = commit; + this.start = start; + this.end = end; + } + + /** + * First line of this region. Starting by 0, inclusive + * + * @return first line of this region. + */ + public int getStart() { + return start; + } + + /** + * One after last line in this region (or: last line non-inclusive) + * + * @return one after last line in this region. + */ + public int getEnd() { + return end; + } + + + /** + * Path of the file this region belongs to + * + * @return path in the repo/commit + */ + public String getSourcePath() { + return sourcePath; + } + + /** + * Commit this region belongs to + * + * @return commit for this region + */ + public ObjectId getSourceCommit() { + return sourceCommit; + } + + @Override + public int compareTo(CacheRegion o) { + return start - o.start; + } + + @SuppressWarnings("nls") + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + if (sourceCommit != null) { + sb.append(sourceCommit.name(), 0, 7).append(' ') + .append(" (") + .append(sourcePath).append(')'); + } else { + sb.append("<unblamed region>"); + } + sb.append(' ').append("start=").append(start).append(", count=") + .append(end - start); + return sb.toString(); + } + + private static void allOrNoneNull(String path, ObjectId commit) { + if (path != null && commit != null) { + return; + } + + if (path == null && commit == null) { + return; + } + throw new IllegalArgumentException(MessageFormat + .format(JGitText.get().cacheRegionAllOrNoneNull, path, commit)); + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/Checkout.java b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/Checkout.java index accf732dc7..de02aecdb9 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/Checkout.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/Checkout.java @@ -217,10 +217,18 @@ public class Checkout { } } try { - if (recursiveDelete && Files.isDirectory(f.toPath(), - LinkOption.NOFOLLOW_LINKS)) { + boolean isDir = Files.isDirectory(f.toPath(), + LinkOption.NOFOLLOW_LINKS); + if (recursiveDelete && isDir) { FileUtils.delete(f, FileUtils.RECURSIVE); } + if (cache.getRepository().isWorkTreeCaseInsensitive() && !isDir) { + // We cannot rely on rename via Files.move() to work correctly + // if the target exists in a case variant. For instance with JDK + // 17 on Mac OS, the existing case-variant name is kept. On + // Windows 11 it would work and use the name given in 'f'. + FileUtils.delete(f, FileUtils.SKIP_MISSING); + } FileUtils.rename(tmpFile, f, StandardCopyOption.ATOMIC_MOVE); cachedParent.remove(f.getName()); } catch (IOException e) { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCache.java index 34dba0b5be..c650d6e8e7 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCache.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCache.java @@ -1037,7 +1037,12 @@ public class DirCache { } } - enum DirCacheVersion implements ConfigEnum { + /** + * DirCache versions + * + * @since 7.2 + */ + public enum DirCacheVersion implements ConfigEnum { /** Minimum index version on-disk format that we support. */ DIRC_VERSION_MINIMUM(2), @@ -1060,6 +1065,9 @@ public class DirCache { this.version = versionCode; } + /** + * @return the version code for this version + */ public int getVersionCode() { return version; } @@ -1078,6 +1086,13 @@ public class DirCache { } } + /** + * Create DirCacheVersion from integer value of the version code. + * + * @param val + * integer value of the version code. + * @return the DirCacheVersion instance of the version code. + */ public static DirCacheVersion fromInt(int val) { for (DirCacheVersion v : DirCacheVersion.values()) { if (val == v.getVersionCode()) { @@ -1098,9 +1113,8 @@ public class DirCache { boolean manyFiles = cfg.getBoolean( ConfigConstants.CONFIG_FEATURE_SECTION, ConfigConstants.CONFIG_KEY_MANYFILES, false); - indexVersion = cfg.getEnum(DirCacheVersion.values(), - ConfigConstants.CONFIG_INDEX_SECTION, null, - ConfigConstants.CONFIG_KEY_VERSION, + indexVersion = cfg.getEnum(ConfigConstants.CONFIG_INDEX_SECTION, + null, ConfigConstants.CONFIG_KEY_VERSION, manyFiles ? DirCacheVersion.DIRC_VERSION_PATHCOMPRESS : DirCacheVersion.DIRC_VERSION_EXTENDED); skipHash = cfg.getBoolean(ConfigConstants.CONFIG_INDEX_SECTION, diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java index 4f78404f48..18d77482e0 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java @@ -5,7 +5,7 @@ * Copyright (C) 2006, Shawn O. Pearce <spearce@spearce.org> * Copyright (C) 2010, Chrisian Halstrick <christian.halstrick@sap.com> * Copyright (C) 2019, 2020, Andre Bossert <andre.bossert@siemens.com> - * Copyright (C) 2017, 2023, Thomas Wolf <twolf@apache.org> and others + * Copyright (C) 2017, 2025, Thomas Wolf <twolf@apache.org> and others * * This program and the accompanying materials are made available under the * terms of the Eclipse Distribution License v. 1.0 which is available at @@ -31,6 +31,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeSet; import org.eclipse.jgit.api.errors.CanceledException; import org.eclipse.jgit.api.errors.FilterFailedException; @@ -66,7 +67,6 @@ import org.eclipse.jgit.treewalk.WorkingTreeOptions; import org.eclipse.jgit.treewalk.filter.PathFilter; import org.eclipse.jgit.util.FS; import org.eclipse.jgit.util.FS.ExecutionResult; -import org.eclipse.jgit.util.IntList; import org.eclipse.jgit.util.SystemReader; import org.eclipse.jgit.util.io.EolStreamTypeUtil; import org.slf4j.Logger; @@ -113,9 +113,11 @@ public class DirCacheCheckout { private Map<String, CheckoutMetadata> updated = new LinkedHashMap<>(); + private Set<String> existing; + private ArrayList<String> conflicts = new ArrayList<>(); - private ArrayList<String> removed = new ArrayList<>(); + private TreeSet<String> removed; private ArrayList<String> kept = new ArrayList<>(); @@ -185,7 +187,7 @@ public class DirCacheCheckout { * @return a list of all files removed by this checkout */ public List<String> getRemoved() { - return removed; + return new ArrayList<>(removed); } /** @@ -214,6 +216,14 @@ public class DirCacheCheckout { this.mergeCommitTree = mergeCommitTree; this.workingTree = workingTree; this.initialCheckout = !repo.isBare() && !repo.getIndexFile().exists(); + boolean caseInsensitive = !repo.isBare() + && repo.isWorkTreeCaseInsensitive(); + this.removed = caseInsensitive + ? new TreeSet<>(String::compareToIgnoreCase) + : new TreeSet<>(); + this.existing = caseInsensitive + ? new TreeSet<>(String::compareToIgnoreCase) + : null; } /** @@ -400,9 +410,11 @@ public class DirCacheCheckout { // content to be checked out. update(m); } - } else + } else { update(m); - } else if (f == null || !m.idEqual(i)) { + } + } else if (f == null || !m.idEqual(i) + || m.getEntryRawMode() != i.getEntryRawMode()) { // The working tree file is missing or the merge content differs // from index content update(m); @@ -410,11 +422,11 @@ public class DirCacheCheckout { // The index contains a file (and not a folder) if (f.isModified(i.getDirCacheEntry(), true, this.walk.getObjectReader()) - || i.getDirCacheEntry().getStage() != 0) + || i.getDirCacheEntry().getStage() != 0) { // The working tree file is dirty or the index contains a // conflict update(m); - else { + } else { // update the timestamp of the index with the one from the // file if not set, as we are sure to be in sync here. DirCacheEntry entry = i.getDirCacheEntry(); @@ -424,9 +436,10 @@ public class DirCacheCheckout { } keep(i.getEntryPathString(), entry, f); } - } else + } else { // The index contains a folder keep(i.getEntryPathString(), i.getDirCacheEntry(), f); + } } else { // There is no entry in the merge commit. Means: we want to delete // what's currently in the index and working tree @@ -521,6 +534,13 @@ public class DirCacheCheckout { // update our index builder.finish(); + // On case-insensitive file systems we may have a case variant kept + // and another one removed. In that case, don't remove it. + if (existing != null) { + removed.removeAll(existing); + existing.clear(); + } + // init progress reporting int numTotal = removed.size() + updated.size() + conflicts.size(); monitor.beginTask(JGitText.get().checkingOutFiles, numTotal); @@ -531,9 +551,9 @@ public class DirCacheCheckout { // when deleting files process them in the opposite order as they have // been reported. This ensures the files are deleted before we delete // their parent folders - IntList nonDeleted = new IntList(); - for (int i = removed.size() - 1; i >= 0; i--) { - String r = removed.get(i); + Iterator<String> iter = removed.descendingIterator(); + while (iter.hasNext()) { + String r = iter.next(); file = new File(repo.getWorkTree(), r); if (!file.delete() && repo.getFS().exists(file)) { // The list of stuff to delete comes from the index @@ -542,7 +562,7 @@ public class DirCacheCheckout { // to delete it. A submodule is not empty, so it // is safe to check this after a failed delete. if (!repo.getFS().isDirectory(file)) { - nonDeleted.add(i); + iter.remove(); toBeDeleted.add(r); } } else { @@ -560,8 +580,6 @@ public class DirCacheCheckout { if (file != null) { removeEmptyParents(file); } - removed = filterOut(removed, nonDeleted); - nonDeleted = null; Iterator<Map.Entry<String, CheckoutMetadata>> toUpdate = updated .entrySet().iterator(); Map.Entry<String, CheckoutMetadata> e = null; @@ -633,36 +651,6 @@ public class DirCacheCheckout { return toBeDeleted.isEmpty(); } - private static ArrayList<String> filterOut(ArrayList<String> strings, - IntList indicesToRemove) { - int n = indicesToRemove.size(); - if (n == strings.size()) { - return new ArrayList<>(0); - } - switch (n) { - case 0: - return strings; - case 1: - strings.remove(indicesToRemove.get(0)); - return strings; - default: - int length = strings.size(); - ArrayList<String> result = new ArrayList<>(length - n); - // Process indicesToRemove from the back; we know that it - // contains indices in descending order. - int j = n - 1; - int idx = indicesToRemove.get(j); - for (int i = 0; i < length; i++) { - if (i == idx) { - idx = (--j >= 0) ? indicesToRemove.get(j) : -1; - } else { - result.add(strings.get(i)); - } - } - return result; - } - } - private static boolean isSamePrefix(String a, String b) { int as = a.lastIndexOf('/'); int bs = b.lastIndexOf('/'); @@ -1233,6 +1221,9 @@ public class DirCacheCheckout { if (!FileMode.TREE.equals(e.getFileMode())) { builder.add(e); } + if (existing != null) { + existing.add(path); + } if (force) { if (f == null || f.isModified(e, true, walk.getObjectReader())) { kept.add(path); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java index b033177e05..58b4d3dc56 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/ManifestParser.java @@ -142,7 +142,17 @@ public class ManifestParser extends DefaultHandler { xmlInRead++; final XMLReader xr; try { - xr = SAXParserFactory.newInstance().newSAXParser().getXMLReader(); + SAXParserFactory spf = SAXParserFactory.newInstance(); + spf.setFeature( + "http://xml.org/sax/features/external-general-entities", //$NON-NLS-1$ + false); + spf.setFeature( + "http://xml.org/sax/features/external-parameter-entities", //$NON-NLS-1$ + false); + spf.setFeature( + "http://apache.org/xml/features/disallow-doctype-decl", //$NON-NLS-1$ + true); + xr = spf.newSAXParser().getXMLReader(); } catch (SAXException | ParserConfigurationException e) { throw new IOException(JGitText.get().noXMLParserAvailable, e); } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java index 2d9d2c527c..8928f47290 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java @@ -94,8 +94,6 @@ public class JGitText extends TranslationBundle { /***/ public String binaryHunkInvalidLength; /***/ public String binaryHunkLineTooShort; /***/ public String binaryHunkMissingNewline; - /***/ public String bitmapAccessErrorForPackfile; - /***/ public String bitmapFailedToGet; /***/ public String bitmapMissingObject; /***/ public String bitmapsMustBePrepared; /***/ public String bitmapUseNoopNoListener; @@ -108,6 +106,7 @@ public class JGitText extends TranslationBundle { /***/ public String buildingBitmaps; /***/ public String cachedPacksPreventsIndexCreation; /***/ public String cachedPacksPreventsListingObjects; + /***/ public String cacheRegionAllOrNoneNull; /***/ public String cannotAccessLastModifiedForSafeDeletion; /***/ public String cannotBeCombined; /***/ public String cannotBeRecursiveWhenTreesAreIncluded; @@ -295,6 +294,7 @@ public class JGitText extends TranslationBundle { /***/ public String deleteTagUnexpectedResult; /***/ public String deletingBranches; /***/ public String deletingNotSupported; + /***/ public String deprecatedTrustFolderStat; /***/ public String depthMustBeAt1; /***/ public String depthWithUnshallow; /***/ public String destinationIsNotAWildcard; @@ -493,6 +493,7 @@ public class JGitText extends TranslationBundle { /***/ public String invalidTimeUnitValue2; /***/ public String invalidTimeUnitValue3; /***/ public String invalidTreeZeroLengthName; + /***/ public String invalidTrustStat; /***/ public String invalidURL; /***/ public String invalidWildcards; /***/ public String invalidRefSpec; @@ -525,6 +526,8 @@ public class JGitText extends TranslationBundle { /***/ public String logLargerFiletimeDiff; /***/ public String logSmallerFiletime; /***/ public String logXDGConfigHomeInvalid; + + /***/ public String logXDGCacheHomeInvalid; /***/ public String looseObjectHandleIsStale; /***/ public String maxCountMustBeNonNegative; /***/ public String mergeConflictOnNonNoteEntries; @@ -537,6 +540,9 @@ public class JGitText extends TranslationBundle { /***/ public String mergeToolNotGivenError; /***/ public String mergeToolNullError; /***/ public String messageAndTaggerNotAllowedInUnannotatedTags; + /***/ public String midxChunkNeeded; + /***/ public String midxChunkRepeated; + /***/ public String midxChunkUnknown; /***/ public String minutesAgo; /***/ public String mismatchOffset; /***/ public String mismatchCRC; @@ -557,6 +563,10 @@ public class JGitText extends TranslationBundle { /***/ public String month; /***/ public String months; /***/ public String monthsAgo; + /***/ public String multiPackIndexFileIsTooLargeForJgit; + /***/ public String multiPackIndexPackCountMismatch; + /***/ public String multiPackIndexUnexpectedSize; + /***/ public String multiPackIndexWritingCancelled; /***/ public String multipleMergeBasesFor; /***/ public String nameMustNotBeNullOrEmpty; /***/ public String need2Arguments; @@ -583,6 +593,7 @@ public class JGitText extends TranslationBundle { /***/ public String notACommitGraph; /***/ public String notADIRCFile; /***/ public String notAGitDirectory; + /***/ public String notAMIDX; /***/ public String notAPACKFile; /***/ public String notARef; /***/ public String notASCIIString; @@ -645,6 +656,7 @@ public class JGitText extends TranslationBundle { /***/ public String personIdentEmailNonNull; /***/ public String personIdentNameNonNull; /***/ public String postCommitHookFailed; + /***/ public String precedenceTrustConfig; /***/ public String prefixRemote; /***/ public String problemWithResolvingPushRefSpecsLocally; /***/ public String progressMonUploading; @@ -877,6 +889,7 @@ public class JGitText extends TranslationBundle { /***/ public String unmergedPaths; /***/ public String unpackException; /***/ public String unreadableCommitGraph; + /***/ public String unreadableMIDX; /***/ public String unreadableObjectSizeIndex; /***/ public String unreadablePackIndex; /***/ public String unrecognizedPackExtension; @@ -890,6 +903,7 @@ public class JGitText extends TranslationBundle { /***/ public String unsupportedEncryptionVersion; /***/ public String unsupportedGC; /***/ public String unsupportedMark; + /***/ public String unsupportedMIDXVersion; /***/ public String unsupportedObjectIdVersion; /***/ public String unsupportedObjectSizeIndexVersion; /***/ public String unsupportedOperationNotAddAtEnd; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java index e6068a15ec..199481cf33 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java @@ -90,7 +90,7 @@ public class DfsGarbageCollector { private long coalesceGarbageLimit = 50 << 20; private long garbageTtlMillis = TimeUnit.DAYS.toMillis(1); - private long startTimeMillis; + private Instant startTime; private List<DfsPackFile> packsBefore; private List<DfsReftable> reftablesBefore; private List<DfsPackFile> expiredGarbagePacks; @@ -352,7 +352,7 @@ public class DfsGarbageCollector { throw new IllegalStateException( JGitText.get().supportOnlyPackIndexVersion2); - startTimeMillis = SystemReader.getInstance().getCurrentTime(); + startTime = SystemReader.getInstance().now(); ctx = objdb.newReader(); try { refdb.refresh(); @@ -435,7 +435,7 @@ public class DfsGarbageCollector { packsBefore = new ArrayList<>(packs.length); expiredGarbagePacks = new ArrayList<>(packs.length); - long now = SystemReader.getInstance().getCurrentTime(); + long now = SystemReader.getInstance().now().toEpochMilli(); for (DfsPackFile p : packs) { DfsPackDescription d = p.getPackDescription(); if (d.getPackSource() != UNREACHABLE_GARBAGE) { @@ -723,7 +723,7 @@ public class DfsGarbageCollector { PackStatistics stats = pw.getStatistics(); pack.setPackStats(stats); - pack.setLastModified(startTimeMillis); + pack.setLastModified(startTime.toEpochMilli()); newPackDesc.add(pack); newPackStats.add(stats); newPackObj.add(pw.getObjectSet()); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java index 16315bf4f2..dd9e4b96a4 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java @@ -83,7 +83,6 @@ public class DfsInserter extends ObjectInserter { DfsPackDescription packDsc; PackStream packOut; private boolean rollback; - private boolean checkExisting = true; /** * Initialize a new inserter. @@ -98,18 +97,6 @@ public class DfsInserter extends ObjectInserter { ConfigConstants.CONFIG_KEY_MIN_BYTES_OBJ_SIZE_INDEX, -1); } - /** - * Check existence - * - * @param check - * if {@code false}, will write out possibly-duplicate objects - * without first checking whether they exist in the repo; default - * is true. - */ - public void checkExisting(boolean check) { - checkExisting = check; - } - void setCompressionLevel(int compression) { this.compression = compression; } @@ -130,8 +117,9 @@ public class DfsInserter extends ObjectInserter { if (objectMap != null && objectMap.contains(id)) return id; // Ignore unreachable (garbage) objects here. - if (checkExisting && db.has(id, true)) + if (db.has(id, true)) { return id; + } long offset = beginObject(type, len); packOut.compress.write(data, off, len); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java index efd666ff27..1a873d1204 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java @@ -52,16 +52,6 @@ public abstract class DfsObjDatabase extends ObjectDatabase { boolean dirty() { return true; } - - @Override - void clearDirty() { - // Always dirty. - } - - @Override - public void markDirty() { - // Always dirty. - } }; /** @@ -534,7 +524,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase { DfsPackFile[] packs = new DfsPackFile[1 + o.packs.length]; packs[0] = newPack; System.arraycopy(o.packs, 0, packs, 1, o.packs.length); - n = new PackListImpl(packs, o.reftables); + n = new PackList(packs, o.reftables); } while (!packList.compareAndSet(o, n)); } @@ -559,7 +549,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase { } } tables.add(new DfsReftable(add)); - n = new PackListImpl(o.packs, tables.toArray(new DfsReftable[0])); + n = new PackList(o.packs, tables.toArray(new DfsReftable[0])); } while (!packList.compareAndSet(o, n)); } @@ -613,13 +603,12 @@ public abstract class DfsObjDatabase extends ObjectDatabase { } if (newPacks.isEmpty() && newReftables.isEmpty()) - return new PackListImpl(NO_PACKS.packs, NO_PACKS.reftables); + return new PackList(NO_PACKS.packs, NO_PACKS.reftables); if (!foundNew) { - old.clearDirty(); return old; } Collections.sort(newReftables, reftableComparator()); - return new PackListImpl( + return new PackList( newPacks.toArray(new DfsPackFile[0]), newReftables.toArray(new DfsReftable[0])); } @@ -685,7 +674,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase { } /** Snapshot of packs scanned in a single pass. */ - public abstract static class PackList { + public static class PackList { /** All known packs, sorted. */ public final DfsPackFile[] packs; @@ -715,39 +704,8 @@ public abstract class DfsObjDatabase extends ObjectDatabase { return lastModified; } - abstract boolean dirty(); - abstract void clearDirty(); - - /** - * Mark pack list as dirty. - * <p> - * Used when the caller knows that new data might have been written to the - * repository that could invalidate open readers depending on this pack list, - * for example if refs are newly scanned. - */ - public abstract void markDirty(); - } - - private static final class PackListImpl extends PackList { - private volatile boolean dirty; - - PackListImpl(DfsPackFile[] packs, DfsReftable[] reftables) { - super(packs, reftables); - } - - @Override boolean dirty() { - return dirty; - } - - @Override - void clearDirty() { - dirty = false; - } - - @Override - public void markDirty() { - dirty = true; + return false; } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java index f9c01b9d6e..6339b0326a 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java @@ -405,7 +405,7 @@ public class DfsPackCompactor { pw.addObject(obj); obj.add(added); - src.representation(rep, id.offset, ctx, rev); + src.fillRepresentation(rep, id.offset, ctx, rev); if (rep.getFormat() != PACK_DELTA) continue; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java index 48ed47a77c..05b63eaca1 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java @@ -27,6 +27,9 @@ import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.text.MessageFormat; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; @@ -49,6 +52,7 @@ import org.eclipse.jgit.internal.storage.file.PackObjectSizeIndexLoader; import org.eclipse.jgit.internal.storage.file.PackReverseIndex; import org.eclipse.jgit.internal.storage.file.PackReverseIndexFactory; import org.eclipse.jgit.internal.storage.pack.BinaryDelta; +import org.eclipse.jgit.internal.storage.pack.ObjectToPack; import org.eclipse.jgit.internal.storage.pack.PackOutputStream; import org.eclipse.jgit.internal.storage.pack.StoredObjectRepresentation; import org.eclipse.jgit.lib.AbbreviatedObjectId; @@ -59,6 +63,7 @@ import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.lib.StoredConfig; +import org.eclipse.jgit.util.BlockList; import org.eclipse.jgit.util.LongList; /** @@ -71,6 +76,10 @@ public final class DfsPackFile extends BlockBasedFile { private static final long REF_POSITION = 0; + private static final Comparator<DfsObjectToPack> OFFSET_SORT = ( + DfsObjectToPack a, + DfsObjectToPack b) -> Long.signum(a.getOffset() - b.getOffset()); + /** * Loader for the default file-based {@link PackBitmapIndex} implementation. */ @@ -139,11 +148,15 @@ public final class DfsPackFile extends BlockBasedFile { * * @param ctx * reader to find the raw bytes + * @param idx + * primary index for this reverse index (probably loaded + * via #index(DfsReader)) * @return the reverse index of the pack * @throws IOException * a problem finding/parsing the reverse index */ - PackReverseIndex reverseIndex(DfsReader ctx) throws IOException; + PackReverseIndex reverseIndex(DfsReader ctx, PackIndex idx) + throws IOException; } /** @@ -364,7 +377,7 @@ public final class DfsPackFile extends BlockBasedFile { return reverseIndex; } - reverseIndex = indexFactory.getPackIndexes().reverseIndex(ctx); + reverseIndex = indexFactory.getPackIndexes().reverseIndex(ctx, idx(ctx)); if (reverseIndex == null) { throw new IOException( "Couldn't get a reference to the reverse index"); //$NON-NLS-1$ @@ -429,6 +442,10 @@ public final class DfsPackFile extends BlockBasedFile { return 0 < offset && !isCorrupt(offset); } + int findIdxPosition(DfsReader ctx, AnyObjectId id) throws IOException { + return idx(ctx).findPosition(id); + } + /** * Get an object from this pack. * @@ -451,23 +468,43 @@ public final class DfsPackFile extends BlockBasedFile { return idx(ctx).findOffset(id); } - void resolve(DfsReader ctx, Set<ObjectId> matches, AbbreviatedObjectId id, - int matchLimit) throws IOException { - idx(ctx).resolve(matches, id, matchLimit); - } - /** - * Obtain the total number of objects available in this pack. This method - * relies on pack index, giving number of effectively available objects. + * Return objects in the list available in this pack, sorted in (pack, + * offset) order. * * @param ctx - * current reader for the calling thread. - * @return number of objects in index of this pack, likewise in this pack + * a reader + * @param objects + * objects we are looking for + * @param skipFound + * ignore objects already found. + * @return list of objects with pack and offset set. * @throws IOException - * the index file cannot be loaded into memory. + * an error occurred */ - long getObjectCount(DfsReader ctx) throws IOException { - return idx(ctx).getObjectCount(); + List<DfsObjectToPack> findAllFromPack(DfsReader ctx, + Iterable<ObjectToPack> objects, boolean skipFound) + throws IOException { + List<DfsObjectToPack> tmp = new BlockList<>(); + for (ObjectToPack obj : objects) { + DfsObjectToPack otp = (DfsObjectToPack) obj; + if (skipFound && otp.isFound()) { + continue; + } + long p = idx(ctx).findOffset(otp); + if (p <= 0 || isCorrupt(p)) { + continue; + } + otp.setOffset(p); + tmp.add(otp); + } + Collections.sort(tmp, OFFSET_SORT); + return tmp; + } + + void resolve(DfsReader ctx, Set<ObjectId> matches, AbbreviatedObjectId id, + int matchLimit) throws IOException { + idx(ctx).resolve(matches, id, matchLimit); } private byte[] decompress(long position, int sz, DfsReader ctx) @@ -1131,31 +1168,29 @@ public final class DfsPackFile extends BlockBasedFile { /** * Return the size of the object from the object-size index. The object * should be a blob. Any other type is not indexed and returns -1. - * - * Caller MUST be sure that the object is in the pack (e.g. with - * {@link #hasObject(DfsReader, AnyObjectId)}) and the pack has object size - * index (e.g. with {@link #hasObjectSizeIndex(DfsReader)}) before asking - * the indexed size. + * <p> + * Caller MUST pass a valid index position, as returned by + * {@link #findIdxPosition(DfsReader, AnyObjectId)} and verify the pack has + * object size index (e.g. with {@link #hasObjectSizeIndex(DfsReader)}) + * before asking the indexed size. * * @param ctx * reader context to support reading from the backing store if * the object size index is not already loaded in memory. - * @param id - * object id of an object in the pack + * @param idxPosition + * position in the primary index of the object we are looking + * for, as returned by findIdxPosition * @return size of the object from the index. Negative if object is not in * the index (below threshold or not a blob) * @throws IOException * could not read the object size index. IO problem or the pack * doesn't have it. */ - long getIndexedObjectSize(DfsReader ctx, AnyObjectId id) + long getIndexedObjectSize(DfsReader ctx, int idxPosition) throws IOException { - int idxPosition = idx(ctx).findPosition(id); if (idxPosition < 0) { - throw new IllegalArgumentException( - "Cannot get size from index since object is not in pack"); //$NON-NLS-1$ + throw new IllegalArgumentException("Invalid index position"); //$NON-NLS-1$ } - PackObjectSizeIndex sizeIdx = getObjectSizeIndex(ctx); if (sizeIdx == null) { throw new IllegalStateException( @@ -1165,12 +1200,47 @@ public final class DfsPackFile extends BlockBasedFile { return sizeIdx.getSize(idxPosition); } - void representation(DfsObjectRepresentation r, final long pos, + /** + * Populates the representation object with the details of how the object at + * "pos" is stored in this pack (e.g. whole or deltified, its packed + * length). + * + * @param r + * represention object to carry data + * @param offset + * offset in this pack of the object + * @param ctx + * a reader + * @throws IOException + * an error reading the object from disk + */ + void fillRepresentation(DfsObjectRepresentation r, long offset, + DfsReader ctx) throws IOException { + fillRepresentation(r, offset, ctx, getReverseIdx(ctx)); + } + + /** + * Populates the representation object with the details of how the object at + * "pos" is stored in this pack (e.g. whole or deltified, its packed + * length). + * + * @param r + * represention object to carry data + * @param offset + * offset in this pack of the object + * @param ctx + * a reader + * @param rev + * reverse index of this pack + * @throws IOException + * an error reading the object from disk + */ + void fillRepresentation(DfsObjectRepresentation r, long offset, DfsReader ctx, PackReverseIndex rev) throws IOException { - r.offset = pos; + r.offset = offset; final byte[] ib = ctx.tempId; - readFully(pos, ib, 0, 20, ctx); + readFully(offset, ib, 0, 20, ctx); int c = ib[0] & 0xff; int p = 1; final int typeCode = (c >> 4) & 7; @@ -1178,7 +1248,7 @@ public final class DfsPackFile extends BlockBasedFile { c = ib[p++] & 0xff; } - long len = rev.findNextOffset(pos, length - 20) - pos; + long len = rev.findNextOffset(offset, length - 20) - offset; switch (typeCode) { case Constants.OBJ_COMMIT: case Constants.OBJ_TREE: @@ -1199,13 +1269,13 @@ public final class DfsPackFile extends BlockBasedFile { ofs += (c & 127); } r.format = StoredObjectRepresentation.PACK_DELTA; - r.baseId = rev.findObject(pos - ofs); + r.baseId = rev.findObject(offset - ofs); r.length = len - p; return; } case Constants.OBJ_REF_DELTA: { - readFully(pos + p, ib, 0, 20, ctx); + readFully(offset + p, ib, 0, 20, ctx); r.format = StoredObjectRepresentation.PACK_DELTA; r.baseId = ObjectId.fromRaw(ib); r.length = len - p - 20; @@ -1540,8 +1610,8 @@ public final class DfsPackFile extends BlockBasedFile { } @Override - public PackReverseIndex reverseIndex(DfsReader ctx) throws IOException { - PackIndex idx = index(ctx); + public PackReverseIndex reverseIndex(DfsReader ctx, PackIndex idx) + throws IOException { DfsStreamKey revKey = desc.getStreamKey(REVERSE_INDEX); // Keep the value parsed in the loader, in case the Ref<> is // nullified in ClockBlockCacheTable#reserveSpace diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java index 62f6753e5d..f50cd597e5 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java @@ -38,8 +38,6 @@ import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource; import org.eclipse.jgit.internal.storage.dfs.DfsReader.PackLoadListener.DfsBlockData; import org.eclipse.jgit.internal.storage.file.BitmapIndexImpl; import org.eclipse.jgit.internal.storage.file.PackBitmapIndex; -import org.eclipse.jgit.internal.storage.file.PackIndex; -import org.eclipse.jgit.internal.storage.file.PackReverseIndex; import org.eclipse.jgit.internal.storage.pack.CachedPack; import org.eclipse.jgit.internal.storage.pack.ObjectReuseAsIs; import org.eclipse.jgit.internal.storage.pack.ObjectToPack; @@ -58,7 +56,6 @@ import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.ObjectReader; import org.eclipse.jgit.lib.ProgressMonitor; -import org.eclipse.jgit.util.BlockList; /** * Reader to access repository content through. @@ -190,31 +187,44 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs { @Override public boolean has(AnyObjectId objectId) throws IOException { + return findPack(objectId) >= 0; + } + + private int findPack(AnyObjectId objectId) throws IOException { if (last != null - && !skipGarbagePack(last) - && last.hasObject(this, objectId)) - return true; + && !skipGarbagePack(last)) { + int idxPos = last.findIdxPosition(this, objectId); + if (idxPos >= 0) { + return idxPos; + } + } + PackList packList = db.getPackList(); - if (hasImpl(packList, objectId)) { - return true; + int idxPos = findInPackList(packList, objectId); + if (idxPos >= 0) { + return idxPos; } else if (packList.dirty()) { stats.scanPacks++; - return hasImpl(db.scanPacks(packList), objectId); + idxPos = findInPackList(db.scanPacks(packList), objectId); + return idxPos; } - return false; + return -1; } - private boolean hasImpl(PackList packList, AnyObjectId objectId) + // Leave "last" pointing to the pack and return the idx position of the + // object (-1 if not found) + private int findInPackList(PackList packList, AnyObjectId objectId) throws IOException { for (DfsPackFile pack : packList.packs) { if (pack == last || skipGarbagePack(pack)) continue; - if (pack.hasObject(this, objectId)) { + int idxPos = pack.findIdxPosition(this, objectId); + if (idxPos >= 0) { last = pack; - return true; + return idxPos; } } - return false; + return -1; } @Override @@ -502,8 +512,8 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs { public long getObjectSize(AnyObjectId objectId, int typeHint) throws MissingObjectException, IncorrectObjectTypeException, IOException { - DfsPackFile pack = findPackWithObject(objectId); - if (pack == null) { + int idxPos = findPack(objectId); + if (idxPos < 0) { if (typeHint == OBJ_ANY) { throw new MissingObjectException(objectId.copy(), JGitText.get().unknownObjectType2); @@ -511,16 +521,15 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs { throw new MissingObjectException(objectId.copy(), typeHint); } - if (typeHint != Constants.OBJ_BLOB || !safeHasObjectSizeIndex(pack)) { - return pack.getObjectSize(this, objectId); + if (typeHint != Constants.OBJ_BLOB || !safeHasObjectSizeIndex(last)) { + return last.getObjectSize(this, objectId); } - Optional<Long> maybeSz = safeGetIndexedObjectSize(pack, objectId); - long sz = maybeSz.orElse(-1L); + long sz = safeGetIndexedObjectSize(last, idxPos); if (sz >= 0) { return sz; } - return pack.getObjectSize(this, objectId); + return last.getObjectSize(this, objectId); } @@ -528,8 +537,8 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs { public boolean isNotLargerThan(AnyObjectId objectId, int typeHint, long limit) throws MissingObjectException, IncorrectObjectTypeException, IOException { - DfsPackFile pack = findPackWithObject(objectId); - if (pack == null) { + int idxPos = findPack(objectId); + if (idxPos < 0) { if (typeHint == OBJ_ANY) { throw new MissingObjectException(objectId.copy(), JGitText.get().unknownObjectType2); @@ -538,28 +547,22 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs { } stats.isNotLargerThanCallCount += 1; - if (typeHint != Constants.OBJ_BLOB || !safeHasObjectSizeIndex(pack)) { - return pack.getObjectSize(this, objectId) <= limit; + if (typeHint != Constants.OBJ_BLOB || !safeHasObjectSizeIndex(last)) { + return last.getObjectSize(this, objectId) <= limit; } - Optional<Long> maybeSz = safeGetIndexedObjectSize(pack, objectId); - if (maybeSz.isEmpty()) { - // Exception in object size index - return pack.getObjectSize(this, objectId) <= limit; - } - - long sz = maybeSz.get(); + long sz = safeGetIndexedObjectSize(last, idxPos); if (sz >= 0) { return sz <= limit; } - if (isLimitInsideIndexThreshold(pack, limit)) { + if (isLimitInsideIndexThreshold(last, limit)) { // With threshold T, not-found means object < T // If limit L > T, then object < T < L return true; } - return pack.getObjectSize(this, objectId) <= limit; + return last.getObjectSize(this, objectId) <= limit; } private boolean safeHasObjectSizeIndex(DfsPackFile pack) { @@ -570,21 +573,22 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs { } } - private Optional<Long> safeGetIndexedObjectSize(DfsPackFile pack, - AnyObjectId objectId) { + private long safeGetIndexedObjectSize(DfsPackFile pack, + int idxPos) { long sz; try { - sz = pack.getIndexedObjectSize(this, objectId); + sz = pack.getIndexedObjectSize(this, idxPos); } catch (IOException e) { - // Do not count the exception as an index miss - return Optional.empty(); + // If there is any error in the index, we should have seen it + // on hasObjectSizeIndex. + throw new IllegalStateException(e); } if (sz < 0) { stats.objectSizeIndexMiss += 1; } else { stats.objectSizeIndexHit += 1; } - return Optional.of(sz); + return sz; } private boolean isLimitInsideIndexThreshold(DfsPackFile pack, long limit) { @@ -595,34 +599,11 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs { } } - private DfsPackFile findPackWithObject(AnyObjectId objectId) - throws IOException { - if (last != null && !skipGarbagePack(last) - && last.hasObject(this, objectId)) { - return last; - } - PackList packList = db.getPackList(); - // hasImpl doesn't check "last", but leaves "last" pointing to the pack - // with the object - if (hasImpl(packList, objectId)) { - return last; - } else if (packList.dirty()) { - if (hasImpl(db.getPackList(), objectId)) { - return last; - } - } - return null; - } - @Override public DfsObjectToPack newObjectToPack(AnyObjectId objectId, int type) { return new DfsObjectToPack(objectId, type); } - private static final Comparator<DfsObjectToPack> OFFSET_SORT = ( - DfsObjectToPack a, - DfsObjectToPack b) -> Long.signum(a.getOffset() - b.getOffset()); - @Override public void selectObjectRepresentation(PackWriter packer, ProgressMonitor monitor, Iterable<ObjectToPack> objects) @@ -642,16 +623,15 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs { ProgressMonitor monitor, Iterable<ObjectToPack> objects, List<DfsPackFile> packs, boolean skipFound) throws IOException { for (DfsPackFile pack : packs) { - List<DfsObjectToPack> tmp = findAllFromPack(pack, objects, skipFound); - if (tmp.isEmpty()) + List<DfsObjectToPack> inPack = pack.findAllFromPack(this, objects, skipFound); + if (inPack.isEmpty()) continue; - Collections.sort(tmp, OFFSET_SORT); - PackReverseIndex rev = pack.getReverseIdx(this); DfsObjectRepresentation rep = new DfsObjectRepresentation(pack); - for (DfsObjectToPack otp : tmp) { - pack.representation(rep, otp.getOffset(), this, rev); + for (DfsObjectToPack otp : inPack) { + // Populate rep.{offset,length} from the pack + pack.fillRepresentation(rep, otp.getOffset(), this); otp.setOffset(0); - packer.select(otp, rep); + packer.select(otp, rep); // Set otp.offset from rep if (!otp.isFound()) { otp.setFound(); monitor.update(1); @@ -698,24 +678,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs { return false; } - private List<DfsObjectToPack> findAllFromPack(DfsPackFile pack, - Iterable<ObjectToPack> objects, boolean skipFound) - throws IOException { - List<DfsObjectToPack> tmp = new BlockList<>(); - PackIndex idx = pack.getPackIndex(this); - for (ObjectToPack obj : objects) { - DfsObjectToPack otp = (DfsObjectToPack) obj; - if (skipFound && otp.isFound()) { - continue; - } - long p = idx.findOffset(otp); - if (0 < p && !pack.isCorrupt(p)) { - otp.setOffset(p); - tmp.add(otp); - } - } - return tmp; - } + @Override public void copyObjectAsIs(PackOutputStream out, ObjectToPack otp, diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftableDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftableDatabase.java index 3ba74b26fc..2751cd2969 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftableDatabase.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftableDatabase.java @@ -28,6 +28,7 @@ import org.eclipse.jgit.lib.BatchRefUpdate; import org.eclipse.jgit.lib.NullProgressMonitor; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Ref; +import org.eclipse.jgit.lib.ReflogReader; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.transport.ReceiveCommand; import org.eclipse.jgit.util.RefList; @@ -177,6 +178,11 @@ public class DfsReftableDatabase extends DfsRefDatabase { } @Override + public ReflogReader getReflogReader(Ref ref) throws IOException { + return reftableDatabase.getReflogReader(ref.getName()); + } + + @Override public Set<Ref> getTipsWithSha1(ObjectId id) throws IOException { if (!getReftableConfig().isIndexObjects()) { return super.getTipsWithSha1(id); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java index 25b7583b95..559d5a4339 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java @@ -16,6 +16,7 @@ import static org.eclipse.jgit.lib.Ref.Storage.PACKED; import java.io.File; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -37,6 +38,7 @@ import org.eclipse.jgit.internal.storage.reftable.ReftableBatchRefUpdate; import org.eclipse.jgit.internal.storage.reftable.ReftableDatabase; import org.eclipse.jgit.internal.storage.reftable.ReftableWriter; import org.eclipse.jgit.lib.BatchRefUpdate; +import org.eclipse.jgit.lib.ConfigConstants; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectIdRef; @@ -70,14 +72,14 @@ public class FileReftableDatabase extends RefDatabase { private final FileReftableStack reftableStack; - FileReftableDatabase(FileRepository repo) throws IOException { - this(repo, new File(new File(repo.getCommonDirectory(), Constants.REFTABLE), - Constants.TABLES_LIST)); - } + private volatile boolean autoRefresh; - FileReftableDatabase(FileRepository repo, File refstackName) throws IOException { + FileReftableDatabase(FileRepository repo) throws IOException { this.fileRepository = repo; - this.reftableStack = new FileReftableStack(refstackName, + this.autoRefresh = repo.getConfig().getBoolean( + ConfigConstants.CONFIG_REFTABLE_SECTION, + ConfigConstants.CONFIG_KEY_AUTOREFRESH, false); + this.reftableStack = new FileReftableStack( new File(fileRepository.getCommonDirectory(), Constants.REFTABLE), () -> fileRepository.fireEvent(new RefsChangedEvent()), () -> fileRepository.getConfig()); @@ -90,7 +92,13 @@ public class FileReftableDatabase extends RefDatabase { }; } - ReflogReader getReflogReader(String refname) throws IOException { + @Override + public ReflogReader getReflogReader(Ref ref) throws IOException { + return reftableDatabase.getReflogReader(ref.getName()); + } + + @Override + public ReflogReader getReflogReader(String refname) throws IOException { return reftableDatabase.getReflogReader(refname); } @@ -177,6 +185,7 @@ public class FileReftableDatabase extends RefDatabase { @Override public Ref exactRef(String name) throws IOException { + autoRefresh(); return reftableDatabase.exactRef(name); } @@ -187,6 +196,7 @@ public class FileReftableDatabase extends RefDatabase { @Override public Map<String, Ref> getRefs(String prefix) throws IOException { + autoRefresh(); List<Ref> refs = reftableDatabase.getRefsByPrefix(prefix); RefList.Builder<Ref> builder = new RefList.Builder<>(refs.size()); for (Ref r : refs) { @@ -199,6 +209,7 @@ public class FileReftableDatabase extends RefDatabase { @Override public List<Ref> getRefsByPrefixWithExclusions(String include, Set<String> excludes) throws IOException { + autoRefresh(); return reftableDatabase.getRefsByPrefixWithExclusions(include, excludes); } @@ -217,6 +228,50 @@ public class FileReftableDatabase extends RefDatabase { } + /** + * Whether to auto-refresh the reftable stack if it is out of date. + * + * @param autoRefresh + * whether to auto-refresh the reftable stack if it is out of + * date. + */ + public void setAutoRefresh(boolean autoRefresh) { + this.autoRefresh = autoRefresh; + } + + /** + * Whether the reftable stack is auto-refreshed if it is out of date. + * + * @return whether the reftable stack is auto-refreshed if it is out of + * date. + */ + public boolean isAutoRefresh() { + return autoRefresh; + } + + private void autoRefresh() { + if (autoRefresh) { + refresh(); + } + } + + /** + * Check if the reftable stack is up to date, and if not, reload it. + * <p> + * {@inheritDoc} + */ + @Override + public void refresh() { + try { + if (!reftableStack.isUpToDate()) { + reftableDatabase.clearCache(); + reftableStack.reload(); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + private Ref doPeel(Ref leaf) throws IOException { try (RevWalk rw = new RevWalk(fileRepository)) { RevObject obj = rw.parseAny(leaf.getObjectId()); @@ -557,9 +612,10 @@ public class FileReftableDatabase extends RefDatabase { boolean writeLogs) throws IOException { int size = 0; List<Ref> refs = repo.getRefDatabase().getRefs(); + RefDatabase refDb = repo.getRefDatabase(); if (writeLogs) { for (Ref r : refs) { - ReflogReader rlr = repo.getReflogReader(r.getName()); + ReflogReader rlr = refDb.getReflogReader(r); if (rlr != null) { size = Math.max(rlr.getReverseEntries().size(), size); } @@ -582,10 +638,7 @@ public class FileReftableDatabase extends RefDatabase { if (writeLogs) { for (Ref r : refs) { long idx = size; - ReflogReader reader = repo.getReflogReader(r.getName()); - if (reader == null) { - continue; - } + ReflogReader reader = refDb.getReflogReader(r); for (ReflogEntry e : reader.getReverseEntries()) { w.writeLog(r.getName(), idx, e.getWho(), e.getOldId(), e.getNewId(), e.getComment()); @@ -625,32 +678,20 @@ public class FileReftableDatabase extends RefDatabase { * the repository * @param writeLogs * whether to write reflogs - * @return a reftable based RefDB from an existing repository. * @throws IOException * on IO error */ - public static FileReftableDatabase convertFrom(FileRepository repo, - boolean writeLogs) throws IOException { - FileReftableDatabase newDb = null; - File reftableList = null; - try { - File reftableDir = new File(repo.getCommonDirectory(), - Constants.REFTABLE); - reftableList = new File(reftableDir, Constants.TABLES_LIST); - if (!reftableDir.isDirectory()) { - reftableDir.mkdir(); - } + public static void convertFrom(FileRepository repo, boolean writeLogs) + throws IOException { + File reftableDir = new File(repo.getCommonDirectory(), + Constants.REFTABLE); + if (!reftableDir.isDirectory()) { + reftableDir.mkdir(); + } - try (FileReftableStack stack = new FileReftableStack(reftableList, - reftableDir, null, () -> repo.getConfig())) { - stack.addReftable(rw -> writeConvertTable(repo, rw, writeLogs)); - } - reftableList = null; - } finally { - if (reftableList != null) { - reftableList.delete(); - } + try (FileReftableStack stack = new FileReftableStack(reftableDir, null, + () -> repo.getConfig())) { + stack.addReftable(rw -> writeConvertTable(repo, rw, writeLogs)); } - return newDb; } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableStack.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableStack.java index 0f5ff0f9f7..6658575fc5 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableStack.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableStack.java @@ -18,8 +18,10 @@ import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; import java.nio.file.Files; +import java.nio.file.NoSuchFileException; import java.nio.file.StandardCopyOption; import java.security.SecureRandom; import java.util.ArrayList; @@ -27,6 +29,7 @@ import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -39,6 +42,9 @@ import org.eclipse.jgit.internal.storage.reftable.ReftableConfig; import org.eclipse.jgit.internal.storage.reftable.ReftableReader; import org.eclipse.jgit.internal.storage.reftable.ReftableWriter; import org.eclipse.jgit.lib.Config; +import org.eclipse.jgit.lib.Constants; +import org.eclipse.jgit.lib.CoreConfig; +import org.eclipse.jgit.lib.CoreConfig.TrustStat; import org.eclipse.jgit.util.FileUtils; import org.eclipse.jgit.util.SystemReader; @@ -59,9 +65,12 @@ public class FileReftableStack implements AutoCloseable { private List<StackEntry> stack; + private AtomicReference<FileSnapshot> snapshot = new AtomicReference<>( + FileSnapshot.DIRTY); + private long lastNextUpdateIndex; - private final File stackPath; + private final File tablesListFile; private final File reftableDir; @@ -98,11 +107,11 @@ public class FileReftableStack implements AutoCloseable { private final CompactionStats stats; + private final TrustStat trustTablesListStat; + /** * Creates a stack corresponding to the list of reftables in the argument * - * @param stackPath - * the filename for the stack. * @param reftableDir * the dir holding the tables. * @param onChange @@ -112,10 +121,10 @@ public class FileReftableStack implements AutoCloseable { * @throws IOException * on I/O problems */ - public FileReftableStack(File stackPath, File reftableDir, + public FileReftableStack(File reftableDir, @Nullable Runnable onChange, Supplier<Config> configSupplier) throws IOException { - this.stackPath = stackPath; + this.tablesListFile = new File(reftableDir, Constants.TABLES_LIST); this.reftableDir = reftableDir; this.stack = new ArrayList<>(); this.configSupplier = configSupplier; @@ -126,6 +135,8 @@ public class FileReftableStack implements AutoCloseable { reload(); stats = new CompactionStats(); + trustTablesListStat = configSupplier.get().get(CoreConfig.KEY) + .getTrustTablesListStat(); } CompactionStats getStats() { @@ -232,7 +243,7 @@ public class FileReftableStack implements AutoCloseable { } if (!success) { - throw new LockFailedException(stackPath); + throw new LockFailedException(tablesListFile); } mergedReftable = new MergedReftable(stack.stream() @@ -272,18 +283,21 @@ public class FileReftableStack implements AutoCloseable { } private List<String> readTableNames() throws IOException { + FileSnapshot old; List<String> names = new ArrayList<>(stack.size() + 1); - + old = snapshot.get(); try (BufferedReader br = new BufferedReader( - new InputStreamReader(new FileInputStream(stackPath), UTF_8))) { + new InputStreamReader(new FileInputStream(tablesListFile), UTF_8))) { String line; while ((line = br.readLine()) != null) { if (!line.isEmpty()) { names.add(line); } } + snapshot.compareAndSet(old, FileSnapshot.save(tablesListFile)); } catch (FileNotFoundException e) { // file isn't there: empty repository. + snapshot.compareAndSet(old, FileSnapshot.MISSING_FILE); } return names; } @@ -294,9 +308,29 @@ public class FileReftableStack implements AutoCloseable { * on IO problem */ boolean isUpToDate() throws IOException { - // We could use FileSnapshot to avoid reading the file, but the file is - // small so it's probably a minor optimization. try { + switch (trustTablesListStat) { + case NEVER: + break; + case AFTER_OPEN: + try (InputStream stream = Files + .newInputStream(reftableDir.toPath())) { + // open the refs/reftable/ directory to refresh attributes + // of reftable files and the tables.list file listing their + // names (on some NFS clients) + } catch (FileNotFoundException | NoSuchFileException e) { + // ignore + } + //$FALL-THROUGH$ + case ALWAYS: + if (!snapshot.get().isModified(tablesListFile)) { + return true; + } + break; + case INHERIT: + // only used in CoreConfig internally + throw new IllegalStateException(); + } List<String> names = readTableNames(); if (names.size() != stack.size()) { return false; @@ -353,7 +387,7 @@ public class FileReftableStack implements AutoCloseable { */ @SuppressWarnings("nls") public boolean addReftable(Writer w) throws IOException { - LockFile lock = new LockFile(stackPath); + LockFile lock = new LockFile(tablesListFile); try { if (!lock.lockForAppend()) { return false; @@ -364,8 +398,7 @@ public class FileReftableStack implements AutoCloseable { String fn = filename(nextUpdateIndex(), nextUpdateIndex()); - File tmpTable = File.createTempFile(fn + "_", ".ref", - stackPath.getParentFile()); + File tmpTable = File.createTempFile(fn + "_", ".ref", reftableDir); ReftableWriter.Stats s; try (FileOutputStream fos = new FileOutputStream(tmpTable)) { @@ -419,7 +452,7 @@ public class FileReftableStack implements AutoCloseable { String fn = filename(first, last); File tmpTable = File.createTempFile(fn + "_", ".ref", //$NON-NLS-1$//$NON-NLS-2$ - stackPath.getParentFile()); + reftableDir); try (FileOutputStream fos = new FileOutputStream(tmpTable)) { ReftableCompactor c = new ReftableCompactor(fos) .setConfig(reftableConfig()) @@ -463,7 +496,7 @@ public class FileReftableStack implements AutoCloseable { if (first >= last) { return true; } - LockFile lock = new LockFile(stackPath); + LockFile lock = new LockFile(tablesListFile); File tmpTable = null; List<LockFile> subtableLocks = new ArrayList<>(); @@ -492,7 +525,7 @@ public class FileReftableStack implements AutoCloseable { tmpTable = compactLocked(first, last); - lock = new LockFile(stackPath); + lock = new LockFile(tablesListFile); if (!lock.lock()) { return false; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java index c5c36565d9..bcf9f1efdf 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java @@ -31,7 +31,6 @@ import java.util.Locale; import java.util.Objects; import java.util.Set; -import org.eclipse.jgit.annotations.NonNull; import org.eclipse.jgit.annotations.Nullable; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.api.errors.JGitInternalException; @@ -543,29 +542,6 @@ public class FileRepository extends Repository { } @Override - public ReflogReader getReflogReader(String refName) throws IOException { - if (refs instanceof FileReftableDatabase) { - // Cannot use findRef: reftable stores log data for deleted or renamed - // branches. - return ((FileReftableDatabase)refs).getReflogReader(refName); - } - - // TODO: use exactRef here, which offers more predictable and therefore preferable - // behavior. - Ref ref = findRef(refName); - if (ref == null) { - return null; - } - return new ReflogReaderImpl(this, ref.getName()); - } - - @Override - public @NonNull ReflogReader getReflogReader(@NonNull Ref ref) - throws IOException { - return new ReflogReaderImpl(this, ref.getName()); - } - - @Override public AttributesNodeProvider createAttributesNodeProvider() { return new AttributesNodeProviderImpl(this); } @@ -697,8 +673,8 @@ public class FileRepository extends Repository { } if (writeLogs) { - List<ReflogEntry> logs = oldDb.getReflogReader(r.getName()) - .getReverseEntries(); + ReflogReader reflogReader = oldDb.getReflogReader(r); + List<ReflogEntry> logs = reflogReader.getReverseEntries(); Collections.reverse(logs); for (ReflogEntry e : logs) { logWriter.log(r.getName(), e); @@ -743,6 +719,8 @@ public class FileRepository extends Repository { } repoConfig.unset(ConfigConstants.CONFIG_EXTENSIONS_SECTION, null, ConfigConstants.CONFIG_KEY_REF_STORAGE); + repoConfig.setLong(ConfigConstants.CONFIG_CORE_SECTION, null, + ConfigConstants.CONFIG_KEY_REPO_FORMAT_VERSION, 0); repoConfig.save(); } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java index 7f3369364b..c08a92e5a7 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java @@ -102,7 +102,7 @@ import org.eclipse.jgit.treewalk.filter.TreeFilter; import org.eclipse.jgit.util.FS; import org.eclipse.jgit.util.FS.LockToken; import org.eclipse.jgit.util.FileUtils; -import org.eclipse.jgit.util.GitDateParser; +import org.eclipse.jgit.util.GitTimeParser; import org.eclipse.jgit.util.StringUtils; import org.eclipse.jgit.util.SystemReader; import org.slf4j.Logger; @@ -160,11 +160,11 @@ public class GC { private long expireAgeMillis = -1; - private Date expire; + private Instant expire; private long packExpireAgeMillis = -1; - private Date packExpire; + private Instant packExpire; private Boolean packKeptObjects; @@ -183,7 +183,7 @@ public class GC { * prune() to inspect only those reflog entries which have been added since * last repack(). */ - private long lastRepackTime; + private Instant lastRepackTime; /** * Whether gc should do automatic housekeeping @@ -698,16 +698,18 @@ public class GC { if (expire == null && expireAgeMillis == -1) { String pruneExpireStr = getPruneExpireStr(); - if (pruneExpireStr == null) + if (pruneExpireStr == null) { pruneExpireStr = PRUNE_EXPIRE_DEFAULT; - expire = GitDateParser.parse(pruneExpireStr, null, SystemReader - .getInstance().getLocale()); + } + expire = GitTimeParser.parseInstant(pruneExpireStr); expireAgeMillis = -1; } - if (expire != null) - expireDate = expire.getTime(); - if (expireAgeMillis != -1) + if (expire != null) { + expireDate = expire.toEpochMilli(); + } + if (expireAgeMillis != -1) { expireDate = System.currentTimeMillis() - expireAgeMillis; + } return expireDate; } @@ -724,16 +726,18 @@ public class GC { String prunePackExpireStr = repo.getConfig().getString( ConfigConstants.CONFIG_GC_SECTION, null, ConfigConstants.CONFIG_KEY_PRUNEPACKEXPIRE); - if (prunePackExpireStr == null) + if (prunePackExpireStr == null) { prunePackExpireStr = PRUNE_PACK_EXPIRE_DEFAULT; - packExpire = GitDateParser.parse(prunePackExpireStr, null, - SystemReader.getInstance().getLocale()); + } + packExpire = GitTimeParser.parseInstant(prunePackExpireStr); packExpireAgeMillis = -1; } - if (packExpire != null) - packExpireDate = packExpire.getTime(); - if (packExpireAgeMillis != -1) + if (packExpire != null) { + packExpireDate = packExpire.toEpochMilli(); + } + if (packExpireAgeMillis != -1) { packExpireDate = System.currentTimeMillis() - packExpireAgeMillis; + } return packExpireDate; } @@ -802,7 +806,7 @@ public class GC { public Collection<Pack> repack() throws IOException { Collection<Pack> toBeDeleted = repo.getObjectDatabase().getPacks(); - long time = System.currentTimeMillis(); + Instant time = SystemReader.getInstance().now(); Collection<Ref> refsBefore = getAllRefs(); Set<ObjectId> allHeadsAndTags = new HashSet<>(); @@ -818,7 +822,7 @@ public class GC { for (Ref ref : refsBefore) { checkCancelled(); - nonHeads.addAll(listRefLogObjects(ref, 0)); + nonHeads.addAll(listRefLogObjects(ref, Instant.EPOCH)); if (ref.isSymbolic() || ref.getObjectId() == null) { continue; } @@ -1148,21 +1152,23 @@ public class GC { * @param ref * the ref which log should be inspected * @param minTime - * only reflog entries not older then this time are processed + * only reflog entries equal or younger than this time are + * processed * @return the {@link ObjectId}s contained in the reflog * @throws IOException * if an IO error occurred */ - private Set<ObjectId> listRefLogObjects(Ref ref, long minTime) throws IOException { - ReflogReader reflogReader = repo.getReflogReader(ref); + private Set<ObjectId> listRefLogObjects(Ref ref, Instant minTime) throws IOException { + ReflogReader reflogReader = repo.getRefDatabase().getReflogReader(ref); List<ReflogEntry> rlEntries = reflogReader .getReverseEntries(); if (rlEntries == null || rlEntries.isEmpty()) return Collections.emptySet(); Set<ObjectId> ret = new HashSet<>(); for (ReflogEntry e : rlEntries) { - if (e.getWho().getWhen().getTime() < minTime) + if (e.getWho().getWhenAsInstant().isBefore(minTime)) { break; + } ObjectId newId = e.getNewId(); if (newId != null && !ObjectId.zeroId().equals(newId)) ret.add(newId); @@ -1548,7 +1554,7 @@ public class GC { public RepoStatistics getStatistics() throws IOException { RepoStatistics ret = new RepoStatistics(); Collection<Pack> packs = repo.getObjectDatabase().getPacks(); - long latestBitmapTime = Long.MIN_VALUE; + long latestBitmapTime = 0L; for (Pack p : packs) { long packedObjects = p.getIndex().getObjectCount(); ret.numberOfPackedObjects += packedObjects; @@ -1556,9 +1562,11 @@ public class GC { ret.sizeOfPackedObjects += p.getPackFile().length(); if (p.getBitmapIndex() != null) { ret.numberOfBitmaps += p.getBitmapIndex().getBitmapCount(); - latestBitmapTime = p.getFileSnapshot().lastModifiedInstant() - .toEpochMilli(); - } else { + if (latestBitmapTime == 0L) { + latestBitmapTime = p.getFileSnapshot().lastModifiedInstant().toEpochMilli(); + } + } + else if (latestBitmapTime == 0L) { ret.numberOfPackFilesSinceBitmap++; ret.numberOfObjectsSinceBitmap += packedObjects; } @@ -1655,12 +1663,31 @@ public class GC { * candidate for pruning. * * @param expire - * instant in time which defines object expiration - * objects with modification time before this instant are expired - * objects with modification time newer or equal to this instant - * are not expired + * instant in time which defines object expiration objects with + * modification time before this instant are expired objects with + * modification time newer or equal to this instant are not + * expired + * @deprecated use {@link #setExpire(Instant)} instead */ + @Deprecated(since = "7.2") public void setExpire(Date expire) { + this.expire = expire.toInstant(); + expireAgeMillis = -1; + } + + /** + * During gc() or prune() each unreferenced, loose object which has been + * created or modified after or at <code>expire</code> will not be pruned. + * Only older objects may be pruned. If set to null then every object is a + * candidate for pruning. + * + * @param expire + * instant in time which defines object expiration objects with + * modification time before this instant are expired objects with + * modification time newer or equal to this instant are not + * expired + */ + public void setExpire(Instant expire) { this.expire = expire; expireAgeMillis = -1; } @@ -1673,8 +1700,24 @@ public class GC { * * @param packExpire * instant in time which defines packfile expiration + * @deprecated use {@link #setPackExpire(Instant)} instead */ + @Deprecated(since = "7.2") public void setPackExpire(Date packExpire) { + this.packExpire = packExpire.toInstant(); + packExpireAgeMillis = -1; + } + + /** + * During gc() or prune() packfiles which are created or modified after or + * at <code>packExpire</code> will not be deleted. Only older packfiles may + * be deleted. If set to null then every packfile is a candidate for + * deletion. + * + * @param packExpire + * instant in time which defines packfile expiration + */ + public void setPackExpire(Instant packExpire) { this.packExpire = packExpire; packExpireAgeMillis = -1; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java index 8647b3e664..862aaab0ee 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java @@ -23,8 +23,7 @@ import java.time.Instant; import org.eclipse.jgit.api.errors.JGitInternalException; import org.eclipse.jgit.lib.ConfigConstants; import org.eclipse.jgit.util.FileUtils; -import org.eclipse.jgit.util.GitDateParser; -import org.eclipse.jgit.util.SystemReader; +import org.eclipse.jgit.util.GitTimeParser; /** * This class manages the gc.log file for a {@link FileRepository}. @@ -62,8 +61,7 @@ class GcLog { if (logExpiryStr == null) { logExpiryStr = LOG_EXPIRY_DEFAULT; } - gcLogExpire = GitDateParser.parse(logExpiryStr, null, - SystemReader.getInstance().getLocale()).toInstant(); + gcLogExpire = GitTimeParser.parseInstant(logExpiryStr); } return gcLogExpire; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LooseObjects.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LooseObjects.java index b4bb2a9293..909b3e3082 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LooseObjects.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LooseObjects.java @@ -26,8 +26,9 @@ import org.eclipse.jgit.internal.storage.file.FileObjectDatabase.InsertLooseObje import org.eclipse.jgit.lib.AbbreviatedObjectId; import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.Config; -import org.eclipse.jgit.lib.ConfigConstants; import org.eclipse.jgit.lib.Constants; +import org.eclipse.jgit.lib.CoreConfig; +import org.eclipse.jgit.lib.CoreConfig.TrustStat; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.util.FileUtils; @@ -49,13 +50,13 @@ class LooseObjects { * Maximum number of attempts to read a loose object for which a stale file * handle exception is thrown */ - private final static int MAX_LOOSE_OBJECT_STALE_READ_ATTEMPTS = 5; + private final static int MAX_STALE_READ_RETRIES = 5; private final File directory; private final UnpackedObjectCache unpackedObjectCache; - private final boolean trustFolderStat; + private final TrustStat trustLooseObjectStat; /** * Initialize a reference to an on-disk object directory. @@ -68,9 +69,8 @@ class LooseObjects { LooseObjects(Config config, File dir) { directory = dir; unpackedObjectCache = new UnpackedObjectCache(); - trustFolderStat = config.getBoolean( - ConfigConstants.CONFIG_CORE_SECTION, - ConfigConstants.CONFIG_KEY_TRUSTFOLDERSTAT, true); + trustLooseObjectStat = config.get(CoreConfig.KEY) + .getTrustLooseObjectStat(); } /** @@ -108,7 +108,8 @@ class LooseObjects { */ boolean has(AnyObjectId objectId) { boolean exists = hasWithoutRefresh(objectId); - if (trustFolderStat || exists) { + if (trustLooseObjectStat == TrustStat.ALWAYS + || exists) { return exists; } try (InputStream stream = Files.newInputStream(directory.toPath())) { @@ -163,13 +164,31 @@ class LooseObjects { } ObjectLoader open(WindowCursor curs, AnyObjectId id) throws IOException { - int readAttempts = 0; - while (readAttempts < MAX_LOOSE_OBJECT_STALE_READ_ATTEMPTS) { - readAttempts++; - File path = fileFor(id); - if (trustFolderStat && !path.exists()) { + File path = fileFor(id); + for (int retries = 0; retries < MAX_STALE_READ_RETRIES; retries++) { + boolean reload = true; + switch (trustLooseObjectStat) { + case NEVER: break; + case AFTER_OPEN: + try (InputStream stream = Files + .newInputStream(path.getParentFile().toPath())) { + // open the loose object's fanout directory to refresh + // attributes (on some NFS clients) + } catch (FileNotFoundException | NoSuchFileException e) { + // ignore + } + //$FALL-THROUGH$ + case ALWAYS: + if (!path.exists()) { + reload = false; + } + break; + case INHERIT: + // only used in CoreConfig internally + throw new IllegalStateException(); } + if (reload) { try { return getObjectLoader(curs, path, id); } catch (FileNotFoundException noFile) { @@ -183,9 +202,10 @@ class LooseObjects { } if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format( - JGitText.get().looseObjectHandleIsStale, id.name(), - Integer.valueOf(readAttempts), Integer.valueOf( - MAX_LOOSE_OBJECT_STALE_READ_ATTEMPTS))); + JGitText.get().looseObjectHandleIsStale, + id.name(), Integer.valueOf(retries), + Integer.valueOf(MAX_STALE_READ_RETRIES))); + } } } } @@ -211,7 +231,7 @@ class LooseObjects { try { return getObjectLoaderWithoutRefresh(curs, path, id); } catch (FileNotFoundException e) { - if (trustFolderStat) { + if (trustLooseObjectStat == TrustStat.ALWAYS) { throw e; } try (InputStream stream = Files @@ -248,7 +268,7 @@ class LooseObjects { return getSizeWithoutRefresh(curs, id); } catch (FileNotFoundException noFile) { try { - if (trustFolderStat) { + if (trustLooseObjectStat == TrustStat.ALWAYS) { throw noFile; } try (InputStream stream = Files diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java index 9f21481a13..3a6de4e8e2 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java @@ -200,6 +200,7 @@ public class ObjectDirectory extends FileObjectDatabase { loose.close(); packed.close(); + preserved.close(); // Fully close all loaded alternates and clear the alternate list. AlternateHandle[] alt = alternates.get(); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/Pack.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/Pack.java index 8d2a86386f..5813d39e9a 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/Pack.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/Pack.java @@ -95,6 +95,9 @@ public class Pack implements Iterable<PackIndex.MutableEntry> { private RandomAccessFile fd; + /** For managing open/close accounting of {@link #fd}. */ + private final Object activeLock = new Object(); + /** Serializes reads performed against {@link #fd}. */ private final Object readLock = new Object(); @@ -113,7 +116,7 @@ public class Pack implements Iterable<PackIndex.MutableEntry> { private volatile Exception invalidatingCause; @Nullable - private PackFile bitmapIdxFile; + private volatile PackFile bitmapIdxFile; private AtomicInteger transientErrorCount = new AtomicInteger(); @@ -645,42 +648,53 @@ public class Pack implements Iterable<PackIndex.MutableEntry> { throw new EOFException(); } - private synchronized void beginCopyAsIs() + private void beginCopyAsIs() throws StoredObjectRepresentationNotAvailableException { - if (++activeCopyRawData == 1 && activeWindows == 0) { - try { - doOpen(); - } catch (IOException thisPackNotValid) { - throw new StoredObjectRepresentationNotAvailableException( - thisPackNotValid); + synchronized (activeLock) { + if (++activeCopyRawData == 1 && activeWindows == 0) { + try { + doOpen(); + } catch (IOException thisPackNotValid) { + throw new StoredObjectRepresentationNotAvailableException( + thisPackNotValid); + } } } } - private synchronized void endCopyAsIs() { - if (--activeCopyRawData == 0 && activeWindows == 0) - doClose(); + private void endCopyAsIs() { + synchronized (activeLock) { + if (--activeCopyRawData == 0 && activeWindows == 0) { + doClose(); + } + } } - synchronized boolean beginWindowCache() throws IOException { - if (++activeWindows == 1) { - if (activeCopyRawData == 0) - doOpen(); - return true; + boolean beginWindowCache() throws IOException { + synchronized (activeLock) { + if (++activeWindows == 1) { + if (activeCopyRawData == 0) { + doOpen(); + } + return true; + } + return false; } - return false; } - synchronized boolean endWindowCache() { - final boolean r = --activeWindows == 0; - if (r && activeCopyRawData == 0) - doClose(); - return r; + boolean endWindowCache() { + synchronized (activeLock) { + boolean r = --activeWindows == 0; + if (r && activeCopyRawData == 0) { + doClose(); + } + return r; + } } private void doOpen() throws IOException { if (invalid) { - openFail(true, invalidatingCause); + openFail(invalidatingCause); throw new PackInvalidException(packFile, invalidatingCause); } try { @@ -691,39 +705,41 @@ public class Pack implements Iterable<PackIndex.MutableEntry> { } } catch (InterruptedIOException e) { // don't invalidate the pack, we are interrupted from another thread - openFail(false, e); + openFail(e); throw e; } catch (FileNotFoundException fn) { - // don't invalidate the pack if opening an existing file failed - // since it may be related to a temporary lack of resources (e.g. - // max open files) - openFail(!packFile.exists(), fn); + if (!packFile.exists()) { + // Failure to open an existing file may be related to a temporary lack of resources + // (e.g. max open files) + invalid = true; + } + openFail(fn); throw fn; } catch (EOFException | AccessDeniedException | NoSuchFileException | CorruptObjectException | NoPackSignatureException | PackMismatchException | UnpackException | UnsupportedPackIndexVersionException | UnsupportedPackVersionException pe) { - // exceptions signaling permanent problems with a pack - openFail(true, pe); + invalid = true; // exceptions signaling permanent problems with a pack + openFail(pe); throw pe; } catch (IOException ioe) { - // mark this packfile as invalid when NFS stale file handle error - // occur - openFail(FileUtils.isStaleFileHandleInCausalChain(ioe), ioe); + if (FileUtils.isStaleFileHandleInCausalChain(ioe)) { + invalid = true; + } + openFail(ioe); throw ioe; } catch (RuntimeException ge) { // generic exceptions could be transient so we should not mark the // pack invalid to avoid false MissingObjectExceptions - openFail(false, ge); + openFail(ge); throw ge; } } - private void openFail(boolean invalidate, Exception cause) { + private void openFail(Exception cause) { activeWindows = 0; activeCopyRawData = 0; - invalid = invalidate; invalidatingCause = cause; doClose(); } @@ -1197,17 +1213,8 @@ public class Pack implements Iterable<PackIndex.MutableEntry> { return null; } - synchronized void refreshBitmapIndex(PackFile bitmapIndexFile) { - this.bitmapIdx = Optionally.empty(); - this.invalid = false; + void setBitmapIndexFile(PackFile bitmapIndexFile) { this.bitmapIdxFile = bitmapIndexFile; - try { - getBitmapIndex(); - } catch (IOException e) { - LOG.warn(JGitText.get().bitmapFailedToGet, bitmapIdxFile, e); - this.bitmapIdx = Optionally.empty(); - this.bitmapIdxFile = null; - } } private synchronized PackReverseIndex getReverseIdx() throws IOException { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackDirectory.java index e31126f027..f50c17eafa 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackDirectory.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackDirectory.java @@ -17,6 +17,8 @@ import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; @@ -42,7 +44,8 @@ import org.eclipse.jgit.internal.storage.pack.PackWriter; import org.eclipse.jgit.lib.AbbreviatedObjectId; import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.Config; -import org.eclipse.jgit.lib.ConfigConstants; +import org.eclipse.jgit.lib.CoreConfig; +import org.eclipse.jgit.lib.CoreConfig.TrustStat; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.util.FileUtils; @@ -72,7 +75,7 @@ class PackDirectory { private final AtomicReference<PackList> packList; - private final boolean trustFolderStat; + private final TrustStat trustPackStat; /** * Initialize a reference to an on-disk 'pack' directory. @@ -86,14 +89,7 @@ class PackDirectory { this.config = config; this.directory = directory; packList = new AtomicReference<>(NO_PACKS); - - // Whether to trust the pack folder's modification time. If set to false - // we will always scan the .git/objects/pack folder to check for new - // pack files. If set to true (default) we use the folder's size, - // modification time, and key (inode) and assume that no new pack files - // can be in this folder if these attributes have not changed. - trustFolderStat = config.getBoolean(ConfigConstants.CONFIG_CORE_SECTION, - ConfigConstants.CONFIG_KEY_TRUSTFOLDERSTAT, true); + trustPackStat = config.get(CoreConfig.KEY).getTrustPackStat(); } /** @@ -313,38 +309,42 @@ class PackDirectory { } private void handlePackError(IOException e, Pack p) { - String warnTmpl = null; + String warnTemplate = null; + String debugTemplate = null; int transientErrorCount = 0; - String errTmpl = JGitText.get().exceptionWhileReadingPack; + String errorTemplate = JGitText.get().exceptionWhileReadingPack; if ((e instanceof CorruptObjectException) || (e instanceof PackInvalidException)) { - warnTmpl = JGitText.get().corruptPack; - LOG.warn(MessageFormat.format(warnTmpl, + warnTemplate = JGitText.get().corruptPack; + LOG.warn(MessageFormat.format(warnTemplate, p.getPackFile().getAbsolutePath()), e); // Assume the pack is corrupted, and remove it from the list. remove(p); } else if (e instanceof FileNotFoundException) { if (p.getPackFile().exists()) { - errTmpl = JGitText.get().packInaccessible; + errorTemplate = JGitText.get().packInaccessible; transientErrorCount = p.incrementTransientErrorCount(); } else { - warnTmpl = JGitText.get().packWasDeleted; + debugTemplate = JGitText.get().packWasDeleted; remove(p); } } else if (FileUtils.isStaleFileHandleInCausalChain(e)) { - warnTmpl = JGitText.get().packHandleIsStale; + warnTemplate = JGitText.get().packHandleIsStale; remove(p); } else { transientErrorCount = p.incrementTransientErrorCount(); } - if (warnTmpl != null) { - LOG.warn(MessageFormat.format(warnTmpl, + if (warnTemplate != null) { + LOG.warn(MessageFormat.format(warnTemplate, p.getPackFile().getAbsolutePath()), e); + } else if (debugTemplate != null) { + LOG.debug(MessageFormat.format(debugTemplate, + p.getPackFile().getAbsolutePath()), e); } else { if (doLogExponentialBackoff(transientErrorCount)) { // Don't remove the pack from the list, as the error may be // transient. - LOG.error(MessageFormat.format(errTmpl, + LOG.error(MessageFormat.format(errorTemplate, p.getPackFile().getAbsolutePath(), Integer.valueOf(transientErrorCount)), e); } @@ -361,8 +361,26 @@ class PackDirectory { } boolean searchPacksAgain(PackList old) { - return (!trustFolderStat || old.snapshot.isModified(directory)) - && old != scanPacks(old); + switch (trustPackStat) { + case NEVER: + break; + case AFTER_OPEN: + try (InputStream stream = Files + .newInputStream(directory.toPath())) { + // open the pack directory to refresh attributes (on some NFS clients) + } catch (IOException e) { + // ignore + } + //$FALL-THROUGH$ + case ALWAYS: + if (!old.snapshot.isModified(directory)) { + return false; + } + break; + case INHERIT: + // only used in CoreConfig internally + } + return old != scanPacks(old); } void insert(Pack pack) { @@ -459,12 +477,9 @@ class PackDirectory { && !oldPack.getFileSnapshot().isModified(packFile)) { forReuse.remove(packFile.getName()); list.add(oldPack); - try { - if(oldPack.getBitmapIndex() == null) { - oldPack.refreshBitmapIndex(packFilesByExt.get(BITMAP_INDEX)); - } - } catch (IOException e) { - LOG.warn(JGitText.get().bitmapAccessErrorForPackfile, oldPack.getPackName(), e); + PackFile bitMaps = packFilesByExt.get(BITMAP_INDEX); + if (bitMaps != null) { + oldPack.setBitmapIndexFile(bitMaps); } continue; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java index c9b05ad025..5f2015b834 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java @@ -185,7 +185,11 @@ public class PackFile extends File { private static PackExt getPackExt(String endsWithExtension) { for (PackExt ext : PackExt.values()) { - if (endsWithExtension.endsWith(ext.getExtension())) { + if (endsWithExtension.equals(ext.getExtension())) { + return ext; + } + + if (endsWithExtension.equals("old-" + ext.getExtension())) { return ext; } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndex.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndex.java index 7189ce20a6..b3e4efb4fc 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndex.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackIndex.java @@ -286,7 +286,7 @@ public interface PackIndex * the index cannot be read. */ void resolve(Set<ObjectId> matches, AbbreviatedObjectId id, - int matchLimit) throws IOException; + int matchLimit) throws IOException; /** * Get pack checksum @@ -304,6 +304,7 @@ public interface PackIndex class MutableEntry { /** Buffer of the ObjectId visited by the EntriesIterator. */ final MutableObjectId idBuffer = new MutableObjectId(); + /** Offset into the packfile of the current object. */ long offset; @@ -345,6 +346,34 @@ public interface PackIndex r.offset = offset; return r; } + + /** + * Similar to {@link Comparable#compareTo(Object)}, using only the + * object id in the entry. + * + * @param other + * Another mutable entry (probably from another index) + * + * @return a negative integer, zero, or a positive integer as this + * object is less than, equal to, or greater than the specified + * object. + */ + public int compareBySha1To(MutableEntry other) { + return idBuffer.compareTo(other.idBuffer); + } + + /** + * Copy the current ObjectId to dest + * <p> + * Like {@link #toObjectId()}, but reusing the destination instead of + * creating a new ObjectId instance. + * + * @param dest + * destination for the object id + */ + public void copyOidTo(MutableObjectId dest) { + dest.fromObjectId(idBuffer); + } } /** @@ -368,7 +397,6 @@ public interface PackIndex this.objectCount = objectCount; } - @Override public boolean hasNext() { return returnedNumber < objectCount; @@ -393,7 +421,6 @@ public interface PackIndex */ protected abstract void readNext(); - /** * Copies to the entry an {@link ObjectId} from the int buffer and * position idx @@ -423,7 +450,8 @@ public interface PackIndex /** * Sets the {@code offset} to the entry * - * @param offset the offset in the pack file + * @param offset + * the offset in the pack file */ protected void setOffset(long offset) { entry.offset = offset; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java index 6aa1157e37..319a9ed710 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java @@ -64,10 +64,9 @@ import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.errors.ObjectWritingException; import org.eclipse.jgit.events.RefsChangedEvent; import org.eclipse.jgit.internal.JGitText; -import org.eclipse.jgit.lib.ConfigConstants; import org.eclipse.jgit.lib.Constants; -import org.eclipse.jgit.lib.CoreConfig.TrustLooseRefStat; -import org.eclipse.jgit.lib.CoreConfig.TrustPackedRefsStat; +import org.eclipse.jgit.lib.CoreConfig; +import org.eclipse.jgit.lib.CoreConfig.TrustStat; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectIdRef; import org.eclipse.jgit.lib.ProgressMonitor; @@ -76,6 +75,7 @@ import org.eclipse.jgit.lib.RefComparator; import org.eclipse.jgit.lib.RefDatabase; import org.eclipse.jgit.lib.RefUpdate; import org.eclipse.jgit.lib.RefWriter; +import org.eclipse.jgit.lib.ReflogReader; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.lib.SymbolicRef; import org.eclipse.jgit.revwalk.RevObject; @@ -184,11 +184,7 @@ public class RefDirectory extends RefDatabase { private List<Integer> retrySleepMs = RETRY_SLEEP_MS; - private final boolean trustFolderStat; - - private final TrustPackedRefsStat trustPackedRefsStat; - - private final TrustLooseRefStat trustLooseRefStat; + private final CoreConfig coreConfig; RefDirectory(RefDirectory refDb) { parent = refDb.parent; @@ -200,9 +196,7 @@ public class RefDirectory extends RefDatabase { packedRefsFile = refDb.packedRefsFile; looseRefs.set(refDb.looseRefs.get()); packedRefs.set(refDb.packedRefs.get()); - trustFolderStat = refDb.trustFolderStat; - trustPackedRefsStat = refDb.trustPackedRefsStat; - trustLooseRefStat = refDb.trustLooseRefStat; + coreConfig = refDb.coreConfig; inProcessPackedRefsLock = refDb.inProcessPackedRefsLock; } @@ -218,17 +212,7 @@ public class RefDirectory extends RefDatabase { looseRefs.set(RefList.<LooseRef> emptyList()); packedRefs.set(NO_PACKED_REFS); - trustFolderStat = db.getConfig() - .getBoolean(ConfigConstants.CONFIG_CORE_SECTION, - ConfigConstants.CONFIG_KEY_TRUSTFOLDERSTAT, true); - trustPackedRefsStat = db.getConfig() - .getEnum(ConfigConstants.CONFIG_CORE_SECTION, null, - ConfigConstants.CONFIG_KEY_TRUST_PACKED_REFS_STAT, - TrustPackedRefsStat.UNSET); - trustLooseRefStat = db.getConfig() - .getEnum(ConfigConstants.CONFIG_CORE_SECTION, null, - ConfigConstants.CONFIG_KEY_TRUST_LOOSE_REF_STAT, - TrustLooseRefStat.ALWAYS); + coreConfig = db.getConfig().get(CoreConfig.KEY); inProcessPackedRefsLock = new ReentrantLock(true); } @@ -456,6 +440,11 @@ public class RefDirectory extends RefDatabase { return ret; } + @Override + public ReflogReader getReflogReader(Ref ref) throws IOException { + return new ReflogReaderImpl(getRepository(), ref.getName()); + } + @SuppressWarnings("unchecked") private RefList<Ref> upcast(RefList<? extends Ref> loose) { return (RefList<Ref>) loose; @@ -712,41 +701,47 @@ public class RefDirectory extends RefDatabase { } String name = dst.getName(); - // Write the packed-refs file using an atomic update. We might - // wind up reading it twice, before and after the lock, to ensure - // we don't miss an edit made externally. - PackedRefList packed = getPackedRefs(); - if (packed.contains(name)) { - inProcessPackedRefsLock.lock(); + // Get and keep the packed-refs lock while updating packed-refs and + // removing any loose ref + inProcessPackedRefsLock.lock(); + try { + LockFile lck = lockPackedRefsOrThrow(); try { - LockFile lck = lockPackedRefsOrThrow(); - try { + // Write the packed-refs file using an atomic update. We might + // wind up reading it twice, before and after checking if the + // ref to delete is included or not, to ensure + // we don't rely on a PackedRefList that is a result of in-memory + // or NFS caching. + PackedRefList packed = getPackedRefs(); + if (packed.contains(name)) { + // Force update our packed-refs snapshot before writing packed = refreshPackedRefs(); int idx = packed.find(name); if (0 <= idx) { commitPackedRefs(lck, packed.remove(idx), packed, true); } - } finally { - lck.unlock(); } - } finally { - inProcessPackedRefsLock.unlock(); - } - } - RefList<LooseRef> curLoose, newLoose; - do { - curLoose = looseRefs.get(); - int idx = curLoose.find(name); - if (idx < 0) - break; - newLoose = curLoose.remove(idx); - } while (!looseRefs.compareAndSet(curLoose, newLoose)); + RefList<LooseRef> curLoose, newLoose; + do { + curLoose = looseRefs.get(); + int idx = curLoose.find(name); + if (idx < 0) { + break; + } + newLoose = curLoose.remove(idx); + } while (!looseRefs.compareAndSet(curLoose, newLoose)); - int levels = levelsIn(name) - 2; - delete(logFor(name), levels); - if (dst.getStorage().isLoose()) { - deleteAndUnlock(fileFor(name), levels, update); + int levels = levelsIn(name) - 2; + delete(logFor(name), levels); + if (dst.getStorage().isLoose()) { + deleteAndUnlock(fileFor(name), levels, update); + } + } finally { + lck.unlock(); + } + } finally { + inProcessPackedRefsLock.unlock(); } modCnt.incrementAndGet(); @@ -973,7 +968,7 @@ public class RefDirectory extends RefDatabase { PackedRefList getPackedRefs() throws IOException { final PackedRefList curList = packedRefs.get(); - switch (trustPackedRefsStat) { + switch (coreConfig.getTrustPackedRefsStat()) { case NEVER: break; case AFTER_OPEN: @@ -989,12 +984,8 @@ public class RefDirectory extends RefDatabase { return curList; } break; - case UNSET: - if (trustFolderStat - && !curList.snapshot.isModified(packedRefsFile)) { - return curList; - } - break; + case INHERIT: + // only used in CoreConfig internally } return refreshPackedRefs(curList); @@ -1180,7 +1171,7 @@ public class RefDirectory extends RefDatabase { LooseRef scanRef(LooseRef ref, String name) throws IOException { final File path = fileFor(name); - if (trustLooseRefStat.equals(TrustLooseRefStat.AFTER_OPEN)) { + if (coreConfig.getTrustLooseRefStat() == TrustStat.AFTER_OPEN) { refreshPathToLooseRef(Paths.get(name)); } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCache.java index fd80faf4ed..15c125c684 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCache.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCache.java @@ -790,7 +790,9 @@ public class WindowCache { } numRemovers++; } - for (int numRemoved = 0; removeNextBlock(numRemoved); numRemoved++); + for (int numRemoved = 0; removeNextBlock(numRemoved); numRemoved++) { + // empty + } synchronized (this) { if (numRemovers > 0) { numRemovers--; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndex.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndex.java new file mode 100644 index 0000000000..15b52391b8 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndex.java @@ -0,0 +1,103 @@ +/* + * Copyright (C) 2024, GerritForge Inc. and others + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +package org.eclipse.jgit.internal.storage.midx; + +import java.util.Set; + +import org.eclipse.jgit.lib.AbbreviatedObjectId; +import org.eclipse.jgit.lib.AnyObjectId; +import org.eclipse.jgit.lib.ObjectId; + +/** + * An index over multiple packs + */ +public interface MultiPackIndex { + + /** + * Obtain the array of packfiles in the MultiPackIndex. + * <p> + * The pack ids correspond to positions in this list. + * + * @return array of packnames refered in this multipak index + */ + String[] getPackNames(); + + /** + * Does this index contains the object + * + * @param oid + * object id + * @return true of the index knows this the object + */ + boolean hasObject(AnyObjectId oid); + + /** + * Obtain the location of the object. + * <p> + * The returned object can be reused by the implementations. Callers + * must create a #copy() if they want to keep a reference. + * + * @param objectId + * objectId to read. + * @return mutable instance with the location or null if not found. + */ + PackOffset find(AnyObjectId objectId); + + /** + * Find objects matching the prefix abbreviation. + * + * @param matches + * set to add any located ObjectIds to. This is an output + * parameter. + * @param id + * prefix to search for. + * @param matchLimit + * maximum number of results to return. At most this many + * ObjectIds should be added to matches before returning. + */ + void resolve(Set<ObjectId> matches, AbbreviatedObjectId id, int matchLimit); + + /** + * Memory size of this multipack index + * + * @return size of this multipack index in memory, in bytes + */ + long getMemorySize(); + + /** + * (packId, offset) coordinates of an object + */ + class PackOffset { + + int packId; + + long offset; + + protected PackOffset setValues(int packId, long offset) { + this.packId = packId; + this.offset = offset; + return this; + } + + public int getPackId() { + return packId; + } + + public long getOffset() { + return offset; + } + + public PackOffset copy() { + PackOffset copy = new PackOffset(); + return copy.setValues(this.packId, this.offset); + } + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexConstants.java new file mode 100644 index 0000000000..5d86f44baf --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexConstants.java @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2025, Google LLC + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ +package org.eclipse.jgit.internal.storage.midx; + +class MultiPackIndexConstants { + static final int MIDX_SIGNATURE = 0x4d494458; /* MIDX */ + + static final byte MIDX_VERSION = 1; + + /** + * We infer the length of object IDs (OIDs) from this value: + * + * <pre> + * 1 => SHA-1 + * 2 => SHA-256 + * </pre> + */ + static final byte OID_HASH_VERSION = 1; + + static final int MULTIPACK_INDEX_FANOUT_SIZE = 4 * 256; + + /** + * First 4 bytes describe the chunk id. Value 0 is a terminating label. + * Other 8 bytes provide the byte-offset in current file for chunk to start. + */ + static final int CHUNK_LOOKUP_WIDTH = 12; + + /** "PNAM" chunk */ + static final int MIDX_CHUNKID_PACKNAMES = 0x504e414d; + + /** "OIDF" chunk */ + static final int MIDX_CHUNKID_OIDFANOUT = 0x4f494446; + + /** "OIDL" chunk */ + static final int MIDX_CHUNKID_OIDLOOKUP = 0x4f49444c; + + /** "OOFF" chunk */ + static final int MIDX_CHUNKID_OBJECTOFFSETS = 0x4f4f4646; + + /** "LOFF" chunk */ + static final int MIDX_CHUNKID_LARGEOFFSETS = 0x4c4f4646; + + /** "RIDX" chunk */ + static final int MIDX_CHUNKID_REVINDEX = 0x52494458; + + /** "BTMP" chunk */ + static final int MIDX_CHUNKID_BITMAPPEDPACKS = 0x42544D50; + + private MultiPackIndexConstants() { + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexLoader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexLoader.java new file mode 100644 index 0000000000..61caddc221 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexLoader.java @@ -0,0 +1,350 @@ +/* + * Copyright (C) 2024, GerritForge Inc. and others + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +package org.eclipse.jgit.internal.storage.midx; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.CHUNK_LOOKUP_WIDTH; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_BITMAPPEDPACKS; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_LARGEOFFSETS; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_OBJECTOFFSETS; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_OIDFANOUT; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_OIDLOOKUP; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_PACKNAMES; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_REVINDEX; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_SIGNATURE; +import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.List; + +import org.eclipse.jgit.internal.JGitText; +import org.eclipse.jgit.util.IO; +import org.eclipse.jgit.util.NB; +import org.eclipse.jgit.util.io.SilentFileInputStream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The loader returns the representation of the MultiPackIndex file content. + */ +public class MultiPackIndexLoader { + private final static Logger LOG = LoggerFactory + .getLogger(MultiPackIndexLoader.class); + + /** + * Open an existing MultiPackIndex file for reading. + * <p> + * The format of the file will be automatically detected and a proper access + * implementation for that format will be constructed and returned to the + * caller. The file may or may not be held open by the returned instance. + * + * @param midxFile + * existing multi-pack-index to read. + * @return a copy of the multi-pack-index file in memory + * @throws FileNotFoundException + * the file does not exist. + * @throws MultiPackIndexFormatException + * MultiPackIndex file's format is different from we expected. + * @throws java.io.IOException + * the file exists but could not be read due to security errors + * or unexpected data corruption. + */ + public static MultiPackIndex open(File midxFile) + throws FileNotFoundException, MultiPackIndexFormatException, + IOException { + try (SilentFileInputStream fd = new SilentFileInputStream(midxFile)) { + try { + return read(fd); + } catch (MultiPackIndexFormatException fe) { + throw fe; + } catch (IOException ioe) { + throw new IOException( + MessageFormat.format(JGitText.get().unreadableMIDX, + midxFile.getAbsolutePath()), + ioe); + } + } + } + + /** + * Read an existing MultiPackIndex file from a buffered stream. + * <p> + * The format of the file will be automatically detected and a proper access + * implementation for that format will be constructed and returned to the + * caller. The file may or may not be held open by the returned instance. + * + * @param fd + * stream to read the multipack-index file from. The stream must be + * buffered as some small IOs are performed against the stream. + * The caller is responsible for closing the stream. + * @return a copy of the MultiPackIndex file in memory + * @throws MultiPackIndexFormatException + * the MultiPackIndex file's format is different from we + * expected. + * @throws java.io.IOException + * the stream cannot be read. + */ + public static MultiPackIndex read(InputStream fd) + throws MultiPackIndexFormatException, IOException { + byte[] hdr = new byte[12]; + IO.readFully(fd, hdr, 0, hdr.length); + + int magic = NB.decodeInt32(hdr, 0); + + if (magic != MIDX_SIGNATURE) { + throw new MultiPackIndexFormatException(JGitText.get().notAMIDX); + } + + // Check MultiPackIndex version + int v = hdr[4]; + if (v != 1) { + throw new MultiPackIndexFormatException(MessageFormat + .format(JGitText.get().unsupportedMIDXVersion, v)); + } + + // Read the object Id version (1 byte) + // 1 => SHA-1 + // 2 => SHA-256 + // TODO: If the hash type does not match the repository's hash + // algorithm, + // the multi-pack-index file should be ignored with a warning + // presented to the user. + int commitIdVersion = hdr[5]; + if (commitIdVersion != 1) { + throw new MultiPackIndexFormatException( + JGitText.get().incorrectOBJECT_ID_LENGTH); + } + + // Read the number of "chunkOffsets" (1 byte) + int chunkCount = hdr[6]; + + // Read the number of multi-pack-index files (1 byte) + // This value is currently always zero. + // TODO populate this + // int numberOfMultiPackIndexFiles = hdr[7]; + + // Number of packfiles (4 bytes) + int packCount = NB.decodeInt32(hdr, 8); + + byte[] lookupBuffer = new byte[CHUNK_LOOKUP_WIDTH * (chunkCount + 1)]; + + IO.readFully(fd, lookupBuffer, 0, lookupBuffer.length); + + List<ChunkSegment> chunks = new ArrayList<>(chunkCount + 1); + for (int i = 0; i <= chunkCount; i++) { + // chunks[chunkCount] is just a marker, in order to record the + // length of the last chunk. + int id = NB.decodeInt32(lookupBuffer, i * 12); + long offset = NB.decodeInt64(lookupBuffer, i * 12 + 4); + chunks.add(new ChunkSegment(id, offset)); + } + + MultiPackIndexBuilder builder = MultiPackIndexBuilder.builder(); + builder.setPackCount(packCount); + for (int i = 0; i < chunkCount; i++) { + long chunkOffset = chunks.get(i).offset; + int chunkId = chunks.get(i).id; + long len = chunks.get(i + 1).offset - chunkOffset; + + if (len > Integer.MAX_VALUE - 8) { // http://stackoverflow.com/a/8381338 + throw new MultiPackIndexFormatException( + JGitText.get().multiPackIndexFileIsTooLargeForJgit); + } + + byte[] buffer = new byte[(int) len]; + IO.readFully(fd, buffer, 0, buffer.length); + + switch (chunkId) { + case MIDX_CHUNKID_OIDFANOUT: + builder.addOidFanout(buffer); + break; + case MIDX_CHUNKID_OIDLOOKUP: + builder.addOidLookUp(buffer); + break; + case MIDX_CHUNKID_PACKNAMES: + builder.addPackNames(buffer); + break; + case MIDX_CHUNKID_BITMAPPEDPACKS: + builder.addBitmappedPacks(buffer); + break; + case MIDX_CHUNKID_OBJECTOFFSETS: + builder.addObjectOffsets(buffer); + break; + case MIDX_CHUNKID_LARGEOFFSETS: + builder.addObjectLargeOffsets(buffer); + break; + default: + LOG.warn(MessageFormat.format(JGitText.get().midxChunkUnknown, + Integer.toHexString(chunkId))); + } + } + return builder.build(); + } + + private record ChunkSegment(int id, long offset) {} + + /** + * Accumulate byte[] of the different chunks, to build a multipack index + */ + // Visible for testing + static class MultiPackIndexBuilder { + + private final int hashLength; + + private int packCount; + + private byte[] oidFanout; + + private byte[] oidLookup; + + private String[] packNames; + + private byte[] bitmappedPackfiles; + + private byte[] objectOffsets; + + // Optional + private byte[] largeObjectOffsets; + + // Optional + private byte[] bitmapPackOrder; + + private MultiPackIndexBuilder(int hashLength) { + this.hashLength = hashLength; + } + + /** + * Create builder + * + * @return A builder of {@link MultiPackIndex}. + */ + static MultiPackIndexBuilder builder() { + return new MultiPackIndexBuilder(OBJECT_ID_LENGTH); + } + + MultiPackIndexBuilder setPackCount(int packCount) { + this.packCount = packCount; + return this; + } + + MultiPackIndexBuilder addOidFanout(byte[] buffer) + throws MultiPackIndexFormatException { + assertChunkNotSeenYet(oidFanout, MIDX_CHUNKID_OIDFANOUT); + oidFanout = buffer; + return this; + } + + MultiPackIndexBuilder addOidLookUp(byte[] buffer) + throws MultiPackIndexFormatException { + assertChunkNotSeenYet(oidLookup, MIDX_CHUNKID_OIDLOOKUP); + oidLookup = buffer; + return this; + } + + MultiPackIndexBuilder addPackNames(byte[] buffer) + throws MultiPackIndexFormatException { + assertChunkNotSeenYet(packNames, MIDX_CHUNKID_PACKNAMES); + packNames = new String(buffer, UTF_8).split("\u0000"); //$NON-NLS-1$ + return this; + } + + MultiPackIndexBuilder addBitmappedPacks(byte[] buffer) + throws MultiPackIndexFormatException { + assertChunkNotSeenYet(bitmappedPackfiles, + MIDX_CHUNKID_BITMAPPEDPACKS); + bitmappedPackfiles = buffer; + return this; + } + + MultiPackIndexBuilder addObjectOffsets(byte[] buffer) + throws MultiPackIndexFormatException { + assertChunkNotSeenYet(objectOffsets, MIDX_CHUNKID_OBJECTOFFSETS); + objectOffsets = buffer; + return this; + } + + MultiPackIndexBuilder addObjectLargeOffsets(byte[] buffer) + throws MultiPackIndexFormatException { + assertChunkNotSeenYet(largeObjectOffsets, + MIDX_CHUNKID_LARGEOFFSETS); + largeObjectOffsets = buffer; + return this; + } + + MultiPackIndexBuilder addReverseIndex(byte[] buffer) + throws MultiPackIndexFormatException { + assertChunkNotSeenYet(bitmapPackOrder, MIDX_CHUNKID_REVINDEX); + bitmapPackOrder = buffer; + return this; + } + + MultiPackIndex build() throws MultiPackIndexFormatException { + assertChunkNotNull(oidFanout, MIDX_CHUNKID_OIDFANOUT); + assertChunkNotNull(oidLookup, MIDX_CHUNKID_OIDLOOKUP); + assertChunkNotNull(packNames, MIDX_CHUNKID_PACKNAMES); + assertChunkNotNull(objectOffsets, MIDX_CHUNKID_OBJECTOFFSETS); + + assertPackCounts(packCount, packNames.length); + return new MultiPackIndexV1(hashLength, oidFanout, oidLookup, + packNames, bitmappedPackfiles, objectOffsets, largeObjectOffsets); + } + + private static void assertChunkNotNull(Object object, int chunkId) + throws MultiPackIndexFormatException { + if (object == null) { + throw new MultiPackIndexFormatException( + MessageFormat.format(JGitText.get().midxChunkNeeded, + Integer.toHexString(chunkId))); + } + } + + private static void assertChunkNotSeenYet(Object object, int chunkId) + throws MultiPackIndexFormatException { + if (object != null) { + throw new MultiPackIndexFormatException( + MessageFormat.format(JGitText.get().midxChunkRepeated, + Integer.toHexString(chunkId))); + } + } + + private static void assertPackCounts(int headerCount, + int packfileNamesCount) throws MultiPackIndexFormatException { + if (headerCount != packfileNamesCount) { + throw new MultiPackIndexFormatException(MessageFormat.format( + JGitText.get().multiPackIndexPackCountMismatch, + headerCount, packfileNamesCount)); + } + } + } + + /** + * Thrown when a MultiPackIndex file's format is different from we expected + */ + public static class MultiPackIndexFormatException extends IOException { + + private static final long serialVersionUID = 1L; + + /** + * Construct an exception. + * + * @param why + * description of the type of error. + */ + MultiPackIndexFormatException(String why) { + super(why); + } + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexPrettyPrinter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexPrettyPrinter.java new file mode 100644 index 0000000000..948b7bc174 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexPrettyPrinter.java @@ -0,0 +1,156 @@ +/* + * Copyright (C) 2025, Google LLC + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ +package org.eclipse.jgit.internal.storage.midx; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.CHUNK_LOOKUP_WIDTH; + +import java.io.PrintWriter; +import java.util.ArrayList; +import java.util.List; + +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.util.NB; + +/** + * Prints a multipack index file in a human-readable format. + * + * @since 7.2 + */ +@SuppressWarnings({ "boxing", "nls" }) +public class MultiPackIndexPrettyPrinter { + + /** + * Writes to out, in human-readable format, the multipack index in rawMidx + * + * @param rawMidx the bytes of a multipack index + * @param out a writer + */ + public static void prettyPrint(byte[] rawMidx, PrintWriter out) { + // Header (12 bytes) + out.println("[ 0] Magic: " + new String(rawMidx, 0, 4, UTF_8)); + out.println("[ 4] Version number: " + (int) rawMidx[4]); + out.println("[ 5] OID version: " + (int) rawMidx[5]); + int chunkCount = rawMidx[6]; + out.println("[ 6] # of chunks: " + chunkCount); + out.println("[ 7] # of bases: " + (int) rawMidx[7]); + int numberOfPacks = NB.decodeInt32(rawMidx, 8); + out.println("[ 8] # of packs: " + numberOfPacks); + + // Chunk lookup table + List<ChunkSegment> chunkSegments = new ArrayList<>(); + int current = printChunkLookup(out, rawMidx, chunkCount, chunkSegments); + + for (int i = 0; i < chunkSegments.size() - 1; i++) { + ChunkSegment segment = chunkSegments.get(i); + if (current != segment.startOffset()) { + throw new IllegalStateException(String.format( + "We are at byte %d, but segment should start at %d", + current, segment.startOffset())); + } + out.printf("Starting chunk: %s @ %d%n", segment.chunkName(), + segment.startOffset()); + switch (segment.chunkName()) { + case "OIDF" -> current = printOIDF(out, rawMidx, current); + case "OIDL" -> current = printOIDL(out, rawMidx, current, + chunkSegments.get(i + 1).startOffset); + case "OOFF" -> current = printOOFF(out, rawMidx, current, + chunkSegments.get(i + 1).startOffset); + case "PNAM" -> current = printPNAM(out, rawMidx, current, + chunkSegments.get(i + 1).startOffset); + case "RIDX" -> current = printRIDX(out, rawMidx, current, + chunkSegments.get(i + 1).startOffset); + default -> { + out.printf( + "Skipping %s (don't know how to print it yet)%n", + segment.chunkName()); + current = (int) chunkSegments.get(i + 1).startOffset(); + } + } + } + // Checksum is a SHA-1, use ObjectId to parse it + out.printf("[ %d] Checksum %s%n", current, + ObjectId.fromRaw(rawMidx, current).name()); + out.printf("Total size: " + (current + 20)); + } + + private static int printChunkLookup(PrintWriter out, byte[] rawMidx, int chunkCount, + List<ChunkSegment> chunkSegments) { + out.println("Starting chunk lookup @ 12"); + int current = 12; + for (int i = 0; i < chunkCount; i++) { + String chunkName = new String(rawMidx, current, 4, UTF_8); + long offset = NB.decodeInt64(rawMidx, current + 4); + out.printf("[ %d] |%8s|%8d|%n", current, chunkName, offset); + current += CHUNK_LOOKUP_WIDTH; + chunkSegments.add(new ChunkSegment(chunkName, offset)); + } + String chunkName = "0000"; + long offset = NB.decodeInt64(rawMidx, current + 4); + out.printf("[ %d] |%8s|%8d|%n", current, chunkName, offset); + current += CHUNK_LOOKUP_WIDTH; + chunkSegments.add(new ChunkSegment(chunkName, offset)); + return current; + } + + private static int printOIDF(PrintWriter out, byte[] rawMidx, int start) { + int current = start; + for (short i = 0; i < 256; i++) { + out.printf("[ %d] (%02X) %d%n", current, i, + NB.decodeInt32(rawMidx, current)); + current += 4; + } + return current; + } + + private static int printOIDL(PrintWriter out, byte[] rawMidx, int start, long end) { + int i = start; + while (i < end) { + out.printf("[ %d] %s%n", i, + ObjectId.fromRaw(rawMidx, i).name()); + i += 20; + } + return i; + } + + private static int printOOFF(PrintWriter out, byte[] rawMidx, int start, long end) { + int i = start; + while (i < end) { + out.printf("[ %d] %d %d%n", i, NB.decodeInt32(rawMidx, i), + NB.decodeInt32(rawMidx, i + 4)); + i += 8; + } + return i; + } + + private static int printRIDX(PrintWriter out, byte[] rawMidx, int start, long end) { + int i = start; + while (i < end) { + out.printf("[ %d] %d%n", i, NB.decodeInt32(rawMidx, i)); + i += 4; + } + return (int) end; + } + + private static int printPNAM(PrintWriter out, byte[] rawMidx, int start, long end) { + int nameStart = start; + for (int i = start; i < end; i++) { + if (rawMidx[i] == 0) { + out + .println(new String(rawMidx, nameStart, i - nameStart)); + nameStart = i + 1; + } + } + return (int) end; + } + + private record ChunkSegment(String chunkName, long startOffset) { + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexV1.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexV1.java new file mode 100644 index 0000000000..be752cc4b5 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexV1.java @@ -0,0 +1,289 @@ +/* + * Copyright (C) 2024, GerritForge Inc. and others + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +package org.eclipse.jgit.internal.storage.midx; + +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Set; + +import org.eclipse.jgit.annotations.NonNull; +import org.eclipse.jgit.annotations.Nullable; +import org.eclipse.jgit.internal.JGitText; +import org.eclipse.jgit.internal.storage.midx.MultiPackIndexLoader.MultiPackIndexFormatException; +import org.eclipse.jgit.lib.AbbreviatedObjectId; +import org.eclipse.jgit.lib.AnyObjectId; +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.util.NB; + +/** + * Support for the MultiPackIndex v1 format. + * + * @see MultiPackIndex + */ +class MultiPackIndexV1 implements MultiPackIndex { + + private final OidLookup idx; + + private final String[] packNames; + + private final byte[] bitmappedPackfiles; + + private final OffsetLookup offsets; + + private final PackOffset result = new PackOffset(); + + MultiPackIndexV1(int hashLength, @NonNull byte[] oidFanout, + @NonNull byte[] oidLookup, String[] packNames, + byte[] bitmappedPackfiles, byte[] objectOffsets, + byte[] largeObjectOffsets) throws MultiPackIndexFormatException { + this.bitmappedPackfiles = bitmappedPackfiles; + this.idx = new OidLookup(hashLength, oidFanout, oidLookup); + this.offsets = new OffsetLookup(objectOffsets, largeObjectOffsets); + this.packNames = packNames; + } + + @Override + public String[] getPackNames() { + return packNames; + } + + @Override + public boolean hasObject(AnyObjectId oid) { + return idx.findMultiPackIndexPosition(oid) != -1; + } + + @Override + @Nullable + public PackOffset find(AnyObjectId objectId) { + int position = idx.findMultiPackIndexPosition(objectId); + if (position == -1) { + return null; + } + offsets.getObjectOffset(position, result); + return result; + } + + @Override + public void resolve(Set<ObjectId> matches, AbbreviatedObjectId id, + int matchLimit) { + idx.resolve(matches, id, matchLimit); + } + + @Override + public long getMemorySize() { + int packNamesSize = Arrays.stream(packNames) + .mapToInt(s -> s.getBytes(StandardCharsets.UTF_8).length).sum(); + return packNamesSize + byteArrayLengh(bitmappedPackfiles) + + idx.getMemorySize() + offsets.getMemorySize(); + } + + @Override + public String toString() { + return "MultiPackIndexV1 {idx=" + idx + ", packfileNames=" //$NON-NLS-1$ //$NON-NLS-2$ + + Arrays.toString(packNames) + ", bitmappedPackfiles=" //$NON-NLS-1$ + + byteArrayToString(bitmappedPackfiles) + ", objectOffsets=" //$NON-NLS-1$ + + offsets + '}'; + } + + private static String byteArrayToString(byte[] array) { + return array == null ? "null" : new String(array); //$NON-NLS-1$ + } + + private static int byteArrayLengh(byte[] array) { + return array == null ? 0 : array.length; + } + + /** + * Wraps the small and large offset chunks (if exists), to lookup offsets. + */ + private static class OffsetLookup { + private static final int OBJECT_OFFSETS_DATA_WIDTH = 8; + + private static final int BIT_31_ON = 0x80000000; + + private static final int TOGGLE_BIT_31 = 0x7fff_ffff; + + private final byte[] offsets; + + private final byte[] largeOffsets; + + /** + * Initialize the ObjectOffsets. + * + * @param offsets + * content of ObjectOffset Chunk. + * @param largeOffsets + * content of largo offsets chunks (can be null). + */ + OffsetLookup(@NonNull byte[] offsets, byte[] largeOffsets) { + this.offsets = offsets; + this.largeOffsets = largeOffsets; + } + + /** + * Get the metadata of a commit。 + * + * @param position + * the position in the multi-pack-index of the object. + * @param result + * an instance of PackOffset to populate with the result. + */ + void getObjectOffset(int position, PackOffset result) { + int offsetInChunk = position * OBJECT_OFFSETS_DATA_WIDTH; + int packId = NB.decodeInt32(offsets, offsetInChunk); + int offset = NB.decodeInt32(offsets, offsetInChunk + 4); + if ((offset & BIT_31_ON) != 0) { + long bigOffset; + if (largeOffsets == null) { + bigOffset = NB.decodeUInt32(offsets, offsetInChunk + 4); + } else { + int bigOffsetPos = (offset & TOGGLE_BIT_31); + bigOffset = NB.decodeInt64(largeOffsets, bigOffsetPos * 8); + } + result.setValues(packId, bigOffset); + return; + } + result.setValues(packId, offset); + } + + long getMemorySize() { + return (long) byteArrayLengh(offsets) + + byteArrayLengh(largeOffsets); + } + } + + /** + * Combines the fanout and oid list chunks, to lookup Oids with an efficient + * binary search + */ + private static class OidLookup { + + private static final int FANOUT = 256; + + private final int hashLength; + + private final int[] fanoutTable; + + private final byte[] oidLookup; + + /** + * Initialize the MultiPackIndexIndex. + * + * @param hashLength + * length of object hash. + * @param oidFanout + * content of OID Fanout Chunk. + * @param oidLookup + * content of OID Lookup Chunk. + * @throws MultiPackIndexFormatException + * MultiPackIndex file's format is different from we + * expected. + */ + OidLookup(int hashLength, @NonNull byte[] oidFanout, + @NonNull byte[] oidLookup) + throws MultiPackIndexFormatException { + this.hashLength = hashLength; + this.oidLookup = oidLookup; + + int[] table = new int[FANOUT]; + long uint32; + for (int k = 0; k < table.length; k++) { + uint32 = NB.decodeUInt32(oidFanout, k * 4); + if (uint32 > Integer.MAX_VALUE) { + throw new MultiPackIndexFormatException( + JGitText.get().multiPackIndexFileIsTooLargeForJgit); + } + table[k] = (int) uint32; + } + this.fanoutTable = table; + } + + /** + * Find the position in the MultiPackIndex file of the specified id. + * + * @param id + * the id for which the multi-pack-index position will be + * found. + * @return the MultiPackIndex position or -1 if the object was not + * found. + */ + int findMultiPackIndexPosition(AnyObjectId id) { + int levelOne = id.getFirstByte(); + int high = fanoutTable[levelOne]; + int low = 0; + if (levelOne > 0) { + low = fanoutTable[levelOne - 1]; + } + while (low < high) { + int mid = (low + high) >>> 1; + int cmp = id.compareTo(oidLookup, hashLength * mid); + if (cmp < 0) { + high = mid; + } else if (cmp == 0) { + return mid; + } else { + low = mid + 1; + } + } + return -1; + } + + void resolve(Set<ObjectId> matches, AbbreviatedObjectId id, + int matchLimit) { + if (matches.size() >= matchLimit) { + return; + } + + if (oidLookup.length == 0) { + return; + } + + int high = fanoutTable[id.getFirstByte()]; + int low = id.getFirstByte() == 0 ? 0 + : fanoutTable[id.getFirstByte() - 1]; + do { + int p = (low + high) >>> 1; + int cmp = id.prefixCompare(oidLookup, idOffset(p)); + if (cmp < 0) { + high = p; + continue; + } + + if (cmp > 0) { + low = p + 1; + continue; + } + + // Got a match. + // We may have landed in the middle of the matches. Move + // backwards to the start of matches, then walk forwards. + while (0 < p + && id.prefixCompare(oidLookup, idOffset(p - 1)) == 0) { + p--; + } + while (p < high && id.prefixCompare(oidLookup, idOffset(p)) == 0 + && matches.size() < matchLimit) { + matches.add(ObjectId.fromRaw(oidLookup, idOffset(p))); + p++; + } + return; + } while (low < high); + } + + private int idOffset(int position) { + return position * hashLength; + } + + long getMemorySize() { + return 4L + byteArrayLengh(oidLookup) + (FANOUT * 4); + } + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexWriter.java new file mode 100644 index 0000000000..b42c821a44 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/MultiPackIndexWriter.java @@ -0,0 +1,428 @@ +/* + * Copyright (C) 2025, Google LLC + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ +package org.eclipse.jgit.internal.storage.midx; + +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.CHUNK_LOOKUP_WIDTH; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_LARGEOFFSETS; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_OBJECTOFFSETS; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_OIDFANOUT; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_OIDLOOKUP; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_PACKNAMES; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_CHUNKID_REVINDEX; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_SIGNATURE; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MIDX_VERSION; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.MULTIPACK_INDEX_FANOUT_SIZE; +import static org.eclipse.jgit.internal.storage.midx.MultiPackIndexConstants.OID_HASH_VERSION; +import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH; + +import java.io.IOException; +import java.io.InterruptedIOException; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.eclipse.jgit.internal.JGitText; +import org.eclipse.jgit.internal.storage.file.PackIndex; +import org.eclipse.jgit.internal.storage.io.CancellableDigestOutputStream; +import org.eclipse.jgit.internal.storage.midx.PackIndexMerger.MidxMutableEntry; +import org.eclipse.jgit.lib.ProgressMonitor; +import org.eclipse.jgit.util.NB; + +/** + * Writes a collection of indexes as a multipack index. + * <p> + * See <a href= + * "https://git-scm.com/docs/pack-format#_multi_pack_index_midx_files_have_the_following_format">multipack + * index format spec</a> + * + * @since 7.2 + */ +public class MultiPackIndexWriter { + + private static final int LIMIT_31_BITS = (1 << 31) - 1; + + private static final int MIDX_HEADER_SIZE = 12; + + /** + * Writes the inputs in the multipack index format in the outputStream. + * + * @param monitor + * progress monitor + * @param outputStream + * stream to write the multipack index file + * @param inputs + * pairs of name and index for each pack to include in the + * multipack index. + * @return bytes written into the stream + * @throws IOException + * Error writing to the stream + */ + public long write(ProgressMonitor monitor, OutputStream outputStream, + Map<String, PackIndex> inputs) throws IOException { + PackIndexMerger data = new PackIndexMerger(inputs); + + // List of chunks in the order they need to be written + List<ChunkHeader> chunkHeaders = createChunkHeaders(data); + long expectedSize = calculateExpectedSize(chunkHeaders); + try (CancellableDigestOutputStream out = new CancellableDigestOutputStream( + monitor, outputStream)) { + writeHeader(out, chunkHeaders.size(), data.getPackCount()); + writeChunkLookup(out, chunkHeaders); + + WriteContext ctx = new WriteContext(out, data); + for (ChunkHeader chunk : chunkHeaders) { + chunk.writerFn.write(ctx); + } + writeCheckSum(out); + if (expectedSize != out.length()) { + throw new IllegalStateException(String.format( + JGitText.get().multiPackIndexUnexpectedSize, + Long.valueOf(expectedSize), + Long.valueOf(out.length()))); + } + return expectedSize; + } catch (InterruptedIOException e) { + throw new IOException(JGitText.get().multiPackIndexWritingCancelled, + e); + } + } + + private static long calculateExpectedSize(List<ChunkHeader> chunks) { + int chunkLookup = (chunks.size() + 1) * CHUNK_LOOKUP_WIDTH; + long chunkContent = chunks.stream().mapToLong(c -> c.size).sum(); + return /* header */ 12 + chunkLookup + chunkContent + /* CRC */ 20; + } + + private List<ChunkHeader> createChunkHeaders(PackIndexMerger data) { + List<ChunkHeader> chunkHeaders = new ArrayList<>(); + chunkHeaders.add(new ChunkHeader(MIDX_CHUNKID_OIDFANOUT, + MULTIPACK_INDEX_FANOUT_SIZE, this::writeFanoutTable)); + chunkHeaders.add(new ChunkHeader(MIDX_CHUNKID_OIDLOOKUP, + (long) data.getUniqueObjectCount() * OBJECT_ID_LENGTH, + this::writeOidLookUp)); + chunkHeaders.add(new ChunkHeader(MIDX_CHUNKID_OBJECTOFFSETS, + 8L * data.getUniqueObjectCount(), this::writeObjectOffsets)); + if (data.needsLargeOffsetsChunk()) { + chunkHeaders.add(new ChunkHeader(MIDX_CHUNKID_LARGEOFFSETS, + 8L * data.getOffsetsOver31BitsCount(), + this::writeObjectLargeOffsets)); + } + chunkHeaders.add(new ChunkHeader(MIDX_CHUNKID_REVINDEX, + 4L * data.getUniqueObjectCount(), this::writeRidx)); + + int packNamesSize = data.getPackNames().stream() + .mapToInt(String::length).map(i -> i + 1 /* null at the end */) + .sum(); + chunkHeaders.add(new ChunkHeader(MIDX_CHUNKID_PACKNAMES, packNamesSize, + this::writePackfileNames)); + return chunkHeaders; + } + + /** + * Write the first 12 bytes of the multipack index. + * <p> + * These bytes include things like magic number, version, number of + * chunks... + * + * @param out + * output stream to write + * @param numChunks + * number of chunks this multipack index is going to have + * @param packCount + * number of packs covered by this multipack index + * @throws IOException + * error writing to the output stream + */ + private void writeHeader(CancellableDigestOutputStream out, int numChunks, + int packCount) throws IOException { + byte[] headerBuffer = new byte[MIDX_HEADER_SIZE]; + NB.encodeInt32(headerBuffer, 0, MIDX_SIGNATURE); + byte[] buff = { MIDX_VERSION, OID_HASH_VERSION, (byte) numChunks, + (byte) 0 }; + System.arraycopy(buff, 0, headerBuffer, 4, 4); + NB.encodeInt32(headerBuffer, 8, packCount); + out.write(headerBuffer, 0, headerBuffer.length); + out.flush(); + } + + /** + * Write a table of "chunkId, start-offset", with a special value "0, + * end-of-previous_chunk", to mark the end. + * + * @param out + * output stream to write + * @param chunkHeaders + * list of chunks in the order they are expected to be written + * @throws IOException + * error writing to the output stream + */ + private void writeChunkLookup(CancellableDigestOutputStream out, + List<ChunkHeader> chunkHeaders) throws IOException { + + // first chunk will start at header + this lookup block + long chunkStart = MIDX_HEADER_SIZE + + (long) (chunkHeaders.size() + 1) * CHUNK_LOOKUP_WIDTH; + byte[] chunkEntry = new byte[CHUNK_LOOKUP_WIDTH]; + for (ChunkHeader chunkHeader : chunkHeaders) { + NB.encodeInt32(chunkEntry, 0, chunkHeader.chunkId); + NB.encodeInt64(chunkEntry, 4, chunkStart); + out.write(chunkEntry); + chunkStart += chunkHeader.size; + } + // Terminating label for the block + // (chunkid 0, offset where the next block would start) + NB.encodeInt32(chunkEntry, 0, 0); + NB.encodeInt64(chunkEntry, 4, chunkStart); + out.write(chunkEntry); + } + + /** + * Write the fanout table for the object ids + * <p> + * Table with 256 entries (one byte), where the ith entry, F[i], stores the + * number of OIDs with first byte at most i. Thus, F[255] stores the total + * number of objects. + * + * @param ctx + * write context + * @throws IOException + * error writing to the output stream + */ + + private void writeFanoutTable(WriteContext ctx) throws IOException { + byte[] tmp = new byte[4]; + int[] fanout = new int[256]; + Iterator<MidxMutableEntry> iterator = ctx.data.bySha1Iterator(); + while (iterator.hasNext()) { + MidxMutableEntry e = iterator.next(); + fanout[e.getObjectId().getFirstByte() & 0xff]++; + } + for (int i = 1; i < fanout.length; i++) { + fanout[i] += fanout[i - 1]; + } + for (int n : fanout) { + NB.encodeInt32(tmp, 0, n); + ctx.out.write(tmp, 0, 4); + } + } + + /** + * Write the OID lookup chunk + * <p> + * A list of OIDs in sha1 order. + * + * @param ctx + * write context + * @throws IOException + * error writing to the output stream + */ + private void writeOidLookUp(WriteContext ctx) throws IOException { + byte[] tmp = new byte[OBJECT_ID_LENGTH]; + + Iterator<MidxMutableEntry> iterator = ctx.data.bySha1Iterator(); + while (iterator.hasNext()) { + MidxMutableEntry e = iterator.next(); + e.getObjectId().copyRawTo(tmp, 0); + ctx.out.write(tmp, 0, OBJECT_ID_LENGTH); + } + } + + /** + * Write the object offsets chunk + * <p> + * A list of offsets, parallel to the list of OIDs. If the offset is too + * large (see {@link #fitsIn31bits(long)}), this contains the position in + * the large offsets list (marked with a 1 in the most significant bit). + * + * @param ctx + * write context + * @throws IOException + * error writing to the output stream + */ + private void writeObjectOffsets(WriteContext ctx) throws IOException { + byte[] entry = new byte[8]; + Iterator<MidxMutableEntry> iterator = ctx.data.bySha1Iterator(); + while (iterator.hasNext()) { + MidxMutableEntry e = iterator.next(); + NB.encodeInt32(entry, 0, e.getPackId()); + if (!ctx.data.needsLargeOffsetsChunk() + || fitsIn31bits(e.getOffset())) { + NB.encodeInt32(entry, 4, (int) e.getOffset()); + } else { + int offloadedPosition = ctx.largeOffsets.append(e.getOffset()); + NB.encodeInt32(entry, 4, offloadedPosition | (1 << 31)); + } + ctx.out.write(entry); + } + } + + /** + * Writes the reverse index chunk + * <p> + * This stores the position of the objects in the main index, ordered first + * by pack and then by offset + * + * @param ctx + * write context + * @throws IOException + * erorr writing to the output stream + */ + private void writeRidx(WriteContext ctx) throws IOException { + Map<Integer, List<OffsetPosition>> packOffsets = new HashMap<>( + ctx.data.getPackCount()); + // TODO(ifrade): Brute force solution loading all offsets/packs in + // memory. We could also iterate reverse indexes looking up + // their position in the midx (and discarding if the pack doesn't + // match). + Iterator<MidxMutableEntry> iterator = ctx.data.bySha1Iterator(); + int midxPosition = 0; + while (iterator.hasNext()) { + MidxMutableEntry e = iterator.next(); + OffsetPosition op = new OffsetPosition(e.getOffset(), midxPosition); + midxPosition++; + packOffsets.computeIfAbsent(Integer.valueOf(e.getPackId()), + k -> new ArrayList<>()).add(op); + } + + for (int i = 0; i < ctx.data.getPackCount(); i++) { + List<OffsetPosition> offsetsForPack = packOffsets + .get(Integer.valueOf(i)); + if (offsetsForPack == null) { + continue; + } + offsetsForPack.sort(Comparator.comparing(OffsetPosition::offset)); + byte[] ridxForPack = new byte[4 * offsetsForPack.size()]; + for (int j = 0; j < offsetsForPack.size(); j++) { + NB.encodeInt32(ridxForPack, j * 4, + offsetsForPack.get(j).position); + } + ctx.out.write(ridxForPack); + } + } + + /** + * Write the large offset chunk + * <p> + * A list of large offsets (long). The regular offset chunk will point to a + * position here. + * + * @param ctx + * writer context + * @throws IOException + * error writing to the output stream + */ + private void writeObjectLargeOffsets(WriteContext ctx) throws IOException { + ctx.out.write(ctx.largeOffsets.offsets, 0, + ctx.largeOffsets.bytePosition); + } + + /** + * Write the list of packfiles chunk + * <p> + * List of packfiles (in lexicographical order) with an \0 at the end + * + * @param ctx + * writer context + * @throws IOException + * error writing to the output stream + */ + private void writePackfileNames(WriteContext ctx) throws IOException { + for (String packName : ctx.data.getPackNames()) { + // Spec doesn't talk about encoding. + ctx.out.write(packName.getBytes(StandardCharsets.UTF_8)); + ctx.out.write(0); + } + } + + /** + * Write final checksum of the data written to the stream + * + * @param out + * output stream used to write + * @throws IOException + * error writing to the output stream + */ + private void writeCheckSum(CancellableDigestOutputStream out) + throws IOException { + out.write(out.getDigest()); + out.flush(); + } + + private record OffsetPosition(long offset, int position) { + } + + /** + * If there is at least one offset value larger than 2^32-1, then the large + * offset chunk must exist, and offsets larger than 2^31-1 must be stored in + * it instead + * + * @param offset + * object offset + * + * @return true if the offset fits in 31 bits + */ + private static boolean fitsIn31bits(long offset) { + return offset <= LIMIT_31_BITS; + } + + private static class LargeOffsets { + private final byte[] offsets; + + private int bytePosition; + + LargeOffsets(int largeOffsetsCount) { + offsets = new byte[largeOffsetsCount * 8]; + bytePosition = 0; + } + + /** + * Add an offset to the large offset chunk + * + * @param largeOffset + * a large offset + * @return the position of the just inserted offset (as in number of + * offsets, NOT in bytes) + */ + int append(long largeOffset) { + int at = bytePosition; + NB.encodeInt64(offsets, at, largeOffset); + bytePosition += 8; + return at / 8; + } + } + + private record ChunkHeader(int chunkId, long size, ChunkWriter writerFn) { + } + + @FunctionalInterface + private interface ChunkWriter { + void write(WriteContext ctx) throws IOException; + } + + private static class WriteContext { + final CancellableDigestOutputStream out; + + final PackIndexMerger data; + + final LargeOffsets largeOffsets; + + WriteContext(CancellableDigestOutputStream out, PackIndexMerger data) { + this.out = out; + this.data = data; + this.largeOffsets = new LargeOffsets( + data.getOffsetsOver31BitsCount()); + } + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/PackIndexMerger.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/PackIndexMerger.java new file mode 100644 index 0000000000..f23665849e --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/midx/PackIndexMerger.java @@ -0,0 +1,337 @@ +/* + * Copyright (C) 2025, Google LLC + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ +package org.eclipse.jgit.internal.storage.midx; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.stream.Collectors; + +import org.eclipse.jgit.internal.storage.file.PackIndex; +import org.eclipse.jgit.lib.AnyObjectId; +import org.eclipse.jgit.lib.MutableObjectId; + +/** + * Collect the stats and offers an iterator over the union of n-pack indexes. + * <p> + * The multipack index is a list of (sha1, packid, offset) ordered by sha1. We + * can build it from the individual pack indexes (sha1, offset) ordered by sha1, + * with a simple merge ignoring duplicates. + * <p> + * This class encapsulates the merging logic and precalculates the stats that + * the index needs (like total count of objects). To limit memory consumption, + * it does the merge as it goes during the iteration and iterators use mutable + * entries. The stats of the combined index are calculated in an iteration at + * construction time. + */ +class PackIndexMerger { + + private static final int LIMIT_31_BITS = (1 << 31) - 1; + + private static final long LIMIT_32_BITS = (1L << 32) - 1; + + /** + * Object returned by the iterator. + * <p> + * The iterator returns (on each next()) the same instance with different + * values, to avoid allocating many short-lived objects. Callers should not + * keep a reference to that returned value. + */ + static class MidxMutableEntry { + // The object id + private final MutableObjectId oid = new MutableObjectId(); + + // Position of the pack in the ordered list of pack in this merger + private int packId; + + // Offset in its pack + private long offset; + + public AnyObjectId getObjectId() { + return oid; + } + + public int getPackId() { + return packId; + } + + public long getOffset() { + return offset; + } + + /** + * Copy values from another mutable entry + * + * @param packId + * packId + * @param other + * another mutable entry + */ + private void fill(int packId, PackIndex.MutableEntry other) { + other.copyOidTo(oid); + this.packId = packId; + this.offset = other.getOffset(); + } + } + + private final List<String> packNames; + + private final List<PackIndex> indexes; + + private final boolean needsLargeOffsetsChunk; + + private final int offsetsOver31BitsCount; + + private final int uniqueObjectCount; + + PackIndexMerger(Map<String, PackIndex> packs) { + this.packNames = packs.keySet().stream().sorted() + .collect(Collectors.toUnmodifiableList()); + + this.indexes = packNames.stream().map(packs::get) + .collect(Collectors.toUnmodifiableList()); + + // Iterate for duplicates + int objectCount = 0; + boolean hasLargeOffsets = false; + int over31bits = 0; + MutableObjectId lastSeen = new MutableObjectId(); + MultiIndexIterator it = new MultiIndexIterator(indexes); + while (it.hasNext()) { + MidxMutableEntry entry = it.next(); + if (lastSeen.equals(entry.oid)) { + continue; + } + // If there is at least one offset value larger than 2^32-1, then + // the large offset chunk must exist, and offsets larger than + // 2^31-1 must be stored in it instead + if (entry.offset > LIMIT_32_BITS) { + hasLargeOffsets = true; + } + if (entry.offset > LIMIT_31_BITS) { + over31bits++; + } + + lastSeen.fromObjectId(entry.oid); + objectCount++; + } + uniqueObjectCount = objectCount; + offsetsOver31BitsCount = over31bits; + needsLargeOffsetsChunk = hasLargeOffsets; + } + + /** + * Object count of the merged index (i.e. without duplicates) + * + * @return object count of the merged index + */ + int getUniqueObjectCount() { + return uniqueObjectCount; + } + + /** + * If any object in any of the indexes has an offset over 2^32-1 + * + * @return true if there is any object with offset > 2^32 -1 + */ + boolean needsLargeOffsetsChunk() { + return needsLargeOffsetsChunk; + } + + /** + * How many object have offsets over 2^31-1 + * <p> + * Per multipack index spec, IF there is large offset chunk, all this + * offsets should be there. + * + * @return number of objects with offsets over 2^31-1 + */ + int getOffsetsOver31BitsCount() { + return offsetsOver31BitsCount; + } + + /** + * List of pack names in alphabetical order. + * <p> + * Order matters: In case of duplicates, the multipack index prefers the + * first package with it. This is in the same order we are using to + * prioritize duplicates. + * + * @return List of pack names, in the order used by the merge. + */ + List<String> getPackNames() { + return packNames; + } + + /** + * How many packs are being merged + * + * @return count of packs merged + */ + int getPackCount() { + return packNames.size(); + } + + /** + * Iterator over the merged indexes in sha1 order without duplicates + * <p> + * The returned entry in the iterator is mutable, callers should NOT keep a + * reference to it. + * + * @return an iterator in sha1 order without duplicates. + */ + Iterator<MidxMutableEntry> bySha1Iterator() { + return new DedupMultiIndexIterator(new MultiIndexIterator(indexes), + getUniqueObjectCount()); + } + + /** + * For testing. Iterate all entries, not skipping duplicates (stable order) + * + * @return an iterator of all objects in sha1 order, including duplicates. + */ + Iterator<MidxMutableEntry> rawIterator() { + return new MultiIndexIterator(indexes); + } + + /** + * Iterator over n-indexes in ObjectId order. + * <p> + * It returns duplicates if the same object id is in different indexes. Wrap + * it with {@link DedupMultiIndexIterator (Iterator, int)} to avoid + * duplicates. + */ + private static final class MultiIndexIterator + implements Iterator<MidxMutableEntry> { + + private final List<PackIndexPeekIterator> indexIterators; + + private final MidxMutableEntry mutableEntry = new MidxMutableEntry(); + + MultiIndexIterator(List<PackIndex> indexes) { + this.indexIterators = new ArrayList<>(indexes.size()); + for (int i = 0; i < indexes.size(); i++) { + PackIndexPeekIterator it = new PackIndexPeekIterator(i, + indexes.get(i)); + // Position in the first element + if (it.next() != null) { + indexIterators.add(it); + } + } + } + + @Override + public boolean hasNext() { + return !indexIterators.isEmpty(); + } + + @Override + public MidxMutableEntry next() { + PackIndexPeekIterator winner = null; + for (int index = 0; index < indexIterators.size(); index++) { + PackIndexPeekIterator current = indexIterators.get(index); + if (winner == null + || current.peek().compareBySha1To(winner.peek()) < 0) { + winner = current; + } + } + + if (winner == null) { + throw new NoSuchElementException(); + } + + mutableEntry.fill(winner.getPackId(), winner.peek()); + if (winner.next() == null) { + indexIterators.remove(winner); + }; + return mutableEntry; + } + } + + private static class DedupMultiIndexIterator + implements Iterator<MidxMutableEntry> { + private final MultiIndexIterator src; + + private int remaining; + + private final MutableObjectId lastOid = new MutableObjectId(); + + DedupMultiIndexIterator(MultiIndexIterator src, int totalCount) { + this.src = src; + this.remaining = totalCount; + } + + @Override + public boolean hasNext() { + return remaining > 0; + } + + @Override + public MidxMutableEntry next() { + MidxMutableEntry next = src.next(); + while (next != null && lastOid.equals(next.oid)) { + next = src.next(); + } + + if (next == null) { + throw new NoSuchElementException(); + } + + lastOid.fromObjectId(next.oid); + remaining--; + return next; + } + } + + /** + * Convenience around the PackIndex iterator to read the current value + * multiple times without consuming it. + * <p> + * This is used to merge indexes in the multipack index, where we need to + * compare the current value between indexes multiple times to find the + * next. + * <p> + * We could also implement this keeping the position (int) and + * MutableEntry#getObjectId, but that would create an ObjectId per entry. + * This implementation reuses the MutableEntry and avoid instantiations. + */ + // Visible for testing + static class PackIndexPeekIterator { + private final Iterator<PackIndex.MutableEntry> it; + + private final int packId; + + PackIndex.MutableEntry current; + + PackIndexPeekIterator(int packId, PackIndex index) { + it = index.iterator(); + this.packId = packId; + } + + PackIndex.MutableEntry next() { + if (it.hasNext()) { + current = it.next(); + } else { + current = null; + } + return current; + } + + PackIndex.MutableEntry peek() { + return current; + } + + int getPackId() { + return packId; + } + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java index e6daaeaca9..d5bb5f2e2f 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java @@ -36,7 +36,10 @@ public enum PackExt { COMMIT_GRAPH("graph"), //$NON-NLS-1$ /** An object size index. */ - OBJECT_SIZE_INDEX("objsize"); //$NON-NLS-1$ + OBJECT_SIZE_INDEX("objsize"), //$NON-NLS-1$ + + /** Multi pack index */ + MULTI_PACK_INDEX("midx"); //$NON-NLS-1$ private final String ext; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriterBitmapPreparer.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriterBitmapPreparer.java index dabc1f0c5f..bf87c4c9d6 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriterBitmapPreparer.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackWriterBitmapPreparer.java @@ -14,6 +14,7 @@ import static org.eclipse.jgit.internal.storage.file.PackBitmapIndex.FLAG_REUSE; import static org.eclipse.jgit.revwalk.RevFlag.SEEN; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -28,16 +29,16 @@ import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.internal.JGitText; import org.eclipse.jgit.internal.revwalk.AddUnseenToBitmapFilter; import org.eclipse.jgit.internal.storage.file.BitmapIndexImpl; +import org.eclipse.jgit.internal.storage.file.BitmapIndexImpl.CompressedBitmap; import org.eclipse.jgit.internal.storage.file.PackBitmapIndex; import org.eclipse.jgit.internal.storage.file.PackBitmapIndexBuilder; import org.eclipse.jgit.internal.storage.file.PackBitmapIndexRemapper; -import org.eclipse.jgit.internal.storage.file.BitmapIndexImpl.CompressedBitmap; import org.eclipse.jgit.lib.AnyObjectId; +import org.eclipse.jgit.lib.BitmapIndex.BitmapBuilder; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectReader; import org.eclipse.jgit.lib.ProgressMonitor; -import org.eclipse.jgit.lib.BitmapIndex.BitmapBuilder; import org.eclipse.jgit.revwalk.BitmapWalker; import org.eclipse.jgit.revwalk.ObjectWalk; import org.eclipse.jgit.revwalk.RevCommit; @@ -99,10 +100,10 @@ class PackWriterBitmapPreparer { this.excessiveBranchCount = config.getBitmapExcessiveBranchCount(); this.excessiveBranchTipCount = Math.max(excessiveBranchCount, config.getBitmapExcessiveBranchTipCount()); - long now = SystemReader.getInstance().getCurrentTime(); + Instant now = SystemReader.getInstance().now(); long ageInSeconds = (long) config.getBitmapInactiveBranchAgeInDays() * DAY_IN_SECONDS; - this.inactiveBranchTimestamp = (now / 1000) - ageInSeconds; + this.inactiveBranchTimestamp = now.getEpochSecond() - ageInSeconds; } /** diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockReader.java index d07713db8e..e9ff02700d 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockReader.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockReader.java @@ -32,6 +32,8 @@ import static org.eclipse.jgit.lib.Ref.Storage.PACKED; import java.io.IOException; import java.nio.ByteBuffer; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.Arrays; import java.util.zip.DataFormatException; import java.util.zip.Inflater; @@ -245,9 +247,9 @@ class BlockReader { private PersonIdent readPersonIdent() { String name = readValueString(); String email = readValueString(); - long ms = readVarint64() * 1000; - int tz = readInt16(); - return new PersonIdent(name, email, ms, tz); + long epochSeconds = readVarint64(); + ZoneOffset tz = ZoneOffset.ofTotalSeconds(readInt16() * 60); + return new PersonIdent(name, email, Instant.ofEpochSecond(epochSeconds), tz); } void readBlock(BlockSource src, long pos, int fileBlockSize) diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockWriter.java index c70f2e4914..0ddfa5798a 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockWriter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockWriter.java @@ -524,8 +524,8 @@ class BlockWriter { this.oldId = oldId; this.newId = newId; - this.timeSecs = who.getWhen().getTime() / 1000L; - this.tz = (short) who.getTimeZoneOffset(); + this.timeSecs = who.getWhenAsInstant().getEpochSecond(); + this.tz = (short) (who.getZoneOffset().getTotalSeconds() / 60); this.name = who.getName().getBytes(UTF_8); this.email = who.getEmailAddress().getBytes(UTF_8); this.msg = message.getBytes(UTF_8); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableCompactor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableCompactor.java index 3c4bc75792..7e5f4ebbd4 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableCompactor.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableCompactor.java @@ -12,6 +12,7 @@ package org.eclipse.jgit.internal.storage.reftable; import java.io.IOException; import java.io.OutputStream; +import java.time.Instant; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.List; @@ -28,20 +29,26 @@ import org.eclipse.jgit.lib.ReflogEntry; * to shadow any lower reftable that may have the reference present. * <p> * By default all log entries within the range defined by - * {@link #setReflogExpireMinUpdateIndex(long)} and {@link #setReflogExpireMaxUpdateIndex(long)} are - * copied, even if no references in the output file match the log records. - * Callers may truncate the log to a more recent time horizon with - * {@link #setReflogExpireOldestReflogTimeMillis(long)}, or disable the log altogether with - * {@code setOldestReflogTimeMillis(Long.MAX_VALUE)}. + * {@link #setReflogExpireMinUpdateIndex(long)} and + * {@link #setReflogExpireMaxUpdateIndex(long)} are copied, even if no + * references in the output file match the log records. Callers may truncate the + * log to a more recent time horizon with + * {@link #setReflogExpireOlderThan(Instant)}, or disable the log + * altogether with {@code setReflogExpireOldestReflogTime(Instant.MAX)}. */ public class ReftableCompactor { private final ReftableWriter writer; + private final ArrayDeque<ReftableReader> tables = new ArrayDeque<>(); private boolean includeDeletes; + private long reflogExpireMinUpdateIndex = 0; + private long reflogExpireMaxUpdateIndex = Long.MAX_VALUE; - private long reflogExpireOldestReflogTimeMillis; + + private Instant reflogExpireOldestReflogTime = Instant.EPOCH; + private Stats stats; /** @@ -122,9 +129,29 @@ public class ReftableCompactor { * entries that predate {@code timeMillis} will be discarded. * Specified in Java standard milliseconds since the epoch. * @return {@code this} + * + * @deprecated Use {@link #setReflogExpireOlderThan(Instant)} instead + */ + @Deprecated(since="7.3") + public ReftableCompactor setReflogExpireOldestReflogTimeMillis( + long timeMillis) { + return setReflogExpireOlderThan(timeMillis == Long.MAX_VALUE + ? Instant.MAX + : Instant.ofEpochMilli(timeMillis)); + } + + /** + * Set oldest reflog time to preserve. + * + * @param cutTime + * oldest log time to preserve. Entries whose timestamps are + * {@code >= cutTime} will be copied into the output file. Log + * entries that predate {@code cutTime} will be discarded. + * @return {@code this} */ - public ReftableCompactor setReflogExpireOldestReflogTimeMillis(long timeMillis) { - reflogExpireOldestReflogTimeMillis = timeMillis; + public ReftableCompactor setReflogExpireOlderThan( + Instant cutTime) { + reflogExpireOldestReflogTime = cutTime; return this; } @@ -182,14 +209,15 @@ public class ReftableCompactor { } private void mergeLogs(MergedReftable mr) throws IOException { - if (reflogExpireOldestReflogTimeMillis == Long.MAX_VALUE) { + if (reflogExpireOldestReflogTime == Instant.MAX) { return; } try (LogCursor lc = mr.allLogs()) { while (lc.next()) { long updateIndex = lc.getUpdateIndex(); - if (updateIndex > reflogExpireMaxUpdateIndex || updateIndex < reflogExpireMinUpdateIndex) { + if (updateIndex > reflogExpireMaxUpdateIndex + || updateIndex < reflogExpireMinUpdateIndex) { continue; } @@ -203,14 +231,9 @@ public class ReftableCompactor { } PersonIdent who = log.getWho(); - if (who.getWhen().getTime() >= reflogExpireOldestReflogTimeMillis) { - writer.writeLog( - refName, - updateIndex, - who, - log.getOldId(), - log.getNewId(), - log.getComment()); + if (who.getWhenAsInstant().compareTo(reflogExpireOldestReflogTime) >= 0) { + writer.writeLog(refName, updateIndex, who, log.getOldId(), + log.getNewId(), log.getComment()); } } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BranchConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BranchConfig.java index e15c7af932..7921052aaa 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BranchConfig.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BranchConfig.java @@ -187,8 +187,7 @@ public class BranchConfig { * @since 4.5 */ public BranchRebaseMode getRebaseMode() { - return config.getEnum(BranchRebaseMode.values(), - ConfigConstants.CONFIG_BRANCH_SECTION, branchName, + return config.getEnum(ConfigConstants.CONFIG_BRANCH_SECTION, branchName, ConfigConstants.CONFIG_KEY_REBASE, BranchRebaseMode.NONE); } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CommitConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CommitConfig.java index f701a41d67..b1ba5dfa28 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CommitConfig.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CommitConfig.java @@ -119,7 +119,7 @@ public class CommitConfig { if (!StringUtils.isEmptyOrNull(comment)) { if ("auto".equalsIgnoreCase(comment)) { //$NON-NLS-1$ autoCommentChar = true; - } else { + } else if (comment != null) { char first = comment.charAt(0); if (first > ' ' && first < 127) { commentCharacter = first; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Config.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Config.java index 07c5fa4500..345cb22f80 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Config.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Config.java @@ -34,6 +34,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import org.eclipse.jgit.annotations.NonNull; +import org.eclipse.jgit.annotations.Nullable; import org.eclipse.jgit.errors.ConfigInvalidException; import org.eclipse.jgit.events.ConfigChangedEvent; import org.eclipse.jgit.events.ConfigChangedListener; @@ -254,9 +255,8 @@ public class Config { * default value to return if no value was present. * @return an integer value from the configuration, or defaultValue. */ - public int getInt(final String section, final String name, - final int defaultValue) { - return typedGetter.getInt(this, section, null, name, defaultValue); + public int getInt(String section, String name, int defaultValue) { + return getInt(section, null, name, defaultValue); } /** @@ -264,6 +264,23 @@ public class Config { * * @param section * section the key is grouped within. + * @param name + * name of the key to get. + * @return an integer value from the configuration, or {@code null} if not + * set. + * @since 7.2 + */ + @Nullable + public Integer getInt(String section, String name) { + return getInt(section, null, name); + } + + + /** + * Obtain an integer value from the configuration. + * + * @param section + * section the key is grouped within. * @param subsection * subsection name, such a remote or branch name. * @param name @@ -272,10 +289,30 @@ public class Config { * default value to return if no value was present. * @return an integer value from the configuration, or defaultValue. */ - public int getInt(final String section, String subsection, - final String name, final int defaultValue) { + public int getInt(String section, String subsection, String name, + int defaultValue) { + Integer v = typedGetter.getInt(this, section, subsection, name, + Integer.valueOf(defaultValue)); + return v == null ? defaultValue : v.intValue(); + } + + /** + * Obtain an integer value from the configuration. + * + * @param section + * section the key is grouped within. + * @param subsection + * subsection name, such a remote or branch name. + * @param name + * name of the key to get. + * @return an integer value from the configuration, or {@code null} if not + * set. + * @since 7.2 + */ + @Nullable + public Integer getInt(String section, String subsection, String name) { return typedGetter.getInt(this, section, subsection, name, - defaultValue); + null); } /** @@ -297,8 +334,30 @@ public class Config { */ public int getIntInRange(String section, String name, int minValue, int maxValue, int defaultValue) { - return typedGetter.getIntInRange(this, section, null, name, minValue, - maxValue, defaultValue); + return getIntInRange(section, null, name, + minValue, maxValue, defaultValue); + } + + /** + * Obtain an integer value from the configuration which must be inside given + * range. + * + * @param section + * section the key is grouped within. + * @param name + * name of the key to get. + * @param minValue + * minimum value + * @param maxValue + * maximum value + * @return an integer value from the configuration, or {@code null} if not + * set. + * @since 7.2 + */ + @Nullable + public Integer getIntInRange(String section, String name, int minValue, + int maxValue) { + return getIntInRange(section, null, name, minValue, maxValue); } /** @@ -322,8 +381,34 @@ public class Config { */ public int getIntInRange(String section, String subsection, String name, int minValue, int maxValue, int defaultValue) { + Integer v = typedGetter.getIntInRange(this, section, subsection, name, + minValue, maxValue, Integer.valueOf(defaultValue)); + return v == null ? defaultValue : v.intValue(); + } + + /** + * Obtain an integer value from the configuration which must be inside given + * range. + * + * @param section + * section the key is grouped within. + * @param subsection + * subsection name, such a remote or branch name. + * @param name + * name of the key to get. + * @param minValue + * minimum value + * @param maxValue + * maximum value + * @return an integer value from the configuration, or {@code null} if not + * set. + * @since 7.2 + */ + @Nullable + public Integer getIntInRange(String section, String subsection, String name, + int minValue, int maxValue) { return typedGetter.getIntInRange(this, section, subsection, name, - minValue, maxValue, defaultValue); + minValue, maxValue, null); } /** @@ -338,7 +423,23 @@ public class Config { * @return an integer value from the configuration, or defaultValue. */ public long getLong(String section, String name, long defaultValue) { - return typedGetter.getLong(this, section, null, name, defaultValue); + return getLong(section, null, name, defaultValue); + } + + /** + * Obtain an integer value from the configuration. + * + * @param section + * section the key is grouped within. + * @param name + * name of the key to get. + * @return an integer value from the configuration, or {@code null} if not + * set. + * @since 7.2 + */ + @Nullable + public Long getLong(String section, String name) { + return getLong(section, null, name); } /** @@ -355,9 +456,28 @@ public class Config { * @return an integer value from the configuration, or defaultValue. */ public long getLong(final String section, String subsection, - final String name, final long defaultValue) { - return typedGetter.getLong(this, section, subsection, name, - defaultValue); + String name, long defaultValue) { + Long v = typedGetter.getLong(this, section, subsection, name, + Long.valueOf(defaultValue)); + return v == null ? defaultValue : v.longValue(); + } + + /** + * Obtain an integer value from the configuration. + * + * @param section + * section the key is grouped within. + * @param subsection + * subsection name, such a remote or branch name. + * @param name + * name of the key to get. + * @return an integer value from the configuration, or {@code null} if not + * set. + * @since 7.2 + */ + @Nullable + public Long getLong(String section, String subsection, String name) { + return typedGetter.getLong(this, section, subsection, name, null); } /** @@ -372,9 +492,26 @@ public class Config { * @return true if any value or defaultValue is true, false for missing or * explicit false */ - public boolean getBoolean(final String section, final String name, - final boolean defaultValue) { - return typedGetter.getBoolean(this, section, null, name, defaultValue); + public boolean getBoolean(String section, String name, + boolean defaultValue) { + Boolean v = typedGetter.getBoolean(this, section, null, name, + Boolean.valueOf(defaultValue)); + return v == null ? defaultValue : v.booleanValue(); + } + + /** + * Get a boolean value from the git config + * + * @param section + * section the key is grouped within. + * @param name + * name of the key to get. + * @return configured boolean value, or {@code null} if not set. + * @since 7.2 + */ + @Nullable + public Boolean getBoolean(String section, String name) { + return getBoolean(section, null, name); } /** @@ -391,10 +528,28 @@ public class Config { * @return true if any value or defaultValue is true, false for missing or * explicit false */ - public boolean getBoolean(final String section, String subsection, - final String name, final boolean defaultValue) { - return typedGetter.getBoolean(this, section, subsection, name, - defaultValue); + public boolean getBoolean(String section, String subsection, String name, + boolean defaultValue) { + Boolean v = typedGetter.getBoolean(this, section, subsection, name, + Boolean.valueOf(defaultValue)); + return v == null ? defaultValue : v.booleanValue(); + } + + /** + * Get a boolean value from the git config + * + * @param section + * section the key is grouped within. + * @param subsection + * subsection name, such a remote or branch name. + * @param name + * name of the key to get. + * @return configured boolean value, or {@code null} if not set. + * @since 7.2 + */ + @Nullable + public Boolean getBoolean(String section, String subsection, String name) { + return typedGetter.getBoolean(this, section, subsection, name, null); } /** @@ -412,8 +567,8 @@ public class Config { * default value to return if no value was present. * @return the selected enumeration value, or {@code defaultValue}. */ - public <T extends Enum<?>> T getEnum(final String section, - final String subsection, final String name, final T defaultValue) { + public <T extends Enum<?>> T getEnum(String section, String subsection, + String name, @NonNull T defaultValue) { final T[] all = allValuesOf(defaultValue); return typedGetter.getEnum(this, all, section, subsection, name, defaultValue); @@ -448,14 +603,41 @@ public class Config { * @param defaultValue * default value to return if no value was present. * @return the selected enumeration value, or {@code defaultValue}. + * @deprecated use {@link #getEnum(String, String, String, Enum)} or + * {{@link #getEnum(Enum[], String, String, String)}} instead. */ - public <T extends Enum<?>> T getEnum(final T[] all, final String section, - final String subsection, final String name, final T defaultValue) { + @Nullable + @Deprecated + public <T extends Enum<?>> T getEnum(T[] all, String section, + String subsection, String name, @Nullable T defaultValue) { return typedGetter.getEnum(this, all, section, subsection, name, defaultValue); } /** + * Parse an enumeration from the configuration. + * + * @param <T> + * type of the returned enum + * @param all + * all possible values in the enumeration which should be + * recognized. Typically {@code EnumType.values()}. + * @param section + * section the key is grouped within. + * @param subsection + * subsection name, such a remote or branch name. + * @param name + * name of the key to get. + * @return the selected enumeration value, or {@code null} if not set. + * @since 7.2 + */ + @Nullable + public <T extends Enum<?>> T getEnum(T[] all, String section, + String subsection, String name) { + return typedGetter.getEnum(this, all, section, subsection, name, null); + } + + /** * Get string value or null if not found. * * @param section @@ -466,8 +648,8 @@ public class Config { * the key name * @return a String value from the config, <code>null</code> if not found */ - public String getString(final String section, String subsection, - final String name) { + @Nullable + public String getString(String section, String subsection, String name) { return getRawString(section, subsection, name); } @@ -526,8 +708,34 @@ public class Config { */ public long getTimeUnit(String section, String subsection, String name, long defaultValue, TimeUnit wantUnit) { + Long v = typedGetter.getTimeUnit(this, section, subsection, name, + Long.valueOf(defaultValue), wantUnit); + return v == null ? defaultValue : v.longValue(); + + } + + /** + * Parse a numerical time unit, such as "1 minute", from the configuration. + * + * @param section + * section the key is in. + * @param subsection + * subsection the key is in, or null if not in a subsection. + * @param name + * the key name. + * @param wantUnit + * the units of {@code defaultValue} and the return value, as + * well as the units to assume if the value does not contain an + * indication of the units. + * @return the value, or {@code null} if not set, expressed in + * {@code units}. + * @since 7.2 + */ + @Nullable + public Long getTimeUnit(String section, String subsection, String name, + TimeUnit wantUnit) { return typedGetter.getTimeUnit(this, section, subsection, name, - defaultValue, wantUnit); + null, wantUnit); } /** @@ -555,8 +763,9 @@ public class Config { * @return the {@link Path}, or {@code defaultValue} if not set * @since 5.10 */ + @Nullable public Path getPath(String section, String subsection, String name, - @NonNull FS fs, File resolveAgainst, Path defaultValue) { + @NonNull FS fs, File resolveAgainst, @Nullable Path defaultValue) { return typedGetter.getPath(this, section, subsection, name, fs, resolveAgainst, defaultValue); } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java index a57f1b714a..c4550329d3 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java @@ -446,6 +446,13 @@ public final class ConfigConstants { /** The "rebase" key */ public static final String CONFIG_KEY_REBASE = "rebase"; + /** + * The "checkout" key + * + * @since 7.2 + */ + public static final String CONFIG_KEY_CHECKOUT = "checkout"; + /** The "url" key */ public static final String CONFIG_KEY_URL = "url"; @@ -593,11 +600,21 @@ public final class ConfigConstants { /** * The "trustfolderstat" key in the "core" section + * * @since 3.6 + * @deprecated use {CONFIG_KEY_TRUST_STAT} instead */ + @Deprecated(since = "7.2", forRemoval = true) public static final String CONFIG_KEY_TRUSTFOLDERSTAT = "trustfolderstat"; /** + * The "trustfilestat" key in the "core"section + * + * @since 7.2 + */ + public static final String CONFIG_KEY_TRUST_STAT = "truststat"; + + /** * The "supportsAtomicFileCreation" key in the "core" section * * @since 4.5 @@ -1016,6 +1033,27 @@ public final class ConfigConstants { public static final String CONFIG_KEY_TRUST_LOOSE_REF_STAT = "trustLooseRefStat"; /** + * The "trustLooseRefStat" key + * + * @since 7.2 + */ + public static final String CONFIG_KEY_TRUST_PACK_STAT = "trustPackStat"; + + /** + * The "trustLooseObjectFileStat" key + * + * @since 7.2 + */ + public static final String CONFIG_KEY_TRUST_LOOSE_OBJECT_STAT = "trustLooseObjectStat"; + + /** + * The "trustTablesListStat" key + * + * @since 7.2 + */ + public static final String CONFIG_KEY_TRUST_TABLESLIST_STAT = "trustTablesListStat"; + + /** * The "pack.preserveOldPacks" key * * @since 5.13.2 @@ -1063,4 +1101,18 @@ public final class ConfigConstants { * @since 7.1 */ public static final String CONFIG_KEY_LOAD_REV_INDEX_IN_PARALLEL = "loadRevIndexInParallel"; + + /** + * The "reftable" section + * + * @since 7.2 + */ + public static final String CONFIG_REFTABLE_SECTION = "reftable"; + + /** + * The "autorefresh" key + * + * @since 7.2 + */ + public static final String CONFIG_KEY_AUTOREFRESH = "autorefresh"; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java index 997f4ed314..9de8392690 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java @@ -345,6 +345,15 @@ public final class Constants { public static final String XDG_CONFIG_HOME = "XDG_CONFIG_HOME"; /** + * The key of the XDG_CACHE_HOME directory defined in the + * <a href="https://wiki.archlinux.org/index.php/XDG_Base_Directory"> + * XDG Base Directory specification</a>. + * + * @since 7.3 + */ + public static final String XDG_CACHE_HOME = "XDG_CACHE_HOME"; + + /** * The environment variable that limits how close to the root of the file * systems JGit will traverse when looking for a repository root. */ diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java index 49602a75eb..0e27b2743c 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java @@ -17,12 +17,16 @@ package org.eclipse.jgit.lib; import static java.util.zip.Deflater.DEFAULT_COMPRESSION; +import org.eclipse.jgit.internal.JGitText; import org.eclipse.jgit.lib.Config.SectionParser; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class keeps git repository core parameters. */ public class CoreConfig { + private static final Logger LOG = LoggerFactory.getLogger(CoreConfig.class); /** Key for {@link Config#get(SectionParser)}. */ public static final Config.SectionParser<CoreConfig> KEY = CoreConfig::new; @@ -127,7 +131,9 @@ public class CoreConfig { * Permissible values for {@code core.trustPackedRefsStat}. * * @since 6.1.1 + * @deprecated use {@link TrustStat} instead */ + @Deprecated(since = "7.2", forRemoval = true) public enum TrustPackedRefsStat { /** Do not trust file attributes of the packed-refs file. */ NEVER, @@ -135,12 +141,15 @@ public class CoreConfig { /** Trust file attributes of the packed-refs file. */ ALWAYS, - /** Open and close the packed-refs file to refresh its file attributes - * and then trust it. */ + /** + * Open and close the packed-refs file to refresh its file attributes + * and then trust it. + */ AFTER_OPEN, - /** {@code core.trustPackedRefsStat} defaults to this when it is - * not set */ + /** + * {@code core.trustPackedRefsStat} defaults to this when it is not set + */ UNSET } @@ -148,17 +157,44 @@ public class CoreConfig { * Permissible values for {@code core.trustLooseRefStat}. * * @since 6.9 + * @deprecated use {@link TrustStat} instead */ + @Deprecated(since = "7.2", forRemoval = true) public enum TrustLooseRefStat { /** Trust file attributes of the loose ref. */ ALWAYS, - /** Open and close parent directories of the loose ref file until the - * repository root to refresh its file attributes and then trust it. */ + /** + * Open and close parent directories of the loose ref file until the + * repository root to refresh its file attributes and then trust it. + */ AFTER_OPEN, } + /** + * Values for {@code core.trustXXX} options. + * + * @since 7.2 + */ + public enum TrustStat { + /** Do not trust file attributes of a File. */ + NEVER, + + /** Always trust file attributes of a File. */ + ALWAYS, + + /** Open and close the File to refresh its file attributes + * and then trust it. */ + AFTER_OPEN, + + /** + * Used for specific options to inherit value from value set for + * core.trustStat. + */ + INHERIT + } + private final int compression; private final int packIndexVersion; @@ -169,6 +205,18 @@ public class CoreConfig { private final boolean commitGraph; + private final TrustStat trustStat; + + private final TrustStat trustPackedRefsStat; + + private final TrustStat trustLooseRefStat; + + private final TrustStat trustPackStat; + + private final TrustStat trustLooseObjectStat; + + private final TrustStat trustTablesListStat; + /** * Options for symlink handling * @@ -198,7 +246,13 @@ public class CoreConfig { DOTGITONLY } - private CoreConfig(Config rc) { + /** + * Create a new core configuration from the passed configuration. + * + * @param rc + * git configuration + */ + CoreConfig(Config rc) { compression = rc.getInt(ConfigConstants.CONFIG_CORE_SECTION, ConfigConstants.CONFIG_KEY_COMPRESSION, DEFAULT_COMPRESSION); packIndexVersion = rc.getInt(ConfigConstants.CONFIG_PACK_SECTION, @@ -210,6 +264,68 @@ public class CoreConfig { commitGraph = rc.getBoolean(ConfigConstants.CONFIG_CORE_SECTION, ConfigConstants.CONFIG_COMMIT_GRAPH, DEFAULT_COMMIT_GRAPH_ENABLE); + + trustStat = parseTrustStat(rc); + trustPackedRefsStat = parseTrustPackedRefsStat(rc); + trustLooseRefStat = parseTrustLooseRefStat(rc); + trustPackStat = parseTrustPackFileStat(rc); + trustLooseObjectStat = parseTrustLooseObjectFileStat(rc); + trustTablesListStat = parseTablesListStat(rc); + } + + private static TrustStat parseTrustStat(Config rc) { + Boolean tfs = rc.getBoolean(ConfigConstants.CONFIG_CORE_SECTION, + ConfigConstants.CONFIG_KEY_TRUSTFOLDERSTAT); + TrustStat ts = rc.getEnum(TrustStat.values(), + ConfigConstants.CONFIG_CORE_SECTION, null, + ConfigConstants.CONFIG_KEY_TRUST_STAT); + if (tfs != null) { + if (ts == null) { + LOG.warn(JGitText.get().deprecatedTrustFolderStat); + return tfs.booleanValue() ? TrustStat.ALWAYS : TrustStat.NEVER; + } + LOG.warn(JGitText.get().precedenceTrustConfig); + } + if (ts == null) { + ts = TrustStat.ALWAYS; + } else if (ts == TrustStat.INHERIT) { + LOG.warn(JGitText.get().invalidTrustStat); + ts = TrustStat.ALWAYS; + } + return ts; + } + + private TrustStat parseTrustPackedRefsStat(Config rc) { + return inheritParseTrustStat(rc, + ConfigConstants.CONFIG_KEY_TRUST_PACKED_REFS_STAT); + } + + private TrustStat parseTrustLooseRefStat(Config rc) { + return inheritParseTrustStat(rc, + ConfigConstants.CONFIG_KEY_TRUST_LOOSE_REF_STAT); + } + + private TrustStat parseTrustPackFileStat(Config rc) { + return inheritParseTrustStat(rc, + ConfigConstants.CONFIG_KEY_TRUST_PACK_STAT); + } + + private TrustStat parseTrustLooseObjectFileStat(Config rc) { + return inheritParseTrustStat(rc, + ConfigConstants.CONFIG_KEY_TRUST_LOOSE_OBJECT_STAT); + } + + private TrustStat inheritParseTrustStat(Config rc, String key) { + TrustStat t = rc.getEnum(ConfigConstants.CONFIG_CORE_SECTION, null, key, + TrustStat.INHERIT); + return t == TrustStat.INHERIT ? trustStat : t; + } + + private TrustStat parseTablesListStat(Config rc) { + TrustStat t = rc.getEnum(ConfigConstants.CONFIG_CORE_SECTION, null, + ConfigConstants.CONFIG_KEY_TRUST_TABLESLIST_STAT, + TrustStat.INHERIT); + return t == TrustStat.INHERIT ? trustStat : t; } /** @@ -260,4 +376,70 @@ public class CoreConfig { public boolean enableCommitGraph() { return commitGraph; } + + /** + * Get how far we can trust file attributes of packed-refs file which is + * used to store {@link org.eclipse.jgit.lib.Ref}s in + * {@link org.eclipse.jgit.internal.storage.file.RefDirectory}. + * + * @return how far we can trust file attributes of packed-refs file. + * + * @since 7.2 + */ + public TrustStat getTrustPackedRefsStat() { + return trustPackedRefsStat; + } + + /** + * Get how far we can trust file attributes of loose ref files which are + * used to store {@link org.eclipse.jgit.lib.Ref}s in + * {@link org.eclipse.jgit.internal.storage.file.RefDirectory}. + * + * @return how far we can trust file attributes of loose ref files. + * + * @since 7.2 + */ + public TrustStat getTrustLooseRefStat() { + return trustLooseRefStat; + } + + /** + * Get how far we can trust file attributes of packed-refs file which is + * used to store {@link org.eclipse.jgit.lib.Ref}s in + * {@link org.eclipse.jgit.internal.storage.file.RefDirectory}. + * + * @return how far we can trust file attributes of packed-refs file. + * + * @since 7.2 + */ + public TrustStat getTrustPackStat() { + return trustPackStat; + } + + /** + * Get how far we can trust file attributes of loose ref files which are + * used to store {@link org.eclipse.jgit.lib.Ref}s in + * {@link org.eclipse.jgit.internal.storage.file.RefDirectory}. + * + * @return how far we can trust file attributes of loose ref files. + * + * @since 7.2 + */ + public TrustStat getTrustLooseObjectStat() { + return trustLooseObjectStat; + } + + /** + * Get how far we can trust file attributes of the "tables.list" file which + * is used to store the list of filenames of the files storing + * {@link org.eclipse.jgit.internal.storage.reftable.Reftable}s in + * {@link org.eclipse.jgit.internal.storage.file.FileReftableDatabase}. + * + * @return how far we can trust file attributes of the "tables.list" file. + * + * @since 7.2 + */ + public TrustStat getTrustTablesListStat() { + return trustTablesListStat; + } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/DefaultTypedConfigGetter.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/DefaultTypedConfigGetter.java index a71549c92e..3059f283fe 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/DefaultTypedConfigGetter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/DefaultTypedConfigGetter.java @@ -18,6 +18,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.eclipse.jgit.annotations.NonNull; +import org.eclipse.jgit.annotations.Nullable; import org.eclipse.jgit.internal.JGitText; import org.eclipse.jgit.lib.Config.ConfigEnum; import org.eclipse.jgit.transport.RefSpec; @@ -31,27 +32,37 @@ import org.eclipse.jgit.util.StringUtils; */ public class DefaultTypedConfigGetter implements TypedConfigGetter { + @SuppressWarnings("boxed") @Override public boolean getBoolean(Config config, String section, String subsection, String name, boolean defaultValue) { + return neverNull(getBoolean(config, section, subsection, name, + Boolean.valueOf(defaultValue))); + } + + @Nullable + @Override + public Boolean getBoolean(Config config, String section, String subsection, + String name, @Nullable Boolean defaultValue) { String n = config.getString(section, subsection, name); if (n == null) { return defaultValue; } if (Config.isMissing(n)) { - return true; + return Boolean.TRUE; } try { - return StringUtils.toBoolean(n); + return Boolean.valueOf(StringUtils.toBoolean(n)); } catch (IllegalArgumentException err) { throw new IllegalArgumentException(MessageFormat.format( JGitText.get().invalidBooleanValue, section, name, n), err); } } + @Nullable @Override public <T extends Enum<?>> T getEnum(Config config, T[] all, String section, - String subsection, String name, T defaultValue) { + String subsection, String name, @Nullable T defaultValue) { String value = config.getString(section, subsection, name); if (value == null) { return defaultValue; @@ -107,9 +118,27 @@ public class DefaultTypedConfigGetter implements TypedConfigGetter { @Override public int getInt(Config config, String section, String subsection, String name, int defaultValue) { - long val = config.getLong(section, subsection, name, defaultValue); + return neverNull(getInt(config, section, subsection, name, + Integer.valueOf(defaultValue))); + } + + @Nullable + @Override + @SuppressWarnings("boxing") + public Integer getInt(Config config, String section, String subsection, + String name, @Nullable Integer defaultValue) { + Long longDefault = defaultValue != null + ? Long.valueOf(defaultValue.longValue()) + : null; + Long val = config.getLong(section, subsection, name); + if (val == null) { + val = longDefault; + } + if (val == null) { + return null; + } if (Integer.MIN_VALUE <= val && val <= Integer.MAX_VALUE) { - return (int) val; + return Integer.valueOf(Math.toIntExact(val)); } throw new IllegalArgumentException(MessageFormat .format(JGitText.get().integerValueOutOfRange, section, name)); @@ -118,37 +147,56 @@ public class DefaultTypedConfigGetter implements TypedConfigGetter { @Override public int getIntInRange(Config config, String section, String subsection, String name, int minValue, int maxValue, int defaultValue) { - int val = getInt(config, section, subsection, name, defaultValue); + return neverNull(getIntInRange(config, section, subsection, name, + minValue, maxValue, Integer.valueOf(defaultValue))); + } + + @Override + @SuppressWarnings("boxing") + public Integer getIntInRange(Config config, String section, + String subsection, String name, int minValue, int maxValue, + Integer defaultValue) { + Integer val = getInt(config, section, subsection, name, defaultValue); + if (val == null) { + return null; + } if ((val >= minValue && val <= maxValue) || val == UNSET_INT) { return val; } if (subsection == null) { - throw new IllegalArgumentException(MessageFormat.format( - JGitText.get().integerValueNotInRange, section, name, - Integer.valueOf(val), Integer.valueOf(minValue), - Integer.valueOf(maxValue))); + throw new IllegalArgumentException( + MessageFormat.format(JGitText.get().integerValueNotInRange, + section, name, val, minValue, maxValue)); } throw new IllegalArgumentException(MessageFormat.format( JGitText.get().integerValueNotInRangeSubSection, section, - subsection, name, Integer.valueOf(val), - Integer.valueOf(minValue), Integer.valueOf(maxValue))); + subsection, name, val, minValue, maxValue)); } @Override public long getLong(Config config, String section, String subsection, String name, long defaultValue) { - final String str = config.getString(section, subsection, name); + return neverNull(getLong(config, section, subsection, name, + Long.valueOf(defaultValue))); + } + + @Nullable + @Override + public Long getLong(Config config, String section, String subsection, + String name, @Nullable Long defaultValue) { + String str = config.getString(section, subsection, name); if (str == null) { return defaultValue; } try { - return StringUtils.parseLongWithSuffix(str, false); + return Long.valueOf(StringUtils.parseLongWithSuffix(str, false)); } catch (StringIndexOutOfBoundsException e) { // Empty return defaultValue; } catch (NumberFormatException nfe) { - throw new IllegalArgumentException(MessageFormat.format( - JGitText.get().invalidIntegerValue, section, name, str), + throw new IllegalArgumentException( + MessageFormat.format(JGitText.get().invalidIntegerValue, + section, name, str), nfe); } } @@ -156,6 +204,13 @@ public class DefaultTypedConfigGetter implements TypedConfigGetter { @Override public long getTimeUnit(Config config, String section, String subsection, String name, long defaultValue, TimeUnit wantUnit) { + return neverNull(getTimeUnit(config, section, subsection, name, + Long.valueOf(defaultValue), wantUnit)); + } + + @Override + public Long getTimeUnit(Config config, String section, String subsection, + String name, @Nullable Long defaultValue, TimeUnit wantUnit) { String valueString = config.getString(section, subsection, name); if (valueString == null) { @@ -232,8 +287,8 @@ public class DefaultTypedConfigGetter implements TypedConfigGetter { } try { - return wantUnit.convert(Long.parseLong(digits) * inputMul, - inputUnit); + return Long.valueOf(wantUnit + .convert(Long.parseLong(digits) * inputMul, inputUnit)); } catch (NumberFormatException nfe) { IllegalArgumentException iae = notTimeUnit(section, subsection, unitName, valueString); @@ -274,4 +329,14 @@ public class DefaultTypedConfigGetter implements TypedConfigGetter { } return result; } + + // Trick for the checkers. When we use this, one is never null, but + // they don't know. + @NonNull + private static <T> T neverNull(T one) { + if (one == null) { + throw new IllegalArgumentException(); + } + return one; + } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java index 76ed36a6e5..23d16db39f 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java @@ -74,8 +74,7 @@ public class GpgConfig { * the config to read from */ public GpgConfig(Config config) { - keyFormat = config.getEnum(GpgFormat.values(), - ConfigConstants.CONFIG_GPG_SECTION, null, + keyFormat = config.getEnum(ConfigConstants.CONFIG_GPG_SECTION, null, ConfigConstants.CONFIG_KEY_FORMAT, GpgFormat.OPENPGP); signingKey = config.getString(ConfigConstants.CONFIG_USER_SECTION, null, ConfigConstants.CONFIG_KEY_SIGNINGKEY); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/PersonIdent.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/PersonIdent.java index f22642c4ce..50f4a83b93 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/PersonIdent.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/PersonIdent.java @@ -45,7 +45,9 @@ public class PersonIdent implements Serializable { * timezone offset as in {@link #getTimeZoneOffset()}. * @return time zone object for the given offset. * @since 4.1 + * @deprecated use {@link #getZoneId(int)} instead */ + @Deprecated(since = "7.2") public static TimeZone getTimeZone(int tzOffset) { StringBuilder tzId = new StringBuilder(8); tzId.append("GMT"); //$NON-NLS-1$ diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RefDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RefDatabase.java index 09cb5a83dd..49d5224325 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RefDatabase.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RefDatabase.java @@ -356,6 +356,40 @@ public abstract class RefDatabase { } /** + * Get the reflog reader + * + * @param refName + * a {@link java.lang.String} object. + * @return a {@link org.eclipse.jgit.lib.ReflogReader} for the supplied + * refname, or {@code null} if the named ref does not exist. + * @throws java.io.IOException + * the ref could not be accessed. + * @since 7.2 + */ + @Nullable + public ReflogReader getReflogReader(String refName) throws IOException { + Ref ref = exactRef(refName); + if (ref == null) { + return null; + } + return getReflogReader(ref); + } + + /** + * Get the reflog reader. + * + * @param ref + * a Ref + * @return a {@link org.eclipse.jgit.lib.ReflogReader} for the supplied ref. + * @throws IOException + * if an IO error occurred + * @since 7.2 + */ + @NonNull + public abstract ReflogReader getReflogReader(@NonNull Ref ref) + throws IOException; + + /** * Get a section of the reference namespace. * * @param prefix diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java index 0562840915..c9dc6da4ba 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java @@ -26,6 +26,8 @@ import java.io.IOException; import java.io.OutputStream; import java.io.UncheckedIOException; import java.net.URISyntaxException; +import java.nio.file.Files; +import java.nio.file.LinkOption; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; @@ -33,10 +35,12 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Pattern; import org.eclipse.jgit.annotations.NonNull; @@ -132,6 +136,8 @@ public abstract class Repository implements AutoCloseable { private final String initialBranch; + private final AtomicReference<Boolean> caseInsensitiveWorktree = new AtomicReference<>(); + /** * Initialize a new repository instance. * @@ -1577,6 +1583,40 @@ public abstract class Repository implements AutoCloseable { } /** + * Tells whether the work tree is on a case-insensitive file system. + * + * @return {@code true} if the work tree is case-insensitive; {@code false} + * otherwise + * @throws NoWorkTreeException + * if the repository is bare + * @since 7.2 + */ + public boolean isWorkTreeCaseInsensitive() throws NoWorkTreeException { + Boolean flag = caseInsensitiveWorktree.get(); + if (flag == null) { + File directory = getWorkTree(); + // See if we can find ".git" also as ".GIT". + File dotGit = new File(directory, Constants.DOT_GIT); + if (Files.exists(dotGit.toPath(), LinkOption.NOFOLLOW_LINKS)) { + dotGit = new File(directory, + Constants.DOT_GIT.toUpperCase(Locale.ROOT)); + flag = Boolean.valueOf(Files.exists(dotGit.toPath(), + LinkOption.NOFOLLOW_LINKS)); + } else { + // Fall back to a mostly sane default. On Mac, HFS+ and APFS + // partitions are case-insensitive by default but can be + // configured to be case-sensitive. + SystemReader system = SystemReader.getInstance(); + flag = Boolean.valueOf(system.isWindows() || system.isMacOS()); + } + if (!caseInsensitiveWorktree.compareAndSet(null, flag)) { + flag = caseInsensitiveWorktree.get(); + } + } + return flag.booleanValue(); + } + + /** * Force a scan for changed refs. Fires an IndexChangedEvent(false) if * changes are detected. * @@ -1692,10 +1732,13 @@ public abstract class Repository implements AutoCloseable { * @throws java.io.IOException * the ref could not be accessed. * @since 3.0 + * @deprecated use {@code #getRefDatabase().getReflogReader(String)} instead */ + @Deprecated(since = "7.2") @Nullable - public abstract ReflogReader getReflogReader(String refName) - throws IOException; + public ReflogReader getReflogReader(String refName) throws IOException { + return getRefDatabase().getReflogReader(refName); + } /** * Get the reflog reader. Subclasses should override this method and provide @@ -1703,15 +1746,17 @@ public abstract class Repository implements AutoCloseable { * * @param ref * a Ref - * @return a {@link org.eclipse.jgit.lib.ReflogReader} for the supplied ref, - * or {@code null} if the ref does not exist. + * @return a {@link org.eclipse.jgit.lib.ReflogReader} for the supplied ref. * @throws IOException * if an IO error occurred * @since 5.13.2 + * @deprecated use {@code #getRefDatabase().getReflogReader(Ref)} instead */ - public @Nullable ReflogReader getReflogReader(@NonNull Ref ref) + @Deprecated(since = "7.2") + @NonNull + public ReflogReader getReflogReader(@NonNull Ref ref) throws IOException { - return getReflogReader(ref.getName()); + return getRefDatabase().getReflogReader(ref); } /** diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/TypedConfigGetter.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/TypedConfigGetter.java index 0c03adcab8..3d4e0d1f3c 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/TypedConfigGetter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/TypedConfigGetter.java @@ -17,6 +17,7 @@ import java.util.List; import java.util.concurrent.TimeUnit; import org.eclipse.jgit.annotations.NonNull; +import org.eclipse.jgit.annotations.Nullable; import org.eclipse.jgit.transport.RefSpec; import org.eclipse.jgit.util.FS; @@ -50,11 +51,36 @@ public interface TypedConfigGetter { * default value to return if no value was present. * @return true if any value or defaultValue is true, false for missing or * explicit false + * @deprecated use + * {@link #getBoolean(Config, String, String, String, Boolean)} + * instead */ + @Deprecated boolean getBoolean(Config config, String section, String subsection, String name, boolean defaultValue); /** + * Get a boolean value from a git {@link Config}. + * + * @param config + * to get the value from + * @param section + * section the key is grouped within. + * @param subsection + * subsection name, such a remote or branch name. + * @param name + * name of the key to get. + * @param defaultValue + * default value to return if no value was present. + * @return true if any value or defaultValue is true, false for missing or + * explicit false + * @since 7.2 + */ + @Nullable + Boolean getBoolean(Config config, String section, String subsection, + String name, @Nullable Boolean defaultValue); + + /** * Parse an enumeration from a git {@link Config}. * * @param <T> @@ -74,8 +100,9 @@ public interface TypedConfigGetter { * default value to return if no value was present. * @return the selected enumeration value, or {@code defaultValue}. */ + @Nullable <T extends Enum<?>> T getEnum(Config config, T[] all, String section, - String subsection, String name, T defaultValue); + String subsection, String name, @Nullable T defaultValue); /** * Obtain an integer value from a git {@link Config}. @@ -91,11 +118,34 @@ public interface TypedConfigGetter { * @param defaultValue * default value to return if no value was present. * @return an integer value from the configuration, or defaultValue. + * @deprecated use {@link #getInt(Config, String, String, String, Integer)} + * instead */ + @Deprecated int getInt(Config config, String section, String subsection, String name, int defaultValue); /** + * Obtain an integer value from a git {@link Config}. + * + * @param config + * to get the value from + * @param section + * section the key is grouped within. + * @param subsection + * subsection name, such a remote or branch name. + * @param name + * name of the key to get. + * @param defaultValue + * default value to return if no value was present. + * @return an integer value from the configuration, or defaultValue. + * @since 7.2 + */ + @Nullable + Integer getInt(Config config, String section, String subsection, + String name, @Nullable Integer defaultValue); + + /** * Obtain an integer value from a git {@link Config} which must be in given * range. * @@ -117,11 +167,43 @@ public interface TypedConfigGetter { * @return an integer value from the configuration, or defaultValue. * {@code #UNSET_INT} if unset. * @since 6.1 + * @deprecated use + * {@link #getIntInRange(Config, String, String, String, int, int, Integer)} + * instead */ + @Deprecated int getIntInRange(Config config, String section, String subsection, String name, int minValue, int maxValue, int defaultValue); /** + * Obtain an integer value from a git {@link Config} which must be in given + * range. + * + * @param config + * to get the value from + * @param section + * section the key is grouped within. + * @param subsection + * subsection name, such a remote or branch name. + * @param name + * name of the key to get. + * @param minValue + * minimal value + * @param maxValue + * maximum value + * @param defaultValue + * default value to return if no value was present. Use + * {@code #UNSET_INT} to set the default to unset. + * @return an integer value from the configuration, or defaultValue. + * {@code #UNSET_INT} if unset. + * @since 7.2 + */ + @Nullable + Integer getIntInRange(Config config, String section, String subsection, + String name, int minValue, int maxValue, + @Nullable Integer defaultValue); + + /** * Obtain a long value from a git {@link Config}. * * @param config @@ -135,11 +217,34 @@ public interface TypedConfigGetter { * @param defaultValue * default value to return if no value was present. * @return a long value from the configuration, or defaultValue. + * @deprecated use {@link #getLong(Config, String, String, String, Long)} + * instead */ + @Deprecated long getLong(Config config, String section, String subsection, String name, long defaultValue); /** + * Obtain a long value from a git {@link Config}. + * + * @param config + * to get the value from + * @param section + * section the key is grouped within. + * @param subsection + * subsection name, such a remote or branch name. + * @param name + * name of the key to get. + * @param defaultValue + * default value to return if no value was present. + * @return a long value from the configuration, or defaultValue. + * @since 7.2 + */ + @Nullable + Long getLong(Config config, String section, String subsection, String name, + @Nullable Long defaultValue); + + /** * Parse a numerical time unit, such as "1 minute", from a git * {@link Config}. * @@ -159,11 +264,41 @@ public interface TypedConfigGetter { * indication of the units. * @return the value, or {@code defaultValue} if not set, expressed in * {@code units}. + * @deprecated use + * {@link #getTimeUnit(Config, String, String, String, Long, TimeUnit)} + * instead */ + @Deprecated long getTimeUnit(Config config, String section, String subsection, String name, long defaultValue, TimeUnit wantUnit); /** + * Parse a numerical time unit, such as "1 minute", from a git + * {@link Config}. + * + * @param config + * to get the value from + * @param section + * section the key is in. + * @param subsection + * subsection the key is in, or null if not in a subsection. + * @param name + * the key name. + * @param defaultValue + * default value to return if no value was present. + * @param wantUnit + * the units of {@code defaultValue} and the return value, as + * well as the units to assume if the value does not contain an + * indication of the units. + * @return the value, or {@code defaultValue} if not set, expressed in + * {@code units}. + * @since 7.2 + */ + @Nullable + Long getTimeUnit(Config config, String section, String subsection, + String name, @Nullable Long defaultValue, TimeUnit wantUnit); + + /** * Parse a string value from a git {@link Config} and treat it as a file * path, replacing a ~/ prefix by the user's home directory. * <p> @@ -189,9 +324,10 @@ public interface TypedConfigGetter { * @return the {@link Path}, or {@code defaultValue} if not set * @since 5.10 */ + @Nullable default Path getPath(Config config, String section, String subsection, String name, @NonNull FS fs, File resolveAgainst, - Path defaultValue) { + @Nullable Path defaultValue) { String value = config.getString(section, subsection, name); if (value == null) { return defaultValue; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revplot/PlotWalk.java b/org.eclipse.jgit/src/org/eclipse/jgit/revplot/PlotWalk.java index 0f6bd2d6cc..c8c454a228 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/revplot/PlotWalk.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/revplot/PlotWalk.java @@ -169,8 +169,9 @@ public class PlotWalk extends RevWalk { } long timeof(RevObject o) { - if (o instanceof RevCommit) - return ((RevCommit) o).getCommitTime(); + if (o instanceof RevCommit) { + return ((RevCommit) o).getCommitTime() * 1000L; + } if (o instanceof RevTag) { RevTag tag = (RevTag) o; try { @@ -179,7 +180,7 @@ public class PlotWalk extends RevWalk { return 0; } PersonIdent who = tag.getTaggerIdent(); - return who != null ? who.getWhen().getTime() : 0; + return who != null ? who.getWhenAsInstant().toEpochMilli() : 0; } return 0; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/FollowFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/FollowFilter.java index 35ef51f4fd..12e6c4ea98 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/FollowFilter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/FollowFilter.java @@ -18,7 +18,7 @@ import org.eclipse.jgit.diff.DiffConfig; import org.eclipse.jgit.errors.IncorrectObjectTypeException; import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.treewalk.TreeWalk; -import org.eclipse.jgit.treewalk.filter.PathFilter; +import org.eclipse.jgit.treewalk.filter.ChangedPathTreeFilter; import org.eclipse.jgit.treewalk.filter.TreeFilter; /** @@ -56,39 +56,44 @@ public class FollowFilter extends TreeFilter { * @since 3.0 */ public static FollowFilter create(String path, DiffConfig cfg) { - return new FollowFilter(PathFilter.create(path), cfg); + return new FollowFilter(ChangedPathTreeFilter.create(path), cfg); } - private final PathFilter path; + private final ChangedPathTreeFilter path; final DiffConfig cfg; private RenameCallback renameCallback; - FollowFilter(PathFilter path, DiffConfig cfg) { + FollowFilter(ChangedPathTreeFilter path, DiffConfig cfg) { this.path = path; this.cfg = cfg; } - /** @return the path this filter matches. */ /** * Get the path this filter matches. * * @return the path this filter matches. */ public String getPath() { - return path.getPath(); + return path.getPaths().get(0); } @Override public boolean include(TreeWalk walker) throws MissingObjectException, IncorrectObjectTypeException, IOException { - return path.include(walker) && ANY_DIFF.include(walker); + return path.include(walker); + } + + @Override + public boolean shouldTreeWalk(RevCommit c, RevWalk rw, + MutableBoolean cpfUsed) { + return path.shouldTreeWalk(c, rw, cpfUsed); } @Override public boolean shouldBeRecursive() { - return path.shouldBeRecursive() || ANY_DIFF.shouldBeRecursive(); + return path.shouldBeRecursive(); } @Override @@ -105,9 +110,7 @@ public class FollowFilter extends TreeFilter { @SuppressWarnings("nls") @Override public String toString() { - return "(FOLLOW(" + path.toString() + ")" // - + " AND " // - + ANY_DIFF.toString() + ")"; + return "(FOLLOW(" + path.toString() + "))"; } /** diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/RevCommit.java b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/RevCommit.java index 55ddebf288..871545fca2 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/RevCommit.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/RevCommit.java @@ -401,13 +401,13 @@ public class RevCommit extends RevObject { * @since 5.1 */ public final byte[] getRawGpgSignature() { - final byte[] raw = buffer; - final byte[] header = { 'g', 'p', 'g', 's', 'i', 'g' }; - final int start = RawParseUtils.headerStart(header, raw, 0); + byte[] raw = buffer; + byte[] header = { 'g', 'p', 'g', 's', 'i', 'g' }; + int start = RawParseUtils.headerStart(header, raw, 0); if (start < 0) { return null; } - final int end = RawParseUtils.headerEnd(raw, start); + int end = RawParseUtils.nextLfSkippingSplitLines(raw, start); return RawParseUtils.headerValue(raw, start, end); } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/RevWalk.java b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/RevWalk.java index 9f0e28d0ce..41f98bad84 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/RevWalk.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/RevWalk.java @@ -19,9 +19,14 @@ import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; -import java.util.Optional; +import java.util.Map; +import java.util. +Optional; +import java.util.Set; import org.eclipse.jgit.annotations.NonNull; import org.eclipse.jgit.annotations.Nullable; @@ -523,6 +528,27 @@ public class RevWalk implements Iterable<RevCommit>, AutoCloseable { } /** + * Determine if a <code>commit</code> is merged into any of the given + * <code>revs</code>. + * + * @param commit + * commit the caller thinks is reachable from <code>revs</code>. + * @param revs + * commits to start iteration from, and which is most likely a + * descendant (child) of <code>commit</code>. + * @return true if commit is merged into any of the revs; false otherwise. + * @throws java.io.IOException + * a pack file or loose object could not be read. + * @since 6.10.1 + */ + public boolean isMergedIntoAnyCommit(RevCommit commit, Collection<RevCommit> revs) + throws IOException { + return getCommitsMergedInto(commit, revs, + GetMergedIntoStrategy.RETURN_ON_FIRST_FOUND, + NullProgressMonitor.INSTANCE).size() > 0; + } + + /** * Determine if a <code>commit</code> is merged into all of the given * <code>refs</code>. * @@ -545,7 +571,26 @@ public class RevWalk implements Iterable<RevCommit>, AutoCloseable { private List<Ref> getMergedInto(RevCommit needle, Collection<Ref> haystacks, Enum returnStrategy, ProgressMonitor monitor) throws IOException { + Map<RevCommit, List<Ref>> refsByCommit = new HashMap<>(); + for (Ref r : haystacks) { + RevObject o = peel(parseAny(r.getObjectId())); + if (!(o instanceof RevCommit)) { + continue; + } + refsByCommit.computeIfAbsent((RevCommit) o, c -> new ArrayList<>()).add(r); + } + monitor.update(1); List<Ref> result = new ArrayList<>(); + for (RevCommit c : getCommitsMergedInto(needle, refsByCommit.keySet(), + returnStrategy, monitor)) { + result.addAll(refsByCommit.get(c)); + } + return result; + } + + private Set<RevCommit> getCommitsMergedInto(RevCommit needle, Collection<RevCommit> haystacks, + Enum returnStrategy, ProgressMonitor monitor) throws IOException { + Set<RevCommit> result = new HashSet<>(); List<RevCommit> uninteresting = new ArrayList<>(); List<RevCommit> marked = new ArrayList<>(); RevFilter oldRF = filter; @@ -561,16 +606,11 @@ public class RevWalk implements Iterable<RevCommit>, AutoCloseable { needle.parseHeaders(this); } int cutoff = needle.getGeneration(); - for (Ref r : haystacks) { + for (RevCommit c : haystacks) { if (monitor.isCancelled()) { return result; } monitor.update(1); - RevObject o = peel(parseAny(r.getObjectId())); - if (!(o instanceof RevCommit)) { - continue; - } - RevCommit c = (RevCommit) o; reset(UNINTERESTING | TEMP_MARK); markStart(c); boolean commitFound = false; @@ -582,7 +622,7 @@ public class RevWalk implements Iterable<RevCommit>, AutoCloseable { } if (References.isSameObject(next, needle) || (next.flags & TEMP_MARK) != 0) { - result.add(r); + result.add(c); if (returnStrategy == GetMergedIntoStrategy.RETURN_ON_FIRST_FOUND) { return result; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java index 99943b78e6..e9a3e72c7f 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java @@ -12,15 +12,11 @@ package org.eclipse.jgit.revwalk; import java.io.IOException; import java.util.List; -import java.util.Optional; -import java.util.Set; -import org.eclipse.jgit.internal.storage.commitgraph.ChangedPathFilter; import org.eclipse.jgit.diff.DiffConfig; import org.eclipse.jgit.diff.DiffEntry; import org.eclipse.jgit.diff.DiffEntry.ChangeType; import org.eclipse.jgit.diff.RenameDetector; -import org.eclipse.jgit.errors.CorruptObjectException; import org.eclipse.jgit.errors.IncorrectObjectTypeException; import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.errors.StopWalkException; @@ -28,6 +24,7 @@ import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.revwalk.filter.RevFilter; import org.eclipse.jgit.treewalk.TreeWalk; import org.eclipse.jgit.treewalk.filter.TreeFilter; +import org.eclipse.jgit.treewalk.filter.TreeFilter.MutableBoolean; /** * Filter applying a {@link org.eclipse.jgit.treewalk.filter.TreeFilter} against @@ -50,6 +47,8 @@ public class TreeRevFilter extends RevFilter { private final TreeWalk pathFilter; + private final MutableBoolean changedPathFilterUsed = new MutableBoolean(); + private long changedPathFilterTruePositive = 0; private long changedPathFilterFalsePositive = 0; @@ -126,24 +125,15 @@ public class TreeRevFilter extends RevFilter { } trees[nParents] = c.getTree(); tw.reset(trees); + changedPathFilterUsed.reset(); if (nParents == 1) { // We have exactly one parent. This is a very common case. // int chgs = 0, adds = 0; - boolean changedPathFilterUsed = false; - boolean mustCalculateChgs = true; - ChangedPathFilter cpf = c.getChangedPathFilter(walker); - if (cpf != null) { - Optional<Set<byte[]>> paths = pathFilter.getFilter() - .getPathsBestEffort(); - if (paths.isPresent()) { - changedPathFilterUsed = true; - if (paths.get().stream().noneMatch(cpf::maybeContains)) { - mustCalculateChgs = false; - } - } - } + TreeFilter tf = pathFilter.getFilter(); + boolean mustCalculateChgs = tf.shouldTreeWalk(c, walker, + changedPathFilterUsed); if (mustCalculateChgs) { while (tw.next()) { chgs++; @@ -153,7 +143,7 @@ public class TreeRevFilter extends RevFilter { break; // no point in looking at this further. } } - if (changedPathFilterUsed) { + if (changedPathFilterUsed.get()) { if (chgs > 0) { changedPathFilterTruePositive++; } else { @@ -161,7 +151,7 @@ public class TreeRevFilter extends RevFilter { } } } else { - if (changedPathFilterUsed) { + if (changedPathFilterUsed.get()) { changedPathFilterNegative++; } } @@ -315,9 +305,7 @@ public class TreeRevFilter extends RevFilter { } private void updateFollowFilter(ObjectId[] trees, DiffConfig cfg, - RevCommit commit) - throws MissingObjectException, IncorrectObjectTypeException, - CorruptObjectException, IOException { + RevCommit commit) throws IOException { TreeWalk tw = pathFilter; FollowFilter oldFilter = (FollowFilter) tw.getFilter(); tw.setFilter(TreeFilter.ANY_DIFF); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/submodule/SubmoduleWalk.java b/org.eclipse.jgit/src/org/eclipse/jgit/submodule/SubmoduleWalk.java index becc8082ba..105cba7d28 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/submodule/SubmoduleWalk.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/submodule/SubmoduleWalk.java @@ -787,14 +787,14 @@ public class SubmoduleWalk implements AutoCloseable { IgnoreSubmoduleMode mode = repoConfig.getEnum( IgnoreSubmoduleMode.values(), ConfigConstants.CONFIG_SUBMODULE_SECTION, getModuleName(), - ConfigConstants.CONFIG_KEY_IGNORE, null); + ConfigConstants.CONFIG_KEY_IGNORE); if (mode != null) { return mode; } lazyLoadModulesConfig(); - return modulesConfig.getEnum(IgnoreSubmoduleMode.values(), - ConfigConstants.CONFIG_SUBMODULE_SECTION, getModuleName(), - ConfigConstants.CONFIG_KEY_IGNORE, IgnoreSubmoduleMode.NONE); + return modulesConfig.getEnum(ConfigConstants.CONFIG_SUBMODULE_SECTION, + getModuleName(), ConfigConstants.CONFIG_KEY_IGNORE, + IgnoreSubmoduleMode.NONE); } /** diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/AmazonS3.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/AmazonS3.java index aaf9f8a08a..9d9f5495fe 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/AmazonS3.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/AmazonS3.java @@ -760,6 +760,15 @@ public class AmazonS3 { SAXParserFactory saxParserFactory = SAXParserFactory .newInstance(); saxParserFactory.setNamespaceAware(true); + saxParserFactory.setFeature( + "http://xml.org/sax/features/external-general-entities", //$NON-NLS-1$ + false); + saxParserFactory.setFeature( + "http://xml.org/sax/features/external-parameter-entities", //$NON-NLS-1$ + false); + saxParserFactory.setFeature( + "http://apache.org/xml/features/disallow-doctype-decl", //$NON-NLS-1$ + true); xr = saxParserFactory.newSAXParser().getXMLReader(); } catch (SAXException | ParserConfigurationException e) { throw new IOException( diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/HttpConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/HttpConfig.java index 73eddb8e21..f10b7bf452 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/HttpConfig.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/HttpConfig.java @@ -302,8 +302,7 @@ public class HttpConfig { int postBufferSize = config.getInt(HTTP, POST_BUFFER_KEY, 1 * 1024 * 1024); boolean sslVerifyFlag = config.getBoolean(HTTP, SSL_VERIFY_KEY, true); - HttpRedirectMode followRedirectsMode = config.getEnum( - HttpRedirectMode.values(), HTTP, null, + HttpRedirectMode followRedirectsMode = config.getEnum(HTTP, null, FOLLOW_REDIRECTS_KEY, HttpRedirectMode.INITIAL); int redirectLimit = config.getInt(HTTP, MAX_REDIRECTS_KEY, MAX_REDIRECTS); @@ -335,8 +334,8 @@ public class HttpConfig { postBufferSize); sslVerifyFlag = config.getBoolean(HTTP, match, SSL_VERIFY_KEY, sslVerifyFlag); - followRedirectsMode = config.getEnum(HttpRedirectMode.values(), - HTTP, match, FOLLOW_REDIRECTS_KEY, followRedirectsMode); + followRedirectsMode = config.getEnum(HTTP, match, + FOLLOW_REDIRECTS_KEY, followRedirectsMode); int newMaxRedirects = config.getInt(HTTP, match, MAX_REDIRECTS_KEY, redirectLimit); if (newMaxRedirects >= 0) { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/URIish.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/URIish.java index 4de6ff825f..7b5842b712 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/URIish.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/URIish.java @@ -82,7 +82,7 @@ public class URIish implements Serializable { * Part of a pattern which matches a relative path. Relative paths don't * start with slash or drive letters. Defines no capturing group. */ - private static final String RELATIVE_PATH_P = "(?:(?:[^\\\\/]+[\\\\/]+)*[^\\\\/]+[\\\\/]*)"; //$NON-NLS-1$ + private static final String RELATIVE_PATH_P = "(?:(?:[^\\\\/]+[\\\\/]+)*+[^\\\\/]*)"; //$NON-NLS-1$ /** * Part of a pattern which matches a relative or absolute path. Defines no @@ -120,7 +120,7 @@ public class URIish implements Serializable { * path (maybe even containing windows drive-letters) or a relative path. */ private static final Pattern LOCAL_FILE = Pattern.compile("^" // //$NON-NLS-1$ - + "([\\\\/]?" + PATH_P + ")" // //$NON-NLS-1$ //$NON-NLS-2$ + + "([\\\\/]?+" + PATH_P + ")" // //$NON-NLS-1$ //$NON-NLS-2$ + "$"); //$NON-NLS-1$ /** diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/AndTreeFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/AndTreeFilter.java index c6804da039..b35dbebd17 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/AndTreeFilter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/AndTreeFilter.java @@ -12,11 +12,14 @@ package org.eclipse.jgit.treewalk.filter; import java.io.IOException; +import java.util.Arrays; import java.util.Collection; import org.eclipse.jgit.errors.IncorrectObjectTypeException; import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.internal.JGitText; +import org.eclipse.jgit.revwalk.RevCommit; +import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.treewalk.TreeWalk; /** @@ -100,6 +103,13 @@ public abstract class AndTreeFilter extends TreeFilter { } @Override + public boolean shouldTreeWalk(RevCommit c, RevWalk rw, + MutableBoolean cpfUsed) { + return a.shouldTreeWalk(c, rw, cpfUsed) + && b.shouldTreeWalk(c, rw, cpfUsed); + } + + @Override public int matchFilter(TreeWalk walker) throws MissingObjectException, IncorrectObjectTypeException, IOException { @@ -174,6 +184,13 @@ public abstract class AndTreeFilter extends TreeFilter { } @Override + public boolean shouldTreeWalk(RevCommit c, RevWalk rw, + MutableBoolean cpfUsed) { + return Arrays.stream(subfilters) + .allMatch(t -> t.shouldTreeWalk(c, rw, cpfUsed)); + } + + @Override public TreeFilter clone() { final TreeFilter[] s = new TreeFilter[subfilters.length]; for (int i = 0; i < s.length; i++) diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ChangedPathTreeFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ChangedPathTreeFilter.java new file mode 100644 index 0000000000..a74b9b617f --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/ChangedPathTreeFilter.java @@ -0,0 +1,135 @@ +/* + * Copyright (C) 2025, Google LLC and others + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +package org.eclipse.jgit.treewalk.filter; + +import org.eclipse.jgit.internal.JGitText; +import org.eclipse.jgit.internal.storage.commitgraph.ChangedPathFilter; +import org.eclipse.jgit.lib.Constants; +import org.eclipse.jgit.revwalk.RevCommit; +import org.eclipse.jgit.revwalk.RevWalk; +import org.eclipse.jgit.treewalk.TreeWalk; +import org.eclipse.jgit.util.StringUtils; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +/** + * Filter tree entries that modified the contents of particular file paths. + * <p> + * Equivalent to AndTreeFilter(PathFilter, AnyDiffFilter). This filter uses + * {@link org.eclipse.jgit.internal.storage.commitgraph.ChangedPathFilter} + * (bloom filters) when available to discard commits without diffing their + * trees. + * + * @since 7.3 + */ +public class ChangedPathTreeFilter extends TreeFilter { + + private TreeFilter pathFilter; + + private List<String> paths; + + private List<byte[]> rawPaths; + + /** + * Create a TreeFilter for trees modifying one or more user supplied paths. + * <p> + * Path strings are relative to the root of the repository. If the user's + * input should be assumed relative to a subdirectory of the repository the + * caller must prepend the subdirectory's path prior to creating the filter. + * <p> + * Path strings use '/' to delimit directories on all platforms. + * <p> + * Paths may appear in any order within the collection. Sorting may be done + * internally when the group is constructed if doing so will improve path + * matching performance. + * + * @param paths + * the paths to test against. Must have at least one entry. + * @return a new filter for the list of paths supplied. + */ + public static ChangedPathTreeFilter create(String... paths) { + return new ChangedPathTreeFilter(paths); + } + + private ChangedPathTreeFilter(String... paths) { + List<String> filtered = Arrays.stream(paths) + .map(s -> StringUtils.trim(s, '/')) + .collect(Collectors.toList()); + + if (filtered.size() == 0) + throw new IllegalArgumentException( + JGitText.get().atLeastOnePathIsRequired); + + if (filtered.stream().anyMatch(s -> s.isEmpty() || s.isBlank())) { + throw new IllegalArgumentException( + JGitText.get().emptyPathNotPermitted); + } + + this.paths = filtered; + this.rawPaths = this.paths.stream().map(Constants::encode) + .collect(Collectors.toList()); + if (filtered.size() == 1) { + this.pathFilter = PathFilter.create(paths[0]); + } else { + this.pathFilter = OrTreeFilter.create(Arrays.stream(paths) + .map(PathFilter::create).collect(Collectors.toList())); + } + } + + @Override + public boolean shouldTreeWalk(RevCommit c, RevWalk rw, + MutableBoolean cpfUsed) { + ChangedPathFilter cpf = c.getChangedPathFilter(rw); + if (cpf == null) { + return true; + } + if (cpfUsed != null) { + cpfUsed.orValue(true); + } + // return true if at least one path might exist in cpf + return rawPaths.stream().anyMatch(cpf::maybeContains); + } + + @Override + public boolean include(TreeWalk walker) throws IOException { + return pathFilter.include(walker) && ANY_DIFF.include(walker); + } + + @Override + public boolean shouldBeRecursive() { + return pathFilter.shouldBeRecursive() || ANY_DIFF.shouldBeRecursive(); + } + + @Override + public ChangedPathTreeFilter clone() { + return this; + } + + /** + * Get the paths this filter matches. + * + * @return the paths this filter matches. + */ + public List<String> getPaths() { + return paths; + } + + @SuppressWarnings("nls") + @Override + public String toString() { + return "(CHANGED_PATH(" + pathFilter.toString() + ")" // + + " AND " // + + ANY_DIFF.toString() + ")"; + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/OrTreeFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/OrTreeFilter.java index 3c18a9f98d..ce2382552b 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/OrTreeFilter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/OrTreeFilter.java @@ -12,11 +12,14 @@ package org.eclipse.jgit.treewalk.filter; import java.io.IOException; +import java.util.Arrays; import java.util.Collection; import org.eclipse.jgit.errors.IncorrectObjectTypeException; import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.internal.JGitText; +import org.eclipse.jgit.revwalk.RevCommit; +import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.treewalk.TreeWalk; /** @@ -116,6 +119,13 @@ public abstract class OrTreeFilter extends TreeFilter { } @Override + public boolean shouldTreeWalk(RevCommit c, RevWalk rw, + MutableBoolean cpfUsed) { + return a.shouldTreeWalk(c, rw, cpfUsed) + || b.shouldTreeWalk(c, rw, cpfUsed); + } + + @Override public boolean shouldBeRecursive() { return a.shouldBeRecursive() || b.shouldBeRecursive(); } @@ -164,6 +174,13 @@ public abstract class OrTreeFilter extends TreeFilter { } @Override + public boolean shouldTreeWalk(RevCommit c, RevWalk rw, + MutableBoolean cpfUsed) { + return Arrays.stream(subfilters) + .anyMatch(t -> t.shouldTreeWalk(c, rw, cpfUsed)); + } + + @Override public boolean shouldBeRecursive() { for (TreeFilter f : subfilters) if (f.shouldBeRecursive()) diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/TreeFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/TreeFilter.java index a9066dc8f8..8159843312 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/TreeFilter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/TreeFilter.java @@ -14,9 +14,12 @@ import java.io.IOException; import java.util.Optional; import java.util.Set; +import org.eclipse.jgit.annotations.Nullable; import org.eclipse.jgit.dircache.DirCacheIterator; import org.eclipse.jgit.errors.IncorrectObjectTypeException; import org.eclipse.jgit.errors.MissingObjectException; +import org.eclipse.jgit.revwalk.RevCommit; +import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.treewalk.TreeWalk; import org.eclipse.jgit.treewalk.WorkingTreeIterator; @@ -210,14 +213,38 @@ public abstract class TreeFilter { public abstract boolean shouldBeRecursive(); /** - * If this filter checks that at least one of the paths in a set has been + * Return true if the tree entries within this commit require + * {@link #include(TreeWalk)} to correctly determine whether they are + * interesting to report. + * <p> + * Otherwise, all tree entries within this commit are UNINTERESTING for this + * tree filter. + * + * @param c + * the commit being considered by the TreeFilter. + * @param rw + * the RevWalk used in retrieving relevant commit data. + * @param cpfUsed + * if not null, it reports if the changedPathFilter was used in + * this method + * @return True if the tree entries within c require + * {@link #include(TreeWalk)}. + * @since 7.3 + */ + public boolean shouldTreeWalk(RevCommit c, RevWalk rw, + @Nullable MutableBoolean cpfUsed) { + return true; + } + + /** + * If this filter checks that a specific set of paths have all been * modified, returns that set of paths to be checked against a changed path * filter. Otherwise, returns empty. * * @return a set of paths, or empty - * - * @since 6.7 + * @deprecated use {@code shouldTreeWalk} instead. */ + @Deprecated(since = "7.3") public Optional<Set<byte[]>> getPathsBestEffort() { return Optional.empty(); } @@ -242,4 +269,33 @@ public abstract class TreeFilter { } return n.replace('$', '.'); } + + /** + * Mutable wrapper to return a boolean in a function parameter. + * + * @since 7.3 + */ + public static class MutableBoolean { + private boolean value; + + /** + * Return the boolean value. + * + * @return The state of the internal boolean value. + */ + public boolean get() { + return value; + } + + void orValue(boolean v) { + value = value || v; + } + + /** + * Reset the boolean value. + */ + public void reset() { + value = false; + } + } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/ChangeIdUtil.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/ChangeIdUtil.java index 12af374b2e..c8421d6012 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/util/ChangeIdUtil.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/ChangeIdUtil.java @@ -86,8 +86,8 @@ public class ChangeIdUtil { } } - private static final Pattern issuePattern = Pattern - .compile("^(Bug|Issue)[a-zA-Z0-9-]*:.*$"); //$NON-NLS-1$ + private static final Pattern signedOffByPattern = Pattern + .compile("^Signed-off-by:.*$"); //$NON-NLS-1$ private static final Pattern footerPattern = Pattern .compile("(^[a-zA-Z0-9-]+:(?!//).*$)"); //$NON-NLS-1$ @@ -159,7 +159,7 @@ public class ChangeIdUtil { int footerFirstLine = indexOfFirstFooterLine(lines); int insertAfter = footerFirstLine; for (int i = footerFirstLine; i < lines.length; ++i) { - if (issuePattern.matcher(lines[i]).matches()) { + if (!signedOffByPattern.matcher(lines[i]).matches()) { insertAfter = i + 1; continue; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java index 59bbacfa76..6a40fad1db 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java @@ -363,6 +363,7 @@ public abstract class FS { private static FileStoreAttributes getFileStoreAttributes(Path dir) { FileStore s; + CompletableFuture<Optional<FileStoreAttributes>> f = null; try { if (Files.exists(dir)) { s = Files.getFileStore(dir); @@ -385,7 +386,7 @@ public abstract class FS { return FALLBACK_FILESTORE_ATTRIBUTES; } - CompletableFuture<Optional<FileStoreAttributes>> f = CompletableFuture + f = CompletableFuture .supplyAsync(() -> { Lock lock = locks.computeIfAbsent(s, l -> new ReentrantLock()); @@ -455,10 +456,13 @@ public abstract class FS { } // fall through and return fallback } catch (IOException | ExecutionException | CancellationException e) { + cancel(f); LOG.error(e.getMessage(), e); } catch (TimeoutException | SecurityException e) { + cancel(f); // use fallback } catch (InterruptedException e) { + cancel(f); LOG.error(e.getMessage(), e); Thread.currentThread().interrupt(); } @@ -467,6 +471,13 @@ public abstract class FS { return FALLBACK_FILESTORE_ATTRIBUTES; } + private static void cancel( + CompletableFuture<Optional<FileStoreAttributes>> f) { + if (f != null) { + f.cancel(true); + } + } + @SuppressWarnings("boxing") private static Duration measureMinimalRacyInterval(Path dir) { LOG.debug("{}: start measure minimal racy interval in {}", //$NON-NLS-1$ diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/GitDateFormatter.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/GitDateFormatter.java index 524126b098..332e65985e 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/util/GitDateFormatter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/GitDateFormatter.java @@ -10,10 +10,10 @@ package org.eclipse.jgit.util; -import java.text.DateFormat; -import java.text.SimpleDateFormat; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.time.format.FormatStyle; import java.util.Locale; -import java.util.TimeZone; import org.eclipse.jgit.lib.PersonIdent; @@ -26,9 +26,9 @@ import org.eclipse.jgit.lib.PersonIdent; */ public class GitDateFormatter { - private DateFormat dateTimeInstance; + private DateTimeFormatter dateTimeFormat; - private DateFormat dateTimeInstance2; + private DateTimeFormatter dateTimeFormat2; private final Format format; @@ -96,30 +96,34 @@ public class GitDateFormatter { default: break; case DEFAULT: // Not default: - dateTimeInstance = new SimpleDateFormat( + dateTimeFormat = DateTimeFormatter.ofPattern( "EEE MMM dd HH:mm:ss yyyy Z", Locale.US); //$NON-NLS-1$ break; case ISO: - dateTimeInstance = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z", //$NON-NLS-1$ + dateTimeFormat = DateTimeFormatter.ofPattern( + "yyyy-MM-dd HH:mm:ss Z", //$NON-NLS-1$ Locale.US); break; case LOCAL: - dateTimeInstance = new SimpleDateFormat("EEE MMM dd HH:mm:ss yyyy", //$NON-NLS-1$ + dateTimeFormat = DateTimeFormatter.ofPattern( + "EEE MMM dd HH:mm:ss yyyy", //$NON-NLS-1$ Locale.US); break; case RFC: - dateTimeInstance = new SimpleDateFormat( + dateTimeFormat = DateTimeFormatter.ofPattern( "EEE, dd MMM yyyy HH:mm:ss Z", Locale.US); //$NON-NLS-1$ break; case SHORT: - dateTimeInstance = new SimpleDateFormat("yyyy-MM-dd", Locale.US); //$NON-NLS-1$ + dateTimeFormat = DateTimeFormatter.ofPattern("yyyy-MM-dd", //$NON-NLS-1$ + Locale.US); break; case LOCALE: case LOCALELOCAL: - SystemReader systemReader = SystemReader.getInstance(); - dateTimeInstance = systemReader.getDateTimeInstance( - DateFormat.DEFAULT, DateFormat.DEFAULT); - dateTimeInstance2 = systemReader.getSimpleDateFormat("Z"); //$NON-NLS-1$ + dateTimeFormat = DateTimeFormatter + .ofLocalizedDateTime(FormatStyle.MEDIUM) + .withLocale(Locale.US); + dateTimeFormat2 = DateTimeFormatter.ofPattern("Z", //$NON-NLS-1$ + Locale.US); break; } } @@ -135,39 +139,45 @@ public class GitDateFormatter { @SuppressWarnings("boxing") public String formatDate(PersonIdent ident) { switch (format) { - case RAW: - int offset = ident.getTimeZoneOffset(); + case RAW: { + int offset = ident.getZoneOffset().getTotalSeconds(); String sign = offset < 0 ? "-" : "+"; //$NON-NLS-1$ //$NON-NLS-2$ int offset2; - if (offset < 0) + if (offset < 0) { offset2 = -offset; - else + } else { offset2 = offset; - int hours = offset2 / 60; - int minutes = offset2 % 60; + } + int minutes = (offset2 / 60) % 60; + int hours = offset2 / 60 / 60; return String.format("%d %s%02d%02d", //$NON-NLS-1$ - ident.getWhen().getTime() / 1000, sign, hours, minutes); + ident.getWhenAsInstant().getEpochSecond(), sign, hours, + minutes); + } case RELATIVE: return RelativeDateFormatter.format(ident.getWhenAsInstant()); case LOCALELOCAL: case LOCAL: - dateTimeInstance.setTimeZone(SystemReader.getInstance() - .getTimeZone()); - return dateTimeInstance.format(ident.getWhen()); - case LOCALE: - TimeZone tz = ident.getTimeZone(); - if (tz == null) - tz = SystemReader.getInstance().getTimeZone(); - dateTimeInstance.setTimeZone(tz); - dateTimeInstance2.setTimeZone(tz); - return dateTimeInstance.format(ident.getWhen()) + " " //$NON-NLS-1$ - + dateTimeInstance2.format(ident.getWhen()); - default: - tz = ident.getTimeZone(); - if (tz == null) - tz = SystemReader.getInstance().getTimeZone(); - dateTimeInstance.setTimeZone(ident.getTimeZone()); - return dateTimeInstance.format(ident.getWhen()); + return dateTimeFormat + .withZone(SystemReader.getInstance().getTimeZoneId()) + .format(ident.getWhenAsInstant()); + case LOCALE: { + ZoneId tz = ident.getZoneId(); + if (tz == null) { + tz = SystemReader.getInstance().getTimeZoneId(); + } + return dateTimeFormat.withZone(tz).format(ident.getWhenAsInstant()) + + " " //$NON-NLS-1$ + + dateTimeFormat2.withZone(tz) + .format(ident.getWhenAsInstant()); + } + default: { + ZoneId tz = ident.getZoneId(); + if (tz == null) { + tz = SystemReader.getInstance().getTimeZoneId(); + } + return dateTimeFormat.withZone(tz).format(ident.getWhenAsInstant()); + } } } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/GitTimeParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/GitTimeParser.java index 7d00fcd5ed..acaa1ce563 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/util/GitTimeParser.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/GitTimeParser.java @@ -11,6 +11,7 @@ package org.eclipse.jgit.util; import java.text.MessageFormat; import java.text.ParseException; +import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; @@ -97,6 +98,40 @@ public class GitTimeParser { return parse(dateStr, SystemReader.getInstance().civilNow()); } + /** + * Parses a string into a {@link java.time.Instant} using the default + * locale. Since this parser also supports relative formats (e.g. + * "yesterday") the caller can specify the reference date. These types of + * strings can be parsed: + * <ul> + * <li>"never"</li> + * <li>"now"</li> + * <li>"yesterday"</li> + * <li>"(x) years|months|weeks|days|hours|minutes|seconds ago"<br> + * Multiple specs can be combined like in "2 weeks 3 days ago". Instead of ' + * ' one can use '.' to separate the words</li> + * <li>"yyyy-MM-dd HH:mm:ss Z" (ISO)</li> + * <li>"EEE, dd MMM yyyy HH:mm:ss Z" (RFC)</li> + * <li>"yyyy-MM-dd"</li> + * <li>"yyyy.MM.dd"</li> + * <li>"MM/dd/yyyy",</li> + * <li>"dd.MM.yyyy"</li> + * <li>"EEE MMM dd HH:mm:ss yyyy Z" (DEFAULT)</li> + * <li>"EEE MMM dd HH:mm:ss yyyy" (LOCAL)</li> + * </ul> + * + * @param dateStr + * the string to be parsed + * @return the parsed {@link java.time.Instant} + * @throws java.text.ParseException + * if the given dateStr was not recognized + * @since 7.2 + */ + public static Instant parseInstant(String dateStr) throws ParseException { + return parse(dateStr).atZone(SystemReader.getInstance().getTimeZoneId()) + .toInstant(); + } + // Only tests seem to use this method static LocalDateTime parse(String dateStr, LocalDateTime now) throws ParseException { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/SignatureUtils.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/SignatureUtils.java index 820ac2db91..e3e3e04fd9 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/util/SignatureUtils.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/SignatureUtils.java @@ -48,7 +48,7 @@ public final class SignatureUtils { if (verification.creationDate() != null) { // Use the creator's timezone for the signature date PersonIdent dateId = new PersonIdent(creator, - verification.creationDate()); + verification.creationDate().toInstant()); result.append( MessageFormat.format(JGitText.get().verifySignatureMade, formatter.formatDate(dateId))); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/StringUtils.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/StringUtils.java index 2fbd12dcc5..e381a3bcc9 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/util/StringUtils.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/StringUtils.java @@ -278,6 +278,44 @@ public final class StringUtils { } /** + * Remove the specified character from beginning and end of a string + * <p> + * If the character repeats, all copies + * + * @param str input string + * @param c character to remove + * @return the input string with c + * @since 7.2 + */ + public static String trim(String str, char c) { + if (str == null || str.length() == 0) { + return str; + } + + int endPos = str.length()-1; + while (endPos >= 0 && str.charAt(endPos) == c) { + endPos--; + } + + // Whole string is c + if (endPos == -1) { + return EMPTY; + } + + int startPos = 0; + while (startPos < endPos && str.charAt(startPos) == c) { + startPos++; + } + + if (startPos == 0 && endPos == str.length()-1) { + // No need to copy + return str; + } + + return str.substring(startPos, endPos+1); + } + + /** * Appends {@link Constants#DOT_GIT_EXT} unless the given name already ends * with that suffix. * diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/SystemReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/SystemReader.java index 55cc878e02..0b7c6204f2 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/util/SystemReader.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/SystemReader.java @@ -492,6 +492,36 @@ public abstract class SystemReader { } /** + * Gets the directory denoted by environment variable XDG_CACHE_HOME. If + * the variable is not set or empty, return a path for + * {@code $HOME/.cache}. + * + * @param fileSystem + * {@link FS} to get the user's home directory + * @return a {@link Path} denoting the directory, which may exist or not, or + * {@code null} if the environment variable is not set and there is + * no home directory, or the path is invalid. + * @since 7.3 + */ + public Path getXdgCacheDirectory(FS fileSystem) { + String cacheHomePath = getenv(Constants.XDG_CACHE_HOME); + if (StringUtils.isEmptyOrNull(cacheHomePath)) { + File home = fileSystem.userHome(); + if (home == null) { + return null; + } + cacheHomePath = new File(home, ".cache").getAbsolutePath(); //$NON-NLS-1$ + } + try { + return Paths.get(cacheHomePath); + } catch (InvalidPathException e) { + LOG.error(JGitText.get().logXDGCacheHomeInvalid, cacheHomePath, + e); + } + return null; + } + + /** * Update config and its parents if they seem modified * * @param config @@ -523,7 +553,7 @@ public abstract class SystemReader { * * @deprecated Use {@link #now()} */ - @Deprecated + @Deprecated(since = "7.1") public abstract long getCurrentTime(); /** @@ -569,7 +599,7 @@ public abstract class SystemReader { * * @deprecated Use {@link #getTimeZoneAt(Instant)} instead. */ - @Deprecated + @Deprecated(since = "7.1") public abstract int getTimezone(long when); /** @@ -592,7 +622,7 @@ public abstract class SystemReader { * * @deprecated Use {@link #getTimeZoneId()} */ - @Deprecated + @Deprecated(since = "7.1") public TimeZone getTimeZone() { return TimeZone.getDefault(); } |