Date: Mon, 1 Jul 2024 12:24:36 -0700
Subject: DfsPackFile: Enable/disable object size index via DfsReaderOptions
DfsPackFile always uses the object size index if available. That is
the desired final state, but for a safe rollout, we should be able to
disable using the object size index.
Add an option (dfs.useObjectSizeIndex) to enable/disable the usage of
the object size index. False by default.
This changes the default from true to false. It only makes a different
for the DFS stack when writing of the index was explicitely
enabled. This is an optimization, so it shouldn't cause any
regression. Operators can restore previous behaviour setting
"dfs.useObjectSizeIndex" to true.
Change-Id: I44bf5a57e3942a4ecfe66d58bfa9175e99f96fcc
---
.../storage/dfs/DfsGarbageCollectorTest.java | 3 +++
.../jgit/internal/storage/dfs/DfsInserterTest.java | 1 +
.../jgit/internal/storage/dfs/DfsPackFileTest.java | 1 +
.../internal/storage/dfs/DfsPackParserTest.java | 1 +
.../jgit/internal/storage/dfs/DfsReaderTest.java | 2 ++
.../jgit/internal/storage/dfs/DfsPackFile.java | 1 +
.../internal/storage/dfs/DfsReaderOptions.java | 29 ++++++++++++++++++++++
.../src/org/eclipse/jgit/lib/ConfigConstants.java | 7 ++++++
8 files changed, 45 insertions(+)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
index e193de9764..2be11d32ea 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
@@ -18,6 +18,7 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
+
import org.eclipse.jgit.internal.storage.commitgraph.CommitGraph;
import org.eclipse.jgit.internal.storage.commitgraph.CommitGraphWriter;
import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource;
@@ -1171,6 +1172,7 @@ public class DfsGarbageCollectorTest {
gcWithObjectSizeIndex(10);
+ odb.getReaderOptions().setUseObjectSizeIndex(true);
DfsReader reader = odb.newReader();
DfsPackFile gcPack = findFirstBySource(odb.getPacks(), GC);
assertTrue(gcPack.hasObjectSizeIndex(reader));
@@ -1191,6 +1193,7 @@ public class DfsGarbageCollectorTest {
gcWithObjectSizeIndex(10);
+ odb.getReaderOptions().setUseObjectSizeIndex(true);
DfsReader reader = odb.newReader();
DfsPackFile gcPack = findFirstBySource(odb.getPacks(), GC);
assertTrue(gcPack.hasObjectSizeIndex(reader));
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsInserterTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsInserterTest.java
index b84a0b00ae..0b558edf2c 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsInserterTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsInserterTest.java
@@ -295,6 +295,7 @@ public class DfsInserterTest {
public void testObjectSizeIndexOnInsert() throws IOException {
db.getConfig().setInt(CONFIG_PACK_SECTION, null,
CONFIG_KEY_MIN_BYTES_OBJ_SIZE_INDEX, 0);
+ db.getObjectDatabase().getReaderOptions().setUseObjectSizeIndex(true);
byte[] contents = Constants.encode("foo");
ObjectId fooId;
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackFileTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackFileTest.java
index d21e51f276..bc851f8dde 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackFileTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackFileTest.java
@@ -126,6 +126,7 @@ public class DfsPackFileTest {
setObjectSizeIndexMinBytes(0);
ObjectId blobId = setupPack(512, 800);
+ db.getObjectDatabase().getReaderOptions().setUseObjectSizeIndex(true);
DfsReader reader = db.getObjectDatabase().newReader();
DfsPackFile pack = db.getObjectDatabase().getPacks()[0];
assertTrue(pack.hasObjectSizeIndex(reader));
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackParserTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackParserTest.java
index 130af27773..c1cd231c66 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackParserTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackParserTest.java
@@ -61,6 +61,7 @@ public class DfsPackParserTest {
ins.flush();
}
+ repo.getObjectDatabase().getReaderOptions().setUseObjectSizeIndex(true);
DfsReader reader = repo.getObjectDatabase().newReader();
PackList packList = repo.getObjectDatabase().getPackList();
assertEquals(1, packList.packs.length);
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsReaderTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsReaderTest.java
index 254184ee80..a0c228906e 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsReaderTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsReaderTest.java
@@ -37,6 +37,8 @@ public class DfsReaderTest {
@Before
public void setUp() {
db = new InMemoryRepository(new DfsRepositoryDescription("test"));
+ // These tests assume the object size index is enabled.
+ db.getObjectDatabase().getReaderOptions().setUseObjectSizeIndex(true);
}
@Test
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
index 3e4d4d300c..b94a84a41c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
@@ -354,6 +354,7 @@ public final class DfsPackFile extends BlockBasedFile {
}
if (objectSizeIndexLoadAttempted
+ || !ctx.getOptions().shouldUseObjectSizeIndex()
|| !desc.hasFileExt(OBJECT_SIZE_INDEX)) {
// Pack doesn't have object size index
return null;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderOptions.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderOptions.java
index f2ac461deb..5f5e81977b 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderOptions.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderOptions.java
@@ -15,6 +15,7 @@ import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_DFS_SECTION;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_DELTA_BASE_CACHE_LIMIT;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_STREAM_BUFFER;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_STREAM_FILE_THRESHOLD;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_USE_OBJECT_SIZE_INDEX;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.storage.pack.PackConfig;
@@ -36,6 +37,8 @@ public class DfsReaderOptions {
private boolean loadRevIndexInParallel;
+ private boolean useObjectSizeIndex;
+
/**
* Create a default reader configuration.
*/
@@ -136,6 +139,28 @@ public class DfsReaderOptions {
return this;
}
+ /**
+ * Use the object size index if available.
+ *
+ * @return true if the reader should try to use the object size index. if
+ * false, the reader ignores that index.
+ */
+ public boolean shouldUseObjectSizeIndex() {
+ return useObjectSizeIndex;
+ }
+
+ /**
+ * Set if the reader should try to use the object size index
+ *
+ * @param useObjectSizeIndex true to use it, false to ignore the object size index
+ *
+ * @return {@code this}
+ */
+ public DfsReaderOptions setUseObjectSizeIndex(boolean useObjectSizeIndex) {
+ this.useObjectSizeIndex = useObjectSizeIndex;
+ return this;
+ }
+
/**
* Update properties by setting fields from the configuration.
*
@@ -168,6 +193,10 @@ public class DfsReaderOptions {
CONFIG_DFS_SECTION,
CONFIG_KEY_STREAM_BUFFER,
getStreamPackBufferSize()));
+
+ setUseObjectSizeIndex(rc.getBoolean(CONFIG_CORE_SECTION,
+ CONFIG_DFS_SECTION, CONFIG_KEY_USE_OBJECT_SIZE_INDEX,
+ false));
return this;
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
index 0edf3c5ad0..e9fbc65696 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
@@ -1012,4 +1012,11 @@ public final class ConfigConstants {
* @since 6.7
*/
public static final String CONFIG_KEY_READ_CHANGED_PATHS = "readChangedPaths";
+
+ /**
+ * The "useObjectSizeIndex" key
+ *
+ * @since 7.0
+ */
+ public static final String CONFIG_KEY_USE_OBJECT_SIZE_INDEX = "useObjectSizeIndex";
}
--
cgit v1.2.3
From b343442bdb8333ec31d4a17a82bb6e02aee3fbe6 Mon Sep 17 00:00:00 2001
From: Laura Hamelin
Date: Fri, 7 Jun 2024 16:12:18 -0700
Subject: DfsBlockCacheConfig: support configurations for dfs cache tables per
extensions
Parse configurations for tables containing a set of extensions,
defined in [core "dfs.*"] sections.
Parse configurations for cache tables according to configurations
defined in [core "dfs.*"] git config sections for sets of
extensions. The current [core "dfs"] is the default to any
extension not listed in any other table.
Configuration falls back to the defaults defined in the
DfsBlockCacheConfig.java file when not set on each cache
table configuration.
Sample format for individual cache tables:
In this example:
1. PACK types would go to the "default" table
2. INDEX and BITMAP_INDEX types would go to the
"multipleExtensionCache" table
3. REFTABLE types would go to the "reftableCache" table
[core "dfs"] // Configuration for the "default" cache table.
blockSize = 512
blockLimit = 100
concurrencyLevel = 5
(...)
[core "dfs.multipleExtensionCache"]
packExtensions = "INDEX BITMAP_INDEX"
blockSize = 512
blockLimit = 100
concurrencyLevel = 5
(...)
[core "dfs.reftableCache"]
packExtensions = "REFTABLE"
blockSize = 512
blockLimit = 100
concurrencyLevel = 5
(...)
Change-Id: I0e534e6d78b684832e3d3d269cee2590aa0f1911
---
.../storage/dfs/DfsBlockCacheConfigTest.java | 159 ++++++++++++++++
.../org/eclipse/jgit/internal/JGitText.properties | 4 +
.../src/org/eclipse/jgit/internal/JGitText.java | 4 +
.../internal/storage/dfs/DfsBlockCacheConfig.java | 199 +++++++++++++++++----
.../src/org/eclipse/jgit/lib/ConfigConstants.java | 10 ++
5 files changed, 342 insertions(+), 34 deletions(-)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfigTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfigTest.java
index 2df0ba1b05..6ca0ff6a16 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfigTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfigTest.java
@@ -38,11 +38,27 @@
package org.eclipse.jgit.internal.storage.dfs;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_CORE_SECTION;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_DFS_CACHE_PREFIX;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_DFS_SECTION;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_BLOCK_LIMIT;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_BLOCK_SIZE;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_CONCURRENCY_LEVEL;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_PACK_EXTENSIONS;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_STREAM_RATIO;
import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.closeTo;
+import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThrows;
+import java.util.List;
+import java.util.stream.Collectors;
+
import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCacheConfig.DfsBlockCachePackExtConfig;
+import org.eclipse.jgit.internal.storage.pack.PackExt;
+import org.eclipse.jgit.lib.Config;
import org.junit.Test;
public class DfsBlockCacheConfigTest {
@@ -80,4 +96,147 @@ public class DfsBlockCacheConfigTest {
assertThat(config.getBlockSize(), is(65536));
}
+
+ @Test
+ public void fromConfigs() {
+ Config config = new Config();
+ config.setLong(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+ CONFIG_KEY_BLOCK_LIMIT, 50 * 1024);
+ config.setInt(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+ CONFIG_KEY_BLOCK_SIZE, 1024);
+ config.setInt(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+ CONFIG_KEY_CONCURRENCY_LEVEL, 3);
+ config.setString(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+ CONFIG_KEY_STREAM_RATIO, "0.5");
+
+ DfsBlockCacheConfig cacheConfig = new DfsBlockCacheConfig()
+ .fromConfig(config);
+ assertThat(cacheConfig.getBlockLimit(), is(50L * 1024L));
+ assertThat(cacheConfig.getBlockSize(), is(1024));
+ assertThat(cacheConfig.getConcurrencyLevel(), is(3));
+ assertThat(cacheConfig.getStreamRatio(), closeTo(0.5, 0.0001));
+ }
+
+ @Test
+ public void fromConfig_blockLimitNotAMultipleOfBlockSize_throws() {
+ Config config = new Config();
+ config.setLong(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+ CONFIG_KEY_BLOCK_LIMIT, 1025);
+ config.setInt(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+ CONFIG_KEY_BLOCK_SIZE, 1024);
+
+ assertThrows(IllegalArgumentException.class,
+ () -> new DfsBlockCacheConfig().fromConfig(config));
+ }
+
+ @Test
+ public void fromConfig_streamRatioInvalidFormat_throws() {
+ Config config = new Config();
+ config.setString(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+ CONFIG_KEY_STREAM_RATIO, "0.a5");
+
+ assertThrows(IllegalArgumentException.class,
+ () -> new DfsBlockCacheConfig().fromConfig(config));
+ }
+
+ @Test
+ public void fromConfig_generatesDfsBlockCachePackExtConfigs() {
+ Config config = new Config();
+ addPackExtConfigEntry(config, "pack", List.of(PackExt.PACK),
+ /* blockLimit= */ 20 * 512, /* blockSize= */ 512);
+
+ addPackExtConfigEntry(config, "bitmap", List.of(PackExt.BITMAP_INDEX),
+ /* blockLimit= */ 25 * 1024, /* blockSize= */ 1024);
+
+ addPackExtConfigEntry(config, "index",
+ List.of(PackExt.INDEX, PackExt.OBJECT_SIZE_INDEX,
+ PackExt.REVERSE_INDEX),
+ /* blockLimit= */ 30 * 1024, /* blockSize= */ 1024);
+
+ DfsBlockCacheConfig cacheConfig = new DfsBlockCacheConfig()
+ .fromConfig(config);
+ var configs = cacheConfig.getPackExtCacheConfigurations();
+ assertThat(configs, hasSize(3));
+ var packConfig = getConfigForExt(configs, PackExt.PACK);
+ assertThat(packConfig.getBlockLimit(), is(20L * 512L));
+ assertThat(packConfig.getBlockSize(), is(512));
+
+ var bitmapConfig = getConfigForExt(configs, PackExt.BITMAP_INDEX);
+ assertThat(bitmapConfig.getBlockLimit(), is(25L * 1024L));
+ assertThat(bitmapConfig.getBlockSize(), is(1024));
+
+ var indexConfig = getConfigForExt(configs, PackExt.INDEX);
+ assertThat(indexConfig.getBlockLimit(), is(30L * 1024L));
+ assertThat(indexConfig.getBlockSize(), is(1024));
+ assertThat(getConfigForExt(configs, PackExt.OBJECT_SIZE_INDEX),
+ is(indexConfig));
+ assertThat(getConfigForExt(configs, PackExt.REVERSE_INDEX),
+ is(indexConfig));
+ }
+
+ @Test
+ public void fromConfigs_dfsBlockCachePackExtConfigWithDuplicateExtensions_throws() {
+ Config config = new Config();
+ config.setString(CONFIG_CORE_SECTION, CONFIG_DFS_CACHE_PREFIX + "pack1",
+ CONFIG_KEY_PACK_EXTENSIONS, PackExt.PACK.name());
+
+ config.setString(CONFIG_CORE_SECTION, CONFIG_DFS_CACHE_PREFIX + "pack2",
+ CONFIG_KEY_PACK_EXTENSIONS, PackExt.PACK.name());
+
+ assertThrows(IllegalArgumentException.class,
+ () -> new DfsBlockCacheConfig().fromConfig(config));
+ }
+
+ @Test
+ public void fromConfigs_dfsBlockCachePackExtConfigWithEmptyExtensions_throws() {
+ Config config = new Config();
+ config.setString(CONFIG_CORE_SECTION, CONFIG_DFS_CACHE_PREFIX + "pack1",
+ CONFIG_KEY_PACK_EXTENSIONS, "");
+
+ assertThrows(IllegalArgumentException.class,
+ () -> new DfsBlockCacheConfig().fromConfig(config));
+ }
+
+ @Test
+ public void fromConfigs_dfsBlockCachePackExtConfigWithNoExtensions_throws() {
+ Config config = new Config();
+ config.setInt(CONFIG_CORE_SECTION, CONFIG_DFS_CACHE_PREFIX + "pack1",
+ CONFIG_KEY_BLOCK_SIZE, 0);
+
+ assertThrows(IllegalArgumentException.class,
+ () -> new DfsBlockCacheConfig().fromConfig(config));
+ }
+
+ @Test
+ public void fromConfigs_dfsBlockCachePackExtConfigWithUnknownExtensions_throws() {
+ Config config = new Config();
+ config.setString(CONFIG_CORE_SECTION,
+ CONFIG_DFS_CACHE_PREFIX + "unknownExt",
+ CONFIG_KEY_PACK_EXTENSIONS, "NotAKnownExt");
+
+ assertThrows(IllegalArgumentException.class,
+ () -> new DfsBlockCacheConfig().fromConfig(config));
+ }
+
+ private static void addPackExtConfigEntry(Config config, String configName,
+ List packExts, long blockLimit, int blockSize) {
+ String packExtConfigName = CONFIG_DFS_CACHE_PREFIX + configName;
+ config.setString(CONFIG_CORE_SECTION, packExtConfigName,
+ CONFIG_KEY_PACK_EXTENSIONS, packExts.stream().map(PackExt::name)
+ .collect(Collectors.joining(" ")));
+ config.setLong(CONFIG_CORE_SECTION, packExtConfigName,
+ CONFIG_KEY_BLOCK_LIMIT, blockLimit);
+ config.setInt(CONFIG_CORE_SECTION, packExtConfigName,
+ CONFIG_KEY_BLOCK_SIZE, blockSize);
+ }
+
+ private static DfsBlockCacheConfig getConfigForExt(
+ List configs, PackExt packExt) {
+ for (DfsBlockCachePackExtConfig config : configs) {
+ if (config.getPackExts().contains(packExt)) {
+ return config.getPackExtCacheConfiguration();
+ }
+ }
+ return null;
+ }
}
diff --git a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
index 19c90086aa..9d12facb33 100644
--- a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
+++ b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
@@ -285,6 +285,7 @@ DIRCUnrecognizedExtendedFlags=Unrecognized extended flags: {0}
downloadCancelled=Download cancelled
downloadCancelledDuringIndexing=Download cancelled during indexing
duplicateAdvertisementsOf=duplicate advertisements of {0}
+duplicatePackExtensionsSet=Attempting to configure duplicate pack extensions: {0}.{1}.{2} contains {3}
duplicateRef=Duplicate ref: {0}
duplicateRefAttribute=Duplicate ref attribute: {0}
duplicateRemoteRefUpdateIsIllegal=Duplicate remote ref update is illegal. Affected remote name: {0}
@@ -539,6 +540,8 @@ noMergeBase=No merge base could be determined. Reason={0}. {1}
noMergeHeadSpecified=No merge head specified
nonBareLinkFilesNotSupported=Link files are not supported with nonbare repos
nonCommitToHeads=Cannot point a branch to a non-commit object
+noPackExtConfigurationGiven=No PackExt configuration given
+noPackExtGivenForConfiguration=No PackExt given for configuration
noPathAttributesFound=No Attributes found for {0}.
noSuchRef=no such ref
noSuchRefKnown=no such ref: {0}
@@ -829,6 +832,7 @@ unknownObject=unknown object
unknownObjectInIndex=unknown object {0} found in index but not in pack file
unknownObjectType=Unknown object type {0}.
unknownObjectType2=unknown
+unknownPackExtension=Unknown pack extension: {0}.{1}.{2}={3}
unknownPositionEncoding=Unknown position encoding %s
unknownRefStorageFormat=Unknown ref storage format "{0}"
unknownRepositoryFormat=Unknown repository format
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
index 700b54a7a6..311d9c22aa 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
@@ -315,6 +315,7 @@ public class JGitText extends TranslationBundle {
/***/ public String downloadCancelled;
/***/ public String downloadCancelledDuringIndexing;
/***/ public String duplicateAdvertisementsOf;
+ /***/ public String duplicatePackExtensionsSet;
/***/ public String duplicateRef;
/***/ public String duplicateRefAttribute;
/***/ public String duplicateRemoteRefUpdateIsIllegal;
@@ -569,6 +570,8 @@ public class JGitText extends TranslationBundle {
/***/ public String noMergeHeadSpecified;
/***/ public String nonBareLinkFilesNotSupported;
/***/ public String nonCommitToHeads;
+ /***/ public String noPackExtConfigurationGiven;
+ /***/ public String noPackExtGivenForConfiguration;
/***/ public String noPathAttributesFound;
/***/ public String noSuchRef;
/***/ public String noSuchRefKnown;
@@ -858,6 +861,7 @@ public class JGitText extends TranslationBundle {
/***/ public String unknownObjectInIndex;
/***/ public String unknownObjectType;
/***/ public String unknownObjectType2;
+ /***/ public String unknownPackExtension;
/***/ public String unknownPositionEncoding;
/***/ public String unknownRefStorageFormat;
/***/ public String unknownRepositoryFormat;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
index 77273cec61..fa86701de8 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
@@ -11,17 +11,25 @@
package org.eclipse.jgit.internal.storage.dfs;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_CORE_SECTION;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_DFS_CACHE_PREFIX;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_DFS_SECTION;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_BLOCK_LIMIT;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_BLOCK_SIZE;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_CONCURRENCY_LEVEL;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_PACK_EXTENSIONS;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_STREAM_RATIO;
import java.text.MessageFormat;
import java.time.Duration;
+import java.util.ArrayList;
import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashSet;
+import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.function.Consumer;
+import java.util.stream.Collectors;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.pack.PackExt;
@@ -42,15 +50,21 @@ public class DfsBlockCacheConfig {
public static final int DEFAULT_CACHE_HOT_MAX = 1;
private long blockLimit;
+
private int blockSize;
+
private double streamRatio;
+
private int concurrencyLevel;
private Consumer refLock;
+
private Map cacheHotMap;
private IndexEventConsumer indexEventConsumer;
+ private List packExtCacheConfigurations;
+
/**
* Create a default configuration.
*/
@@ -60,6 +74,7 @@ public class DfsBlockCacheConfig {
setStreamRatio(0.30);
setConcurrencyLevel(32);
cacheHotMap = Collections.emptyMap();
+ packExtCacheConfigurations = Collections.emptyList();
}
/**
@@ -77,10 +92,10 @@ public class DfsBlockCacheConfig {
* Set maximum number bytes of heap memory to dedicate to caching pack file
* data.
*
- * It is strongly recommended to set the block limit to be an integer multiple
- * of the block size. This constraint is not enforced by this method (since
- * it may be called before {@link #setBlockSize(int)}), but it is enforced by
- * {@link #fromConfig(Config)}.
+ * It is strongly recommended to set the block limit to be an integer
+ * multiple of the block size. This constraint is not enforced by this
+ * method (since it may be called before {@link #setBlockSize(int)}), but it
+ * is enforced by {@link #fromConfig(Config)}.
*
* @param newLimit
* maximum number bytes of heap memory to dedicate to caching
@@ -89,9 +104,9 @@ public class DfsBlockCacheConfig {
*/
public DfsBlockCacheConfig setBlockLimit(long newLimit) {
if (newLimit <= 0) {
- throw new IllegalArgumentException(MessageFormat.format(
- JGitText.get().blockLimitNotPositive,
- Long.valueOf(newLimit)));
+ throw new IllegalArgumentException(
+ MessageFormat.format(JGitText.get().blockLimitNotPositive,
+ Long.valueOf(newLimit)));
}
blockLimit = newLimit;
return this;
@@ -239,62 +254,101 @@ public class DfsBlockCacheConfig {
return this;
}
+ /**
+ * Get the list of pack ext cache configs.
+ *
+ * @return the list of pack ext cache configs.
+ */
+ List getPackExtCacheConfigurations() {
+ return packExtCacheConfigurations;
+ }
+
/**
* Update properties by setting fields from the configuration.
*
* If a property is not defined in the configuration, then it is left
* unmodified.
*
- * Enforces certain constraints on the combination of settings in the config,
- * for example that the block limit is a multiple of the block size.
+ * Enforces certain constraints on the combination of settings in the
+ * config, for example that the block limit is a multiple of the block size.
*
* @param rc
* configuration to read properties from.
* @return {@code this}
*/
public DfsBlockCacheConfig fromConfig(Config rc) {
- long cfgBlockLimit = rc.getLong(
- CONFIG_CORE_SECTION,
- CONFIG_DFS_SECTION,
- CONFIG_KEY_BLOCK_LIMIT,
- getBlockLimit());
- int cfgBlockSize = rc.getInt(
- CONFIG_CORE_SECTION,
- CONFIG_DFS_SECTION,
- CONFIG_KEY_BLOCK_SIZE,
+ fromConfig(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION, rc);
+ loadPackExtConfigs(rc);
+ return this;
+ }
+
+ private void fromConfig(String section, String subSection, Config rc) {
+ long cfgBlockLimit = rc.getLong(section, subSection,
+ CONFIG_KEY_BLOCK_LIMIT, getBlockLimit());
+ int cfgBlockSize = rc.getInt(section, subSection, CONFIG_KEY_BLOCK_SIZE,
getBlockSize());
if (cfgBlockLimit % cfgBlockSize != 0) {
throw new IllegalArgumentException(MessageFormat.format(
JGitText.get().blockLimitNotMultipleOfBlockSize,
- Long.valueOf(cfgBlockLimit),
- Long.valueOf(cfgBlockSize)));
+ Long.valueOf(cfgBlockLimit), Long.valueOf(cfgBlockSize)));
}
setBlockLimit(cfgBlockLimit);
setBlockSize(cfgBlockSize);
- setConcurrencyLevel(rc.getInt(
- CONFIG_CORE_SECTION,
- CONFIG_DFS_SECTION,
- CONFIG_KEY_CONCURRENCY_LEVEL,
- getConcurrencyLevel()));
+ setConcurrencyLevel(rc.getInt(section, subSection,
+ CONFIG_KEY_CONCURRENCY_LEVEL, getConcurrencyLevel()));
- String v = rc.getString(
- CONFIG_CORE_SECTION,
- CONFIG_DFS_SECTION,
- CONFIG_KEY_STREAM_RATIO);
+ String v = rc.getString(section, subSection, CONFIG_KEY_STREAM_RATIO);
if (v != null) {
try {
setStreamRatio(Double.parseDouble(v));
} catch (NumberFormatException e) {
throw new IllegalArgumentException(MessageFormat.format(
- JGitText.get().enumValueNotSupported3,
- CONFIG_CORE_SECTION,
- CONFIG_DFS_SECTION,
- CONFIG_KEY_STREAM_RATIO, v), e);
+ JGitText.get().enumValueNotSupported3, section,
+ subSection, CONFIG_KEY_STREAM_RATIO, v), e);
}
}
- return this;
+ }
+
+ private void loadPackExtConfigs(Config config) {
+ List subSections = config.getSubsections(CONFIG_CORE_SECTION)
+ .stream()
+ .filter(section -> section.startsWith(CONFIG_DFS_CACHE_PREFIX))
+ .collect(Collectors.toList());
+ if (subSections.size() == 0) {
+ return;
+ }
+ ArrayList cacheConfigs = new ArrayList<>();
+ Set extensionsSeen = new HashSet<>();
+ for (String subSection : subSections) {
+ var cacheConfig = DfsBlockCachePackExtConfig.fromConfig(config,
+ CONFIG_CORE_SECTION, subSection);
+ Set packExtsDuplicates = intersection(extensionsSeen,
+ cacheConfig.packExts);
+ if (packExtsDuplicates.size() > 0) {
+ String duplicatePackExts = packExtsDuplicates.stream()
+ .map(PackExt::toString)
+ .collect(Collectors.joining(","));
+ throw new IllegalArgumentException(MessageFormat.format(
+ JGitText.get().duplicatePackExtensionsSet,
+ CONFIG_CORE_SECTION, subSection,
+ CONFIG_KEY_PACK_EXTENSIONS, duplicatePackExts));
+ }
+ extensionsSeen.addAll(cacheConfig.packExts);
+ cacheConfigs.add(cacheConfig);
+ }
+ packExtCacheConfigurations = cacheConfigs;
+ }
+
+ private static Set intersection(Set first, Set second) {
+ Set ret = new HashSet<>();
+ for (T entry : second) {
+ if (first.contains(entry)) {
+ ret.add(entry);
+ }
+ }
+ return ret;
}
/** Consumer of DfsBlockCache loading and eviction events for indexes. */
@@ -346,4 +400,81 @@ public class DfsBlockCacheConfig {
return false;
}
}
+
+ /**
+ * A configuration for a single cache table storing 1 or more Pack
+ * extensions.
+ *
+ * The current pack ext cache tables implementation supports the same
+ * parameters the ClockBlockCacheTable (current default implementation).
+ *
+ * Configuration falls back to the defaults coded values defined in the
+ * {@link DfsBlockCacheConfig} when not set on each cache table
+ * configuration and NOT the values of the basic dfs section.
+ *
+ *
+ *
+ * Format:
+ * [core "dfs.packCache"]
+ * packExtensions = "PACK"
+ * blockSize = 512
+ * blockLimit = 100
+ * concurrencyLevel = 5
+ *
+ * [core "dfs.multipleExtensionCache"]
+ * packExtensions = "INDEX REFTABLE BITMAP_INDEX"
+ * blockSize = 512
+ * blockLimit = 100
+ * concurrencyLevel = 5
+ *
+ */
+ static class DfsBlockCachePackExtConfig {
+ // Set of pack extensions that will map to the cache instance.
+ private final EnumSet packExts;
+
+ // Configuration for the cache instance.
+ private final DfsBlockCacheConfig packExtCacheConfiguration;
+
+ private DfsBlockCachePackExtConfig(EnumSet packExts,
+ DfsBlockCacheConfig packExtCacheConfiguration) {
+ this.packExts = packExts;
+ this.packExtCacheConfiguration = packExtCacheConfiguration;
+ }
+
+ Set getPackExts() {
+ return packExts;
+ }
+
+ DfsBlockCacheConfig getPackExtCacheConfiguration() {
+ return packExtCacheConfiguration;
+ }
+
+ private static DfsBlockCachePackExtConfig fromConfig(Config config,
+ String section, String subSection) {
+ String packExtensions = config.getString(section, subSection,
+ CONFIG_KEY_PACK_EXTENSIONS);
+ if (packExtensions == null) {
+ throw new IllegalArgumentException(
+ JGitText.get().noPackExtGivenForConfiguration);
+ }
+ String[] extensions = packExtensions.split(" ", -1);
+ Set packExts = new HashSet<>(extensions.length);
+ for (String extension : extensions) {
+ try {
+ packExts.add(PackExt.valueOf(extension));
+ } catch (IllegalArgumentException e) {
+ throw new IllegalArgumentException(MessageFormat.format(
+ JGitText.get().unknownPackExtension, section,
+ subSection, CONFIG_KEY_PACK_EXTENSIONS, extension),
+ e);
+ }
+ }
+
+ DfsBlockCacheConfig dfsBlockCacheConfig = new DfsBlockCacheConfig();
+ dfsBlockCacheConfig.fromConfig(section, subSection, config);
+ return new DfsBlockCachePackExtConfig(EnumSet.copyOf(packExts),
+ dfsBlockCacheConfig);
+ }
+
+ }
}
\ No newline at end of file
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
index 0edf3c5ad0..61db6a0762 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
@@ -77,6 +77,9 @@ public final class ConfigConstants {
/** The "dfs" section */
public static final String CONFIG_DFS_SECTION = "dfs";
+ /** The dfs cache subsection prefix */
+ public static final String CONFIG_DFS_CACHE_PREFIX = "dfs.";
+
/**
* The "receive" section
* @since 4.6
@@ -331,6 +334,13 @@ public final class ConfigConstants {
/** The "deltaBaseCacheLimit" key */
public static final String CONFIG_KEY_DELTA_BASE_CACHE_LIMIT = "deltaBaseCacheLimit";
+ /**
+ * The "packExtensions" key
+ *
+ * @since 7.0
+ **/
+ public static final String CONFIG_KEY_PACK_EXTENSIONS = "packExtensions";
+
/**
* The "symlinks" key
* @since 3.3
--
cgit v1.2.3
From f9beeb3b33e438fa3a10eb3871206573ae986abb Mon Sep 17 00:00:00 2001
From: Janne Valkealahti
Date: Thu, 4 Jul 2024 07:44:49 +0100
Subject: Add worktrees read support
Based on deritative work done in Andre's work in [1].
This change focuses on adding support for reading the repository
state when branches are checked out using git's worktrees.
I've refactored original work by removing all unrelevant
changes which were mostly around refactoring to extract
i.e. constants which mostly created noise for a review.
I've tried to address original review comments:
- Not adding non-behavioral changes
- "HEAD" should get resolved from gitDir
- Reftable recently landed in cgit 2.45,
see https://github.com/git/git/blob/master/Documentation/RelNotes/2.45.0.txt#L8
We can add worktree support for reftable in a later change.
- Some new tests to read from a linked worktree which
is created manually as there's no write support.
[1] https://git.eclipse.org/r/c/jgit/jgit/+/163940/18
Change-Id: Id077d58fb6c09ecb090eb09d5dbc7edc351a581d
---
.../src/org/eclipse/jgit/pgm/Config.java | 2 +-
.../org/eclipse/jgit/api/EolRepositoryTest.java | 2 +-
.../org/eclipse/jgit/api/LinkedWorktreeTest.java | 192 +++++++++++++++++++++
.../jgit/attributes/AttributesHandlerTest.java | 2 +-
.../internal/storage/file/BatchRefUpdateTest.java | 2 +-
.../jgit/internal/storage/file/GcPackRefsTest.java | 2 +-
.../jgit/internal/storage/file/GcReflogTest.java | 2 +-
.../internal/storage/file/RefDirectoryTest.java | 31 ++--
.../internal/storage/file/ReflogReaderTest.java | 2 +-
.../internal/storage/file/ReflogWriterTest.java | 2 +-
.../org/eclipse/jgit/api/SubmoduleAddCommand.java | 2 +-
.../eclipse/jgit/api/SubmoduleUpdateCommand.java | 2 +-
.../eclipse/jgit/dircache/DirCacheCheckout.java | 2 +
.../storage/file/FileReftableDatabase.java | 8 +-
.../jgit/internal/storage/file/FileRepository.java | 39 +++--
.../org/eclipse/jgit/internal/storage/file/GC.java | 2 +-
.../eclipse/jgit/internal/storage/file/GcLog.java | 2 +-
.../internal/storage/file/InfoAttributesNode.java | 2 +-
.../jgit/internal/storage/file/RefDirectory.java | 20 ++-
.../internal/storage/file/ReflogReaderImpl.java | 6 +-
.../eclipse/jgit/lib/BaseRepositoryBuilder.java | 116 +++++++++++--
.../src/org/eclipse/jgit/lib/Constants.java | 52 ++++++
.../src/org/eclipse/jgit/lib/IndexDiff.java | 2 +-
.../src/org/eclipse/jgit/lib/Repository.java | 16 +-
.../src/org/eclipse/jgit/lib/RepositoryCache.java | 51 ++++--
.../eclipse/jgit/transport/TransportGitSsh.java | 6 +
.../org/eclipse/jgit/transport/TransportLocal.java | 1 +
.../eclipse/jgit/treewalk/WorkingTreeIterator.java | 4 +-
org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java | 33 +++-
29 files changed, 520 insertions(+), 85 deletions(-)
create mode 100644 org.eclipse.jgit.test/tst/org/eclipse/jgit/api/LinkedWorktreeTest.java
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Config.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Config.java
index 52f40c2957..f5de7045d0 100644
--- a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Config.java
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Config.java
@@ -94,7 +94,7 @@ class Config extends TextBuiltin {
if (global || isListAll())
list(SystemReader.getInstance().openUserConfig(null, fs));
if (local || isListAll())
- list(new FileBasedConfig(fs.resolve(getRepository().getDirectory(),
+ list(new FileBasedConfig(fs.resolve(getRepository().getCommonDirectory(),
Constants.CONFIG), fs));
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/EolRepositoryTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/EolRepositoryTest.java
index b937b1f6a9..4c971ffb6b 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/EolRepositoryTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/EolRepositoryTest.java
@@ -559,7 +559,7 @@ public class EolRepositoryTest extends RepositoryTestCase {
}
if (infoAttributesContent != null) {
- File f = new File(db.getDirectory(), Constants.INFO_ATTRIBUTES);
+ File f = new File(db.getCommonDirectory(), Constants.INFO_ATTRIBUTES);
write(f, infoAttributesContent);
}
config.save();
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/LinkedWorktreeTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/LinkedWorktreeTest.java
new file mode 100644
index 0000000000..3b60e1b5c0
--- /dev/null
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/LinkedWorktreeTest.java
@@ -0,0 +1,192 @@
+/*
+ * Copyright (C) 2024, Broadcom and others
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Distribution License v. 1.0 which is available at
+ * https://www.eclipse.org/org/documents/edl-v10.php.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+package org.eclipse.jgit.api;
+
+import static org.eclipse.jgit.lib.Constants.HEAD;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.Collection;
+import java.util.Iterator;
+
+import org.eclipse.jgit.internal.storage.file.FileRepository;
+import org.eclipse.jgit.junit.JGitTestUtil;
+import org.eclipse.jgit.junit.RepositoryTestCase;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ReflogEntry;
+import org.eclipse.jgit.revwalk.RevCommit;
+import org.eclipse.jgit.util.FS;
+import org.eclipse.jgit.util.FS.ExecutionResult;
+import org.eclipse.jgit.util.RawParseUtils;
+import org.eclipse.jgit.util.TemporaryBuffer;
+import org.junit.Test;
+
+public class LinkedWorktreeTest extends RepositoryTestCase {
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+
+ try (Git git = new Git(db)) {
+ git.commit().setMessage("Initial commit").call();
+ }
+ }
+
+ @Test
+ public void testWeCanReadFromLinkedWorktreeFromBare() throws Exception {
+ FS fs = db.getFS();
+ File directory = trash.getParentFile();
+ String dbDirName = db.getWorkTree().getName();
+ cloneBare(fs, directory, dbDirName, "bare");
+ File bareDirectory = new File(directory, "bare");
+ worktreeAddExisting(fs, bareDirectory, "master");
+
+ File worktreesDir = new File(bareDirectory, "worktrees");
+ File masterWorktreesDir = new File(worktreesDir, "master");
+
+ FileRepository repository = new FileRepository(masterWorktreesDir);
+ try (Git git = new Git(repository)) {
+ ObjectId objectId = repository.resolve(HEAD);
+ assertNotNull(objectId);
+
+ Iterator log = git.log().all().call().iterator();
+ assertTrue(log.hasNext());
+ assertTrue("Initial commit".equals(log.next().getShortMessage()));
+
+ // we have reflog entry
+ // depending on git version we either have one or
+ // two entries where extra is zeroid entry with
+ // same message or no message
+ Collection reflog = git.reflog().call();
+ assertNotNull(reflog);
+ assertTrue(reflog.size() > 0);
+ ReflogEntry[] reflogs = reflog.toArray(new ReflogEntry[0]);
+ assertEquals(reflogs[reflogs.length - 1].getComment(),
+ "reset: moving to HEAD");
+
+ // index works with file changes
+ File masterDir = new File(directory, "master");
+ File testFile = new File(masterDir, "test");
+
+ Status status = git.status().call();
+ assertTrue(status.getUncommittedChanges().size() == 0);
+ assertTrue(status.getUntracked().size() == 0);
+
+ JGitTestUtil.write(testFile, "test");
+ status = git.status().call();
+ assertTrue(status.getUncommittedChanges().size() == 0);
+ assertTrue(status.getUntracked().size() == 1);
+
+ git.add().addFilepattern("test").call();
+ status = git.status().call();
+ assertTrue(status.getUncommittedChanges().size() == 1);
+ assertTrue(status.getUntracked().size() == 0);
+ }
+ }
+
+ @Test
+ public void testWeCanReadFromLinkedWorktreeFromNonBare() throws Exception {
+ FS fs = db.getFS();
+ worktreeAddNew(fs, db.getWorkTree(), "wt");
+
+ File worktreesDir = new File(db.getDirectory(), "worktrees");
+ File masterWorktreesDir = new File(worktreesDir, "wt");
+
+ FileRepository repository = new FileRepository(masterWorktreesDir);
+ try (Git git = new Git(repository)) {
+ ObjectId objectId = repository.resolve(HEAD);
+ assertNotNull(objectId);
+
+ Iterator log = git.log().all().call().iterator();
+ assertTrue(log.hasNext());
+ assertTrue("Initial commit".equals(log.next().getShortMessage()));
+
+ // we have reflog entry
+ Collection reflog = git.reflog().call();
+ assertNotNull(reflog);
+ assertTrue(reflog.size() > 0);
+ ReflogEntry[] reflogs = reflog.toArray(new ReflogEntry[0]);
+ assertEquals(reflogs[reflogs.length - 1].getComment(),
+ "reset: moving to HEAD");
+
+ // index works with file changes
+ File directory = trash.getParentFile();
+ File wtDir = new File(directory, "wt");
+ File testFile = new File(wtDir, "test");
+
+ Status status = git.status().call();
+ assertTrue(status.getUncommittedChanges().size() == 0);
+ assertTrue(status.getUntracked().size() == 0);
+
+ JGitTestUtil.write(testFile, "test");
+ status = git.status().call();
+ assertTrue(status.getUncommittedChanges().size() == 0);
+ assertTrue(status.getUntracked().size() == 1);
+
+ git.add().addFilepattern("test").call();
+ status = git.status().call();
+ assertTrue(status.getUncommittedChanges().size() == 1);
+ assertTrue(status.getUntracked().size() == 0);
+ }
+
+ }
+
+ private static void cloneBare(FS fs, File directory, String from, String to) throws IOException, InterruptedException {
+ ProcessBuilder builder = fs.runInShell("git",
+ new String[] { "clone", "--bare", from, to });
+ builder.directory(directory);
+ builder.environment().put("HOME", fs.userHome().getAbsolutePath());
+ StringBuilder input = new StringBuilder();
+ ExecutionResult result = fs.execute(builder, new ByteArrayInputStream(
+ input.toString().getBytes(StandardCharsets.UTF_8)));
+ String stdOut = toString(result.getStdout());
+ String errorOut = toString(result.getStderr());
+ assertNotNull(stdOut);
+ assertNotNull(errorOut);
+ }
+
+ private static void worktreeAddExisting(FS fs, File directory, String name) throws IOException, InterruptedException {
+ ProcessBuilder builder = fs.runInShell("git",
+ new String[] { "worktree", "add", "../" + name, name });
+ builder.directory(directory);
+ builder.environment().put("HOME", fs.userHome().getAbsolutePath());
+ StringBuilder input = new StringBuilder();
+ ExecutionResult result = fs.execute(builder, new ByteArrayInputStream(
+ input.toString().getBytes(StandardCharsets.UTF_8)));
+ String stdOut = toString(result.getStdout());
+ String errorOut = toString(result.getStderr());
+ assertNotNull(stdOut);
+ assertNotNull(errorOut);
+ }
+
+ private static void worktreeAddNew(FS fs, File directory, String name) throws IOException, InterruptedException {
+ ProcessBuilder builder = fs.runInShell("git",
+ new String[] { "worktree", "add", "-b", name, "../" + name, "master"});
+ builder.directory(directory);
+ builder.environment().put("HOME", fs.userHome().getAbsolutePath());
+ StringBuilder input = new StringBuilder();
+ ExecutionResult result = fs.execute(builder, new ByteArrayInputStream(
+ input.toString().getBytes(StandardCharsets.UTF_8)));
+ String stdOut = toString(result.getStdout());
+ String errorOut = toString(result.getStderr());
+ assertNotNull(stdOut);
+ assertNotNull(errorOut);
+ }
+
+ private static String toString(TemporaryBuffer b) throws IOException {
+ return RawParseUtils.decode(b.toByteArray());
+ }
+
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/attributes/AttributesHandlerTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/attributes/AttributesHandlerTest.java
index 7fb98ec53b..c41dd81add 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/attributes/AttributesHandlerTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/attributes/AttributesHandlerTest.java
@@ -584,7 +584,7 @@ public class AttributesHandlerTest extends RepositoryTestCase {
}
if (infoAttributesContent != null) {
- File f = new File(db.getDirectory(), Constants.INFO_ATTRIBUTES);
+ File f = new File(db.getCommonDirectory(), Constants.INFO_ATTRIBUTES);
write(f, infoAttributesContent);
}
config.save();
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java
index daf4382719..1af42cb229 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java
@@ -171,7 +171,7 @@ public class BatchRefUpdateTest extends LocalDiskRepositoryTestCase {
assertEquals(c2.getResult(), ReceiveCommand.Result.OK);
}
- File packed = new File(diskRepo.getDirectory(), "packed-refs");
+ File packed = new File(diskRepo.getCommonDirectory(), "packed-refs");
String packedStr = new String(Files.readAllBytes(packed.toPath()),
UTF_8);
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcPackRefsTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcPackRefsTest.java
index 8baa3cc341..c57295518d 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcPackRefsTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcPackRefsTest.java
@@ -58,7 +58,7 @@ public class GcPackRefsTest extends GcTestCase {
String ref = "dir/ref";
tr.branch(ref).commit().create();
String name = repo.findRef(ref).getName();
- Path dir = repo.getDirectory().toPath().resolve(name).getParent();
+ Path dir = repo.getCommonDirectory().toPath().resolve(name).getParent();
assertNotNull(dir);
gc.packRefs();
assertFalse(Files.exists(dir));
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcReflogTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcReflogTest.java
index e6c1ee5fd6..29f180d76b 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcReflogTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcReflogTest.java
@@ -30,7 +30,7 @@ public class GcReflogTest extends GcTestCase {
BranchBuilder bb = tr.branch("refs/heads/master");
bb.commit().add("A", "A").add("B", "B").create();
bb.commit().add("A", "A2").add("B", "B2").create();
- new File(repo.getDirectory(), Constants.LOGS + "/refs/heads/master")
+ new File(repo.getCommonDirectory(), Constants.LOGS + "/refs/heads/master")
.delete();
stats = gc.getStatistics();
assertEquals(8, stats.numberOfLooseObjects);
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java
index 2bafde65d3..baa0182b87 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java
@@ -90,25 +90,26 @@ public class RefDirectoryTest extends LocalDiskRepositoryTestCase {
@Test
public void testCreate() throws IOException {
// setUp above created the directory. We just have to test it.
- File d = diskRepo.getDirectory();
+ File gitDir = diskRepo.getDirectory();
+ File commonDir = diskRepo.getCommonDirectory();
assertSame(diskRepo, refdir.getRepository());
- assertTrue(new File(d, "refs").isDirectory());
- assertTrue(new File(d, "logs").isDirectory());
- assertTrue(new File(d, "logs/refs").isDirectory());
- assertFalse(new File(d, "packed-refs").exists());
+ assertTrue(new File(commonDir, "refs").isDirectory());
+ assertTrue(new File(commonDir, "logs").isDirectory());
+ assertTrue(new File(commonDir, "logs/refs").isDirectory());
+ assertFalse(new File(commonDir, "packed-refs").exists());
- assertTrue(new File(d, "refs/heads").isDirectory());
- assertTrue(new File(d, "refs/tags").isDirectory());
- assertEquals(2, new File(d, "refs").list().length);
- assertEquals(0, new File(d, "refs/heads").list().length);
- assertEquals(0, new File(d, "refs/tags").list().length);
+ assertTrue(new File(commonDir, "refs/heads").isDirectory());
+ assertTrue(new File(commonDir, "refs/tags").isDirectory());
+ assertEquals(2, new File(commonDir, "refs").list().length);
+ assertEquals(0, new File(commonDir, "refs/heads").list().length);
+ assertEquals(0, new File(commonDir, "refs/tags").list().length);
- assertTrue(new File(d, "logs/refs/heads").isDirectory());
- assertFalse(new File(d, "logs/HEAD").exists());
- assertEquals(0, new File(d, "logs/refs/heads").list().length);
+ assertTrue(new File(commonDir, "logs/refs/heads").isDirectory());
+ assertFalse(new File(gitDir, "logs/HEAD").exists());
+ assertEquals(0, new File(commonDir, "logs/refs/heads").list().length);
- assertEquals("ref: refs/heads/master\n", read(new File(d, HEAD)));
+ assertEquals("ref: refs/heads/master\n", read(new File(gitDir, HEAD)));
}
@Test(expected = UnsupportedOperationException.class)
@@ -1382,7 +1383,7 @@ public class RefDirectoryTest extends LocalDiskRepositoryTestCase {
}
private void deleteLooseRef(String name) {
- File path = new File(diskRepo.getDirectory(), name);
+ File path = new File(diskRepo.getCommonDirectory(), name);
assertTrue("deleted " + name, path.delete());
}
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogReaderTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogReaderTest.java
index dc0e749373..eb521ff9eb 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogReaderTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogReaderTest.java
@@ -238,7 +238,7 @@ public class ReflogReaderTest extends SampleDataRepositoryTestCase {
private void setupReflog(String logName, byte[] data)
throws FileNotFoundException, IOException {
- File logfile = new File(db.getDirectory(), logName);
+ File logfile = new File(db.getCommonDirectory(), logName);
if (!logfile.getParentFile().mkdirs()
&& !logfile.getParentFile().isDirectory()) {
throw new IOException(
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogWriterTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogWriterTest.java
index 8d0e99dea0..8e9b7b84bd 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogWriterTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogWriterTest.java
@@ -48,7 +48,7 @@ public class ReflogWriterTest extends SampleDataRepositoryTestCase {
private void readReflog(byte[] buffer)
throws FileNotFoundException, IOException {
- File logfile = new File(db.getDirectory(), "logs/refs/heads/master");
+ File logfile = new File(db.getCommonDirectory(), "logs/refs/heads/master");
if (!logfile.getParentFile().mkdirs()
&& !logfile.getParentFile().isDirectory()) {
throw new IOException(
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleAddCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleAddCommand.java
index 8fb5d60b85..401f069e4e 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleAddCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleAddCommand.java
@@ -176,7 +176,7 @@ public class SubmoduleAddCommand extends
CloneCommand clone = Git.cloneRepository();
configure(clone);
clone.setDirectory(moduleDirectory);
- clone.setGitDir(new File(new File(repo.getDirectory(),
+ clone.setGitDir(new File(new File(repo.getCommonDirectory(),
Constants.MODULES), path));
clone.setURI(resolvedUri);
if (monitor != null)
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java
index df73164161..751dabcd6b 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java
@@ -128,7 +128,7 @@ public class SubmoduleUpdateCommand extends
clone.setURI(url);
clone.setDirectory(generator.getDirectory());
clone.setGitDir(
- new File(new File(repo.getDirectory(), Constants.MODULES),
+ new File(new File(repo.getCommonDirectory(), Constants.MODULES),
generator.getPath()));
if (monitor != null) {
clone.setProgressMonitor(monitor);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java
index 6ae5153c12..fa0a82fd58 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java
@@ -1647,6 +1647,8 @@ public class DirCacheCheckout {
filterProcessBuilder.directory(repo.getWorkTree());
filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY,
repo.getDirectory().getAbsolutePath());
+ filterProcessBuilder.environment().put(Constants.GIT_COMMON_DIR_KEY,
+ repo.getCommonDirectory().getAbsolutePath());
ExecutionResult result;
int rc;
try {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java
index ed2516ddd0..80240e5062 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java
@@ -68,14 +68,14 @@ public class FileReftableDatabase extends RefDatabase {
private final FileReftableStack reftableStack;
FileReftableDatabase(FileRepository repo) throws IOException {
- this(repo, new File(new File(repo.getDirectory(), Constants.REFTABLE),
+ this(repo, new File(new File(repo.getCommonDirectory(), Constants.REFTABLE),
Constants.TABLES_LIST));
}
FileReftableDatabase(FileRepository repo, File refstackName) throws IOException {
this.fileRepository = repo;
this.reftableStack = new FileReftableStack(refstackName,
- new File(fileRepository.getDirectory(), Constants.REFTABLE),
+ new File(fileRepository.getCommonDirectory(), Constants.REFTABLE),
() -> fileRepository.fireEvent(new RefsChangedEvent()),
() -> fileRepository.getConfig());
this.reftableDatabase = new ReftableDatabase() {
@@ -318,7 +318,7 @@ public class FileReftableDatabase extends RefDatabase {
@Override
public void create() throws IOException {
FileUtils.mkdir(
- new File(fileRepository.getDirectory(), Constants.REFTABLE),
+ new File(fileRepository.getCommonDirectory(), Constants.REFTABLE),
true);
}
@@ -615,7 +615,7 @@ public class FileReftableDatabase extends RefDatabase {
FileReftableDatabase newDb = null;
File reftableList = null;
try {
- File reftableDir = new File(repo.getDirectory(),
+ File reftableDir = new File(repo.getCommonDirectory(),
Constants.REFTABLE);
reftableList = new File(reftableDir, Constants.TABLES_LIST);
if (!reftableDir.isDirectory()) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java
index e5a00d3925..b5d29a3fc8 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java
@@ -165,7 +165,7 @@ public class FileRepository extends Repository {
throw new IOException(e.getMessage(), e);
}
repoConfig = new FileBasedConfig(userConfig, getFS().resolve(
- getDirectory(), Constants.CONFIG),
+ getCommonDirectory(), Constants.CONFIG),
getFS());
loadRepoConfig();
@@ -193,7 +193,7 @@ public class FileRepository extends Repository {
options.getObjectDirectory(), //
options.getAlternateObjectDirectories(), //
getFS(), //
- new File(getDirectory(), Constants.SHALLOW));
+ new File(getCommonDirectory(), Constants.SHALLOW));
if (objectDatabase.exists()) {
if (repositoryFormatVersion > 1)
@@ -622,16 +622,17 @@ public class FileRepository extends Repository {
* on IO problem
*/
void convertToPackedRefs(boolean writeLogs, boolean backup) throws IOException {
+ File commonDirectory = getCommonDirectory();
List[ all = refs.getRefs();
- File packedRefs = new File(getDirectory(), Constants.PACKED_REFS);
+ File packedRefs = new File(commonDirectory, Constants.PACKED_REFS);
if (packedRefs.exists()) {
throw new IOException(MessageFormat.format(JGitText.get().fileAlreadyExists,
packedRefs.getName()));
}
- File refsFile = new File(getDirectory(), "refs"); //$NON-NLS-1$
+ File refsFile = new File(commonDirectory, "refs"); //$NON-NLS-1$
File refsHeadsFile = new File(refsFile, "heads");//$NON-NLS-1$
- File headFile = new File(getDirectory(), Constants.HEAD);
+ File headFile = new File(commonDirectory, Constants.HEAD);
FileReftableDatabase oldDb = (FileReftableDatabase) refs;
// Remove the dummy files that ensure compatibility with older git
@@ -701,7 +702,7 @@ public class FileRepository extends Repository {
}
if (!backup) {
- File reftableDir = new File(getDirectory(), Constants.REFTABLE);
+ File reftableDir = new File(commonDirectory, Constants.REFTABLE);
FileUtils.delete(reftableDir,
FileUtils.RECURSIVE | FileUtils.IGNORE_ERRORS);
}
@@ -730,8 +731,10 @@ public class FileRepository extends Repository {
@SuppressWarnings("nls")
void convertToReftable(boolean writeLogs, boolean backup)
throws IOException {
- File reftableDir = new File(getDirectory(), Constants.REFTABLE);
- File headFile = new File(getDirectory(), Constants.HEAD);
+ File commonDirectory = getCommonDirectory();
+ File directory = getDirectory();
+ File reftableDir = new File(commonDirectory, Constants.REFTABLE);
+ File headFile = new File(directory, Constants.HEAD);
if (reftableDir.exists() && FileUtils.hasFiles(reftableDir.toPath())) {
throw new IOException(JGitText.get().reftableDirExists);
}
@@ -739,28 +742,28 @@ public class FileRepository extends Repository {
// Ignore return value, as it is tied to temporary newRefs file.
FileReftableDatabase.convertFrom(this, writeLogs);
- File refsFile = new File(getDirectory(), "refs");
+ File refsFile = new File(commonDirectory, "refs");
// non-atomic: remove old data.
- File packedRefs = new File(getDirectory(), Constants.PACKED_REFS);
- File logsDir = new File(getDirectory(), Constants.LOGS);
+ File packedRefs = new File(commonDirectory, Constants.PACKED_REFS);
+ File logsDir = new File(commonDirectory, Constants.LOGS);
List additional = getRefDatabase().getAdditionalRefs().stream()
.map(Ref::getName).collect(toList());
additional.add(Constants.HEAD);
if (backup) {
- FileUtils.rename(refsFile, new File(getDirectory(), "refs.old"));
+ FileUtils.rename(refsFile, new File(commonDirectory, "refs.old"));
if (packedRefs.exists()) {
- FileUtils.rename(packedRefs, new File(getDirectory(),
+ FileUtils.rename(packedRefs, new File(commonDirectory,
Constants.PACKED_REFS + ".old"));
}
if (logsDir.exists()) {
FileUtils.rename(logsDir,
- new File(getDirectory(), Constants.LOGS + ".old"));
+ new File(commonDirectory, Constants.LOGS + ".old"));
}
for (String r : additional) {
- FileUtils.rename(new File(getDirectory(), r),
- new File(getDirectory(), r + ".old"));
+ FileUtils.rename(new File(commonDirectory, r),
+ new File(commonDirectory, r + ".old"));
}
} else {
FileUtils.delete(packedRefs, FileUtils.SKIP_MISSING);
@@ -770,7 +773,7 @@ public class FileRepository extends Repository {
FileUtils.delete(refsFile,
FileUtils.RECURSIVE | FileUtils.SKIP_MISSING);
for (String r : additional) {
- new File(getDirectory(), r).delete();
+ new File(commonDirectory, r).delete();
}
}
@@ -784,7 +787,7 @@ public class FileRepository extends Repository {
// Some tools might write directly into .git/refs/heads/BRANCH. By
// putting a file here, this fails spectacularly.
- FileUtils.createNewFile(new File(refsFile, "heads"));
+ FileUtils.createNewFile(new File(refsFile, Constants.HEADS));
repoConfig.setString(ConfigConstants.CONFIG_EXTENSIONS_SECTION, null,
ConfigConstants.CONFIG_KEY_REF_STORAGE,
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
index cf26f8d284..4fafc5a088 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
@@ -1047,7 +1047,7 @@ public class GC {
}
private void deleteEmptyRefsFolders() throws IOException {
- Path refs = repo.getDirectory().toPath().resolve(Constants.R_REFS);
+ Path refs = repo.getCommonDirectory().toPath().resolve(Constants.R_REFS);
// Avoid deleting a folder that was created after the threshold so that concurrent
// operations trying to create a reference are not impacted
Instant threshold = Instant.now().minus(30, ChronoUnit.SECONDS);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java
index 628bf5db0c..8647b3e664 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java
@@ -50,7 +50,7 @@ class GcLog {
*/
GcLog(FileRepository repo) {
this.repo = repo;
- logFile = new File(repo.getDirectory(), "gc.log"); //$NON-NLS-1$
+ logFile = new File(repo.getCommonDirectory(), "gc.log"); //$NON-NLS-1$
lock = new LockFile(logFile);
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java
index 11d842b246..e8d442b8fb 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java
@@ -46,7 +46,7 @@ public class InfoAttributesNode extends AttributesNode {
FS fs = repository.getFS();
- File attributes = fs.resolve(repository.getDirectory(),
+ File attributes = fs.resolve(repository.getCommonDirectory(),
Constants.INFO_ATTRIBUTES);
FileRepository.AttributesNodeProviderImpl.loadRulesFromFile(r, attributes);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
index 8e57bf9f2f..604868133e 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
@@ -16,6 +16,7 @@ package org.eclipse.jgit.internal.storage.file;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.eclipse.jgit.lib.Constants.HEAD;
import static org.eclipse.jgit.lib.Constants.LOGS;
+import static org.eclipse.jgit.lib.Constants.L_LOGS;
import static org.eclipse.jgit.lib.Constants.OBJECT_ID_STRING_LENGTH;
import static org.eclipse.jgit.lib.Constants.PACKED_REFS;
import static org.eclipse.jgit.lib.Constants.R_HEADS;
@@ -124,6 +125,8 @@ public class RefDirectory extends RefDatabase {
private final File gitDir;
+ private final File gitCommonDir;
+
final File refsDir;
final File packedRefsFile;
@@ -188,6 +191,7 @@ public class RefDirectory extends RefDatabase {
RefDirectory(RefDirectory refDb) {
parent = refDb.parent;
gitDir = refDb.gitDir;
+ gitCommonDir = refDb.gitCommonDir;
refsDir = refDb.refsDir;
logsDir = refDb.logsDir;
logsRefsDir = refDb.logsRefsDir;
@@ -204,10 +208,11 @@ public class RefDirectory extends RefDatabase {
final FS fs = db.getFS();
parent = db;
gitDir = db.getDirectory();
- refsDir = fs.resolve(gitDir, R_REFS);
- logsDir = fs.resolve(gitDir, LOGS);
- logsRefsDir = fs.resolve(gitDir, LOGS + '/' + R_REFS);
- packedRefsFile = fs.resolve(gitDir, PACKED_REFS);
+ gitCommonDir = db.getCommonDirectory();
+ refsDir = fs.resolve(gitCommonDir, R_REFS);
+ logsDir = fs.resolve(gitCommonDir, LOGS);
+ logsRefsDir = fs.resolve(gitCommonDir, L_LOGS + R_REFS);
+ packedRefsFile = fs.resolve(gitCommonDir, PACKED_REFS);
looseRefs.set(RefList. emptyList());
packedRefs.set(NO_PACKED_REFS);
@@ -1329,7 +1334,12 @@ public class RefDirectory extends RefDatabase {
name = name.substring(R_REFS.length());
return new File(refsDir, name);
}
- return new File(gitDir, name);
+ // HEAD needs to get resolved from git dir as resolving it from common dir
+ // would always lead back to current default branch
+ if (name.equals(HEAD)) {
+ return new File(gitDir, name);
+ }
+ return new File(gitCommonDir, name);
}
static int levelsIn(String name) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogReaderImpl.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogReaderImpl.java
index 21b5a54eb7..f1888eb90f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogReaderImpl.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogReaderImpl.java
@@ -10,6 +10,8 @@
package org.eclipse.jgit.internal.storage.file;
+import static org.eclipse.jgit.lib.Constants.HEAD;
+
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
@@ -37,7 +39,9 @@ class ReflogReaderImpl implements ReflogReader {
* {@code Ref} name
*/
ReflogReaderImpl(Repository db, String refname) {
- logName = new File(db.getDirectory(), Constants.LOGS + '/' + refname);
+ File logBaseDir = refname.equals(HEAD) ? db.getDirectory()
+ : db.getCommonDirectory();
+ logName = new File(logBaseDir, Constants.L_LOGS + refname);
}
/* (non-Javadoc)
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java
index 5dfb648faa..d232be6276 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java
@@ -13,13 +13,17 @@ package org.eclipse.jgit.lib;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_CORE_SECTION;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_BARE;
import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_WORKTREE;
+import static org.eclipse.jgit.lib.Constants.CONFIG;
import static org.eclipse.jgit.lib.Constants.DOT_GIT;
import static org.eclipse.jgit.lib.Constants.GIT_ALTERNATE_OBJECT_DIRECTORIES_KEY;
import static org.eclipse.jgit.lib.Constants.GIT_CEILING_DIRECTORIES_KEY;
+import static org.eclipse.jgit.lib.Constants.GIT_COMMON_DIR_KEY;
import static org.eclipse.jgit.lib.Constants.GIT_DIR_KEY;
import static org.eclipse.jgit.lib.Constants.GIT_INDEX_FILE_KEY;
import static org.eclipse.jgit.lib.Constants.GIT_OBJECT_DIRECTORY_KEY;
import static org.eclipse.jgit.lib.Constants.GIT_WORK_TREE_KEY;
+import static org.eclipse.jgit.lib.Constants.OBJECTS;
+import static org.eclipse.jgit.lib.Constants.GITDIR_FILE;
import java.io.File;
import java.io.IOException;
@@ -70,7 +74,21 @@ public class BaseRepositoryBuilder alternateObjectDirectories;
@@ -171,6 +191,30 @@ public class BaseRepositoryBuilder
* This method tries to read the standard Git environment variables, such as
- * {@code GIT_DIR} and {@code GIT_WORK_TREE} to configure this builder
- * instance. If an environment variable is set, it overrides the value
- * already set in this builder.
+ * {@code GIT_DIR}, {@code GIT_COMMON_DIR}, {@code GIT_WORK_TREE} etc. to
+ * configure this builder instance. If an environment variable is set, it
+ * overrides the value already set in this builder.
*
* @return {@code this} (for chaining calls).
*/
@@ -410,9 +454,9 @@ public class BaseRepositoryBuilder
* This method tries to read the standard Git environment variables, such as
- * {@code GIT_DIR} and {@code GIT_WORK_TREE} to configure this builder
- * instance. If a property is already set in the builder, the environment
- * variable is not used.
+ * {@code GIT_DIR}, {@code GIT_COMMON_DIR}, {@code GIT_WORK_TREE} etc. to
+ * configure this builder instance. If a property is already set in the
+ * builder, the environment variable is not used.
*
* @param sr
* the SystemReader abstraction to access the environment.
@@ -425,6 +469,13 @@ public class BaseRepositoryBuilder
@@ -695,8 +761,12 @@ public class BaseRepositoryBuilder
Date: Mon, 15 Jul 2024 11:35:56 -0700
Subject: DfsBlockCacheTable: extract stats get* methods to interface
Having the DfsBlockCacheTable methods extracted to an interface will
allow alternative implementations of BlockCacheStats not tied to the
current implementation.
Change-Id: I534f7998f46253cdb7a68d5ec21d4f42ea586e8e
---
.../internal/storage/dfs/ClockBlockCacheTable.java | 2 +-
.../jgit/internal/storage/dfs/DfsBlockCache.java | 12 +--
.../internal/storage/dfs/DfsBlockCacheTable.java | 117 ++++++++++++---------
3 files changed, 77 insertions(+), 54 deletions(-)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ClockBlockCacheTable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ClockBlockCacheTable.java
index d0907bcc8d..ce71a71d67 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ClockBlockCacheTable.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ClockBlockCacheTable.java
@@ -135,7 +135,7 @@ final class ClockBlockCacheTable implements DfsBlockCacheTable {
}
@Override
- public DfsBlockCacheStats getDfsBlockCacheStats() {
+ public BlockCacheStats getBlockCacheStats() {
return dfsBlockCacheStats;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
index 56719cf0f4..3e1300c867 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
@@ -119,7 +119,7 @@ public final class DfsBlockCache {
* @return total number of bytes in the cache, per pack file extension.
*/
public long[] getCurrentSize() {
- return dfsBlockCacheTable.getDfsBlockCacheStats().getCurrentSize();
+ return dfsBlockCacheTable.getBlockCacheStats().getCurrentSize();
}
/**
@@ -138,7 +138,7 @@ public final class DfsBlockCache {
* extension.
*/
public long[] getHitCount() {
- return dfsBlockCacheTable.getDfsBlockCacheStats().getHitCount();
+ return dfsBlockCacheTable.getBlockCacheStats().getHitCount();
}
/**
@@ -149,7 +149,7 @@ public final class DfsBlockCache {
* extension.
*/
public long[] getMissCount() {
- return dfsBlockCacheTable.getDfsBlockCacheStats().getMissCount();
+ return dfsBlockCacheTable.getBlockCacheStats().getMissCount();
}
/**
@@ -158,7 +158,7 @@ public final class DfsBlockCache {
* @return total number of requests (hit + miss), per pack file extension.
*/
public long[] getTotalRequestCount() {
- return dfsBlockCacheTable.getDfsBlockCacheStats()
+ return dfsBlockCacheTable.getBlockCacheStats()
.getTotalRequestCount();
}
@@ -168,7 +168,7 @@ public final class DfsBlockCache {
* @return hit ratios
*/
public long[] getHitRatio() {
- return dfsBlockCacheTable.getDfsBlockCacheStats().getHitRatio();
+ return dfsBlockCacheTable.getBlockCacheStats().getHitRatio();
}
/**
@@ -179,7 +179,7 @@ public final class DfsBlockCache {
* file extension.
*/
public long[] getEvictions() {
- return dfsBlockCacheTable.getDfsBlockCacheStats().getEvictions();
+ return dfsBlockCacheTable.getBlockCacheStats().getEvictions();
}
/**
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheTable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheTable.java
index 701d1fdce3..309f2d1595 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheTable.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheTable.java
@@ -129,18 +129,72 @@ public interface DfsBlockCacheTable {
T get(DfsStreamKey key, long position);
/**
- * Get the DfsBlockCacheStats object for this block cache table's
+ * Get the {@link BlockCacheStats} object for this block cache table's
* statistics.
*
- * @return the DfsBlockCacheStats tracking this block cache table's
+ * @return the {@link BlockCacheStats} tracking this block cache table's
* statistics.
*/
- DfsBlockCacheStats getDfsBlockCacheStats();
+ BlockCacheStats getBlockCacheStats();
+
+ /**
+ * Provides methods used with Block Cache statistics.
+ */
+ interface BlockCacheStats {
+ /**
+ * Get total number of bytes in the cache, per pack file extension.
+ *
+ * @return total number of bytes in the cache, per pack file extension.
+ */
+ long[] getCurrentSize();
+
+ /**
+ * Get number of requests for items in the cache, per pack file
+ * extension.
+ *
+ * @return the number of requests for items in the cache, per pack file
+ * extension.
+ */
+ long[] getHitCount();
+
+ /**
+ * Get number of requests for items not in the cache, per pack file
+ * extension.
+ *
+ * @return the number of requests for items not in the cache, per pack
+ * file extension.
+ */
+ long[] getMissCount();
+
+ /**
+ * Get total number of requests (hit + miss), per pack file extension.
+ *
+ * @return total number of requests (hit + miss), per pack file
+ * extension.
+ */
+ long[] getTotalRequestCount();
+
+ /**
+ * Get hit ratios.
+ *
+ * @return hit ratios.
+ */
+ long[] getHitRatio();
+
+ /**
+ * Get number of evictions performed due to cache being full, per pack
+ * file extension.
+ *
+ * @return the number of evictions performed due to cache being full,
+ * per pack file extension.
+ */
+ long[] getEvictions();
+ }
/**
* Keeps track of stats for a Block Cache table.
*/
- class DfsBlockCacheStats {
+ class DfsBlockCacheStats implements BlockCacheStats {
/**
* Number of times a block was found in the cache, per pack file
* extension.
@@ -214,44 +268,23 @@ public interface DfsBlockCacheTable {
getStat(liveBytes, key).addAndGet(size);
}
- /**
- * Get total number of bytes in the cache, per pack file extension.
- *
- * @return total number of bytes in the cache, per pack file extension.
- */
- long[] getCurrentSize() {
+ @Override
+ public long[] getCurrentSize() {
return getStatVals(liveBytes);
}
- /**
- * Get number of requests for items in the cache, per pack file
- * extension.
- *
- * @return the number of requests for items in the cache, per pack file
- * extension.
- */
- long[] getHitCount() {
+ @Override
+ public long[] getHitCount() {
return getStatVals(statHit);
}
- /**
- * Get number of requests for items not in the cache, per pack file
- * extension.
- *
- * @return the number of requests for items not in the cache, per pack
- * file extension.
- */
- long[] getMissCount() {
+ @Override
+ public long[] getMissCount() {
return getStatVals(statMiss);
}
- /**
- * Get total number of requests (hit + miss), per pack file extension.
- *
- * @return total number of requests (hit + miss), per pack file
- * extension.
- */
- long[] getTotalRequestCount() {
+ @Override
+ public long[] getTotalRequestCount() {
AtomicLong[] hit = statHit.get();
AtomicLong[] miss = statMiss.get();
long[] cnt = new long[Math.max(hit.length, miss.length)];
@@ -264,12 +297,8 @@ public interface DfsBlockCacheTable {
return cnt;
}
- /**
- * Get hit ratios.
- *
- * @return hit ratios.
- */
- long[] getHitRatio() {
+ @Override
+ public long[] getHitRatio() {
AtomicLong[] hit = statHit.get();
AtomicLong[] miss = statMiss.get();
long[] ratio = new long[Math.max(hit.length, miss.length)];
@@ -288,14 +317,8 @@ public interface DfsBlockCacheTable {
return ratio;
}
- /**
- * Get number of evictions performed due to cache being full, per pack
- * file extension.
- *
- * @return the number of evictions performed due to cache being full,
- * per pack file extension.
- */
- long[] getEvictions() {
+ @Override
+ public long[] getEvictions() {
return getStatVals(statEvict);
}
--
cgit v1.2.3
From 439512d7f82c503638e6b12689e751bb8debc015 Mon Sep 17 00:00:00 2001
From: Ivan Frade
Date: Wed, 10 Apr 2024 10:52:44 -0700
Subject: DfsPackFile: Do not set local reverse index ref from cache callback
The DfsBlockCache loading callback sets the local reference to the
index in the DfsPackFile. This prevents abstracting the loading to
implement it over multiple backends.
Reorg the code so the loadReverseIndex do only the loading, the caller
sets it into DfsBlockCache and the external code sets the local
reference in DfsPackFile.
This is the same pattern we did with the PackIndex in the parent
commit.
Change-Id: I3a395b347965fa7b0e5a3398c4da311dc11c58a1
---
.../jgit/internal/storage/dfs/DfsPackFile.java | 38 ++++++++++++----------
1 file changed, 21 insertions(+), 17 deletions(-)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
index b94a84a41c..cdd10618e0 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
@@ -330,18 +330,27 @@ public final class DfsPackFile extends BlockBasedFile {
PackIndex idx = idx(ctx);
DfsStreamKey revKey = desc.getStreamKey(REVERSE_INDEX);
- AtomicBoolean cacheHit = new AtomicBoolean(true);
- DfsBlockCache.Ref revref = cache.getOrLoadRef(revKey,
- REF_POSITION, () -> {
- cacheHit.set(false);
- return loadReverseIdx(ctx, revKey, idx);
+ // Keep the value parsed in the loader, in case the Ref<> is
+ // nullified in ClockBlockCacheTable#reserveSpace
+ // before we read its value.
+ AtomicReference loadedRef = new AtomicReference<>(
+ null);
+ DfsBlockCache.Ref cachedRef = cache
+ .getOrLoadRef(revKey, REF_POSITION, () -> {
+ RefWithSize ridx = loadReverseIdx(ctx,
+ idx);
+ loadedRef.set(ridx.ref);
+ return new DfsBlockCache.Ref<>(revKey, REF_POSITION,
+ ridx.size, ridx.ref);
});
- if (cacheHit.get()) {
+ if (loadedRef.get() == null) {
ctx.stats.ridxCacheHit++;
}
- PackReverseIndex revidx = revref.get();
- if (reverseIndex == null && revidx != null) {
- reverseIndex = revidx;
+ reverseIndex = cachedRef.get() != null ? cachedRef.get()
+ : loadedRef.get();
+ if (reverseIndex == null) {
+ throw new IOException(
+ "Couldn't get a reference to the reverse index"); //$NON-NLS-1$
}
ctx.emitIndexLoad(desc, REVERSE_INDEX, reverseIndex);
return reverseIndex;
@@ -1241,18 +1250,13 @@ public final class DfsPackFile extends BlockBasedFile {
}
}
- private DfsBlockCache.Ref loadReverseIdx(
- DfsReader ctx, DfsStreamKey revKey, PackIndex idx) {
+ private static RefWithSize loadReverseIdx(DfsReader ctx,
+ PackIndex idx) {
ctx.stats.readReverseIdx++;
long start = System.nanoTime();
PackReverseIndex revidx = PackReverseIndexFactory.computeFromIndex(idx);
- reverseIndex = revidx;
ctx.stats.readReverseIdxMicros += elapsedMicros(start);
- return new DfsBlockCache.Ref<>(
- revKey,
- REF_POSITION,
- idx.getObjectCount() * 8,
- revidx);
+ return new RefWithSize<>(revidx, idx.getObjectCount() * 8);
}
private DfsBlockCache.Ref loadObjectSizeIndex(
--
cgit v1.2.3
From 1c9d1a49b0f9fcf7980775f22302df35293a3776 Mon Sep 17 00:00:00 2001
From: Ivan Frade
Date: Fri, 19 Jul 2024 15:44:15 -0700
Subject: PackObjectSizeIndex: Read all bytes and use the byte[] directly
The parser reads N integers one by one from the stream, assuming the
InputStream does some ahead reading from storage. We see some very
slow loading of indexes and suspect that this preemptive reading is
not happening. The slow loading can be reproduced in clones, and it
produces higher latencies and locks many threads waiting for the
loading.
Read the whole array from storage in one shot to avoid many small IO
reads. Work directly on the resulting byte[], so there is no need of a
second copy to cast to int/long.
This is how other indexes, like primary or commit graph, work.
Change-Id: I60058606e2c457f60aa4646a1f10ae7b28ce34c2
---
.../org/eclipse/jgit/internal/JGitText.properties | 1 +
.../src/org/eclipse/jgit/internal/JGitText.java | 1 +
.../storage/file/PackObjectSizeIndexV1.java | 181 ++++++++++++++-------
3 files changed, 122 insertions(+), 61 deletions(-)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
index 9d12facb33..8c02bbeeab 100644
--- a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
+++ b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
@@ -806,6 +806,7 @@ truncatedHunkOldLinesMissing=Truncated hunk, at least {0} old lines is missing
tSizeMustBeGreaterOrEqual1=tSize must be >= 1
unableToCheckConnectivity=Unable to check connectivity.
unableToCreateNewObject=Unable to create new object: {0}
+unableToReadFullArray=Unable to read an array with {0} elements from the stream
unableToReadFullInt=Unable to read a full int from the stream
unableToReadPackfile=Unable to read packfile {0}
unableToRemovePath=Unable to remove path ''{0}''
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
index 311d9c22aa..cbda506f99 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
@@ -836,6 +836,7 @@ public class JGitText extends TranslationBundle {
/***/ public String unableToCheckConnectivity;
/***/ public String unableToCreateNewObject;
/***/ public String unableToReadFullInt;
+ /***/ public String unableToReadFullArray;
/***/ public String unableToReadPackfile;
/***/ public String unableToRemovePath;
/***/ public String unableToWrite;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackObjectSizeIndexV1.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackObjectSizeIndexV1.java
index a3d74be040..e172f141f5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackObjectSizeIndexV1.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackObjectSizeIndexV1.java
@@ -12,7 +12,7 @@ package org.eclipse.jgit.internal.storage.file;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
-import java.util.Arrays;
+import java.text.MessageFormat;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.util.NB;
@@ -35,7 +35,7 @@ class PackObjectSizeIndexV1 implements PackObjectSizeIndex {
private final UInt24Array positions24;
- private final int[] positions32;
+ private final IntArray positions32;
/**
* Parallel array to concat(positions24, positions32) with the size of the
@@ -45,35 +45,37 @@ class PackObjectSizeIndexV1 implements PackObjectSizeIndex {
* doesn't fit in an int and |value|-1 is the position for the size in the
* size64 array e.g. a value of -1 is sizes64[0], -2 = sizes64[1], ...
*/
- private final int[] sizes32;
+ private final IntArray sizes32;
- private final long[] sizes64;
+ private final LongArray sizes64;
static PackObjectSizeIndex parse(InputStream in) throws IOException {
/** Header and version already out of the input */
- IndexInputStreamReader stream = new IndexInputStreamReader(in);
- int threshold = stream.readInt(); // minSize
- int objCount = stream.readInt();
+ byte[] buffer = new byte[8];
+ in.readNBytes(buffer, 0, 8);
+ int threshold = NB.decodeInt32(buffer, 0); // minSize
+ int objCount = NB.decodeInt32(buffer, 4);
if (objCount == 0) {
return new EmptyPackObjectSizeIndex(threshold);
}
- return new PackObjectSizeIndexV1(stream, threshold, objCount);
+ return new PackObjectSizeIndexV1(in, threshold, objCount);
}
- private PackObjectSizeIndexV1(IndexInputStreamReader stream, int threshold,
+ private PackObjectSizeIndexV1(InputStream stream, int threshold,
int objCount) throws IOException {
this.threshold = threshold;
UInt24Array pos24 = null;
- int[] pos32 = null;
+ IntArray pos32 = null;
+ StreamHelper helper = new StreamHelper();
byte positionEncoding;
- while ((positionEncoding = stream.readByte()) != 0) {
+ while ((positionEncoding = helper.readByte(stream)) != 0) {
if (Byte.compareUnsigned(positionEncoding, BITS_24) == 0) {
- int sz = stream.readInt();
+ int sz = helper.readInt(stream);
pos24 = new UInt24Array(stream.readNBytes(sz * 3));
} else if (Byte.compareUnsigned(positionEncoding, BITS_32) == 0) {
- int sz = stream.readInt();
- pos32 = stream.readIntArray(sz);
+ int sz = helper.readInt(stream);
+ pos32 = IntArray.from(stream, sz);
} else {
throw new UnsupportedEncodingException(
String.format(JGitText.get().unknownPositionEncoding,
@@ -81,16 +83,16 @@ class PackObjectSizeIndexV1 implements PackObjectSizeIndex {
}
}
positions24 = pos24 != null ? pos24 : UInt24Array.EMPTY;
- positions32 = pos32 != null ? pos32 : new int[0];
+ positions32 = pos32 != null ? pos32 : IntArray.EMPTY;
- sizes32 = stream.readIntArray(objCount);
- int c64sizes = stream.readInt();
+ sizes32 = IntArray.from(stream, objCount);
+ int c64sizes = helper.readInt(stream);
if (c64sizes == 0) {
- sizes64 = new long[0];
+ sizes64 = LongArray.EMPTY;
return;
}
- sizes64 = stream.readLongArray(c64sizes);
- int c128sizes = stream.readInt();
+ sizes64 = LongArray.from(stream, c64sizes);
+ int c128sizes = helper.readInt(stream);
if (c128sizes != 0) {
// this MUST be 0 (we don't support 128 bits sizes yet)
throw new IOException(JGitText.get().unsupportedSizesObjSizeIndex);
@@ -102,8 +104,8 @@ class PackObjectSizeIndexV1 implements PackObjectSizeIndex {
int pos = -1;
if (!positions24.isEmpty() && idxOffset <= positions24.getLastValue()) {
pos = positions24.binarySearch(idxOffset);
- } else if (positions32.length > 0 && idxOffset >= positions32[0]) {
- int pos32 = Arrays.binarySearch(positions32, idxOffset);
+ } else if (!positions32.empty() && idxOffset >= positions32.get(0)) {
+ int pos32 = positions32.binarySearch(idxOffset);
if (pos32 >= 0) {
pos = pos32 + positions24.size();
}
@@ -112,17 +114,17 @@ class PackObjectSizeIndexV1 implements PackObjectSizeIndex {
return -1;
}
- int objSize = sizes32[pos];
+ int objSize = sizes32.get(pos);
if (objSize < 0) {
int secondPos = Math.abs(objSize) - 1;
- return sizes64[secondPos];
+ return sizes64.get(secondPos);
}
return objSize;
}
@Override
public long getObjectCount() {
- return (long) positions24.size() + positions32.length;
+ return (long) positions24.size() + positions32.size();
}
@Override
@@ -131,69 +133,126 @@ class PackObjectSizeIndexV1 implements PackObjectSizeIndex {
}
/**
- * Wrapper to read parsed content from the byte stream
+ * A byte[] that should be interpreted as an int[]
*/
- private static class IndexInputStreamReader {
+ private static class IntArray {
+ private static final IntArray EMPTY = new IntArray(new byte[0]);
- private final byte[] buffer = new byte[8];
+ private static final int INT_SIZE = 4;
- private final InputStream in;
+ private final byte[] data;
- IndexInputStreamReader(InputStream in) {
- this.in = in;
- }
+ private final int size;
- int readInt() throws IOException {
- int n = in.readNBytes(buffer, 0, 4);
- if (n < 4) {
- throw new IOException(JGitText.get().unableToReadFullInt);
+ static IntArray from(InputStream in, int ints) throws IOException {
+ int expectedBytes = ints * INT_SIZE;
+ byte[] data = in.readNBytes(expectedBytes);
+ if (data.length < expectedBytes) {
+ throw new IOException(MessageFormat
+ .format(JGitText.get().unableToReadFullArray, ints));
}
- return NB.decodeInt32(buffer, 0);
+ return new IntArray(data);
+ }
+
+ private IntArray(byte[] data) {
+ this.data = data;
+ size = data.length / INT_SIZE;
}
- int[] readIntArray(int intsCount) throws IOException {
- if (intsCount == 0) {
- return new int[0];
+ /**
+ * Returns position of element in array, -1 if not there
+ *
+ * @param needle
+ * element to look for
+ * @return position of the element in the array or -1 if not found
+ */
+ int binarySearch(int needle) {
+ if (size == 0) {
+ return -1;
}
+ int high = size;
+ int low = 0;
+ do {
+ int mid = (low + high) >>> 1;
+ int cmp = Integer.compare(needle, get(mid));
+ if (cmp < 0)
+ high = mid;
+ else if (cmp == 0) {
+ return mid;
+ } else
+ low = mid + 1;
+ } while (low < high);
+ return -1;
+ }
- int[] dest = new int[intsCount];
- for (int i = 0; i < intsCount; i++) {
- dest[i] = readInt();
+ int get(int position) {
+ if (position < 0 || position >= size) {
+ throw new IndexOutOfBoundsException(position);
}
- return dest;
+ return NB.decodeInt32(data, position * INT_SIZE);
}
- long readLong() throws IOException {
- int n = in.readNBytes(buffer, 0, 8);
- if (n < 8) {
- throw new IOException(JGitText.get().unableToReadFullInt);
+ boolean empty() {
+ return size == 0;
+ }
+
+ int size() {
+ return size;
+ }
+ }
+
+ /**
+ * A byte[] that should be interpreted as an long[]
+ */
+ private static class LongArray {
+ private static final LongArray EMPTY = new LongArray(new byte[0]);
+
+ private static final int LONG_SIZE = 8; // bytes
+
+ private final byte[] data;
+
+ private final int size;
+
+ static LongArray from(InputStream in, int longs) throws IOException {
+ byte[] data = in.readNBytes(longs * LONG_SIZE);
+ if (data.length < longs * LONG_SIZE) {
+ throw new IOException(MessageFormat
+ .format(JGitText.get().unableToReadFullArray, longs));
}
- return NB.decodeInt64(buffer, 0);
+ return new LongArray(data);
}
- long[] readLongArray(int longsCount) throws IOException {
- if (longsCount == 0) {
- return new long[0];
+ private LongArray(byte[] data) {
+ this.data = data;
+ size = data.length / LONG_SIZE;
+ }
+
+ long get(int position) {
+ if (position < 0 || position >= size) {
+ throw new IndexOutOfBoundsException(position);
}
+ return NB.decodeInt64(data, position * LONG_SIZE);
+ }
+ }
- long[] dest = new long[longsCount];
- for (int i = 0; i < longsCount; i++) {
- dest[i] = readLong();
+ private static class StreamHelper {
+ private final byte[] buffer = new byte[8];
+
+ int readInt(InputStream in) throws IOException {
+ int n = in.readNBytes(buffer, 0, 4);
+ if (n < 4) {
+ throw new IOException(JGitText.get().unableToReadFullInt);
}
- return dest;
+ return NB.decodeInt32(buffer, 0);
}
- byte readByte() throws IOException {
+ byte readByte(InputStream in) throws IOException {
int n = in.readNBytes(buffer, 0, 1);
if (n != 1) {
throw new IOException(JGitText.get().cannotReadByte);
}
return buffer[0];
}
-
- byte[] readNBytes(int sz) throws IOException {
- return in.readNBytes(sz);
- }
}
private static class EmptyPackObjectSizeIndex
--
cgit v1.2.3
From a2a5cdddd78a1e066aed7caf86ba9510450710c9 Mon Sep 17 00:00:00 2001
From: Laura Hamelin
Date: Fri, 7 Jun 2024 16:11:21 -0700
Subject: PackExtBlockCacheTable: spread extensions over multiple dfs tables
The existing DfsBlockCache uses a single table for all
extensions (idx, ridx, ...).
This change introduces an implementation of the table
interface that can keep extensions in different cache
tables.
This selects the appropriate cache to use for a specific
PackExt or DfsStreamKey's PackExt type, allowing the
separation of entries from different pack types to help
limit churn in cache caused by entries of differing sizes.
This is especially useful in fine-tuning caches and
influencing interactions by extension type.
For example, a table holding INDEX types only will
not influence evictions of other PackExt types and
vice versa.
The PackExtBlockCacheTable allowing setting the
underlying DfsBlockCacheTables and mappinh directly,
letting users implement and use custom DfsBlockCacheTables.
Change-Id: Icee7b644ef6b600aa473d35645469d6aa1bce345
---
.../storage/dfs/PackExtBlockCacheTableTest.java | 588 +++++++++++++++++++++
.../org/eclipse/jgit/internal/JGitText.properties | 2 +
.../src/org/eclipse/jgit/internal/JGitText.java | 2 +
.../internal/storage/dfs/DfsBlockCacheConfig.java | 26 +-
.../storage/dfs/PackExtBlockCacheTable.java | 289 ++++++++++
5 files changed, 905 insertions(+), 2 deletions(-)
create mode 100644 org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTableTest.java
create mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTable.java
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTableTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTableTest.java
new file mode 100644
index 0000000000..d506bfba40
--- /dev/null
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTableTest.java
@@ -0,0 +1,588 @@
+/*
+ * Copyright (c) 2024, Google LLC and others
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Distribution License v. 1.0 which is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.sameInstance;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertThrows;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.when;
+
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Map;
+
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.Ref;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.RefLoader;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCacheConfig.DfsBlockCachePackExtConfig;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCacheTable.DfsBlockCacheStats;
+import org.eclipse.jgit.internal.storage.pack.PackExt;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+public class PackExtBlockCacheTableTest {
+ @Test
+ public void fromBlockCacheConfigs_createsDfsPackExtBlockCacheTables() {
+ DfsBlockCacheConfig cacheConfig = new DfsBlockCacheConfig();
+ cacheConfig.setPackExtCacheConfigurations(
+ List.of(new DfsBlockCachePackExtConfig(EnumSet.of(PackExt.PACK),
+ new DfsBlockCacheConfig())));
+ assertNotNull(
+ PackExtBlockCacheTable.fromBlockCacheConfigs(cacheConfig));
+ }
+
+ @Test
+ public void fromBlockCacheConfigs_noPackExtConfigurationGiven_packExtCacheConfigurationsIsEmpty_throws() {
+ DfsBlockCacheConfig cacheConfig = new DfsBlockCacheConfig();
+ cacheConfig.setPackExtCacheConfigurations(List.of());
+ assertThrows(IllegalArgumentException.class,
+ () -> PackExtBlockCacheTable
+ .fromBlockCacheConfigs(cacheConfig));
+ }
+
+ @Test
+ public void hasBlock0_packExtMapsToCacheTable_callsBitmapIndexCacheTable() {
+ DfsStreamKey streamKey = new TestKey(PackExt.BITMAP_INDEX);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(bitmapIndexCacheTable.hasBlock0(any(DfsStreamKey.class)))
+ .thenReturn(true);
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertTrue(tables.hasBlock0(streamKey));
+ }
+
+ @Test
+ public void hasBlock0_packExtDoesNotMapToCacheTable_callsDefaultCache() {
+ DfsStreamKey streamKey = new TestKey(PackExt.PACK);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(defaultBlockCacheTable.hasBlock0(any(DfsStreamKey.class)))
+ .thenReturn(true);
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertTrue(tables.hasBlock0(streamKey));
+ }
+
+ @Test
+ public void getOrLoad_packExtMapsToCacheTable_callsBitmapIndexCacheTable()
+ throws Exception {
+ BlockBasedFile blockBasedFile = new BlockBasedFile(null,
+ mock(DfsPackDescription.class), PackExt.BITMAP_INDEX) {
+ };
+ DfsBlock dfsBlock = mock(DfsBlock.class);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(defaultBlockCacheTable.getOrLoad(any(BlockBasedFile.class),
+ anyLong(), any(DfsReader.class),
+ any(DfsBlockCache.ReadableChannelSupplier.class)))
+ .thenReturn(mock(DfsBlock.class));
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(bitmapIndexCacheTable.getOrLoad(any(BlockBasedFile.class),
+ anyLong(), any(DfsReader.class),
+ any(DfsBlockCache.ReadableChannelSupplier.class)))
+ .thenReturn(dfsBlock);
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertThat(
+ tables.getOrLoad(blockBasedFile, 0, mock(DfsReader.class),
+ mock(DfsBlockCache.ReadableChannelSupplier.class)),
+ sameInstance(dfsBlock));
+ }
+
+ @Test
+ public void getOrLoad_packExtDoesNotMapToCacheTable_callsDefaultCache()
+ throws Exception {
+ BlockBasedFile blockBasedFile = new BlockBasedFile(null,
+ mock(DfsPackDescription.class), PackExt.PACK) {
+ };
+ DfsBlock dfsBlock = mock(DfsBlock.class);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(defaultBlockCacheTable.getOrLoad(any(BlockBasedFile.class),
+ anyLong(), any(DfsReader.class),
+ any(DfsBlockCache.ReadableChannelSupplier.class)))
+ .thenReturn(dfsBlock);
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(bitmapIndexCacheTable.getOrLoad(any(BlockBasedFile.class),
+ anyLong(), any(DfsReader.class),
+ any(DfsBlockCache.ReadableChannelSupplier.class)))
+ .thenReturn(mock(DfsBlock.class));
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertThat(
+ tables.getOrLoad(blockBasedFile, 0, mock(DfsReader.class),
+ mock(DfsBlockCache.ReadableChannelSupplier.class)),
+ sameInstance(dfsBlock));
+ }
+
+ @Test
+ public void getOrLoadRef_packExtMapsToCacheTable_callsBitmapIndexCacheTable()
+ throws Exception {
+ Ref ref = mock(Ref.class);
+ DfsStreamKey dfsStreamKey = new TestKey(PackExt.BITMAP_INDEX);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(defaultBlockCacheTable.getOrLoadRef(any(DfsStreamKey.class),
+ anyLong(), any(RefLoader.class))).thenReturn(mock(Ref.class));
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(bitmapIndexCacheTable.getOrLoadRef(any(DfsStreamKey.class),
+ anyLong(), any(RefLoader.class))).thenReturn(ref);
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertThat(tables.getOrLoadRef(dfsStreamKey, 0, mock(RefLoader.class)),
+ sameInstance(ref));
+ }
+
+ @Test
+ public void getOrLoadRef_packExtDoesNotMapToCacheTable_callsDefaultCache()
+ throws Exception {
+ Ref ref = mock(Ref.class);
+ DfsStreamKey dfsStreamKey = new TestKey(PackExt.PACK);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(defaultBlockCacheTable.getOrLoadRef(any(DfsStreamKey.class),
+ anyLong(), any(RefLoader.class))).thenReturn(ref);
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(bitmapIndexCacheTable.getOrLoadRef(any(DfsStreamKey.class),
+ anyLong(), any(RefLoader.class))).thenReturn(mock(Ref.class));
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertThat(tables.getOrLoadRef(dfsStreamKey, 0, mock(RefLoader.class)),
+ sameInstance(ref));
+ }
+
+ @Test
+ public void putDfsBlock_packExtMapsToCacheTable_callsBitmapIndexCacheTable() {
+ DfsStreamKey dfsStreamKey = new TestKey(PackExt.BITMAP_INDEX);
+ DfsBlock dfsBlock = new DfsBlock(dfsStreamKey, 0, new byte[0]);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ tables.put(dfsBlock);
+ Mockito.verify(bitmapIndexCacheTable, times(1)).put(dfsBlock);
+ }
+
+ @Test
+ public void putDfsBlock_packExtDoesNotMapToCacheTable_callsDefaultCache() {
+ DfsStreamKey dfsStreamKey = new TestKey(PackExt.PACK);
+ DfsBlock dfsBlock = new DfsBlock(dfsStreamKey, 0, new byte[0]);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ tables.put(dfsBlock);
+ Mockito.verify(defaultBlockCacheTable, times(1)).put(dfsBlock);
+ }
+
+ @Test
+ public void putDfsStreamKey_packExtMapsToCacheTable_callsBitmapIndexCacheTable() {
+ DfsStreamKey dfsStreamKey = new TestKey(PackExt.BITMAP_INDEX);
+ Ref ref = mock(Ref.class);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(defaultBlockCacheTable.put(any(DfsStreamKey.class), anyLong(),
+ anyLong(), anyInt())).thenReturn(mock(Ref.class));
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(bitmapIndexCacheTable.put(any(DfsStreamKey.class), anyLong(),
+ anyLong(), anyInt())).thenReturn(ref);
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertThat(tables.put(dfsStreamKey, 0, 0, 0), sameInstance(ref));
+ }
+
+ @Test
+ public void putDfsStreamKey_packExtDoesNotMapToCacheTable_callsDefaultCache() {
+ DfsStreamKey dfsStreamKey = new TestKey(PackExt.PACK);
+ Ref ref = mock(Ref.class);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(defaultBlockCacheTable.put(any(DfsStreamKey.class), anyLong(),
+ anyLong(), anyInt())).thenReturn(ref);
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(bitmapIndexCacheTable.put(any(DfsStreamKey.class), anyLong(),
+ anyLong(), anyInt())).thenReturn(mock(Ref.class));
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertThat(tables.put(dfsStreamKey, 0, 0, 0), sameInstance(ref));
+ }
+
+ @Test
+ public void putRef_packExtMapsToCacheTable_callsBitmapIndexCacheTable() {
+ DfsStreamKey dfsStreamKey = new TestKey(PackExt.BITMAP_INDEX);
+ Ref ref = mock(Ref.class);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(defaultBlockCacheTable.putRef(any(DfsStreamKey.class), anyLong(),
+ anyInt())).thenReturn(mock(Ref.class));
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(bitmapIndexCacheTable.putRef(any(DfsStreamKey.class), anyLong(),
+ anyInt())).thenReturn(ref);
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertThat(tables.putRef(dfsStreamKey, 0, 0), sameInstance(ref));
+ }
+
+ @Test
+ public void putRef_packExtDoesNotMapToCacheTable_callsDefaultCache() {
+ DfsStreamKey dfsStreamKey = new TestKey(PackExt.PACK);
+ Ref ref = mock(Ref.class);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(defaultBlockCacheTable.putRef(any(DfsStreamKey.class), anyLong(),
+ anyInt())).thenReturn(ref);
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(bitmapIndexCacheTable.putRef(any(DfsStreamKey.class), anyLong(),
+ anyInt())).thenReturn(mock(Ref.class));
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertThat(tables.putRef(dfsStreamKey, 0, 0), sameInstance(ref));
+ }
+
+ @Test
+ public void contains_packExtMapsToCacheTable_callsBitmapIndexCacheTable() {
+ DfsStreamKey streamKey = new TestKey(PackExt.BITMAP_INDEX);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(bitmapIndexCacheTable.contains(any(DfsStreamKey.class), anyLong()))
+ .thenReturn(true);
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertTrue(tables.contains(streamKey, 0));
+ }
+
+ @Test
+ public void contains_packExtDoesNotMapToCacheTable_callsDefaultCache() {
+ DfsStreamKey streamKey = new TestKey(PackExt.PACK);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(defaultBlockCacheTable.contains(any(DfsStreamKey.class),
+ anyLong())).thenReturn(true);
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertTrue(tables.contains(streamKey, 0));
+ }
+
+ @Test
+ public void get_packExtMapsToCacheTable_callsBitmapIndexCacheTable() {
+ DfsStreamKey dfsStreamKey = new TestKey(PackExt.BITMAP_INDEX);
+ Ref ref = mock(Ref.class);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(defaultBlockCacheTable.get(any(DfsStreamKey.class), anyLong()))
+ .thenReturn(mock(Ref.class));
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(bitmapIndexCacheTable.get(any(DfsStreamKey.class), anyLong()))
+ .thenReturn(ref);
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertThat(tables.get(dfsStreamKey, 0), sameInstance(ref));
+ }
+
+ @Test
+ public void get_packExtDoesNotMapToCacheTable_callsDefaultCache() {
+ DfsStreamKey dfsStreamKey = new TestKey(PackExt.PACK);
+ Ref ref = mock(Ref.class);
+ DfsBlockCacheTable defaultBlockCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(defaultBlockCacheTable.get(any(DfsStreamKey.class), anyLong()))
+ .thenReturn(ref);
+ DfsBlockCacheTable bitmapIndexCacheTable = mock(
+ DfsBlockCacheTable.class);
+ when(bitmapIndexCacheTable.get(any(DfsStreamKey.class), anyLong()))
+ .thenReturn(mock(Ref.class));
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+ defaultBlockCacheTable,
+ Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+ assertThat(tables.get(dfsStreamKey, 0), sameInstance(ref));
+ }
+
+ @Test
+ public void getBlockCacheStats_getCurrentSize_consolidatesAllTableCurrentSizes() {
+ long[] currentSizes = createEmptyStatsArray();
+
+ DfsBlockCacheStats packStats = new DfsBlockCacheStats();
+ packStats.addToLiveBytes(new TestKey(PackExt.PACK), 5);
+ currentSizes[PackExt.PACK.getPosition()] = 5;
+
+ DfsBlockCacheStats bitmapStats = new DfsBlockCacheStats();
+ bitmapStats.addToLiveBytes(new TestKey(PackExt.BITMAP_INDEX), 6);
+ currentSizes[PackExt.BITMAP_INDEX.getPosition()] = 6;
+
+ DfsBlockCacheStats indexStats = new DfsBlockCacheStats();
+ indexStats.addToLiveBytes(new TestKey(PackExt.INDEX), 7);
+ currentSizes[PackExt.INDEX.getPosition()] = 7;
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable
+ .fromCacheTables(cacheTableWithStats(packStats),
+ Map.of(PackExt.BITMAP_INDEX,
+ cacheTableWithStats(bitmapStats), PackExt.INDEX,
+ cacheTableWithStats(indexStats)));
+
+ assertArrayEquals(tables.getBlockCacheStats().getCurrentSize(),
+ currentSizes);
+ }
+
+ @Test
+ public void getBlockCacheStats_GetHitCount_consolidatesAllTableHitCounts() {
+ long[] hitCounts = createEmptyStatsArray();
+
+ DfsBlockCacheStats packStats = new DfsBlockCacheStats();
+ incrementCounter(5,
+ () -> packStats.incrementHit(new TestKey(PackExt.PACK)));
+ hitCounts[PackExt.PACK.getPosition()] = 5;
+
+ DfsBlockCacheStats bitmapStats = new DfsBlockCacheStats();
+ incrementCounter(6, () -> bitmapStats
+ .incrementHit(new TestKey(PackExt.BITMAP_INDEX)));
+ hitCounts[PackExt.BITMAP_INDEX.getPosition()] = 6;
+
+ DfsBlockCacheStats indexStats = new DfsBlockCacheStats();
+ incrementCounter(7,
+ () -> indexStats.incrementHit(new TestKey(PackExt.INDEX)));
+ hitCounts[PackExt.INDEX.getPosition()] = 7;
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable
+ .fromCacheTables(cacheTableWithStats(packStats),
+ Map.of(PackExt.BITMAP_INDEX,
+ cacheTableWithStats(bitmapStats), PackExt.INDEX,
+ cacheTableWithStats(indexStats)));
+
+ assertArrayEquals(tables.getBlockCacheStats().getHitCount(), hitCounts);
+ }
+
+ @Test
+ public void getBlockCacheStats_getMissCount_consolidatesAllTableMissCounts() {
+ long[] missCounts = createEmptyStatsArray();
+
+ DfsBlockCacheStats packStats = new DfsBlockCacheStats();
+ incrementCounter(5,
+ () -> packStats.incrementMiss(new TestKey(PackExt.PACK)));
+ missCounts[PackExt.PACK.getPosition()] = 5;
+
+ DfsBlockCacheStats bitmapStats = new DfsBlockCacheStats();
+ incrementCounter(6, () -> bitmapStats
+ .incrementMiss(new TestKey(PackExt.BITMAP_INDEX)));
+ missCounts[PackExt.BITMAP_INDEX.getPosition()] = 6;
+
+ DfsBlockCacheStats indexStats = new DfsBlockCacheStats();
+ incrementCounter(7,
+ () -> indexStats.incrementMiss(new TestKey(PackExt.INDEX)));
+ missCounts[PackExt.INDEX.getPosition()] = 7;
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable
+ .fromCacheTables(cacheTableWithStats(packStats),
+ Map.of(PackExt.BITMAP_INDEX,
+ cacheTableWithStats(bitmapStats), PackExt.INDEX,
+ cacheTableWithStats(indexStats)));
+
+ assertArrayEquals(tables.getBlockCacheStats().getMissCount(),
+ missCounts);
+ }
+
+ @Test
+ public void getBlockCacheStats_getTotalRequestCount_consolidatesAllTableTotalRequestCounts() {
+ long[] totalRequestCounts = createEmptyStatsArray();
+
+ DfsBlockCacheStats packStats = new DfsBlockCacheStats();
+ incrementCounter(5, () -> {
+ packStats.incrementHit(new TestKey(PackExt.PACK));
+ packStats.incrementMiss(new TestKey(PackExt.PACK));
+ });
+ totalRequestCounts[PackExt.PACK.getPosition()] = 10;
+
+ DfsBlockCacheStats bitmapStats = new DfsBlockCacheStats();
+ incrementCounter(6, () -> {
+ bitmapStats.incrementHit(new TestKey(PackExt.BITMAP_INDEX));
+ bitmapStats.incrementMiss(new TestKey(PackExt.BITMAP_INDEX));
+ });
+ totalRequestCounts[PackExt.BITMAP_INDEX.getPosition()] = 12;
+
+ DfsBlockCacheStats indexStats = new DfsBlockCacheStats();
+ incrementCounter(7, () -> {
+ indexStats.incrementHit(new TestKey(PackExt.INDEX));
+ indexStats.incrementMiss(new TestKey(PackExt.INDEX));
+ });
+ totalRequestCounts[PackExt.INDEX.getPosition()] = 14;
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable
+ .fromCacheTables(cacheTableWithStats(packStats),
+ Map.of(PackExt.BITMAP_INDEX,
+ cacheTableWithStats(bitmapStats), PackExt.INDEX,
+ cacheTableWithStats(indexStats)));
+
+ assertArrayEquals(tables.getBlockCacheStats().getTotalRequestCount(),
+ totalRequestCounts);
+ }
+
+ @Test
+ public void getBlockCacheStats_getHitRatio_consolidatesAllTableHitRatios() {
+ long[] hitRatios = createEmptyStatsArray();
+
+ DfsBlockCacheStats packStats = new DfsBlockCacheStats();
+ incrementCounter(5,
+ () -> packStats.incrementHit(new TestKey(PackExt.PACK)));
+ hitRatios[PackExt.PACK.getPosition()] = 100;
+
+ DfsBlockCacheStats bitmapStats = new DfsBlockCacheStats();
+ incrementCounter(6, () -> {
+ bitmapStats.incrementHit(new TestKey(PackExt.BITMAP_INDEX));
+ bitmapStats.incrementMiss(new TestKey(PackExt.BITMAP_INDEX));
+ });
+ hitRatios[PackExt.BITMAP_INDEX.getPosition()] = 50;
+
+ DfsBlockCacheStats indexStats = new DfsBlockCacheStats();
+ incrementCounter(7,
+ () -> indexStats.incrementMiss(new TestKey(PackExt.INDEX)));
+ hitRatios[PackExt.INDEX.getPosition()] = 0;
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable
+ .fromCacheTables(cacheTableWithStats(packStats),
+ Map.of(PackExt.BITMAP_INDEX,
+ cacheTableWithStats(bitmapStats), PackExt.INDEX,
+ cacheTableWithStats(indexStats)));
+
+ assertArrayEquals(tables.getBlockCacheStats().getHitRatio(), hitRatios);
+ }
+
+ @Test
+ public void getBlockCacheStats_getEvictions_consolidatesAllTableEvictions() {
+ long[] evictions = createEmptyStatsArray();
+
+ DfsBlockCacheStats packStats = new DfsBlockCacheStats();
+ incrementCounter(5,
+ () -> packStats.incrementEvict(new TestKey(PackExt.PACK)));
+ evictions[PackExt.PACK.getPosition()] = 5;
+
+ DfsBlockCacheStats bitmapStats = new DfsBlockCacheStats();
+ incrementCounter(6, () -> bitmapStats
+ .incrementEvict(new TestKey(PackExt.BITMAP_INDEX)));
+ evictions[PackExt.BITMAP_INDEX.getPosition()] = 6;
+
+ DfsBlockCacheStats indexStats = new DfsBlockCacheStats();
+ incrementCounter(7,
+ () -> indexStats.incrementEvict(new TestKey(PackExt.INDEX)));
+ evictions[PackExt.INDEX.getPosition()] = 7;
+
+ PackExtBlockCacheTable tables = PackExtBlockCacheTable
+ .fromCacheTables(cacheTableWithStats(packStats),
+ Map.of(PackExt.BITMAP_INDEX,
+ cacheTableWithStats(bitmapStats), PackExt.INDEX,
+ cacheTableWithStats(indexStats)));
+
+ assertArrayEquals(tables.getBlockCacheStats().getEvictions(),
+ evictions);
+ }
+
+ private static void incrementCounter(int amount, Runnable fn) {
+ for (int i = 0; i < amount; i++) {
+ fn.run();
+ }
+ }
+
+ private static long[] createEmptyStatsArray() {
+ return new long[PackExt.values().length];
+ }
+
+ private static DfsBlockCacheTable cacheTableWithStats(
+ DfsBlockCacheStats dfsBlockCacheStats) {
+ DfsBlockCacheTable cacheTable = mock(DfsBlockCacheTable.class);
+ when(cacheTable.getBlockCacheStats()).thenReturn(dfsBlockCacheStats);
+ return cacheTable;
+ }
+
+ private static class TestKey extends DfsStreamKey {
+ TestKey(PackExt packExt) {
+ super(0, packExt);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ return false;
+ }
+ }
+}
diff --git a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
index 9d12facb33..a15fe1f372 100644
--- a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
+++ b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
@@ -285,6 +285,8 @@ DIRCUnrecognizedExtendedFlags=Unrecognized extended flags: {0}
downloadCancelled=Download cancelled
downloadCancelledDuringIndexing=Download cancelled during indexing
duplicateAdvertisementsOf=duplicate advertisements of {0}
+duplicateCacheTablesGiven=Duplicate cache tables given
+duplicatePackExtensionsForCacheTables=Duplicate pack extension {0} in cache tables
duplicatePackExtensionsSet=Attempting to configure duplicate pack extensions: {0}.{1}.{2} contains {3}
duplicateRef=Duplicate ref: {0}
duplicateRefAttribute=Duplicate ref attribute: {0}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
index 311d9c22aa..e31533aaa2 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
@@ -315,6 +315,8 @@ public class JGitText extends TranslationBundle {
/***/ public String downloadCancelled;
/***/ public String downloadCancelledDuringIndexing;
/***/ public String duplicateAdvertisementsOf;
+ /***/ public String duplicateCacheTablesGiven;
+ /***/ public String duplicatePackExtensionsForCacheTables;
/***/ public String duplicatePackExtensionsSet;
/***/ public String duplicateRef;
/***/ public String duplicateRefAttribute;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
index fa86701de8..20f4666373 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
@@ -263,6 +263,21 @@ public class DfsBlockCacheConfig {
return packExtCacheConfigurations;
}
+ /**
+ * Set the list of pack ext cache configs.
+ *
+ * Made visible for testing.
+ *
+ * @param packExtCacheConfigurations
+ * the list of pack ext cache configs to set.
+ * @return {@code this}
+ */
+ DfsBlockCacheConfig setPackExtCacheConfigurations(
+ List packExtCacheConfigurations) {
+ this.packExtCacheConfigurations = packExtCacheConfigurations;
+ return this;
+ }
+
/**
* Update properties by setting fields from the configuration.
* ]
@@ -435,7 +450,15 @@ public class DfsBlockCacheConfig {
// Configuration for the cache instance.
private final DfsBlockCacheConfig packExtCacheConfiguration;
- private DfsBlockCachePackExtConfig(EnumSet packExts,
+ /**
+ * Made visible for testing.
+ *
+ * @param packExts
+ * Set of {@link PackExt}s associated to this cache config.
+ * @param packExtCacheConfiguration
+ * {@link DfsBlockCacheConfig} for this cache config.
+ */
+ DfsBlockCachePackExtConfig(EnumSet packExts,
DfsBlockCacheConfig packExtCacheConfiguration) {
this.packExts = packExts;
this.packExtCacheConfiguration = packExtCacheConfiguration;
@@ -475,6 +498,5 @@ public class DfsBlockCacheConfig {
return new DfsBlockCachePackExtConfig(EnumSet.copyOf(packExts),
dfsBlockCacheConfig);
}
-
}
}
\ No newline at end of file
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTable.java
new file mode 100644
index 0000000000..858f731b70
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTable.java
@@ -0,0 +1,289 @@
+/*
+ * Copyright (c) 2024, Google LLC and others
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Distribution License v. 1.0 which is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.ReadableChannelSupplier;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.Ref;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.RefLoader;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCacheConfig.DfsBlockCachePackExtConfig;
+import org.eclipse.jgit.internal.storage.pack.PackExt;
+
+/**
+ * A table that holds multiple cache tables accessed by {@link PackExt} types.
+ *
+ *
+ * Allows the separation of entries from different {@link PackExt} types to
+ * limit churn in cache caused by entries of differing sizes.
+ *
+ * Separating these tables enables the fine-tuning of cache tables per extension
+ * type.
+ */
+class PackExtBlockCacheTable implements DfsBlockCacheTable {
+ private final DfsBlockCacheTable defaultBlockCacheTable;
+
+ // Holds the unique tables backing the extBlockCacheTables values.
+ private final List blockCacheTableList;
+
+ // Holds the mapping of PackExt to DfsBlockCacheTables.
+ // The relation between the size of extBlockCacheTables entries and
+ // blockCacheTableList entries is:
+ // blockCacheTableList.size() <= extBlockCacheTables.size()
+ private final Map extBlockCacheTables;
+
+ /**
+ * Builds the PackExtBlockCacheTable from a list of
+ * {@link DfsBlockCachePackExtConfig}s.
+ *
+ * @param cacheConfig
+ * {@link DfsBlockCacheConfig} containing
+ * {@link DfsBlockCachePackExtConfig}s used to configure
+ * PackExtBlockCacheTable. The {@link DfsBlockCacheConfig} holds
+ * the configuration for the default cache table.
+ * @return the cache table built from the given configs.
+ * @throws IllegalArgumentException
+ * when no {@link DfsBlockCachePackExtConfig} exists in the
+ * {@link DfsBlockCacheConfig}.
+ */
+ static PackExtBlockCacheTable fromBlockCacheConfigs(
+ DfsBlockCacheConfig cacheConfig) {
+ DfsBlockCacheTable defaultTable = new ClockBlockCacheTable(cacheConfig);
+ Map packExtBlockCacheTables = new HashMap<>();
+ List packExtConfigs = cacheConfig
+ .getPackExtCacheConfigurations();
+ if (packExtConfigs == null || packExtConfigs.size() == 0) {
+ throw new IllegalArgumentException(
+ JGitText.get().noPackExtConfigurationGiven);
+ }
+ for (DfsBlockCachePackExtConfig packExtCacheConfig : packExtConfigs) {
+ DfsBlockCacheTable table = new ClockBlockCacheTable(
+ packExtCacheConfig.getPackExtCacheConfiguration());
+ for (PackExt packExt : packExtCacheConfig.getPackExts()) {
+ if (packExtBlockCacheTables.containsKey(packExt)) {
+ throw new IllegalArgumentException(MessageFormat.format(
+ JGitText.get().duplicatePackExtensionsForCacheTables,
+ packExt));
+ }
+ packExtBlockCacheTables.put(packExt, table);
+ }
+ }
+ return fromCacheTables(defaultTable, packExtBlockCacheTables);
+ }
+
+ /**
+ * Creates a new PackExtBlockCacheTable from the combination of a default
+ * {@link DfsBlockCacheTable} and a map of {@link PackExt}s to
+ * {@link DfsBlockCacheTable}s.
+ *
+ * This method allows for the PackExtBlockCacheTable to handle a mapping of
+ * {@link PackExt}s to arbitrarily defined {@link DfsBlockCacheTable}
+ * implementations. This is especially useful for users wishing to implement
+ * custom cache tables.
+ *
+ * This is currently made visible for testing.
+ *
+ * @param defaultBlockCacheTable
+ * the default table used when a handling a {@link PackExt} type
+ * that does not map to a {@link DfsBlockCacheTable} mapped by
+ * packExtsCacheTablePairs.
+ * @param packExtBlockCacheTables
+ * the mapping of {@link PackExt}s to
+ * {@link DfsBlockCacheTable}s. A single
+ * {@link DfsBlockCacheTable} can be defined for multiple
+ * {@link PackExt}s in a many-to-one relationship.
+ * @return the PackExtBlockCacheTable created from the
+ * defaultBlockCacheTable and packExtsCacheTablePairs mapping.
+ * @throws IllegalArgumentException
+ * when a {@link PackExt} is defined for multiple
+ * {@link DfsBlockCacheTable}s.
+ */
+ static PackExtBlockCacheTable fromCacheTables(
+ DfsBlockCacheTable defaultBlockCacheTable,
+ Map packExtBlockCacheTables) {
+ Set blockCacheTables = new HashSet<>();
+ blockCacheTables.add(defaultBlockCacheTable);
+ blockCacheTables.addAll(packExtBlockCacheTables.values());
+ return new PackExtBlockCacheTable(defaultBlockCacheTable,
+ List.copyOf(blockCacheTables), packExtBlockCacheTables);
+ }
+
+ private PackExtBlockCacheTable(DfsBlockCacheTable defaultBlockCacheTable,
+ List blockCacheTableList,
+ Map extBlockCacheTables) {
+ this.defaultBlockCacheTable = defaultBlockCacheTable;
+ this.blockCacheTableList = blockCacheTableList;
+ this.extBlockCacheTables = extBlockCacheTables;
+ }
+
+ @Override
+ public boolean hasBlock0(DfsStreamKey key) {
+ return getTable(key).hasBlock0(key);
+ }
+
+ @Override
+ public DfsBlock getOrLoad(BlockBasedFile file, long position,
+ DfsReader dfsReader, ReadableChannelSupplier fileChannel)
+ throws IOException {
+ return getTable(file.ext).getOrLoad(file, position, dfsReader,
+ fileChannel);
+ }
+
+ @Override
+ public Ref getOrLoadRef(DfsStreamKey key, long position,
+ RefLoader loader) throws IOException {
+ return getTable(key).getOrLoadRef(key, position, loader);
+ }
+
+ @Override
+ public void put(DfsBlock v) {
+ getTable(v.stream).put(v);
+ }
+
+ @Override
+ public Ref put(DfsStreamKey key, long pos, long size, T v) {
+ return getTable(key).put(key, pos, size, v);
+ }
+
+ @Override
+ public Ref putRef(DfsStreamKey key, long size, T v) {
+ return getTable(key).putRef(key, size, v);
+ }
+
+ @Override
+ public boolean contains(DfsStreamKey key, long position) {
+ return getTable(key).contains(key, position);
+ }
+
+ @Override
+ public T get(DfsStreamKey key, long position) {
+ return getTable(key).get(key, position);
+ }
+
+ @Override
+ public BlockCacheStats getBlockCacheStats() {
+ return new CacheStats(blockCacheTableList.stream()
+ .map(DfsBlockCacheTable::getBlockCacheStats)
+ .collect(Collectors.toList()));
+ }
+
+ private DfsBlockCacheTable getTable(PackExt packExt) {
+ return extBlockCacheTables.getOrDefault(packExt,
+ defaultBlockCacheTable);
+ }
+
+ private DfsBlockCacheTable getTable(DfsStreamKey key) {
+ return extBlockCacheTables.getOrDefault(getPackExt(key),
+ defaultBlockCacheTable);
+ }
+
+ private static PackExt getPackExt(DfsStreamKey key) {
+ return PackExt.values()[key.packExtPos];
+ }
+
+ private static class CacheStats implements BlockCacheStats {
+ private final List blockCacheStats;
+
+ private CacheStats(List blockCacheStats) {
+ this.blockCacheStats = blockCacheStats;
+ }
+
+ @Override
+ public long[] getCurrentSize() {
+ long[] sums = emptyPackStats();
+ for (BlockCacheStats blockCacheStatsEntry : blockCacheStats) {
+ sums = add(sums, blockCacheStatsEntry.getCurrentSize());
+ }
+ return sums;
+ }
+
+ @Override
+ public long[] getHitCount() {
+ long[] sums = emptyPackStats();
+ for (BlockCacheStats blockCacheStatsEntry : blockCacheStats) {
+ sums = add(sums, blockCacheStatsEntry.getHitCount());
+ }
+ return sums;
+ }
+
+ @Override
+ public long[] getMissCount() {
+ long[] sums = emptyPackStats();
+ for (BlockCacheStats blockCacheStatsEntry : blockCacheStats) {
+ sums = add(sums, blockCacheStatsEntry.getMissCount());
+ }
+ return sums;
+ }
+
+ @Override
+ public long[] getTotalRequestCount() {
+ long[] sums = emptyPackStats();
+ for (BlockCacheStats blockCacheStatsEntry : blockCacheStats) {
+ sums = add(sums, blockCacheStatsEntry.getTotalRequestCount());
+ }
+ return sums;
+ }
+
+ @Override
+ public long[] getHitRatio() {
+ long[] hit = getHitCount();
+ long[] miss = getMissCount();
+ long[] ratio = new long[Math.max(hit.length, miss.length)];
+ for (int i = 0; i < ratio.length; i++) {
+ if (i >= hit.length) {
+ ratio[i] = 0;
+ } else if (i >= miss.length) {
+ ratio[i] = 100;
+ } else {
+ long total = hit[i] + miss[i];
+ ratio[i] = total == 0 ? 0 : hit[i] * 100 / total;
+ }
+ }
+ return ratio;
+ }
+
+ @Override
+ public long[] getEvictions() {
+ long[] sums = emptyPackStats();
+ for (BlockCacheStats blockCacheStatsEntry : blockCacheStats) {
+ sums = add(sums, blockCacheStatsEntry.getEvictions());
+ }
+ return sums;
+ }
+
+ private static long[] emptyPackStats() {
+ return new long[PackExt.values().length];
+ }
+
+ private static long[] add(long[] first, long[] second) {
+ long[] sums = new long[Integer.max(first.length, second.length)];
+ int i;
+ for (i = 0; i < Integer.min(first.length, second.length); i++) {
+ sums[i] = first[i] + second[i];
+ }
+ for (int j = i; j < first.length; j++) {
+ sums[j] = first[i];
+ }
+ for (int j = i; j < second.length; j++) {
+ sums[j] = second[i];
+ }
+ return sums;
+ }
+ }
+}
--
cgit v1.2.3
From 31e534193ff531e3aad1f115b16d8bda97bfc9e9 Mon Sep 17 00:00:00 2001
From: Ivan Frade
Date: Wed, 10 Apr 2024 13:52:56 -0700
Subject: DfsPackFile: Abstract the loading of pack indexes
DfsPackFile assumes that the indexes are stored in file streams and
their references need to be cached in DFS. This doesn't allow us to
experiment other storage options, like key-value databases. In these
experiments not all indexes are together in the same storage.
Define an interface per index to load it, so implementors can focus on
the specifics of their index. Put them together in the IndexFactory
interface. The implementation of the IndexFactory chooses the right
combination of storages.
At the moment we do this only for primary and reverse
indexes. Following changes can do the same for other indexes.
Change-Id: Icf790d8ba64b34dbe984759f1c6d8ec702554149
---
.../jgit/internal/storage/dfs/DfsPackFile.java | 253 +++++++++++++++------
1 file changed, 183 insertions(+), 70 deletions(-)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
index cdd10618e0..845feab5ac 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
@@ -107,6 +107,53 @@ public final class DfsPackFile extends BlockBasedFile {
/** Lock for {@link #corruptObjects}. */
private final Object corruptObjectsLock = new Object();
+ private final IndexFactory indexFactory;
+
+ /**
+ * Returns the indexes for this pack.
+ *
+ * We define indexes in different sub interfaces to allow implementing the
+ * indexes over different combinations of backends.
+ *
+ * Implementations decide if/how to cache the indexes. The calling
+ * DfsPackFile will keep the reference to the index as long as it needs it.
+ */
+ public interface IndexFactory {
+ /**
+ * Take care of loading the primary and reverse indexes for this pack.
+ */
+ interface PackIndexes {
+ /**
+ * Load the primary index for the pack.
+ *
+ * @param ctx
+ * reader to find the raw bytes
+ * @return a primary index
+ * @throws IOException
+ * a problem finding/parsing the index
+ */
+ PackIndex index(DfsReader ctx) throws IOException;
+
+ /**
+ * Load the reverse index of the pack
+ *
+ * @param ctx
+ * reader to find the raw bytes
+ * @return the reverse index of the pack
+ * @throws IOException
+ * a problem finding/parsing the reverse index
+ */
+ PackReverseIndex reverseIndex(DfsReader ctx) throws IOException;
+ }
+
+ /**
+ * Returns a provider of the primary and reverse indexes of this pack
+ *
+ * @return an implementation of the {@link PackIndexes} interface
+ */
+ PackIndexes getPackIndexes();
+ }
+
/**
* Construct a reader for an existing, packfile.
*
@@ -116,7 +163,8 @@ public final class DfsPackFile extends BlockBasedFile {
* description of the pack within the DFS.
*/
DfsPackFile(DfsBlockCache cache, DfsPackDescription desc) {
- this(cache, desc, DEFAULT_BITMAP_LOADER);
+ this(cache, desc, DEFAULT_BITMAP_LOADER,
+ new CachedStreamIndexFactory(cache, desc));
}
/**
@@ -128,9 +176,11 @@ public final class DfsPackFile extends BlockBasedFile {
* description of the pack within the DFS
* @param bitmapLoader
* loader to get the bitmaps of this pack (if any)
+ * @param indexFactory
+ * an IndexFactory to get references to the indexes of this pack
*/
public DfsPackFile(DfsBlockCache cache, DfsPackDescription desc,
- PackBitmapIndexLoader bitmapLoader) {
+ PackBitmapIndexLoader bitmapLoader, IndexFactory indexFactory) {
super(cache, desc, PACK);
int bs = desc.getBlockSize(PACK);
@@ -142,6 +192,7 @@ public final class DfsPackFile extends BlockBasedFile {
length = sz > 0 ? sz : -1;
this.bitmapLoader = bitmapLoader;
+ this.indexFactory = indexFactory;
}
/**
@@ -196,22 +247,7 @@ public final class DfsPackFile extends BlockBasedFile {
Repository.getGlobalListenerList()
.dispatch(new BeforeDfsPackIndexLoadedEvent(this));
try {
- DfsStreamKey idxKey = desc.getStreamKey(INDEX);
- // Keep the value parsed in the loader, in case the Ref<> is
- // nullified in ClockBlockCacheTable#reserveSpace
- // before we read its value.
- AtomicReference loadedRef = new AtomicReference<>(null);
- DfsBlockCache.Ref cachedRef = cache.getOrLoadRef(idxKey,
- REF_POSITION, () -> {
- RefWithSize idx = loadPackIndex(ctx);
- loadedRef.set(idx.ref);
- return new DfsBlockCache.Ref<>(idxKey, REF_POSITION,
- idx.size, idx.ref);
- });
- if (loadedRef.get() == null) {
- ctx.stats.idxCacheHit++;
- }
- index = cachedRef.get() != null ? cachedRef.get() : loadedRef.get();
+ index = indexFactory.getPackIndexes().index(ctx);
if (index == null) {
throw new IOException(
"Couldn't get a reference to the primary index"); //$NON-NLS-1$
@@ -328,26 +364,7 @@ public final class DfsPackFile extends BlockBasedFile {
return reverseIndex;
}
- PackIndex idx = idx(ctx);
- DfsStreamKey revKey = desc.getStreamKey(REVERSE_INDEX);
- // Keep the value parsed in the loader, in case the Ref<> is
- // nullified in ClockBlockCacheTable#reserveSpace
- // before we read its value.
- AtomicReference loadedRef = new AtomicReference<>(
- null);
- DfsBlockCache.Ref cachedRef = cache
- .getOrLoadRef(revKey, REF_POSITION, () -> {
- RefWithSize ridx = loadReverseIdx(ctx,
- idx);
- loadedRef.set(ridx.ref);
- return new DfsBlockCache.Ref<>(revKey, REF_POSITION,
- ridx.size, ridx.ref);
- });
- if (loadedRef.get() == null) {
- ctx.stats.ridxCacheHit++;
- }
- reverseIndex = cachedRef.get() != null ? cachedRef.get()
- : loadedRef.get();
+ reverseIndex = indexFactory.getPackIndexes().reverseIndex(ctx);
if (reverseIndex == null) {
throw new IOException(
"Couldn't get a reference to the reverse index"); //$NON-NLS-1$
@@ -1227,38 +1244,6 @@ public final class DfsPackFile extends BlockBasedFile {
}
}
- private RefWithSize loadPackIndex(DfsReader ctx)
- throws IOException {
- try {
- ctx.stats.readIdx++;
- long start = System.nanoTime();
- try (ReadableChannel rc = ctx.db.openFile(desc, INDEX)) {
- PackIndex idx = PackIndex.read(alignTo8kBlocks(rc));
- ctx.stats.readIdxBytes += rc.position();
- return new RefWithSize<>(idx, idx.getObjectCount() * REC_SIZE);
- } finally {
- ctx.stats.readIdxMicros += elapsedMicros(start);
- }
- } catch (EOFException e) {
- throw new IOException(MessageFormat.format(
- DfsText.get().shortReadOfIndex,
- desc.getFileName(INDEX)), e);
- } catch (IOException e) {
- throw new IOException(MessageFormat.format(
- DfsText.get().cannotReadIndex,
- desc.getFileName(INDEX)), e);
- }
- }
-
- private static RefWithSize loadReverseIdx(DfsReader ctx,
- PackIndex idx) {
- ctx.stats.readReverseIdx++;
- long start = System.nanoTime();
- PackReverseIndex revidx = PackReverseIndexFactory.computeFromIndex(idx);
- ctx.stats.readReverseIdxMicros += elapsedMicros(start);
- return new RefWithSize<>(revidx, idx.getObjectCount() * 8);
- }
-
private DfsBlockCache.Ref loadObjectSizeIndex(
DfsReader ctx, DfsStreamKey objectSizeIndexKey) throws IOException {
ctx.stats.readObjectSizeIndex++;
@@ -1457,6 +1442,134 @@ public final class DfsPackFile extends BlockBasedFile {
}
}
+ /**
+ * An index factory backed by Dfs streams and references cached in
+ * DfsBlockCache
+ */
+ public static final class CachedStreamIndexFactory implements IndexFactory {
+ private final CachedStreamPackIndexes indexes;
+
+ /**
+ * An index factory
+ *
+ * @param cache
+ * DFS block cache to use for the references
+ * @param desc
+ * This factory loads indexes for this package
+ */
+ public CachedStreamIndexFactory(DfsBlockCache cache,
+ DfsPackDescription desc) {
+ this.indexes = new CachedStreamPackIndexes(cache, desc);
+ }
+
+ @Override
+ public PackIndexes getPackIndexes() {
+ return indexes;
+ }
+ }
+
+ /**
+ * Load primary and reverse index from Dfs streams and cache the references
+ * in DfsBlockCache.
+ */
+ public static final class CachedStreamPackIndexes implements IndexFactory.PackIndexes {
+ private final DfsBlockCache cache;
+
+ private final DfsPackDescription desc;
+
+ /**
+ * An index factory
+ *
+ * @param cache
+ * DFS block cache to use for the references
+ * @param desc This factory loads indexes for this package
+ */
+ public CachedStreamPackIndexes(DfsBlockCache cache,
+ DfsPackDescription desc) {
+ this.cache = cache;
+ this.desc = desc;
+ }
+
+ @Override
+ public PackIndex index(DfsReader ctx) throws IOException {
+ DfsStreamKey idxKey = desc.getStreamKey(INDEX);
+ // Keep the value parsed in the loader, in case the Ref<> is
+ // nullified in ClockBlockCacheTable#reserveSpace
+ // before we read its value.
+ AtomicReference loadedRef = new AtomicReference<>(null);
+ DfsBlockCache.Ref cachedRef = cache.getOrLoadRef(idxKey,
+ REF_POSITION, () -> {
+ RefWithSize idx = loadPackIndex(ctx, desc);
+ loadedRef.set(idx.ref);
+ return new DfsBlockCache.Ref<>(idxKey, REF_POSITION,
+ idx.size, idx.ref);
+ });
+ if (loadedRef.get() == null) {
+ ctx.stats.idxCacheHit++;
+ }
+ return cachedRef.get() != null ? cachedRef.get() : loadedRef.get();
+ }
+
+ private static RefWithSize loadPackIndex(DfsReader ctx,
+ DfsPackDescription desc) throws IOException {
+ try {
+ ctx.stats.readIdx++;
+ long start = System.nanoTime();
+ try (ReadableChannel rc = ctx.db.openFile(desc, INDEX)) {
+ PackIndex idx = PackIndex.read(alignTo8kBlocks(rc));
+ ctx.stats.readIdxBytes += rc.position();
+ return new RefWithSize<>(idx,
+ idx.getObjectCount() * REC_SIZE);
+ } finally {
+ ctx.stats.readIdxMicros += elapsedMicros(start);
+ }
+ } catch (EOFException e) {
+ throw new IOException(
+ MessageFormat.format(DfsText.get().shortReadOfIndex,
+ desc.getFileName(INDEX)),
+ e);
+ } catch (IOException e) {
+ throw new IOException(
+ MessageFormat.format(DfsText.get().cannotReadIndex,
+ desc.getFileName(INDEX)),
+ e);
+ }
+ }
+
+ @Override
+ public PackReverseIndex reverseIndex(DfsReader ctx) throws IOException {
+ PackIndex idx = index(ctx);
+ DfsStreamKey revKey = desc.getStreamKey(REVERSE_INDEX);
+ // Keep the value parsed in the loader, in case the Ref<> is
+ // nullified in ClockBlockCacheTable#reserveSpace
+ // before we read its value.
+ AtomicReference loadedRef = new AtomicReference<>(
+ null);
+ DfsBlockCache.Ref cachedRef = cache
+ .getOrLoadRef(revKey, REF_POSITION, () -> {
+ RefWithSize ridx = loadReverseIdx(ctx,
+ idx);
+ loadedRef.set(ridx.ref);
+ return new DfsBlockCache.Ref<>(revKey, REF_POSITION,
+ ridx.size, ridx.ref);
+ });
+ if (loadedRef.get() == null) {
+ ctx.stats.ridxCacheHit++;
+ }
+ return cachedRef.get() != null ? cachedRef.get() : loadedRef.get();
+ }
+
+ private static RefWithSize loadReverseIdx(
+ DfsReader ctx, PackIndex idx) {
+ ctx.stats.readReverseIdx++;
+ long start = System.nanoTime();
+ PackReverseIndex revidx = PackReverseIndexFactory
+ .computeFromIndex(idx);
+ ctx.stats.readReverseIdxMicros += elapsedMicros(start);
+ return new RefWithSize<>(revidx, idx.getObjectCount() * 8);
+ }
+ }
+
private static final class RefWithSize {
final V ref;
final long size;
--
cgit v1.2.3
From 81199f02fb49f79ef17b2daa3efac00d16040766 Mon Sep 17 00:00:00 2001
From: granny
Date: Tue, 2 Jul 2024 15:15:37 -0700
Subject: Lib: Fix ssh value for gpg.format throwing an
IllegalArgumentException
Git version 2.34 and later supports signing commits and tags with SSH keys. This means gpg.format now supports "ssh" as a value.
Change-Id: Iee1e5a68a816bec149a17a73a6916d2884a54163
---
.../tst/org/eclipse/jgit/lib/GpgConfigTest.java | 10 ++++++++++
org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java | 4 +++-
2 files changed, 13 insertions(+), 1 deletion(-)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/GpgConfigTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/GpgConfigTest.java
index 32f6766d47..5c2b190777 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/GpgConfigTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/GpgConfigTest.java
@@ -95,6 +95,16 @@ public class GpgConfigTest {
assertEquals(GpgConfig.GpgFormat.X509, new GpgConfig(c).getKeyFormat());
}
+ @Test
+ public void testGetKeyFormat_ssh() throws Exception {
+ Config c = parse("" //
+ + "[gpg]\n" //
+ + " format = ssh\n" //
+ );
+
+ assertEquals(GpgConfig.GpgFormat.SSH, new GpgConfig(c).getKeyFormat());
+ }
+
@Test
public void testGetSigningKey() throws Exception {
Config c = parse("" //
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java
index 427a235f3b..4b0c07983f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java
@@ -24,7 +24,9 @@ public class GpgConfig {
/** Value for openpgp */
OPENPGP("openpgp"), //$NON-NLS-1$
/** Value for x509 */
- X509("x509"); //$NON-NLS-1$
+ X509("x509"), //$NON-NLS-1$
+ /** Value for ssh */
+ SSH("ssh"); //$NON-NLS-1$
private final String configValue;
--
cgit v1.2.3
From d82bea9d8bbcb438459f756f3d57068997aeb782 Mon Sep 17 00:00:00 2001
From: Sergey Zakharov
Date: Sun, 21 Jul 2024 16:06:19 +0300
Subject: ObjectWalk: Remove duplicated word "the" in class documentation
Change-Id: I0ca0b5fc65b5cafc768a053b1de40aea9f14231c
---
org.eclipse.jgit/src/org/eclipse/jgit/revwalk/ObjectWalk.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/ObjectWalk.java b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/ObjectWalk.java
index 82671d9abb..385f073a77 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/ObjectWalk.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/ObjectWalk.java
@@ -43,7 +43,7 @@ import org.eclipse.jgit.util.RawParseUtils;
* Tree and blob objects reachable from interesting commits are automatically
* scheduled for inclusion in the results of {@link #nextObject()}, returning
* each object exactly once. Objects are sorted and returned according to the
- * the commits that reference them and the order they appear within a tree.
+ * commits that reference them and the order they appear within a tree.
* Ordering can be affected by changing the
* {@link org.eclipse.jgit.revwalk.RevSort} used to order the commits that are
* returned first.
--
cgit v1.2.3
From 9b73ec4d12d65f3366e7cc5c3533d84d8c442a67 Mon Sep 17 00:00:00 2001
From: Matthias Sohn
Date: Fri, 9 Aug 2024 11:53:01 +0200
Subject: Fix "Comparison of narrow type with wide type in loop condition"
This issue was detected by a GitHub CodeQL security scan run on JGit
source code.
Description of the error raised by the security scan:
"In a loop condition, comparison of a value of a narrow type with a
value of a wide type may always evaluate to true if the wider value is
sufficiently large (or small). This is because the narrower value may
overflow. This can lead to an infinite loop."
Fix this by using type `long` for the local variable `done`.
Change-Id: Ibd4f71299e3f2e40d4331227bd143569a4264d8c
---
org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java
index 715cbb48fb..cfceed00e8 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java
@@ -531,7 +531,7 @@ public abstract class PackParser {
receiving.beginTask(JGitText.get().receivingObjects,
(int) expectedObjectCount);
try {
- for (int done = 0; done < expectedObjectCount; done++) {
+ for (long done = 0; done < expectedObjectCount; done++) {
indexOneObject();
receiving.update(1);
if (receiving.isCancelled())
--
cgit v1.2.3
From a5e123349c921bd6850e8d11d00ea142422dddba Mon Sep 17 00:00:00 2001
From: Thomas Wolf
Date: Sat, 10 Aug 2024 19:27:07 +0200
Subject: ConfigConstants: Add missing @since 7.0
Change-Id: I1ea31c1f0735b7c8fd09fbedc413d613e4baa803
Signed-off-by: Thomas Wolf
---
org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
index f4e43c9f53..4be0cceedc 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
@@ -77,7 +77,11 @@ public final class ConfigConstants {
/** The "dfs" section */
public static final String CONFIG_DFS_SECTION = "dfs";
- /** The dfs cache subsection prefix */
+ /**
+ * The dfs cache subsection prefix.
+ *
+ * @since 7.0
+ */
public static final String CONFIG_DFS_CACHE_PREFIX = "dfs.";
/**
--
cgit v1.2.3
From 888cd56583b23b88f714ac3f0596e23c690c1a74 Mon Sep 17 00:00:00 2001
From: Ivan Frade
Date: Tue, 20 Aug 2024 11:56:41 -0700
Subject: DfsReaderIoStats: Order fields and methods consistently
As fields and getters were added, we didn't respect the (hard to see)
existing order.
Reorder with the following criteria:
Methods:
xCacheHits() (for all indexes in index order),
xCount() (same),
xBytes() (same),
xMicros() (same).
Index order: primary, reverse, bitmap, commit-graph, object-size
Change-Id: I28f1d8121070d4357d566e3683947a26ceb3ba04
---
.../internal/storage/dfs/DfsReaderIoStats.java | 68 +++++++++++-----------
1 file changed, 34 insertions(+), 34 deletions(-)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java
index adb4673ae4..1e5e439598 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderIoStats.java
@@ -40,12 +40,12 @@ public class DfsReaderIoStats {
/** Total number of complete pack indexes read into memory. */
long readIdx;
- /** Total number of complete bitmap indexes read into memory. */
- long readBitmap;
-
/** Total number of reverse indexes added into memory. */
long readReverseIdx;
+ /** Total number of complete bitmap indexes read into memory. */
+ long readBitmap;
+
/** Total number of complete commit graphs read into memory. */
long readCommitGraph;
@@ -55,6 +55,9 @@ public class DfsReaderIoStats {
/** Total number of bytes read from pack indexes. */
long readIdxBytes;
+ /** Total number of bytes read from bitmap indexes. */
+ long readBitmapIdxBytes;
+
/** Total number of bytes read from commit graphs. */
long readCommitGraphBytes;
@@ -67,18 +70,15 @@ public class DfsReaderIoStats {
/** Total microseconds spent creating reverse indexes. */
long readReverseIdxMicros;
+ /** Total microseconds spent reading bitmap indexes. */
+ long readBitmapIdxMicros;
+
/** Total microseconds spent creating commit graphs. */
long readCommitGraphMicros;
/** Total microseconds spent creating object size indexes */
long readObjectSizeIndexMicros;
- /** Total number of bytes read from bitmap indexes. */
- long readBitmapIdxBytes;
-
- /** Total microseconds spent reading bitmap indexes. */
- long readBitmapIdxMicros;
-
/** Total number of block cache hits. */
long blockCacheHit;
@@ -195,21 +195,21 @@ public class DfsReaderIoStats {
}
/**
- * Get total number of times the commit graph read into memory.
+ * Get total number of complete bitmap indexes read into memory.
*
- * @return total number of commit graph read into memory.
+ * @return total number of complete bitmap indexes read into memory.
*/
- public long getReadCommitGraphCount() {
- return stats.readCommitGraph;
+ public long getReadBitmapIndexCount() {
+ return stats.readBitmap;
}
/**
- * Get total number of complete bitmap indexes read into memory.
+ * Get total number of times the commit graph read into memory.
*
- * @return total number of complete bitmap indexes read into memory.
+ * @return total number of commit graph read into memory.
*/
- public long getReadBitmapIndexCount() {
- return stats.readBitmap;
+ public long getReadCommitGraphCount() {
+ return stats.readCommitGraph;
}
/**
@@ -230,6 +230,15 @@ public class DfsReaderIoStats {
return stats.readIdxBytes;
}
+ /**
+ * Get total number of bytes read from bitmap indexes.
+ *
+ * @return total number of bytes read from bitmap indexes.
+ */
+ public long getReadBitmapIndexBytes() {
+ return stats.readBitmapIdxBytes;
+ }
+
/**
* Get total number of bytes read from commit graphs.
*
@@ -258,30 +267,21 @@ public class DfsReaderIoStats {
}
/**
- * Get total microseconds spent reading commit graphs.
- *
- * @return total microseconds spent reading commit graphs.
- */
- public long getReadCommitGraphMicros() {
- return stats.readCommitGraphMicros;
- }
-
- /**
- * Get total number of bytes read from bitmap indexes.
+ * Get total microseconds spent reading bitmap indexes.
*
- * @return total number of bytes read from bitmap indexes.
+ * @return total microseconds spent reading bitmap indexes.
*/
- public long getReadBitmapIndexBytes() {
- return stats.readBitmapIdxBytes;
+ public long getReadBitmapIndexMicros() {
+ return stats.readBitmapIdxMicros;
}
/**
- * Get total microseconds spent reading bitmap indexes.
+ * Get total microseconds spent reading commit graphs.
*
- * @return total microseconds spent reading bitmap indexes.
+ * @return total microseconds spent reading commit graphs.
*/
- public long getReadBitmapIndexMicros() {
- return stats.readBitmapIdxMicros;
+ public long getReadCommitGraphMicros() {
+ return stats.readCommitGraphMicros;
}
/**
--
cgit v1.2.3
From 81067f18e6ee9ac36602926ac0e47945a18e8e3c Mon Sep 17 00:00:00 2001
From: Thomas Wolf
Date: Tue, 20 Aug 2024 22:40:10 +0200
Subject: GpgConfig: Add missing @since
Change-Id: Ie56e7d8f2defe10a87565056a1763288d5b1e1a6
Signed-off-by: Thomas Wolf
---
org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java
index 4b0c07983f..f5064df061 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java
@@ -25,7 +25,11 @@ public class GpgConfig {
OPENPGP("openpgp"), //$NON-NLS-1$
/** Value for x509 */
X509("x509"), //$NON-NLS-1$
- /** Value for ssh */
+ /**
+ * Value for ssh.
+ *
+ * @since 7.0
+ */
SSH("ssh"); //$NON-NLS-1$
private final String configValue;
--
cgit v1.2.3
From d66175d7dca30be7ab96af0e3998f795a174b891 Mon Sep 17 00:00:00 2001
From: Kaushik Lingarkar
Date: Tue, 20 Aug 2024 15:50:20 -0700
Subject: LockFile: Retry lock creation if parent dirs were removed
In the small window between creation of the lock file's parent dirs and
the lock file itself, the parent dirs may be cleaned by an external
process packing refs in the repository. When this scenario occurs, retry
creating the lock file (along with its parent dirs).
Change-Id: Id7ec60c3f7f373b59f1dc8de6b8fa6df6bdf2570
Signed-off-by: Kaushik Lingarkar
---
.../eclipse/jgit/internal/storage/file/LockFile.java | 17 +++++++++++++++--
1 file changed, 15 insertions(+), 2 deletions(-)
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java
index a2d8bd0140..1983541e4a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java
@@ -24,6 +24,7 @@ import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
+import java.nio.file.NoSuchFileException;
import java.nio.file.StandardCopyOption;
import java.nio.file.attribute.FileTime;
import java.text.MessageFormat;
@@ -141,9 +142,8 @@ public class LockFile {
throw new IllegalStateException(
MessageFormat.format(JGitText.get().lockAlreadyHeld, ref));
}
- FileUtils.mkdirs(lck.getParentFile(), true);
try {
- token = FS.DETECTED.createNewFileAtomic(lck);
+ token = createLockFileWithRetry();
} catch (IOException e) {
LOG.error(JGitText.get().failedCreateLockFile, lck, e);
throw e;
@@ -160,6 +160,19 @@ public class LockFile {
return obtainedLock;
}
+ private FS.LockToken createLockFileWithRetry() throws IOException {
+ try {
+ return createLockFile();
+ } catch (NoSuchFileException e) {
+ return createLockFile();
+ }
+ }
+
+ private FS.LockToken createLockFile() throws IOException {
+ FileUtils.mkdirs(lck.getParentFile(), true);
+ return FS.DETECTED.createNewFileAtomic(lck);
+ }
+
/**
* Try to establish the lock for appending.
*
--
cgit v1.2.3
From a78e6eaef63754902f46dffe657a783403c44bfe Mon Sep 17 00:00:00 2001
From: Thomas Wolf
Date: Tue, 20 Aug 2024 22:41:45 +0200
Subject: Signing: refactor interfaces
This is a big API-breaking change cleaning up the signing interfaces.
Initially, these interfaces were GPG/OpenPGP-specific. When EGit added
new signers and signature verifiers that called an external GPG
executable, they were found inadequate and were extended to be able to
pass in the GpgConfig to get access to the "gpg.program" setting.
With the introduction of X.509 S/MIME signing, it was discovered that
the interfaces were still not quite adequate, and the "Gpg" prefix on
the class names were confusing.
Since 7.0 is a major version bump, I'm taking this chance to overhaul
these interfaces from ground up.
For signing, there is a new Signer interface. With it goes a
SignerFactory SPI interface, and a final Signers class managing the
currently set signers. By default, signers for the different signature
types are created from the signer factories, which are discovered via
the ServiceLoader. External code can install its own signers, overriding
the default factories.
For signature verification, exactly the same mechanism is used.
This simplifies the setup of signers and signature verifiers, and makes
it all more regular. Signer instances just get a byte[] to sign and
don't have to worry about ObjectBuilders at all. SignatureVerifier
instances also just get the data and signature as byte[] and don't have
to worry about extracting the signature from a commit or tag, or about
what kind of signature it is.
Both Signers and SignatureVerifiers always get passed the Repository
and the GpgConfig. The repository will be needed in an implementation
for SSH signatures because gpg.ssh.* configs may need to be loaded
explicitly, and some of those values need the current workspace
location.
For signature verification, there is exactly one place in core JGit in
SignatureVerifiers that extracts signatures, determines the signature
type, and then calls the right signature verifier.
Change RevTag to recognize all signature types known in git (GPG, X509,
and SSH).
Change-Id: I26d2731e7baebb38976c87b7f328b63a239760d5
Signed-off-by: Thomas Wolf
---
org.eclipse.jgit.gpg.bc/META-INF/MANIFEST.MF | 5 +-
org.eclipse.jgit.gpg.bc/pom.xml | 2 +-
...rg.eclipse.jgit.lib.GpgSignatureVerifierFactory | 1 -
.../services/org.eclipse.jgit.lib.GpgSigner | 1 -
.../org.eclipse.jgit.lib.SignatureVerifierFactory | 1 +
.../services/org.eclipse.jgit.lib.SignerFactory | 1 +
.../jgit/gpg/bc/BouncyCastleGpgSignerFactory.java | 34 ---
.../internal/BouncyCastleGpgSignatureVerifier.java | 138 ++----------
.../BouncyCastleGpgSignatureVerifierFactory.java | 23 +-
.../gpg/bc/internal/BouncyCastleGpgSigner.java | 127 ++++-------
.../bc/internal/BouncyCastleGpgSignerFactory.java | 31 +++
.../src/org/eclipse/jgit/pgm/Log.java | 24 +--
.../src/org/eclipse/jgit/pgm/Show.java | 27 +--
.../src/org/eclipse/jgit/pgm/Tag.java | 5 +-
.../jgit/pgm/internal/VerificationUtils.java | 2 +-
.../org/eclipse/jgit/api/CommitCommandTest.java | 73 +++++--
.../org/eclipse/jgit/internal/JGitText.properties | 3 +-
.../src/org/eclipse/jgit/api/CommitCommand.java | 47 ++--
.../src/org/eclipse/jgit/api/TagCommand.java | 45 ++--
.../org/eclipse/jgit/api/VerificationResult.java | 5 +-
.../eclipse/jgit/api/VerifySignatureCommand.java | 57 +----
.../src/org/eclipse/jgit/internal/JGitText.java | 3 +-
.../jgit/lib/AbstractGpgSignatureVerifier.java | 71 ------
.../src/org/eclipse/jgit/lib/Constants.java | 14 ++
.../src/org/eclipse/jgit/lib/GpgConfig.java | 24 +--
.../src/org/eclipse/jgit/lib/GpgObjectSigner.java | 95 --------
.../org/eclipse/jgit/lib/GpgSignatureVerifier.java | 184 ----------------
.../jgit/lib/GpgSignatureVerifierFactory.java | 92 --------
.../src/org/eclipse/jgit/lib/GpgSigner.java | 141 ------------
.../org/eclipse/jgit/lib/SignatureVerifier.java | 106 +++++++++
.../eclipse/jgit/lib/SignatureVerifierFactory.java | 37 ++++
.../org/eclipse/jgit/lib/SignatureVerifiers.java | 239 +++++++++++++++++++++
.../src/org/eclipse/jgit/lib/Signer.java | 139 ++++++++++++
.../src/org/eclipse/jgit/lib/SignerFactory.java | 37 ++++
.../src/org/eclipse/jgit/lib/Signers.java | 101 +++++++++
.../src/org/eclipse/jgit/revwalk/RevTag.java | 27 ++-
.../src/org/eclipse/jgit/util/SignatureUtils.java | 26 +--
37 files changed, 944 insertions(+), 1044 deletions(-)
delete mode 100644 org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.GpgSignatureVerifierFactory
delete mode 100644 org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.GpgSigner
create mode 100644 org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.SignatureVerifierFactory
create mode 100644 org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.SignerFactory
delete mode 100644 org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/BouncyCastleGpgSignerFactory.java
create mode 100644 org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSignerFactory.java
delete mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/lib/AbstractGpgSignatureVerifier.java
delete mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgObjectSigner.java
delete mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgSignatureVerifier.java
delete mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgSignatureVerifierFactory.java
delete mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgSigner.java
create mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/lib/SignatureVerifier.java
create mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/lib/SignatureVerifierFactory.java
create mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/lib/SignatureVerifiers.java
create mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/lib/Signer.java
create mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/lib/SignerFactory.java
create mode 100644 org.eclipse.jgit/src/org/eclipse/jgit/lib/Signers.java
(limited to 'org.eclipse.jgit/src')
diff --git a/org.eclipse.jgit.gpg.bc/META-INF/MANIFEST.MF b/org.eclipse.jgit.gpg.bc/META-INF/MANIFEST.MF
index 9749ac1111..7a62d1d232 100644
--- a/org.eclipse.jgit.gpg.bc/META-INF/MANIFEST.MF
+++ b/org.eclipse.jgit.gpg.bc/META-INF/MANIFEST.MF
@@ -28,9 +28,6 @@ Import-Package: org.bouncycastle.asn1;version="[1.69.0,2.0.0)",
org.bouncycastle.util;version="[1.69.0,2.0.0)",
org.bouncycastle.util.encoders;version="[1.69.0,2.0.0)",
org.bouncycastle.util.io;version="[1.69.0,2.0.0)",
- org.eclipse.jgit.annotations;version="[7.0.0,7.1.0)",
- org.eclipse.jgit.api.errors;version="[7.0.0,7.1.0)",
org.slf4j;version="[1.7.0,3.0.0)"
-Export-Package: org.eclipse.jgit.gpg.bc;version="7.0.0",
- org.eclipse.jgit.gpg.bc.internal;version="7.0.0";x-friends:="org.eclipse.jgit.gpg.bc.test",
+Export-Package: org.eclipse.jgit.gpg.bc.internal;version="7.0.0";x-friends:="org.eclipse.jgit.gpg.bc.test",
org.eclipse.jgit.gpg.bc.internal.keys;version="7.0.0";x-friends:="org.eclipse.jgit.gpg.bc.test"
diff --git a/org.eclipse.jgit.gpg.bc/pom.xml b/org.eclipse.jgit.gpg.bc/pom.xml
index f4dce68fab..ad907d3809 100644
--- a/org.eclipse.jgit.gpg.bc/pom.xml
+++ b/org.eclipse.jgit.gpg.bc/pom.xml
@@ -160,7 +160,7 @@
false
false
false
- false
+ true
true
false
diff --git a/org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.GpgSignatureVerifierFactory b/org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.GpgSignatureVerifierFactory
deleted file mode 100644
index 17ab30fba7..0000000000
--- a/org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.GpgSignatureVerifierFactory
+++ /dev/null
@@ -1 +0,0 @@
-org.eclipse.jgit.gpg.bc.internal.BouncyCastleGpgSignatureVerifierFactory
\ No newline at end of file
diff --git a/org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.GpgSigner b/org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.GpgSigner
deleted file mode 100644
index 6752b64ddf..0000000000
--- a/org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.GpgSigner
+++ /dev/null
@@ -1 +0,0 @@
-org.eclipse.jgit.gpg.bc.internal.BouncyCastleGpgSigner
diff --git a/org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.SignatureVerifierFactory b/org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.SignatureVerifierFactory
new file mode 100644
index 0000000000..17ab30fba7
--- /dev/null
+++ b/org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.SignatureVerifierFactory
@@ -0,0 +1 @@
+org.eclipse.jgit.gpg.bc.internal.BouncyCastleGpgSignatureVerifierFactory
\ No newline at end of file
diff --git a/org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.SignerFactory b/org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.SignerFactory
new file mode 100644
index 0000000000..c0b214db39
--- /dev/null
+++ b/org.eclipse.jgit.gpg.bc/resources/META-INF/services/org.eclipse.jgit.lib.SignerFactory
@@ -0,0 +1 @@
+org.eclipse.jgit.gpg.bc.internal.BouncyCastleGpgSignerFactory
diff --git a/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/BouncyCastleGpgSignerFactory.java b/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/BouncyCastleGpgSignerFactory.java
deleted file mode 100644
index fdd1a2b11a..0000000000
--- a/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/BouncyCastleGpgSignerFactory.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (C) 2021 Thomas Wolf and others
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Distribution License v. 1.0 which is available at
- * https://www.eclipse.org/org/documents/edl-v10.php.
- *
- * SPDX-License-Identifier: BSD-3-Clause
- */
-package org.eclipse.jgit.gpg.bc;
-
-import org.eclipse.jgit.gpg.bc.internal.BouncyCastleGpgSigner;
-import org.eclipse.jgit.lib.GpgSigner;
-
-/**
- * Factory for creating a {@link GpgSigner} based on Bouncy Castle.
- *
- * @since 5.11
- */
-public final class BouncyCastleGpgSignerFactory {
-
- private BouncyCastleGpgSignerFactory() {
- // No instantiation
- }
-
- /**
- * Creates a new {@link GpgSigner}.
- *
- * @return the {@link GpgSigner}
- */
- public static GpgSigner create() {
- return new BouncyCastleGpgSigner();
- }
-}
diff --git a/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSignatureVerifier.java b/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSignatureVerifier.java
index 3378bb3969..5a3d43ba54 100644
--- a/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSignatureVerifier.java
+++ b/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSignatureVerifier.java
@@ -12,7 +12,6 @@ package org.eclipse.jgit.gpg.bc.internal;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.security.Security;
import java.text.MessageFormat;
import java.time.Instant;
import java.util.Date;
@@ -20,7 +19,6 @@ import java.util.List;
import java.util.Locale;
import org.bouncycastle.bcpg.sig.IssuerFingerprint;
-import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.openpgp.PGPCompressedData;
import org.bouncycastle.openpgp.PGPException;
import org.bouncycastle.openpgp.PGPPublicKey;
@@ -31,33 +29,20 @@ import org.bouncycastle.openpgp.PGPUtil;
import org.bouncycastle.openpgp.jcajce.JcaPGPObjectFactory;
import org.bouncycastle.openpgp.operator.jcajce.JcaPGPContentVerifierBuilderProvider;
import org.bouncycastle.util.encoders.Hex;
-import org.eclipse.jgit.annotations.NonNull;
import org.eclipse.jgit.api.errors.JGitInternalException;
-import org.eclipse.jgit.lib.AbstractGpgSignatureVerifier;
import org.eclipse.jgit.lib.GpgConfig;
-import org.eclipse.jgit.lib.GpgSignatureVerifier;
+import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.lib.SignatureVerifier;
import org.eclipse.jgit.util.LRUMap;
import org.eclipse.jgit.util.StringUtils;
/**
- * A {@link GpgSignatureVerifier} to verify GPG signatures using BouncyCastle.
+ * A {@link SignatureVerifier} to verify GPG signatures using BouncyCastle.
*/
public class BouncyCastleGpgSignatureVerifier
- extends AbstractGpgSignatureVerifier {
+ implements SignatureVerifier {
- private static void registerBouncyCastleProviderIfNecessary() {
- if (Security.getProvider(BouncyCastleProvider.PROVIDER_NAME) == null) {
- Security.addProvider(new BouncyCastleProvider());
- }
- }
-
- /**
- * Creates a new instance and registers the BouncyCastle security provider
- * if needed.
- */
- public BouncyCastleGpgSignatureVerifier() {
- registerBouncyCastleProviderIfNecessary();
- }
+ private static final String NAME = "bc"; //$NON-NLS-1$
// To support more efficient signature verification of multiple objects we
// cache public keys once found in a LRU cache.
@@ -70,7 +55,7 @@ public class BouncyCastleGpgSignatureVerifier
@Override
public String getName() {
- return "bc"; //$NON-NLS-1$
+ return NAME;
}
static PGPSignature parseSignature(InputStream in)
@@ -90,9 +75,8 @@ public class BouncyCastleGpgSignatureVerifier
}
@Override
- public SignatureVerification verify(@NonNull GpgConfig config, byte[] data,
- byte[] signatureData)
- throws IOException {
+ public SignatureVerification verify(Repository repository, GpgConfig config,
+ byte[] data, byte[] signatureData) throws IOException {
PGPSignature signature = null;
String fingerprint = null;
String signer = null;
@@ -127,14 +111,15 @@ public class BouncyCastleGpgSignatureVerifier
}
Date signatureCreatedAt = signature.getCreationTime();
if (fingerprint == null && signer == null && keyId == null) {
- return new VerificationResult(signatureCreatedAt, null, null, null,
- false, false, TrustLevel.UNKNOWN,
+ return new SignatureVerification(NAME, signatureCreatedAt,
+ null, null, null, false, false, TrustLevel.UNKNOWN,
BCText.get().signatureNoKeyInfo);
}
if (fingerprint != null && keyId != null
&& !fingerprint.endsWith(keyId)) {
- return new VerificationResult(signatureCreatedAt, signer, fingerprint,
- signer, false, false, TrustLevel.UNKNOWN,
+ return new SignatureVerification(NAME, signatureCreatedAt,
+ signer, fingerprint, signer, false, false,
+ TrustLevel.UNKNOWN,
MessageFormat.format(BCText.get().signatureInconsistent,
keyId, fingerprint));
}
@@ -175,15 +160,16 @@ public class BouncyCastleGpgSignatureVerifier
bySigner.put(signer, NO_KEY);
}
}
- return new VerificationResult(signatureCreatedAt, signer,
- fingerprint, signer, false, false, TrustLevel.UNKNOWN,
- BCText.get().signatureNoPublicKey);
+ return new SignatureVerification(NAME, signatureCreatedAt,
+ signer, fingerprint, signer, false, false,
+ TrustLevel.UNKNOWN, BCText.get().signatureNoPublicKey);
}
if (fingerprint != null && !publicKey.isExactMatch()) {
// We did find _some_ signing key for the signer, but it doesn't
// match the given fingerprint.
- return new VerificationResult(signatureCreatedAt, signer,
- fingerprint, signer, false, false, TrustLevel.UNKNOWN,
+ return new SignatureVerification(NAME, signatureCreatedAt,
+ signer, fingerprint, signer, false, false,
+ TrustLevel.UNKNOWN,
MessageFormat.format(BCText.get().signatureNoSigningKey,
fingerprint));
}
@@ -229,8 +215,7 @@ public class BouncyCastleGpgSignatureVerifier
boolean verified = false;
try {
signature.init(
- new JcaPGPContentVerifierBuilderProvider()
- .setProvider(BouncyCastleProvider.PROVIDER_NAME),
+ new JcaPGPContentVerifierBuilderProvider(),
pubKey);
signature.update(data);
verified = signature.verify();
@@ -238,15 +223,8 @@ public class BouncyCastleGpgSignatureVerifier
throw new JGitInternalException(
BCText.get().signatureVerificationError, e);
}
- return new VerificationResult(signatureCreatedAt, signer, fingerprint, user,
- verified, expired, trust, null);
- }
-
- @Override
- public SignatureVerification verify(byte[] data, byte[] signatureData)
- throws IOException {
- throw new UnsupportedOperationException(
- "Call verify(GpgConfig, byte[], byte[]) instead."); //$NON-NLS-1$
+ return new SignatureVerification(NAME, signatureCreatedAt, signer,
+ fingerprint, user, verified, expired, trust, null);
}
private TrustLevel parseGpgTrustPacket(byte[] packet) {
@@ -282,76 +260,4 @@ public class BouncyCastleGpgSignatureVerifier
byFingerprint.clear();
bySigner.clear();
}
-
- private static class VerificationResult implements SignatureVerification {
-
- private final Date creationDate;
-
- private final String signer;
-
- private final String keyUser;
-
- private final String fingerprint;
-
- private final boolean verified;
-
- private final boolean expired;
-
- private final @NonNull TrustLevel trustLevel;
-
- private final String message;
-
- public VerificationResult(Date creationDate, String signer,
- String fingerprint, String user, boolean verified,
- boolean expired, @NonNull TrustLevel trust, String message) {
- this.creationDate = creationDate;
- this.signer = signer;
- this.fingerprint = fingerprint;
- this.keyUser = user;
- this.verified = verified;
- this.expired = expired;
- this.trustLevel = trust;
- this.message = message;
- }
-
- @Override
- public Date getCreationDate() {
- return creationDate;
- }
-
- @Override
- public String getSigner() {
- return signer;
- }
-
- @Override
- public String getKeyUser() {
- return keyUser;
- }
-
- @Override
- public String getKeyFingerprint() {
- return fingerprint;
- }
-
- @Override
- public boolean isExpired() {
- return expired;
- }
-
- @Override
- public TrustLevel getTrustLevel() {
- return trustLevel;
- }
-
- @Override
- public String getMessage() {
- return message;
- }
-
- @Override
- public boolean getVerified() {
- return verified;
- }
- }
}
diff --git a/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSignatureVerifierFactory.java b/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSignatureVerifierFactory.java
index ae82b758a6..566ad1bf91 100644
--- a/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSignatureVerifierFactory.java
+++ b/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSignatureVerifierFactory.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2021, Thomas Wolf and others
+ * Copyright (C) 2021, 2024 Thomas Wolf and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@@ -9,20 +9,27 @@
*/
package org.eclipse.jgit.gpg.bc.internal;
-import org.eclipse.jgit.lib.GpgSignatureVerifier;
-import org.eclipse.jgit.lib.GpgSignatureVerifierFactory;
+import org.eclipse.jgit.lib.GpgConfig.GpgFormat;
+import org.eclipse.jgit.lib.SignatureVerifier;
+import org.eclipse.jgit.lib.SignatureVerifierFactory;
/**
- * A {@link GpgSignatureVerifierFactory} that creates
- * {@link GpgSignatureVerifier} instances that verify GPG signatures using
- * BouncyCastle and that do cache public keys.
+ * A {@link SignatureVerifierFactory} that creates {@link SignatureVerifier}
+ * instances that verify GPG signatures using BouncyCastle and that do cache
+ * public keys.
*/
public final class BouncyCastleGpgSignatureVerifierFactory
- extends GpgSignatureVerifierFactory {
+ implements SignatureVerifierFactory {
@Override
- public GpgSignatureVerifier getVerifier() {
+ public GpgFormat getType() {
+ return GpgFormat.OPENPGP;
+ }
+
+ @Override
+ public SignatureVerifier create() {
return new BouncyCastleGpgSignatureVerifier();
}
+
}
diff --git a/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSigner.java b/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSigner.java
index 763b7f7526..1d187a5db2 100644
--- a/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSigner.java
+++ b/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSigner.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2018, 2021, Salesforce and others
+ * Copyright (C) 2018, 2024, Salesforce and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0 which is available at
@@ -14,13 +14,11 @@ import java.io.IOException;
import java.net.URISyntaxException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
-import java.security.Security;
import java.util.Iterator;
import org.bouncycastle.bcpg.ArmoredOutputStream;
import org.bouncycastle.bcpg.BCPGOutputStream;
import org.bouncycastle.bcpg.HashAlgorithmTags;
-import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.openpgp.PGPException;
import org.bouncycastle.openpgp.PGPPrivateKey;
import org.bouncycastle.openpgp.PGPPublicKey;
@@ -30,79 +28,23 @@ import org.bouncycastle.openpgp.PGPSignatureGenerator;
import org.bouncycastle.openpgp.PGPSignatureSubpacketGenerator;
import org.bouncycastle.openpgp.operator.jcajce.JcaPGPContentSignerBuilder;
import org.bouncycastle.openpgp.operator.jcajce.JcePBESecretKeyDecryptorBuilder;
-import org.eclipse.jgit.annotations.NonNull;
import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.api.errors.CanceledException;
import org.eclipse.jgit.api.errors.JGitInternalException;
import org.eclipse.jgit.api.errors.UnsupportedSigningFormatException;
import org.eclipse.jgit.errors.UnsupportedCredentialItem;
-import org.eclipse.jgit.internal.JGitText;
-import org.eclipse.jgit.lib.CommitBuilder;
import org.eclipse.jgit.lib.GpgConfig;
-import org.eclipse.jgit.lib.GpgObjectSigner;
import org.eclipse.jgit.lib.GpgSignature;
-import org.eclipse.jgit.lib.GpgSigner;
-import org.eclipse.jgit.lib.ObjectBuilder;
import org.eclipse.jgit.lib.PersonIdent;
-import org.eclipse.jgit.lib.GpgConfig.GpgFormat;
+import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.lib.Signer;
import org.eclipse.jgit.transport.CredentialsProvider;
import org.eclipse.jgit.util.StringUtils;
/**
* GPG Signer using the BouncyCastle library.
*/
-public class BouncyCastleGpgSigner extends GpgSigner
- implements GpgObjectSigner {
-
- private static void registerBouncyCastleProviderIfNecessary() {
- if (Security.getProvider(BouncyCastleProvider.PROVIDER_NAME) == null) {
- Security.addProvider(new BouncyCastleProvider());
- }
- }
-
- /**
- * Create a new instance.
- *
- * The BounceCastleProvider will be registered if necessary.
- *
- */
- public BouncyCastleGpgSigner() {
- registerBouncyCastleProviderIfNecessary();
- }
-
- @Override
- public boolean canLocateSigningKey(@Nullable String gpgSigningKey,
- PersonIdent committer, CredentialsProvider credentialsProvider)
- throws CanceledException {
- try {
- return canLocateSigningKey(gpgSigningKey, committer,
- credentialsProvider, null);
- } catch (UnsupportedSigningFormatException e) {
- // Cannot occur with a null config
- return false;
- }
- }
-
- @Override
- public boolean canLocateSigningKey(@Nullable String gpgSigningKey,
- PersonIdent committer, CredentialsProvider credentialsProvider,
- GpgConfig config)
- throws CanceledException, UnsupportedSigningFormatException {
- if (config != null && config.getKeyFormat() != GpgFormat.OPENPGP) {
- throw new UnsupportedSigningFormatException(
- JGitText.get().onlyOpenPgpSupportedForSigning);
- }
- try (BouncyCastleGpgKeyPassphrasePrompt passphrasePrompt = new BouncyCastleGpgKeyPassphrasePrompt(
- credentialsProvider)) {
- BouncyCastleGpgKey gpgKey = locateSigningKey(gpgSigningKey,
- committer, passphrasePrompt);
- return gpgKey != null;
- } catch (CanceledException e) {
- throw e;
- } catch (Exception e) {
- return false;
- }
- }
+public class BouncyCastleGpgSigner implements Signer {
private BouncyCastleGpgKey locateSigningKey(@Nullable String gpgSigningKey,
PersonIdent committer,
@@ -121,38 +63,24 @@ public class BouncyCastleGpgSigner extends GpgSigner
}
@Override
- public void sign(@NonNull CommitBuilder commit,
- @Nullable String gpgSigningKey, @NonNull PersonIdent committer,
- CredentialsProvider credentialsProvider) throws CanceledException {
- try {
- signObject(commit, gpgSigningKey, committer, credentialsProvider,
- null);
- } catch (UnsupportedSigningFormatException e) {
- // Cannot occur with a null config
- }
- }
-
- @Override
- public void signObject(@NonNull ObjectBuilder object,
- @Nullable String gpgSigningKey, @NonNull PersonIdent committer,
- CredentialsProvider credentialsProvider, GpgConfig config)
- throws CanceledException, UnsupportedSigningFormatException {
- if (config != null && config.getKeyFormat() != GpgFormat.OPENPGP) {
- throw new UnsupportedSigningFormatException(
- JGitText.get().onlyOpenPgpSupportedForSigning);
+ public GpgSignature sign(Repository repository, GpgConfig config,
+ byte[] data, PersonIdent committer, String signingKey,
+ CredentialsProvider credentialsProvider) throws CanceledException,
+ IOException, UnsupportedSigningFormatException {
+ String gpgSigningKey = signingKey;
+ if (gpgSigningKey == null) {
+ gpgSigningKey = config.getSigningKey();
}
try (BouncyCastleGpgKeyPassphrasePrompt passphrasePrompt = new BouncyCastleGpgKeyPassphrasePrompt(
credentialsProvider)) {
BouncyCastleGpgKey gpgKey = locateSigningKey(gpgSigningKey,
- committer,
- passphrasePrompt);
+ committer, passphrasePrompt);
PGPSecretKey secretKey = gpgKey.getSecretKey();
if (secretKey == null) {
throw new JGitInternalException(
BCText.get().unableToSignCommitNoSecretKey);
}
- JcePBESecretKeyDecryptorBuilder decryptorBuilder = new JcePBESecretKeyDecryptorBuilder()
- .setProvider(BouncyCastleProvider.PROVIDER_NAME);
+ JcePBESecretKeyDecryptorBuilder decryptorBuilder = new JcePBESecretKeyDecryptorBuilder();
PGPPrivateKey privateKey = null;
if (!passphrasePrompt.hasPassphrase()) {
// Either the key is not encrypted, or it was read from the
@@ -177,8 +105,7 @@ public class BouncyCastleGpgSigner extends GpgSigner
PGPSignatureGenerator signatureGenerator = new PGPSignatureGenerator(
new JcaPGPContentSignerBuilder(
publicKey.getAlgorithm(),
- HashAlgorithmTags.SHA256).setProvider(
- BouncyCastleProvider.PROVIDER_NAME));
+ HashAlgorithmTags.SHA256));
signatureGenerator.init(PGPSignature.BINARY_DOCUMENT, privateKey);
PGPSignatureSubpacketGenerator subpackets = new PGPSignatureSubpacketGenerator();
subpackets.setIssuerFingerprint(false, publicKey);
@@ -202,16 +129,36 @@ public class BouncyCastleGpgSigner extends GpgSigner
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
try (BCPGOutputStream out = new BCPGOutputStream(
new ArmoredOutputStream(buffer))) {
- signatureGenerator.update(object.build());
+ signatureGenerator.update(data);
signatureGenerator.generate().encode(out);
}
- object.setGpgSignature(new GpgSignature(buffer.toByteArray()));
- } catch (PGPException | IOException | NoSuchAlgorithmException
+ return new GpgSignature(buffer.toByteArray());
+ } catch (PGPException | NoSuchAlgorithmException
| NoSuchProviderException | URISyntaxException e) {
throw new JGitInternalException(e.getMessage(), e);
}
}
+ @Override
+ public boolean canLocateSigningKey(Repository repository, GpgConfig config,
+ PersonIdent committer, String signingKey,
+ CredentialsProvider credentialsProvider) throws CanceledException {
+ String gpgSigningKey = signingKey;
+ if (gpgSigningKey == null) {
+ gpgSigningKey = config.getSigningKey();
+ }
+ try (BouncyCastleGpgKeyPassphrasePrompt passphrasePrompt = new BouncyCastleGpgKeyPassphrasePrompt(
+ credentialsProvider)) {
+ BouncyCastleGpgKey gpgKey = locateSigningKey(gpgSigningKey,
+ committer, passphrasePrompt);
+ return gpgKey != null;
+ } catch (CanceledException e) {
+ throw e;
+ } catch (Exception e) {
+ return false;
+ }
+ }
+
static String extractSignerId(String pgpUserId) {
int from = pgpUserId.indexOf('<');
if (from >= 0) {
diff --git a/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSignerFactory.java b/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSignerFactory.java
new file mode 100644
index 0000000000..92ab65d7e4
--- /dev/null
+++ b/org.eclipse.jgit.gpg.bc/src/org/eclipse/jgit/gpg/bc/internal/BouncyCastleGpgSignerFactory.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2021, 2024 Thomas Wolf and others
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Distribution License v. 1.0 which is available at
+ * https://www.eclipse.org/org/documents/edl-v10.php.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+package org.eclipse.jgit.gpg.bc.internal;
+
+import org.eclipse.jgit.lib.GpgConfig.GpgFormat;
+import org.eclipse.jgit.lib.Signer;
+import org.eclipse.jgit.lib.SignerFactory;
+
+/**
+ * Factory for creating a {@link Signer} for OPENPGP signatures based on Bouncy
+ * Castle.
+ */
+public final class BouncyCastleGpgSignerFactory implements SignerFactory {
+
+ @Override
+ public GpgFormat getType() {
+ return GpgFormat.OPENPGP;
+ }
+
+ @Override
+ public Signer create() {
+ return new BouncyCastleGpgSigner();
+ }
+}
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Log.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Log.java
index 852a4b377b..958e566986 100644
--- a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Log.java
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Log.java
@@ -32,13 +32,12 @@ import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.GpgConfig;
-import org.eclipse.jgit.lib.GpgSignatureVerifier;
-import org.eclipse.jgit.lib.GpgSignatureVerifier.SignatureVerification;
-import org.eclipse.jgit.lib.GpgSignatureVerifierFactory;
+import org.eclipse.jgit.lib.SignatureVerifier.SignatureVerification;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.lib.SignatureVerifiers;
import org.eclipse.jgit.notes.NoteMap;
import org.eclipse.jgit.pgm.internal.CLIText;
import org.eclipse.jgit.pgm.internal.VerificationUtils;
@@ -174,8 +173,6 @@ class Log extends RevWalkTextBuiltin {
// END -- Options shared with Diff
- private GpgSignatureVerifier verifier;
-
private GpgConfig config;
Log() {
@@ -227,9 +224,6 @@ class Log extends RevWalkTextBuiltin {
throw die(e.getMessage(), e);
} finally {
diffFmt.close();
- if (verifier != null) {
- verifier.clear();
- }
}
}
@@ -293,21 +287,13 @@ class Log extends RevWalkTextBuiltin {
if (c.getRawGpgSignature() == null) {
return;
}
- if (verifier == null) {
- GpgSignatureVerifierFactory factory = GpgSignatureVerifierFactory
- .getDefault();
- if (factory == null) {
- throw die(CLIText.get().logNoSignatureVerifier, null);
- }
- verifier = factory.getVerifier();
- }
- SignatureVerification verification = verifier.verifySignature(c,
- config);
+ SignatureVerification verification = SignatureVerifiers.verify(db,
+ config, c);
if (verification == null) {
return;
}
VerificationUtils.writeVerification(outw, verification,
- verifier.getName(), c.getCommitterIdent());
+ verification.verifierName(), c.getCommitterIdent());
}
/**
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Show.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Show.java
index 4feb090032..1576792234 100644
--- a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Show.java
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Show.java
@@ -30,12 +30,11 @@ import org.eclipse.jgit.errors.RevisionSyntaxException;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.GpgConfig;
-import org.eclipse.jgit.lib.GpgSignatureVerifier;
-import org.eclipse.jgit.lib.GpgSignatureVerifierFactory;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Repository;
-import org.eclipse.jgit.lib.GpgSignatureVerifier.SignatureVerification;
+import org.eclipse.jgit.lib.SignatureVerifier.SignatureVerification;
+import org.eclipse.jgit.lib.SignatureVerifiers;
import org.eclipse.jgit.pgm.internal.CLIText;
import org.eclipse.jgit.pgm.internal.VerificationUtils;
import org.eclipse.jgit.pgm.opt.PathTreeFilterHandler;
@@ -335,23 +334,13 @@ class Show extends TextBuiltin {
if (c.getRawGpgSignature() == null) {
return;
}
- GpgSignatureVerifierFactory factory = GpgSignatureVerifierFactory
- .getDefault();
- if (factory == null) {
- throw die(CLIText.get().logNoSignatureVerifier, null);
- }
- GpgSignatureVerifier verifier = factory.getVerifier();
GpgConfig config = new GpgConfig(db.getConfig());
- try {
- SignatureVerification verification = verifier.verifySignature(c,
- config);
- if (verification == null) {
- return;
- }
- VerificationUtils.writeVerification(outw, verification,
- verifier.getName(), c.getCommitterIdent());
- } finally {
- verifier.clear();
+ SignatureVerification verification = SignatureVerifiers.verify(db,
+ config, c);
+ if (verification == null) {
+ throw die(CLIText.get().logNoSignatureVerifier, null);
}
+ VerificationUtils.writeVerification(outw, verification,
+ verification.verifierName(), c.getCommitterIdent());
}
}
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Tag.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Tag.java
index 4ea67ab92c..6be30c9447 100644
--- a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Tag.java
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Tag.java
@@ -27,10 +27,10 @@ import org.eclipse.jgit.api.VerifySignatureCommand;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.RefAlreadyExistsException;
import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.lib.GpgSignatureVerifier.SignatureVerification;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.lib.SignatureVerifier.SignatureVerification;
import org.eclipse.jgit.pgm.internal.CLIText;
import org.eclipse.jgit.pgm.internal.VerificationUtils;
import org.eclipse.jgit.revwalk.RevCommit;
@@ -106,7 +106,8 @@ class Tag extends TextBuiltin {
if (error != null) {
throw die(error.getMessage(), error);
}
- writeVerification(verifySig.getVerifier().getName(),
+ writeVerification(
+ verification.getVerification().verifierName(),
(RevTag) verification.getObject(),
verification.getVerification());
}
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/internal/VerificationUtils.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/internal/VerificationUtils.java
index c1f8a86a8c..64ee602620 100644
--- a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/internal/VerificationUtils.java
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/internal/VerificationUtils.java
@@ -11,7 +11,7 @@ package org.eclipse.jgit.pgm.internal;
import java.io.IOException;
-import org.eclipse.jgit.lib.GpgSignatureVerifier.SignatureVerification;
+import org.eclipse.jgit.lib.SignatureVerifier.SignatureVerification;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.util.GitDateFormatter;
import org.eclipse.jgit.util.SignatureUtils;
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/CommitCommandTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/CommitCommandTest.java
index 35de73e204..e74e234297 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/CommitCommandTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/CommitCommandTest.java
@@ -27,6 +27,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.eclipse.jgit.api.CherryPickResult.CherryPickStatus;
import org.eclipse.jgit.api.errors.CanceledException;
import org.eclipse.jgit.api.errors.EmptyCommitException;
+import org.eclipse.jgit.api.errors.UnsupportedSigningFormatException;
import org.eclipse.jgit.api.errors.WrongRepositoryStateException;
import org.eclipse.jgit.diff.DiffEntry;
import org.eclipse.jgit.dircache.DirCache;
@@ -34,19 +35,23 @@ import org.eclipse.jgit.dircache.DirCacheBuilder;
import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.junit.RepositoryTestCase;
import org.eclipse.jgit.junit.time.TimeUtil;
-import org.eclipse.jgit.lib.CommitBuilder;
+import org.eclipse.jgit.lib.CommitConfig.CleanupMode;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.FileMode;
-import org.eclipse.jgit.lib.GpgSigner;
+import org.eclipse.jgit.lib.GpgConfig;
+import org.eclipse.jgit.lib.GpgConfig.GpgFormat;
+import org.eclipse.jgit.lib.GpgSignature;
+import org.eclipse.jgit.lib.ObjectBuilder;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.lib.RefUpdate.Result;
import org.eclipse.jgit.lib.ReflogEntry;
import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.lib.Signer;
+import org.eclipse.jgit.lib.Signers;
import org.eclipse.jgit.lib.StoredConfig;
-import org.eclipse.jgit.lib.CommitConfig.CleanupMode;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.storage.file.FileBasedConfig;
import org.eclipse.jgit.submodule.SubmoduleWalk;
@@ -839,21 +844,39 @@ public class CommitCommandTest extends RepositoryTestCase {
String[] signingKey = new String[1];
PersonIdent[] signingCommitters = new PersonIdent[1];
AtomicInteger callCount = new AtomicInteger();
- GpgSigner.setDefault(new GpgSigner() {
+ // Since GpgFormat defaults to OpenPGP just set a new signer for
+ // that.
+ Signers.set(GpgFormat.OPENPGP, new Signer() {
+
@Override
- public void sign(CommitBuilder commit, String gpgSigningKey,
- PersonIdent signingCommitter, CredentialsProvider credentialsProvider) {
- signingKey[0] = gpgSigningKey;
+ public void signObject(Repository repo, GpgConfig config,
+ ObjectBuilder builder, PersonIdent signingCommitter,
+ String signingKeySpec,
+ CredentialsProvider credentialsProvider)
+ throws CanceledException,
+ UnsupportedSigningFormatException {
+ signingKey[0] = signingKeySpec;
signingCommitters[0] = signingCommitter;
callCount.incrementAndGet();
}
@Override
- public boolean canLocateSigningKey(String gpgSigningKey,
- PersonIdent signingCommitter,
+ public GpgSignature sign(Repository repo, GpgConfig config,
+ byte[] data, PersonIdent signingCommitter,
+ String signingKeySpec,
+ CredentialsProvider credentialsProvider)
+ throws CanceledException,
+ UnsupportedSigningFormatException {
+ throw new CanceledException("Unexpected call");
+ }
+
+ @Override
+ public boolean canLocateSigningKey(Repository repo,
+ GpgConfig config, PersonIdent signingCommitter,
+ String signingKeySpec,
CredentialsProvider credentialsProvider)
throws CanceledException {
- return false;
+ throw new CanceledException("Unexpected call");
}
});
@@ -904,19 +927,37 @@ public class CommitCommandTest extends RepositoryTestCase {
git.add().addFilepattern("file1").call();
AtomicInteger callCount = new AtomicInteger();
- GpgSigner.setDefault(new GpgSigner() {
+ // Since GpgFormat defaults to OpenPGP just set a new signer for
+ // that.
+ Signers.set(GpgFormat.OPENPGP, new Signer() {
+
@Override
- public void sign(CommitBuilder commit, String gpgSigningKey,
- PersonIdent signingCommitter, CredentialsProvider credentialsProvider) {
+ public void signObject(Repository repo, GpgConfig config,
+ ObjectBuilder builder, PersonIdent signingCommitter,
+ String signingKeySpec,
+ CredentialsProvider credentialsProvider)
+ throws CanceledException,
+ UnsupportedSigningFormatException {
callCount.incrementAndGet();
}
@Override
- public boolean canLocateSigningKey(String gpgSigningKey,
- PersonIdent signingCommitter,
+ public GpgSignature sign(Repository repo, GpgConfig config,
+ byte[] data, PersonIdent signingCommitter,
+ String signingKeySpec,
+ CredentialsProvider credentialsProvider)
+ throws CanceledException,
+ UnsupportedSigningFormatException {
+ throw new CanceledException("Unexpected call");
+ }
+
+ @Override
+ public boolean canLocateSigningKey(Repository repo,
+ GpgConfig config, PersonIdent signingCommitter,
+ String signingKeySpec,
CredentialsProvider credentialsProvider)
throws CanceledException {
- return false;
+ throw new CanceledException("Unexpected call");
}
});
diff --git a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
index c9f7336609..11435b8ce4 100644
--- a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
+++ b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
@@ -576,7 +576,6 @@ obtainingCommitsForCherryPick=Obtaining commits that need to be cherry-picked
oldIdMustNotBeNull=Expected old ID must not be null
onlyOneFetchSupported=Only one fetch supported
onlyOneOperationCallPerConnectionIsSupported=Only one operation call per connection is supported.
-onlyOpenPgpSupportedForSigning=OpenPGP is the only supported signing option with JGit at this time (gpg.format must be set to openpgp).
openFilesMustBeAtLeast1=Open files must be >= 1
openingConnection=Opening connection
operationCanceled=Operation {0} was canceled
@@ -723,6 +722,8 @@ shortSkipOfBlock=Short skip of block.
shutdownCleanup=Cleanup {} during JVM shutdown
shutdownCleanupFailed=Cleanup during JVM shutdown failed
shutdownCleanupListenerFailed=Cleanup of {0} during JVM shutdown failed
+signatureServiceConflict={0} conflict for type {1}. Already registered is {2}; additional factory {3} is ignored.
+signatureTypeUnknown=No signer for {0} signatures. Use another signature type for git config gpg.format, or do not sign.
signatureVerificationError=Signature verification failed
signatureVerificationUnavailable=No signature verifier registered
signedTagMessageNoLf=A non-empty message of a signed tag must end in LF.
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java
index a1a2cc09d2..a7d409c3f5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java
@@ -51,9 +51,6 @@ import org.eclipse.jgit.lib.CommitConfig.CleanupMode;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.GpgConfig;
-import org.eclipse.jgit.lib.GpgConfig.GpgFormat;
-import org.eclipse.jgit.lib.GpgObjectSigner;
-import org.eclipse.jgit.lib.GpgSigner;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.PersonIdent;
@@ -62,6 +59,8 @@ import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.lib.RefUpdate.Result;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.RepositoryState;
+import org.eclipse.jgit.lib.Signer;
+import org.eclipse.jgit.lib.Signers;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevTag;
@@ -129,7 +128,7 @@ public class CommitCommand extends GitCommand {
private String signingKey;
- private GpgSigner gpgSigner;
+ private Signer signer;
private GpgConfig gpgConfig;
@@ -319,30 +318,22 @@ public class CommitCommand extends GitCommand {
}
}
- private void sign(CommitBuilder commit) throws ServiceUnavailableException,
- CanceledException, UnsupportedSigningFormatException {
- if (gpgSigner == null) {
- gpgSigner = GpgSigner.getDefault();
- if (gpgSigner == null) {
- throw new ServiceUnavailableException(
- JGitText.get().signingServiceUnavailable);
+ private void sign(CommitBuilder commit)
+ throws CanceledException, IOException,
+ UnsupportedSigningFormatException {
+ if (signer == null) {
+ signer = Signers.get(gpgConfig.getKeyFormat());
+ if (signer == null) {
+ throw new UnsupportedSigningFormatException(MessageFormat
+ .format(JGitText.get().signatureTypeUnknown,
+ gpgConfig.getKeyFormat().toConfigValue()));
}
}
if (signingKey == null) {
signingKey = gpgConfig.getSigningKey();
}
- if (gpgSigner instanceof GpgObjectSigner) {
- ((GpgObjectSigner) gpgSigner).signObject(commit,
- signingKey, committer, credentialsProvider,
- gpgConfig);
- } else {
- if (gpgConfig.getKeyFormat() != GpgFormat.OPENPGP) {
- throw new UnsupportedSigningFormatException(JGitText
- .get().onlyOpenPgpSupportedForSigning);
- }
- gpgSigner.sign(commit, signingKey, committer,
- credentialsProvider);
- }
+ signer.signObject(repo, gpgConfig, commit, committer, signingKey,
+ credentialsProvider);
}
private void updateRef(RepositoryState state, ObjectId headId,
@@ -1097,22 +1088,22 @@ public class CommitCommand extends GitCommand {
}
/**
- * Sets the {@link GpgSigner} to use if the commit is to be signed.
+ * Sets the {@link Signer} to use if the commit is to be signed.
*
* @param signer
* to use; if {@code null}, the default signer will be used
* @return {@code this}
- * @since 5.11
+ * @since 7.0
*/
- public CommitCommand setGpgSigner(GpgSigner signer) {
+ public CommitCommand setSigner(Signer signer) {
checkCallable();
- this.gpgSigner = signer;
+ this.signer = signer;
return this;
}
/**
* Sets an external {@link GpgConfig} to use. Whether it will be used is at
- * the discretion of the {@link #setGpgSigner(GpgSigner)}.
+ * the discretion of the {@link #setSigner(Signer)}.
*
* @param config
* to set; if {@code null}, the config will be loaded from the
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/TagCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/TagCommand.java
index 3edaf5e748..cc8589fa1c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/TagCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/TagCommand.java
@@ -18,14 +18,11 @@ import org.eclipse.jgit.api.errors.InvalidTagNameException;
import org.eclipse.jgit.api.errors.JGitInternalException;
import org.eclipse.jgit.api.errors.NoHeadException;
import org.eclipse.jgit.api.errors.RefAlreadyExistsException;
-import org.eclipse.jgit.api.errors.ServiceUnavailableException;
import org.eclipse.jgit.api.errors.UnsupportedSigningFormatException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.GpgConfig;
import org.eclipse.jgit.lib.GpgConfig.GpgFormat;
-import org.eclipse.jgit.lib.GpgObjectSigner;
-import org.eclipse.jgit.lib.GpgSigner;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.PersonIdent;
@@ -33,6 +30,8 @@ import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.lib.RefUpdate.Result;
import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.lib.Signer;
+import org.eclipse.jgit.lib.Signers;
import org.eclipse.jgit.lib.TagBuilder;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevWalk;
@@ -79,7 +78,7 @@ public class TagCommand extends GitCommand[ {
private GpgConfig gpgConfig;
- private GpgObjectSigner gpgSigner;
+ private Signer signer;
private CredentialsProvider credentialsProvider;
@@ -133,9 +132,9 @@ public class TagCommand extends GitCommand][ {
newTag.setTagger(tagger);
newTag.setObjectId(id);
- if (gpgSigner != null) {
- gpgSigner.signObject(newTag, signingKey, tagger,
- credentialsProvider, gpgConfig);
+ if (signer != null) {
+ signer.signObject(repo, gpgConfig, newTag, tagger, signingKey,
+ credentialsProvider);
}
// write the tag object
@@ -196,15 +195,12 @@ public class TagCommand extends GitCommand][ {
*
* @throws InvalidTagNameException
* if the tag name is null or invalid
- * @throws ServiceUnavailableException
- * if the tag should be signed but no signer can be found
* @throws UnsupportedSigningFormatException
* if the tag should be signed but {@code gpg.format} is not
* {@link GpgFormat#OPENPGP}
*/
private void processOptions()
- throws InvalidTagNameException, ServiceUnavailableException,
- UnsupportedSigningFormatException {
+ throws InvalidTagNameException, UnsupportedSigningFormatException {
if (name == null
|| !Repository.isValidRefName(Constants.R_TAGS + name)) {
throw new InvalidTagNameException(
@@ -230,16 +226,15 @@ public class TagCommand extends GitCommand][ {
doSign = gpgConfig.isSignAnnotated();
}
if (doSign) {
- if (signingKey == null) {
- signingKey = gpgConfig.getSigningKey();
- }
- if (gpgSigner == null) {
- GpgSigner signer = GpgSigner.getDefault();
- if (!(signer instanceof GpgObjectSigner)) {
- throw new ServiceUnavailableException(
- JGitText.get().signingServiceUnavailable);
+ if (signer == null) {
+ signer = Signers.get(gpgConfig.getKeyFormat());
+ if (signer == null) {
+ throw new UnsupportedSigningFormatException(
+ MessageFormat.format(
+ JGitText.get().signatureTypeUnknown,
+ gpgConfig.getKeyFormat()
+ .toConfigValue()));
}
- gpgSigner = (GpgObjectSigner) signer;
}
// The message of a signed tag must end in a newline because
// the signature will be appended.
@@ -326,22 +321,22 @@ public class TagCommand extends GitCommand][ {
}
/**
- * Sets the {@link GpgSigner} to use if the commit is to be signed.
+ * Sets the {@link Signer} to use if the commit is to be signed.
*
* @param signer
* to use; if {@code null}, the default signer will be used
* @return {@code this}
- * @since 5.11
+ * @since 7.0
*/
- public TagCommand setGpgSigner(GpgObjectSigner signer) {
+ public TagCommand setSigner(Signer signer) {
checkCallable();
- this.gpgSigner = signer;
+ this.signer = signer;
return this;
}
/**
* Sets an external {@link GpgConfig} to use. Whether it will be used is at
- * the discretion of the {@link #setGpgSigner(GpgObjectSigner)}.
+ * the discretion of the {@link #setSigner(Signer)}.
*
* @param config
* to set; if {@code null}, the config will be loaded from the
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/VerificationResult.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/VerificationResult.java
index 21cddf75b7..f5f4b06e45 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/VerificationResult.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/VerificationResult.java
@@ -9,7 +9,7 @@
*/
package org.eclipse.jgit.api;
-import org.eclipse.jgit.lib.GpgSignatureVerifier;
+import org.eclipse.jgit.lib.SignatureVerifier;
import org.eclipse.jgit.revwalk.RevObject;
/**
@@ -34,8 +34,9 @@ public interface VerificationResult {
* Retrieves the signature verification result.
*
* @return the result, or {@code null} if none was computed
+ * @since 7.0
*/
- GpgSignatureVerifier.SignatureVerification getVerification();
+ SignatureVerifier.SignatureVerification getVerification();
/**
* Retrieves the git object of which the signature was verified.
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/VerifySignatureCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/VerifySignatureCommand.java
index 6a2a44ea2d..487ff04323 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/VerifySignatureCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/VerifySignatureCommand.java
@@ -25,11 +25,10 @@ import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.GpgConfig;
-import org.eclipse.jgit.lib.GpgSignatureVerifier;
-import org.eclipse.jgit.lib.GpgSignatureVerifier.SignatureVerification;
-import org.eclipse.jgit.lib.GpgSignatureVerifierFactory;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.lib.SignatureVerifier.SignatureVerification;
+import org.eclipse.jgit.lib.SignatureVerifiers;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevWalk;
@@ -65,12 +64,8 @@ public class VerifySignatureCommand extends GitCommand]