diff options
14 files changed, 384 insertions, 288 deletions
diff --git a/SECURITY.md b/SECURITY.md index e6f57c6986..468a1dbfdf 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -2,48 +2,41 @@ _ISO 27005 defines vulnerability as: "A weakness of an asset or group of assets that can be exploited by one or more threats."_ -## The Eclipse Security Team +## Reporting a Security Vulnerability -The Eclipse Security Team provides help and advice to Eclipse projects -on vulnerability issues and is the first point of contact -for handling security vulnerabilities. -Members of the Security Team are committers on Eclipse Projects -and members of the Eclipse Architecture Council. +Vulnerabilities can be reported either via +[email to the Eclipse Security Team](security@eclipse-foundation.org) +or using the +[dedicated security issue tracker](https://gitlab.eclipse.org/security/vulnerability-reports/-/issues/new?issuable_template=new_vulnerability). -Contact the [Eclipse Security Team](mailto:security@eclipse.org). +## Additional Information -**Note that, as a matter of policy, the security team does not open attachments.** +**The Eclipse Foundation Security Team** provides help and advice to Eclipse Foundation projects on +vulnerability issues and is the first point of contact for handling security vulnerabilities. +Members of the Eclipse Foundation Security Team are selected amongs committers on Eclipse Projects, +members of the Eclipse Architecture Council, and Eclipse Foundation staff. -## Reporting a Security Vulnerability +The general security mailing list address is security@eclipse-foundation.org. Members of the Eclipse +Foundation Security Team will receive messages sent to this address. This address should be used +only for reporting undisclosed vulnerabilities; regular issue reports and questions unrelated to +vulnerabilities in Eclipse Foundation software will be ignored. Note that this email set to this +address is not encrypted. -Vulnerabilities can be reported either via email to the Eclipse Security Team -or directly with a project via the Eclipse Foundation's Bugzilla instance. - -The general security mailing list address is security@eclipse.org. -Members of the Eclipse Security Team will receive messages sent to this address. -This address should be used only for reporting undisclosed vulnerabilities; -regular issue reports and questions unrelated to vulnerabilities in Eclipse software -will be ignored. -Note that this email address is not encrypted. +**Note that, as a matter of policy, the security team does not open attachments.** The community is also encouraged to report vulnerabilities using the -[Eclipse Foundation's Bugzilla instance](https://bugs.eclipse.org/bugs/enter_bug.cgi?product=Community&component=Vulnerability%20Reports&keywords=security&groups=Security_Advisories). -Note that you will require an Eclipse Foundation account to create an issue report, +[Eclipse Foundation’s issue tracker](https://gitlab.eclipse.org/security/vulnerability-reports/-/issues/new?issuable_template=new_vulnerability). +Note that you will need an Eclipse Foundation account to create an issue report +([create an account here if you do not have one](https://accounts.eclipse.org/user/register?destination=user)), but by doing so you will be able to participate directly in the resolution of the issue. -Issue reports related to vulnerabilities must be marked as "committers-only", -either automatically by clicking the provided link, by the reporter, -or by a committer during the triage process. -Note that issues marked "committers-only" are visible to all Eclipse committers. -By default, a "committers-only" issue is also accessible to the reporter -and individuals explicitly indicated in the "cc" list. +Issue reports related to vulnerabilities must be marked as “confidential”, either automatically by +clicking the provided link by the reporter, or by a committer during the triage process. ## Disclosure -Disclosure is initially limited to the reporter and all Eclipse Committers, -but is expanded to include other individuals, and the general public. The timing and manner of disclosure is governed by the -[Eclipse Security Policy](https://www.eclipse.org/security/policy.php). +[Eclipse Foundation Vulnerability Reporting Policy](https://www.eclipse.org/security/policy). Publicly disclosed issues are listed on the -[Disclosed Vulnerabilities Page](https://www.eclipse.org/security/known.php).
\ No newline at end of file +[Disclosed Vulnerabilities page](https://www.eclipse.org/security/known).
\ No newline at end of file diff --git a/org.eclipse.jgit.ssh.apache/src/org/eclipse/jgit/internal/transport/sshd/JGitPublicKeyAuthentication.java b/org.eclipse.jgit.ssh.apache/src/org/eclipse/jgit/internal/transport/sshd/JGitPublicKeyAuthentication.java index eb55ec045b..b0b1028daa 100644 --- a/org.eclipse.jgit.ssh.apache/src/org/eclipse/jgit/internal/transport/sshd/JGitPublicKeyAuthentication.java +++ b/org.eclipse.jgit.ssh.apache/src/org/eclipse/jgit/internal/transport/sshd/JGitPublicKeyAuthentication.java @@ -48,9 +48,7 @@ import org.apache.sshd.client.config.hosts.HostConfigEntry; import org.apache.sshd.client.session.ClientSession; import org.apache.sshd.common.FactoryManager; import org.apache.sshd.common.NamedFactory; -import org.apache.sshd.common.config.keys.AuthorizedKeyEntry; import org.apache.sshd.common.config.keys.KeyUtils; -import org.apache.sshd.common.config.keys.PublicKeyEntryResolver; import org.apache.sshd.common.config.keys.u2f.SecurityKeyPublicKey; import org.apache.sshd.common.signature.Signature; import org.apache.sshd.common.signature.SignatureFactoriesManager; @@ -314,6 +312,8 @@ public class JGitPublicKeyAuthentication extends UserAuthPublicKey { private class KeyIterator extends UserAuthPublicKeyIterator { + private static final String PUB_KEY_SUFFIX = ".pub"; //$NON-NLS-1$ + public KeyIterator(ClientSession session, SignatureFactoriesManager manager) throws Exception { @@ -326,22 +326,53 @@ public class JGitPublicKeyAuthentication extends UserAuthPublicKey { return null; } return explicitFiles.stream().map(s -> { - try { - Path p = Paths.get(s + ".pub"); //$NON-NLS-1$ - if (Files.isRegularFile(p, LinkOption.NOFOLLOW_LINKS)) { - return AuthorizedKeyEntry.readAuthorizedKeys(p).get(0) - .resolvePublicKey(null, - PublicKeyEntryResolver.IGNORING); - } - } catch (InvalidPathException | IOException - | GeneralSecurityException e) { - log.warn("{}", //$NON-NLS-1$ - format(SshdText.get().cannotReadPublicKey, s), e); + // assume the explicit key is a public key + PublicKey publicKey = readPublicKey(s, false); + if (publicKey == null && !s.endsWith(PUB_KEY_SUFFIX)) { + // if this is not the case, try to lookup public key with + // same filename and extension .pub + publicKey = readPublicKey(s + PUB_KEY_SUFFIX, true); } - return null; + return publicKey; }).filter(Objects::nonNull).collect(Collectors.toList()); } + /** + * + * @param keyFile + * the path to a public key + * @param isDerived + * {@code false} in case the given {@code keyFile} is set in + * the SSH config as is, otherwise {@code false} + * @return the public key read from the key file + */ + private PublicKey readPublicKey(String keyFile, boolean isDerived) { + try { + Path p = Paths.get(keyFile); + if (Files.isRegularFile(p, LinkOption.NOFOLLOW_LINKS)) { + return KeyUtils.loadPublicKey(p); + } + // only warn about non-existing files in case the key file is + // not derived + if (!isDerived) { + log.warn("{}", //$NON-NLS-1$ + format(SshdText.get().cannotReadPublicKey, keyFile)); + } + } catch (InvalidPathException | IOException e) { + log.warn("{}", //$NON-NLS-1$ + format(SshdText.get().cannotReadPublicKey, keyFile), e); + } catch (GeneralSecurityException e) { + // ignore in case this is not a derived key path, as in most + // cases this specifies a private key + if (isDerived) { + log.warn("{}", //$NON-NLS-1$ + format(SshdText.get().cannotReadPublicKey, keyFile), + e); + } + } + return null; + } + @Override protected Iterable<KeyAgentIdentity> initializeAgentIdentities( ClientSession session) throws IOException { diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackFileTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackFileTest.java index 77e5b7cb14..44694acc8d 100644 --- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackFileTest.java +++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackFileTest.java @@ -14,6 +14,8 @@ import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_MIN_BYTES_OBJ_SIZE import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_PACK_SECTION; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; @@ -24,14 +26,18 @@ import java.util.zip.Deflater; import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource; import org.eclipse.jgit.internal.storage.dfs.DfsReader.PackLoadListener; +import org.eclipse.jgit.internal.storage.file.PackBitmapIndex; import org.eclipse.jgit.internal.storage.pack.PackExt; import org.eclipse.jgit.internal.storage.pack.PackOutputStream; import org.eclipse.jgit.internal.storage.pack.PackWriter; import org.eclipse.jgit.junit.JGitTestUtil; import org.eclipse.jgit.junit.TestRng; +import org.eclipse.jgit.lib.BatchRefUpdate; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.NullProgressMonitor; import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.revwalk.RevWalk; +import org.eclipse.jgit.transport.ReceiveCommand; import org.junit.Before; import org.junit.Test; @@ -123,6 +129,41 @@ public class DfsPackFileTest { } @Test + public void testGetBitmapIndex() throws IOException { + bypassCache = false; + clearCache = true; + ObjectId objectId = setupPack(512, 800); + + // Add a ref for GC + BatchRefUpdate batchRefUpdate = db.getRefDatabase().newBatchUpdate(); + batchRefUpdate.addCommand(new ReceiveCommand(ObjectId.zeroId(), + objectId, "refs/heads/master")); + try (RevWalk rw = new RevWalk(db)) { + batchRefUpdate.execute(rw, NullProgressMonitor.INSTANCE); + } + DfsGarbageCollector gc = new DfsGarbageCollector(db); + gc.pack(NullProgressMonitor.INSTANCE); + + DfsReader reader = db.getObjectDatabase().newReader(); + PackBitmapIndex bitmapIndex = db.getObjectDatabase().getPacks()[0] + .getBitmapIndex(reader); + assertNotNull(bitmapIndex); + assertEquals(1, bitmapIndex.getObjectCount()); + } + + @Test + public void testGetBitmapIndex_noBitmaps() throws IOException { + bypassCache = false; + clearCache = true; + setupPack(512, 800); + + DfsReader reader = db.getObjectDatabase().newReader(); + PackBitmapIndex bitmapIndex = db.getObjectDatabase().getPacks()[0] + .getBitmapIndex(reader); + assertNull(bitmapIndex); + } + + @Test public void testLoadObjectSizeIndex_noIndex() throws IOException { bypassCache = false; clearCache = true; diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsReaderTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsReaderTest.java index eb8ceecd81..254184ee80 100644 --- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsReaderTest.java +++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsReaderTest.java @@ -12,6 +12,7 @@ package org.eclipse.jgit.internal.storage.dfs; import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_MIN_BYTES_OBJ_SIZE_INDEX; import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_PACK_SECTION; import static org.eclipse.jgit.lib.Constants.OBJ_BLOB; +import static org.eclipse.jgit.lib.Constants.OBJ_COMMIT; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -39,8 +40,56 @@ public class DfsReaderTest { } @Test - public void isNotLargerThan_objAboveThreshold() - throws IOException { + public void getObjectSize_noIndex_blob() throws IOException { + ObjectId obj = insertBlobWithSize(100); + try (DfsReader ctx = db.getObjectDatabase().newReader()) { + long size = ctx.getObjectSize(obj, OBJ_BLOB); + assertEquals(100, size); + } + } + + @Test + public void getObjectSize_noIndex_commit() throws IOException { + ObjectId obj = insertObjectWithSize(OBJ_COMMIT, 110); + try (DfsReader ctx = db.getObjectDatabase().newReader()) { + long size = ctx.getObjectSize(obj, OBJ_COMMIT); + assertEquals(110, size); + } + } + + @Test + public void getObjectSize_index_indexedBlob() throws IOException { + setObjectSizeIndexMinBytes(100); + ObjectId obj = insertBlobWithSize(200); + try (DfsReader ctx = db.getObjectDatabase().newReader()) { + long size = ctx.getObjectSize(obj, OBJ_BLOB); + assertEquals(200, size); + } + } + + @Test + public void getObjectSize_index_nonIndexedBlob() throws IOException { + setObjectSizeIndexMinBytes(100); + ObjectId obj = insertBlobWithSize(50); + try (DfsReader ctx = db.getObjectDatabase().newReader()) { + long size = ctx.getObjectSize(obj, OBJ_BLOB); + assertEquals(50, size); + } + } + + @Test + public void getObjectSize_index_commit() throws IOException { + setObjectSizeIndexMinBytes(100); + insertBlobWithSize(110); + ObjectId obj = insertObjectWithSize(OBJ_COMMIT, 120); + try (DfsReader ctx = db.getObjectDatabase().newReader()) { + long size = ctx.getObjectSize(obj, OBJ_COMMIT); + assertEquals(120, size); + } + } + + @Test + public void isNotLargerThan_objAboveThreshold() throws IOException { setObjectSizeIndexMinBytes(100); ObjectId obj = insertBlobWithSize(200); try (DfsReader ctx = db.getObjectDatabase().newReader()) { @@ -76,10 +125,8 @@ public class DfsReaderTest { } } - @Test - public void isNotLargerThan_objBelowThreshold() - throws IOException { + public void isNotLargerThan_objBelowThreshold() throws IOException { setObjectSizeIndexMinBytes(100); insertBlobWithSize(1000); // index not empty ObjectId obj = insertBlobWithSize(50); @@ -168,22 +215,26 @@ public class DfsReaderTest { ctx.addPackLoadListener(listener); boolean has = ctx.has(obj); assertTrue(has); - assertEquals(Integer.valueOf(1), listener.callsPerExt.get(PackExt.INDEX)); + assertEquals(Integer.valueOf(1), + listener.callsPerExt.get(PackExt.INDEX)); } } @Test - public void packLoadListener_notLargerThan_openMultipleIndices() throws IOException { - setObjectSizeIndexMinBytes(100); - ObjectId obj = insertBlobWithSize(200); - try (DfsReader ctx = db.getObjectDatabase().newReader()) { - CounterPackLoadListener listener = new CounterPackLoadListener(); - ctx.addPackLoadListener(listener); - boolean notLargerThan = ctx.isNotLargerThan(obj, OBJ_BLOB, 1000); - assertTrue(notLargerThan); - assertEquals(Integer.valueOf(1), listener.callsPerExt.get(PackExt.INDEX)); - assertEquals(Integer.valueOf(1), listener.callsPerExt.get(PackExt.OBJECT_SIZE_INDEX)); - } + public void packLoadListener_notLargerThan_openMultipleIndices() + throws IOException { + setObjectSizeIndexMinBytes(100); + ObjectId obj = insertBlobWithSize(200); + try (DfsReader ctx = db.getObjectDatabase().newReader()) { + CounterPackLoadListener listener = new CounterPackLoadListener(); + ctx.addPackLoadListener(listener); + boolean notLargerThan = ctx.isNotLargerThan(obj, OBJ_BLOB, 1000); + assertTrue(notLargerThan); + assertEquals(Integer.valueOf(1), + listener.callsPerExt.get(PackExt.INDEX)); + assertEquals(Integer.valueOf(1), + listener.callsPerExt.get(PackExt.OBJECT_SIZE_INDEX)); + } } @Test @@ -195,20 +246,24 @@ public class DfsReaderTest { try (DfsReader ctx = db.getObjectDatabase().newReader()) { CounterPackLoadListener listener = new CounterPackLoadListener(); ctx.addPackLoadListener(listener); - ObjectId oid = ObjectId.fromString("aa48de2aa61d9dffa8a05439dc115fe82f10f129"); + ObjectId oid = ObjectId + .fromString("aa48de2aa61d9dffa8a05439dc115fe82f10f129"); boolean has = ctx.has(oid); assertFalse(has); // Open 3 indices trying to find the pack - assertEquals(Integer.valueOf(3), listener.callsPerExt.get(PackExt.INDEX)); + assertEquals(Integer.valueOf(3), + listener.callsPerExt.get(PackExt.INDEX)); } } - @Test - public void packLoadListener_has_repeatedCalls_openMultipleIndices() throws IOException { + public void packLoadListener_has_repeatedCalls_openMultipleIndices() + throws IOException { // Two objects NOT in the repo - ObjectId oid = ObjectId.fromString("aa48de2aa61d9dffa8a05439dc115fe82f10f129"); - ObjectId oid2 = ObjectId.fromString("aa48de2aa61d9dffa8a05439dc115fe82f10f130"); + ObjectId oid = ObjectId + .fromString("aa48de2aa61d9dffa8a05439dc115fe82f10f129"); + ObjectId oid2 = ObjectId + .fromString("aa48de2aa61d9dffa8a05439dc115fe82f10f130"); setObjectSizeIndexMinBytes(100); insertBlobWithSize(200); @@ -222,7 +277,8 @@ public class DfsReaderTest { ctx.has(oid2); assertFalse(has); // The 3 indices were loaded only once each - assertEquals(Integer.valueOf(3), listener.callsPerExt.get(PackExt.INDEX)); + assertEquals(Integer.valueOf(3), + listener.callsPerExt.get(PackExt.INDEX)); } } @@ -231,8 +287,8 @@ public class DfsReaderTest { @SuppressWarnings("boxing") @Override - public void onIndexLoad(String packName, PackSource src, PackExt ext, long size, - Object loadedIdx) { + public void onIndexLoad(String packName, PackSource src, PackExt ext, + long size, Object loadedIdx) { callsPerExt.merge(ext, 1, Integer::sum); } @@ -243,13 +299,16 @@ public class DfsReaderTest { } } - private ObjectId insertBlobWithSize(int size) + private ObjectId insertBlobWithSize(int size) throws IOException { + return insertObjectWithSize(OBJ_BLOB, size); + } + + private ObjectId insertObjectWithSize(int object_type, int size) throws IOException { TestRng testRng = new TestRng(JGitTestUtil.getName()); ObjectId oid; try (ObjectInserter ins = db.newObjectInserter()) { - oid = ins.insert(OBJ_BLOB, - testRng.nextBytes(size)); + oid = ins.insert(object_type, testRng.nextBytes(size)); ins.flush(); } return oid; diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/TreeRevFilterTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/TreeRevFilterTest.java index f67a623ff6..298facfd15 100644 --- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/TreeRevFilterTest.java +++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/TreeRevFilterTest.java @@ -21,7 +21,6 @@ import org.junit.Test; public class TreeRevFilterTest extends RevWalkTestCase { private RevFilter treeRevFilter() { - rw.setRewriteParents(false); return new TreeRevFilter(rw, TreeFilter.ANY_DIFF); } diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/TreeRevFilterWithRewriteParentsTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/TreeRevFilterWithRewriteParentsTest.java deleted file mode 100644 index 100f2e4164..0000000000 --- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/TreeRevFilterWithRewriteParentsTest.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright (C) 2023, Google LLC and others - * - * This program and the accompanying materials are made available under the - * terms of the Eclipse Distribution License v. 1.0 which is available at - * https://www.eclipse.org/org/documents/edl-v10.php. - * - * SPDX-License-Identifier: BSD-3-Clause - */ - -package org.eclipse.jgit.revwalk; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -import org.eclipse.jgit.revwalk.filter.RevFilter; -import org.eclipse.jgit.treewalk.filter.TreeFilter; -import org.junit.Test; - -public class TreeRevFilterWithRewriteParentsTest extends RevWalkTestCase { - private RevFilter treeRevFilter() { - rw.setRewriteParents(true); - return new TreeRevFilter(rw, TreeFilter.ANY_DIFF); - } - - @Test - public void testStringOfPearls_FilePath1() - throws Exception { - RevCommit a = commit(tree(file("d/f", blob("a")))); - RevCommit b = commit(tree(file("d/f", blob("a"))), a); - RevCommit c = commit(tree(file("d/f", blob("b"))), b); - rw.setRevFilter(treeRevFilter()); - markStart(c); - - assertCommit(c, rw.next()); - assertEquals(1, c.getParentCount()); - assertCommit(a, c.getParent(0)); - - assertCommit(a, rw.next()); // b was skipped - assertEquals(0, a.getParentCount()); - assertNull(rw.next()); - } - - @Test - public void testStringOfPearls_FilePath2() throws Exception { - RevCommit a = commit(tree(file("d/f", blob("a")))); - RevCommit b = commit(tree(file("d/f", blob("a"))), a); - RevCommit c = commit(tree(file("d/f", blob("b"))), b); - RevCommit d = commit(tree(file("d/f", blob("b"))), c); - rw.setRevFilter(treeRevFilter()); - markStart(d); - - // d was skipped - assertCommit(c, rw.next()); - assertEquals(1, c.getParentCount()); - assertCommit(a, c.getParent(0)); - - // b was skipped - assertCommit(a, rw.next()); - assertEquals(0, a.getParentCount()); - assertNull(rw.next()); - } - - @Test - public void testStringOfPearls_DirPath2() throws Exception { - RevCommit a = commit(tree(file("d/f", blob("a")))); - RevCommit b = commit(tree(file("d/f", blob("a"))), a); - RevCommit c = commit(tree(file("d/f", blob("b"))), b); - RevCommit d = commit(tree(file("d/f", blob("b"))), c); - rw.setRevFilter(treeRevFilter()); - markStart(d); - - // d was skipped - assertCommit(c, rw.next()); - assertEquals(1, c.getParentCount()); - assertCommit(a, c.getParent(0)); - - // b was skipped - assertCommit(a, rw.next()); - assertEquals(0, a.getParentCount()); - assertNull(rw.next()); - } - - @Test - public void testStringOfPearls_FilePath3() throws Exception { - RevCommit a = commit(tree(file("d/f", blob("a")))); - RevCommit b = commit(tree(file("d/f", blob("a"))), a); - RevCommit c = commit(tree(file("d/f", blob("b"))), b); - RevCommit d = commit(tree(file("d/f", blob("b"))), c); - RevCommit e = commit(tree(file("d/f", blob("b"))), d); - RevCommit f = commit(tree(file("d/f", blob("b"))), e); - RevCommit g = commit(tree(file("d/f", blob("b"))), f); - RevCommit h = commit(tree(file("d/f", blob("b"))), g); - RevCommit i = commit(tree(file("d/f", blob("c"))), h); - rw.setRevFilter(treeRevFilter()); - markStart(i); - - assertCommit(i, rw.next()); - assertEquals(1, i.getParentCount()); - assertCommit(c, i.getParent(0)); - - // h..d was skipped - assertCommit(c, rw.next()); - assertEquals(1, c.getParentCount()); - assertCommit(a, c.getParent(0)); - - // b was skipped - assertCommit(a, rw.next()); - assertEquals(0, a.getParentCount()); - assertNull(rw.next()); - } -} diff --git a/org.eclipse.jgit/.settings/.api_filters b/org.eclipse.jgit/.settings/.api_filters index e15ed48e28..dda62f22a3 100644 --- a/org.eclipse.jgit/.settings/.api_filters +++ b/org.eclipse.jgit/.settings/.api_filters @@ -1,35 +1,5 @@ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <component id="org.eclipse.jgit" version="2"> - <resource path="src/org/eclipse/jgit/api/GarbageCollectCommand.java" type="org.eclipse.jgit.api.GarbageCollectCommand"> - <filter id="1142947843"> - <message_arguments> - <message_argument value="5.13.3"/> - <message_argument value="setPackKeptObjects(boolean)"/> - </message_arguments> - </filter> - </resource> - <resource path="src/org/eclipse/jgit/lib/BitmapIndex.java" type="org.eclipse.jgit.lib.BitmapIndex"> - <filter id="404000815"> - <message_arguments> - <message_argument value="org.eclipse.jgit.lib.BitmapIndex"/> - <message_argument value="addBitmapLookupListener(BitmapIndex.BitmapLookupListener)"/> - </message_arguments> - </filter> - </resource> - <resource path="src/org/eclipse/jgit/lib/ConfigConstants.java" type="org.eclipse.jgit.lib.ConfigConstants"> - <filter id="1142947843"> - <message_arguments> - <message_argument value="5.13.3"/> - <message_argument value="CONFIG_KEY_PACK_KEPT_OBJECTS"/> - </message_arguments> - </filter> - <filter id="1142947843"> - <message_arguments> - <message_argument value="5.13.3"/> - <message_argument value="CONFIG_REPACK_SECTION"/> - </message_arguments> - </filter> - </resource> <resource path="src/org/eclipse/jgit/lib/GpgSignatureVerifier.java" type="org.eclipse.jgit.lib.GpgSignatureVerifier"> <filter id="404000815"> <message_arguments> diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java index dfab7bab6f..a07d8416c4 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java @@ -217,7 +217,7 @@ public class DfsInserter extends ObjectInserter { db.commitPack(Collections.singletonList(packDsc), null); rollback = false; - DfsPackFile p = new DfsPackFile(cache, packDsc); + DfsPackFile p = db.createDfsPackFile(cache, packDsc); if (index != null) p.setPackIndex(index); db.addPack(p); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java index 2375698303..9f6eb10256 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java @@ -592,7 +592,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase { if (oldPack != null) { newPacks.add(oldPack); } else if (dsc.hasFileExt(PackExt.PACK)) { - newPacks.add(new DfsPackFile(cache, dsc)); + newPacks.add(createDfsPackFile(cache, dsc)); foundNew = true; } @@ -617,6 +617,23 @@ public abstract class DfsObjDatabase extends ObjectDatabase { newReftables.toArray(new DfsReftable[0])); } + /** + * Create instances of DfsPackFile + * + * Implementors can decide to construct or wrap DfsPackFile in different + * ways. + * + * @param cache + * block cache + * @param dsc + * pack description + * @return the dfs packfile + */ + protected DfsPackFile createDfsPackFile(DfsBlockCache cache, + DfsPackDescription dsc) { + return new DfsPackFile(cache, dsc); + } + private static Map<DfsPackDescription, DfsPackFile> packMap(PackList old) { Map<DfsPackDescription, DfsPackFile> forReuse = new HashMap<>(); for (DfsPackFile p : old.packs) { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java index e0f0e13553..42b1d235bf 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java @@ -65,8 +65,11 @@ import org.eclipse.jgit.util.LongList; */ public final class DfsPackFile extends BlockBasedFile { private static final int REC_SIZE = Constants.OBJECT_ID_LENGTH + 8; + private static final long REF_POSITION = 0; + private static final PackBitmapIndexLoader DEFAULT_BITMAP_LOADER = new StreamPackBitmapIndexLoader(); + /** Index mapping {@link ObjectId} to position within the pack stream. */ private volatile PackIndex index; @@ -79,6 +82,8 @@ public final class DfsPackFile extends BlockBasedFile { /** Index of compressed commit graph mapping entire object graph. */ private volatile CommitGraph commitGraph; + private final PackBitmapIndexLoader bitmapLoader; + /** Index by size */ private boolean objectSizeIndexLoadAttempted; @@ -105,6 +110,10 @@ public final class DfsPackFile extends BlockBasedFile { * description of the pack within the DFS. */ DfsPackFile(DfsBlockCache cache, DfsPackDescription desc) { + this(cache, desc, DEFAULT_BITMAP_LOADER); + } + + DfsPackFile(DfsBlockCache cache, DfsPackDescription desc, PackBitmapIndexLoader bitmapLoader) { super(cache, desc, PACK); int bs = desc.getBlockSize(PACK); @@ -114,6 +123,8 @@ public final class DfsPackFile extends BlockBasedFile { long sz = desc.getFileSize(PACK); length = sz > 0 ? sz : -1; + + this.bitmapLoader = bitmapLoader; } /** @@ -206,7 +217,7 @@ public final class DfsPackFile extends BlockBasedFile { * the bitmap index is not available, or is corrupt. */ public PackBitmapIndex getBitmapIndex(DfsReader ctx) throws IOException { - if (invalid || isGarbage() || !desc.hasFileExt(BITMAP_INDEX)) { + if (invalid || isGarbage() || !bitmapLoader.hasBitmaps(desc)) { return null; } @@ -214,6 +225,15 @@ public final class DfsPackFile extends BlockBasedFile { return bitmapIndex; } + if (!bitmapLoader.keepInDfs(desc)) { + PackBitmapIndexLoader.LoadResult result = bitmapLoader + .loadPackBitmapIndex(ctx, this); + if (bitmapIndex == null && result.bitmapIndex != null) { + bitmapIndex = result.bitmapIndex; + } + return bitmapIndex; + } + DfsStreamKey bitmapKey = desc.getStreamKey(BITMAP_INDEX); AtomicBoolean cacheHit = new AtomicBoolean(true); DfsBlockCache.Ref<PackBitmapIndex> idxref = cache @@ -1252,31 +1272,11 @@ public final class DfsPackFile extends BlockBasedFile { private DfsBlockCache.Ref<PackBitmapIndex> loadBitmapIndex(DfsReader ctx, DfsStreamKey bitmapKey) throws IOException { ctx.stats.readBitmap++; - long start = System.nanoTime(); - try (ReadableChannel rc = ctx.db.openFile(desc, BITMAP_INDEX)) { - long size; - PackBitmapIndex bmidx; - try { - bmidx = PackBitmapIndex.read(alignTo8kBlocks(rc), - () -> idx(ctx), () -> getReverseIdx(ctx), - ctx.getOptions().shouldLoadRevIndexInParallel()); - } finally { - size = rc.position(); - ctx.stats.readBitmapIdxBytes += size; - ctx.stats.readBitmapIdxMicros += elapsedMicros(start); - } - bitmapIndex = bmidx; - return new DfsBlockCache.Ref<>( - bitmapKey, REF_POSITION, size, bmidx); - } catch (EOFException e) { - throw new IOException(MessageFormat.format( - DfsText.get().shortReadOfIndex, - desc.getFileName(BITMAP_INDEX)), e); - } catch (IOException e) { - throw new IOException(MessageFormat.format( - DfsText.get().cannotReadIndex, - desc.getFileName(BITMAP_INDEX)), e); - } + PackBitmapIndexLoader.LoadResult result = bitmapLoader + .loadPackBitmapIndex(ctx, this); + bitmapIndex = result.bitmapIndex; + return new DfsBlockCache.Ref<>(bitmapKey, REF_POSITION, + result.bytesRead, result.bitmapIndex); } private DfsBlockCache.Ref<CommitGraph> loadCommitGraph(DfsReader ctx, @@ -1317,4 +1317,105 @@ public final class DfsPackFile extends BlockBasedFile { } return new BufferedInputStream(in, bs); } + + /** + * Loads the PackBitmapIndex associated with this packfile + */ + public interface PackBitmapIndexLoader { + /** + * Does this pack has bitmaps associated? + * + * @param desc + * the pack + * @return true if the pack has bitmaps + */ + boolean hasBitmaps(DfsPackDescription desc); + + /** + * If the returned instance must be kept in DFS cache + * + * It should be true when the instance is expensive to load and can be + * reused. + * + * @param desc + * the pack + * @return true if the returned bitmap index should be kept in DFS + */ + boolean keepInDfs(DfsPackDescription desc); + + /** + * Returns a PackBitmapIndex for this pack, if the pack has bitmaps + * associated. + * + * @param ctx + * the reader + * @param pack + * the pack + * @return the pack bitmap index and bytes size (when applicable) + * @throws IOException + * error accessing storage + */ + LoadResult loadPackBitmapIndex(DfsReader ctx, DfsPackFile pack) + throws IOException; + + /** + * The instance of the pack bitmap index and the amount of bytes loaded. + * + * The bytes can be 0, if the implementation doesn't do any initial + * loading. + */ + class LoadResult { + final PackBitmapIndex bitmapIndex; + + final long bytesRead; + + LoadResult(PackBitmapIndex packBitmapIndex, long bytesRead) { + this.bitmapIndex = packBitmapIndex; + this.bytesRead = bytesRead; + } + } + } + + /** + * Load the packbitmapindex from the BITMAP_INDEX pack extension + */ + private static final class StreamPackBitmapIndexLoader implements PackBitmapIndexLoader { + @Override + public boolean hasBitmaps(DfsPackDescription desc) { + return desc.hasFileExt(BITMAP_INDEX); + } + + @Override + public boolean keepInDfs(DfsPackDescription desc) { + return true; + } + + @Override + public LoadResult loadPackBitmapIndex(DfsReader ctx, DfsPackFile pack) + throws IOException { + DfsPackDescription desc = pack.getPackDescription(); + try (ReadableChannel rc = ctx.db.openFile(desc, BITMAP_INDEX)) { + long size; + PackBitmapIndex bmidx; + try { + bmidx = PackBitmapIndex.read(alignTo8kBlocks(rc), + () -> pack.idx(ctx), () -> pack.getReverseIdx(ctx), + ctx.getOptions().shouldLoadRevIndexInParallel()); + } finally { + size = rc.position(); + } + return new LoadResult(bmidx, size); + } catch (EOFException e) { + throw new IOException( + MessageFormat.format(DfsText.get().shortReadOfIndex, + desc.getFileName(BITMAP_INDEX)), + e); + } catch (IOException e) { + throw new IOException( + MessageFormat.format(DfsText.get().cannotReadIndex, + desc.getFileName(BITMAP_INDEX)), + e); + } + } + } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackParser.java index a38ce91ccd..b19f65c69b 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackParser.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackParser.java @@ -126,7 +126,7 @@ public class DfsPackParser extends PackParser { objdb.commitPack(Collections.singletonList(packDsc), null); rollback = false; - DfsPackFile p = new DfsPackFile(blockCache, packDsc); + DfsPackFile p = objdb.createDfsPackFile(blockCache, packDsc); p.setBlockSize(blockSize); if (packIndex != null) p.setPackIndex(packIndex); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java index c722c06e9c..a342796cbe 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java @@ -503,30 +503,28 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs { public long getObjectSize(AnyObjectId objectId, int typeHint) throws MissingObjectException, IncorrectObjectTypeException, IOException { - if (last != null && !skipGarbagePack(last)) { - long sz = last.getObjectSize(this, objectId); - if (0 <= sz) { - return sz; + DfsPackFile pack = findPackWithObject(objectId); + if (pack == null) { + if (typeHint == OBJ_ANY) { + throw new MissingObjectException(objectId.copy(), + JGitText.get().unknownObjectType2); } + throw new MissingObjectException(objectId.copy(), typeHint); } - PackList packList = db.getPackList(); - long sz = getObjectSizeImpl(packList, objectId); - if (0 <= sz) { - return sz; - } - if (packList.dirty()) { - sz = getObjectSizeImpl(packList, objectId); - if (0 <= sz) { - return sz; - } + if (typeHint != Constants.OBJ_BLOB || !pack.hasObjectSizeIndex(this)) { + return pack.getObjectSize(this, objectId); } - if (typeHint == OBJ_ANY) { - throw new MissingObjectException(objectId.copy(), - JGitText.get().unknownObjectType2); + long sz = pack.getIndexedObjectSize(this, objectId); + if (sz >= 0) { + stats.objectSizeIndexHit += 1; + return sz; } - throw new MissingObjectException(objectId.copy(), typeHint); + + // Object wasn't in the index + stats.objectSizeIndexMiss += 1; + return pack.getObjectSize(this, objectId); } @@ -582,21 +580,6 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs { return null; } - private long getObjectSizeImpl(PackList packList, AnyObjectId objectId) - throws IOException { - for (DfsPackFile pack : packList.packs) { - if (pack == last || skipGarbagePack(pack)) { - continue; - } - long sz = pack.getObjectSize(this, objectId); - if (0 <= sz) { - last = pack; - return sz; - } - } - return -1; - } - @Override public DfsObjectToPack newObjectToPack(AnyObjectId objectId, int type) { return new DfsObjectToPack(objectId, type); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/StartGenerator.java b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/StartGenerator.java index 61a91e70dc..6854b6083d 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/StartGenerator.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/StartGenerator.java @@ -98,14 +98,14 @@ class StartGenerator extends Generator { } else { pending = RevWalk.newDateRevQueue(q); } - if (rf != RevFilter.ALL && w.getRewriteParents()) { - pendingOutputType |= HAS_REWRITE | NEEDS_REWRITE; - } if (tf != TreeFilter.ALL) { + int rewriteFlag; if (w.getRewriteParents()) { pendingOutputType |= HAS_REWRITE | NEEDS_REWRITE; - } - rf = AndRevFilter.create(new TreeRevFilter(w, tf), rf); + rewriteFlag = RevWalk.REWRITE; + } else + rewriteFlag = 0; + rf = AndRevFilter.create(new TreeRevFilter(w, tf, rewriteFlag), rf); } walker.queue = q; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java index 4085954638..43571a6868 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/TreeRevFilter.java @@ -60,8 +60,21 @@ public class TreeRevFilter extends RevFilter { * Create a {@link org.eclipse.jgit.revwalk.filter.RevFilter} from a * {@link org.eclipse.jgit.treewalk.filter.TreeFilter}. * - * When revWalk's rewrite parent flag is set, it creates a filter for the - * first phase of a parent-rewriting limited revision walk. + * @param walker + * walker used for reading trees. + * @param t + * filter to compare against any changed paths in each commit. If + * a {@link org.eclipse.jgit.revwalk.FollowFilter}, will be + * replaced with a new filter following new paths after a rename. + * @since 3.5 + */ + public TreeRevFilter(RevWalk walker, TreeFilter t) { + this(walker, t, 0); + } + + /** + * Create a filter for the first phase of a parent-rewriting limited + * revision walk. * <p> * This filter is ANDed to evaluate before all other filters and ties the * configured {@link TreeFilter} into the revision walking process. @@ -78,13 +91,14 @@ public class TreeRevFilter extends RevFilter { * filter to compare against any changed paths in each commit. If * a {@link FollowFilter}, will be replaced with a new filter * following new paths after a rename. - * @since 3.5 + * @param rewriteFlag + * flag to color commits to be removed from the simplified DAT. */ - public TreeRevFilter(RevWalk walker, TreeFilter t) { + TreeRevFilter(RevWalk walker, TreeFilter t, int rewriteFlag) { pathFilter = new TreeWalk(walker.reader); pathFilter.setFilter(t); pathFilter.setRecursive(t.shouldBeRecursive()); - this.rewriteFlag = walker.getRewriteParents() ? RevWalk.REWRITE : 0; + this.rewriteFlag = rewriteFlag; } @Override |