org.eclipse.jgit.pgm.Version
org.eclipse.jgit.pgm.debug.DiffAlgorithms
+org.eclipse.jgit.pgm.debug.Gc
org.eclipse.jgit.pgm.debug.MakeCacheTree
org.eclipse.jgit.pgm.debug.ReadDirCache
org.eclipse.jgit.pgm.debug.RebuildCommitGraph
--- /dev/null
+/*
+ * Copyright (C) 2012, Christian Halstrick <christian.halstrick@sap.com>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.pgm.debug;
+
+import org.eclipse.jgit.lib.TextProgressMonitor;
+import org.eclipse.jgit.pgm.TextBuiltin;
+import org.eclipse.jgit.storage.file.FileRepository;
+import org.eclipse.jgit.storage.file.GC;
+
+class Gc extends TextBuiltin {
+ @Override
+ protected void run() throws Exception {
+ GC gc = new GC((FileRepository) db);
+ gc.setProgressMonitor(new TextProgressMonitor());
+ gc.gc();
+ }
+}
--- /dev/null
+/*
+ * Copyright (C) 2012, Christian Halstrick <christian.halstrick@sap.com>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.eclipse.jgit.storage.file;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+
+import org.eclipse.jgit.junit.LocalDiskRepositoryTestCase;
+import org.eclipse.jgit.junit.TestRepository;
+import org.eclipse.jgit.junit.TestRepository.BranchBuilder;
+import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.revwalk.RevCommit;
+import org.eclipse.jgit.storage.file.GC.RepoStatistics;
+import org.eclipse.jgit.storage.file.PackIndex.MutableEntry;
+import org.eclipse.jgit.util.FileUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class GCTest extends LocalDiskRepositoryTestCase {
+ private TestRepository<FileRepository> tr;
+
+ private FileRepository repo;
+
+ private GC gc;
+
+ private RepoStatistics stats;
+
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+ repo = createWorkRepository();
+ tr = new TestRepository<FileRepository>((repo));
+ gc = new GC(repo);
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ super.tearDown();
+ }
+
+ @Test
+ public void testPackAllObjectsInOnePack() throws Exception {
+ tr.branch("refs/heads/master").commit().add("A", "A").add("B", "B")
+ .create();
+ stats = gc.getStatistics();
+ assertEquals(4, stats.numberOfLooseObjects);
+ assertEquals(0, stats.numberOfPackedObjects);
+ gc.gc();
+ stats = gc.getStatistics();
+ assertEquals(0, stats.numberOfLooseObjects);
+ assertEquals(4, stats.numberOfPackedObjects);
+ assertEquals(1, stats.numberOfPackFiles);
+ }
+
+ @Test
+ public void testKeepFiles() throws Exception {
+ BranchBuilder bb = tr.branch("refs/heads/master");
+ bb.commit().add("A", "A").add("B", "B").create();
+ stats = gc.getStatistics();
+ assertEquals(4, stats.numberOfLooseObjects);
+ assertEquals(0, stats.numberOfPackedObjects);
+ assertEquals(0, stats.numberOfPackFiles);
+ gc.gc();
+ stats = gc.getStatistics();
+ assertEquals(0, stats.numberOfLooseObjects);
+ assertEquals(4, stats.numberOfPackedObjects);
+ assertEquals(1, stats.numberOfPackFiles);
+
+ Iterator<PackFile> packIt = repo.getObjectDatabase().getPacks()
+ .iterator();
+ PackFile singlePack = packIt.next();
+ assertFalse(packIt.hasNext());
+ File keepFile = new File(singlePack.getPackFile().getPath() + ".keep");
+ assertFalse(keepFile.exists());
+ assertTrue(keepFile.createNewFile());
+ bb.commit().add("A", "A2").add("B", "B2").create();
+ stats = gc.getStatistics();
+ assertEquals(4, stats.numberOfLooseObjects);
+ assertEquals(4, stats.numberOfPackedObjects);
+ assertEquals(1, stats.numberOfPackFiles);
+ gc.gc();
+ stats = gc.getStatistics();
+ assertEquals(0, stats.numberOfLooseObjects);
+ assertEquals(8, stats.numberOfPackedObjects);
+ assertEquals(2, stats.numberOfPackFiles);
+
+ // check that no object is packed twice
+ Iterator<PackFile> packs = repo.getObjectDatabase().getPacks()
+ .iterator();
+ PackIndex ind1 = packs.next().getIndex();
+ assertEquals(4, ind1.getObjectCount());
+ PackIndex ind2 = packs.next().getIndex();
+ assertEquals(4, ind2.getObjectCount());
+ for (MutableEntry e: ind1)
+ if (ind2.hasObject(e.toObjectId()))
+ assertFalse(
+ "the following object is in both packfiles: "
+ + e.toObjectId(), ind2.hasObject(e.toObjectId()));
+ }
+
+ @Test
+ public void testPackRepoWithNoRefs() throws Exception {
+ tr.commit().add("A", "A").add("B", "B").create();
+ stats = gc.getStatistics();
+ assertEquals(4, stats.numberOfLooseObjects);
+ assertEquals(0, stats.numberOfPackedObjects);
+ gc.gc();
+ stats = gc.getStatistics();
+ assertEquals(4, stats.numberOfLooseObjects);
+ assertEquals(0, stats.numberOfPackedObjects);
+ assertEquals(0, stats.numberOfPackFiles);
+ }
+
+ @Test
+ public void testPack2Commits() throws Exception {
+ BranchBuilder bb = tr.branch("refs/heads/master");
+ bb.commit().add("A", "A").add("B", "B").create();
+ bb.commit().add("A", "A2").add("B", "B2").create();
+
+ stats = gc.getStatistics();
+ assertEquals(8, stats.numberOfLooseObjects);
+ assertEquals(0, stats.numberOfPackedObjects);
+ gc.gc();
+ stats = gc.getStatistics();
+ assertEquals(0, stats.numberOfLooseObjects);
+ assertEquals(8, stats.numberOfPackedObjects);
+ assertEquals(1, stats.numberOfPackFiles);
+ }
+
+ @Test
+ public void testPackCommitsAndLooseOne() throws Exception {
+ BranchBuilder bb = tr.branch("refs/heads/master");
+ RevCommit first = bb.commit().add("A", "A").add("B", "B").create();
+ bb.commit().add("A", "A2").add("B", "B2").create();
+ tr.update("refs/heads/master", first);
+
+ stats = gc.getStatistics();
+ assertEquals(8, stats.numberOfLooseObjects);
+ assertEquals(0, stats.numberOfPackedObjects);
+ gc.gc();
+ stats = gc.getStatistics();
+ assertEquals(0, stats.numberOfLooseObjects);
+ assertEquals(8, stats.numberOfPackedObjects);
+ assertEquals(2, stats.numberOfPackFiles);
+ }
+
+ @Test
+ public void testNotPackTwice() throws Exception {
+ BranchBuilder bb = tr.branch("refs/heads/master");
+ RevCommit first = bb.commit().message("M").add("M", "M").create();
+ bb.commit().message("B").add("B", "Q").create();
+ bb.commit().message("A").add("A", "A").create();
+ RevCommit second = tr.commit().parent(first).message("R").add("R", "Q")
+ .create();
+ tr.update("refs/tags/t1", second);
+
+ Collection<PackFile> oldPacks = tr.getRepository().getObjectDatabase()
+ .getPacks();
+ assertEquals(0, oldPacks.size());
+ stats = gc.getStatistics();
+ assertEquals(11, stats.numberOfLooseObjects);
+ assertEquals(0, stats.numberOfPackedObjects);
+
+ gc.setExpireAgeMillis(0);
+ gc.gc();
+ stats = gc.getStatistics();
+ assertEquals(0, stats.numberOfLooseObjects);
+
+ Iterator<PackFile> pIt = repo.getObjectDatabase().getPacks().iterator();
+ long c = pIt.next().getObjectCount();
+ if (c == 9)
+ assertEquals(2, pIt.next().getObjectCount());
+ else {
+ assertEquals(2, c);
+ assertEquals(9, pIt.next().getObjectCount());
+ }
+ }
+
+ @Test
+ public void testPackCommitsAndLooseOneNoReflog() throws Exception {
+ BranchBuilder bb = tr.branch("refs/heads/master");
+ RevCommit first = bb.commit().add("A", "A").add("B", "B").create();
+ bb.commit().add("A", "A2").add("B", "B2").create();
+ tr.update("refs/heads/master", first);
+
+ stats = gc.getStatistics();
+ assertEquals(8, stats.numberOfLooseObjects);
+ assertEquals(0, stats.numberOfPackedObjects);
+
+ FileUtils.delete(new File(repo.getDirectory(), "logs/HEAD"),
+ FileUtils.RETRY | FileUtils.SKIP_MISSING);
+ FileUtils.delete(
+ new File(repo.getDirectory(), "logs/refs/heads/master"),
+ FileUtils.RETRY | FileUtils.SKIP_MISSING);
+ gc.gc();
+
+ stats = gc.getStatistics();
+ assertEquals(4, stats.numberOfLooseObjects);
+ assertEquals(4, stats.numberOfPackedObjects);
+ assertEquals(1, stats.numberOfPackFiles);
+ }
+
+ @Test
+ public void testPackCommitsAndLooseOneWithPruneNow() throws Exception {
+ BranchBuilder bb = tr.branch("refs/heads/master");
+ RevCommit first = bb.commit().add("A", "A").add("B", "B").create();
+ bb.commit().add("A", "A2").add("B", "B2").create();
+ tr.update("refs/heads/master", first);
+
+ stats = gc.getStatistics();
+ assertEquals(8, stats.numberOfLooseObjects);
+ assertEquals(0, stats.numberOfPackedObjects);
+ gc.setExpireAgeMillis(0);
+ gc.gc();
+ stats = gc.getStatistics();
+ assertEquals(0, stats.numberOfLooseObjects);
+ assertEquals(8, stats.numberOfPackedObjects);
+ assertEquals(2, stats.numberOfPackFiles);
+ }
+
+ @Test
+ public void testPackCommitsAndLooseOneWithPruneNowNoReflog()
+ throws Exception {
+ BranchBuilder bb = tr.branch("refs/heads/master");
+ RevCommit first = bb.commit().add("A", "A").add("B", "B").create();
+ bb.commit().add("A", "A2").add("B", "B2").create();
+ tr.update("refs/heads/master", first);
+
+ stats = gc.getStatistics();
+ assertEquals(8, stats.numberOfLooseObjects);
+ assertEquals(0, stats.numberOfPackedObjects);
+
+ FileUtils.delete(new File(repo.getDirectory(), "logs/HEAD"),
+ FileUtils.RETRY | FileUtils.SKIP_MISSING);
+ FileUtils.delete(
+ new File(repo.getDirectory(), "logs/refs/heads/master"),
+ FileUtils.RETRY | FileUtils.SKIP_MISSING);
+ gc.setExpireAgeMillis(0);
+ gc.gc();
+
+ stats = gc.getStatistics();
+ assertEquals(0, stats.numberOfLooseObjects);
+ assertEquals(4, stats.numberOfPackedObjects);
+ assertEquals(1, stats.numberOfPackFiles);
+ }
+
+ @Test
+ public void testIndexSavesObjects() throws Exception {
+ BranchBuilder bb = tr.branch("refs/heads/master");
+ bb.commit().add("A", "A").add("B", "B").create();
+ bb.commit().add("A", "A2").add("B", "B2").create();
+ bb.commit().add("A", "A3"); // this new content in index should survive
+ stats = gc.getStatistics();
+ assertEquals(9, stats.numberOfLooseObjects);
+ assertEquals(0, stats.numberOfPackedObjects);
+ gc.gc();
+ stats = gc.getStatistics();
+ assertEquals(1, stats.numberOfLooseObjects);
+ assertEquals(8, stats.numberOfPackedObjects);
+ assertEquals(1, stats.numberOfPackFiles);
+ }
+
+ @Test
+ public void testIndexSavesObjectsWithPruneNow() throws Exception {
+ BranchBuilder bb = tr.branch("refs/heads/master");
+ bb.commit().add("A", "A").add("B", "B").create();
+ bb.commit().add("A", "A2").add("B", "B2").create();
+ bb.commit().add("A", "A3"); // this new content in index should survive
+ stats = gc.getStatistics();
+ assertEquals(9, stats.numberOfLooseObjects);
+ assertEquals(0, stats.numberOfPackedObjects);
+ gc.setExpireAgeMillis(0);
+ gc.gc();
+ stats = gc.getStatistics();
+ assertEquals(0, stats.numberOfLooseObjects);
+ assertEquals(8, stats.numberOfPackedObjects);
+ assertEquals(1, stats.numberOfPackFiles);
+ }
+
+ @Test
+ public void testPruneNone() throws Exception {
+ BranchBuilder bb = tr.branch("refs/heads/master");
+ bb.commit().add("A", "A").add("B", "B").create();
+ bb.commit().add("A", "A2").add("B", "B2").create();
+ new File(repo.getDirectory(), Constants.LOGS + "/refs/heads/master")
+ .delete();
+ stats = gc.getStatistics();
+ assertEquals(8, stats.numberOfLooseObjects);
+ gc.setExpireAgeMillis(0);
+ gc.prune(Collections.<ObjectId> emptySet());
+ stats = gc.getStatistics();
+ assertEquals(8, stats.numberOfLooseObjects);
+ tr.blob("x");
+ stats = gc.getStatistics();
+ assertEquals(9, stats.numberOfLooseObjects);
+ gc.prune(Collections.<ObjectId> emptySet());
+ stats = gc.getStatistics();
+ assertEquals(8, stats.numberOfLooseObjects);
+ }
+}
import java.io.File;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.Ref.Storage;
import org.eclipse.jgit.lib.RefDatabase;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
public class RefDirectoryTest extends LocalDiskRepositoryTestCase {
private Repository diskRepo;
- private TestRepository repo;
+ private TestRepository<Repository> repo;
private RefDirectory refdir;
diskRepo = createBareRepository();
refdir = (RefDirectory) diskRepo.getRefDatabase();
- repo = new TestRepository(diskRepo);
+ repo = new TestRepository<Repository>(diskRepo);
A = repo.commit().create();
B = repo.commit(repo.getRevWalk().parseCommit(A));
v1_0 = repo.tag("v1_0", B);
assertEquals(v1_0.getObject(), tag.getPeeledObjectId());
}
+ @Test
+ public void test_repack() throws Exception {
+ Map<String, Ref> all;
+
+ writePackedRefs("# pack-refs with: peeled \n" + //
+ A.name() + " refs/heads/master\n" + //
+ B.name() + " refs/heads/other\n" + //
+ v1_0.name() + " refs/tags/v1.0\n" + //
+ "^" + v1_0.getObject().name() + "\n");
+ all = refdir.getRefs(RefDatabase.ALL);
+
+ assertEquals(4, all.size());
+ assertEquals(Storage.LOOSE, all.get(HEAD).getStorage());
+ assertEquals(Storage.PACKED, all.get("refs/heads/master").getStorage());
+ assertEquals(A.getId(), all.get("refs/heads/master").getObjectId());
+ assertEquals(Storage.PACKED, all.get("refs/heads/other").getStorage());
+ assertEquals(Storage.PACKED, all.get("refs/tags/v1.0").getStorage());
+
+ repo.update("refs/heads/master", B.getId());
+ RevTag v0_1 = repo.tag("v0.1", A);
+ repo.update("refs/tags/v0.1", v0_1);
+
+ all = refdir.getRefs(RefDatabase.ALL);
+ assertEquals(5, all.size());
+ assertEquals(Storage.LOOSE, all.get(HEAD).getStorage());
+ // Why isn't the next ref LOOSE_PACKED?
+ assertEquals(Storage.LOOSE, all.get("refs/heads/master")
+ .getStorage());
+ assertEquals(B.getId(), all.get("refs/heads/master").getObjectId());
+ assertEquals(Storage.PACKED, all.get("refs/heads/other").getStorage());
+ assertEquals(Storage.PACKED, all.get("refs/tags/v1.0").getStorage());
+ assertEquals(Storage.LOOSE, all.get("refs/tags/v0.1").getStorage());
+ assertEquals(v0_1.getId(), all.get("refs/tags/v0.1").getObjectId());
+
+ all = refdir.getRefs(RefDatabase.ALL);
+ refdir.pack(new ArrayList<String>(all.keySet()));
+
+ all = refdir.getRefs(RefDatabase.ALL);
+ assertEquals(5, all.size());
+ assertEquals(Storage.LOOSE, all.get(HEAD).getStorage());
+ // Why isn't the next ref LOOSE_PACKED?
+ assertEquals(Storage.PACKED, all.get("refs/heads/master").getStorage());
+ assertEquals(B.getId(), all.get("refs/heads/master").getObjectId());
+ assertEquals(Storage.PACKED, all.get("refs/heads/other").getStorage());
+ assertEquals(Storage.PACKED, all.get("refs/tags/v1.0").getStorage());
+ assertEquals(Storage.PACKED, all.get("refs/tags/v0.1").getStorage());
+ assertEquals(v0_1.getId(), all.get("refs/tags/v0.1").getObjectId());
+ }
+
@Test
public void testGetRef_EmptyDatabase() throws IOException {
Ref r;
cannotCreateConfig=cannot create config
cannotCreateDirectory=Cannot create directory {0}
cannotCreateHEAD=cannot create HEAD
+cannotCreateIndexfile=Cannot create an index file with name {0}
cannotDeleteCheckedOutBranch=Branch {0} is checked out and can not be deleted
cannotDeleteFile=Cannot delete file: {0}
cannotDeleteStaleTrackingRef=Cannot delete stale tracking ref {0}
packHasUnresolvedDeltas=pack has unresolved deltas
packingCancelledDuringObjectsWriting=Packing cancelled during objects writing
packObjectCountMismatch=Pack object count mismatch: pack {0} index {1}: {2}
+packRefs=Pack refs
packTooLargeForIndexVersion1=Pack too large for index version 1
packWriterStatistics=Total {0,number,#0} (delta {1,number,#0}), reused {2,number,#0} (delta {3,number,#0})
+panicCantRenameIndexFile=Panic: index file {0} must be renamed to replace {1}; until then repository is corrupt
patchApplyException=Cannot apply: {0}
patchFormatException=Format error: {0}
pathIsNotInWorkingDir=Path is not in working dir
problemWithResolvingPushRefSpecsLocally=Problem with resolving push ref specs locally: {0}
progressMonUploading=Uploading {0}
propertyIsAlreadyNonNull=Property is already non null
+pruneLoosePackedObjects=Prune loose objects also found in pack files
+pruneLooseUnreferencedObjects=Prune loose, unreferenced objects
pullOnRepoWithoutHEADCurrentlyNotSupported=Pull on repository without HEAD currently not supported
pullTaskName=Pull
pushCancelled=push cancelled
/***/ public String cannotCreateConfig;
/***/ public String cannotCreateDirectory;
/***/ public String cannotCreateHEAD;
+ /***/ public String cannotCreateIndexfile;
/***/ public String cannotDeleteCheckedOutBranch;
/***/ public String cannotDeleteFile;
/***/ public String cannotDeleteStaleTrackingRef;
/***/ public String packHasUnresolvedDeltas;
/***/ public String packingCancelledDuringObjectsWriting;
/***/ public String packObjectCountMismatch;
+ /***/ public String packRefs;
/***/ public String packTooLargeForIndexVersion1;
/***/ public String packWriterStatistics;
+ /***/ public String panicCantRenameIndexFile;
/***/ public String patchApplyException;
/***/ public String patchFormatException;
/***/ public String pathIsNotInWorkingDir;
/***/ public String problemWithResolvingPushRefSpecsLocally;
/***/ public String progressMonUploading;
/***/ public String propertyIsAlreadyNonNull;
+ /***/ public String pruneLoosePackedObjects;
+ /***/ public String pruneLooseUnreferencedObjects;
/***/ public String pullOnRepoWithoutHEADCurrentlyNotSupported;
/***/ public String pullTaskName;
/***/ public String pushCancelled;
--- /dev/null
+/*
+ * Copyright (C) 2012, Christian Halstrick <christian.halstrick@sap.com>
+ * Copyright (C) 2011, Shawn O. Pearce <spearce@spearce.org>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.eclipse.jgit.storage.file;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.channels.Channels;
+import java.nio.channels.FileChannel;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.eclipse.jgit.dircache.DirCacheIterator;
+import org.eclipse.jgit.errors.CorruptObjectException;
+import org.eclipse.jgit.errors.IncorrectObjectTypeException;
+import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.errors.NoWorkTreeException;
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.FileMode;
+import org.eclipse.jgit.lib.NullProgressMonitor;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ProgressMonitor;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.revwalk.ObjectWalk;
+import org.eclipse.jgit.revwalk.RevObject;
+import org.eclipse.jgit.revwalk.RevWalk;
+import org.eclipse.jgit.storage.pack.PackWriter;
+import org.eclipse.jgit.treewalk.TreeWalk;
+import org.eclipse.jgit.treewalk.filter.TreeFilter;
+import org.eclipse.jgit.util.FileUtils;
+
+/**
+ * A garbage collector for git {@link FileRepository}. Instances of this class
+ * are not thread-safe. Don't use the same instance from multiple threads.
+ *
+ * This class started as a copy of DfsGarbageCollector from Shawn O. Pearce
+ * adapted to FileRepositories.
+ */
+public class GC {
+ private final FileRepository repo;
+
+ private ProgressMonitor pm;
+
+ private long expireAgeMillis;
+
+ /**
+ * the refs which existed during the last call to {@link #repack()}. This is
+ * needed during {@link #prune(Set)} where we can optimize by looking at the
+ * difference between the current refs and the refs which existed during
+ * last {@link #repack()}.
+ */
+ private Map<String, Ref> lastPackedRefs;
+
+ /**
+ * Holds the starting time of the last repack() execution. This is needed in
+ * prune() to inspect only those reflog entries which have been added since
+ * last repack().
+ */
+ private long lastRepackTime;
+
+ /**
+ * Creates a new garbage collector with default values. An expirationTime of
+ * two weeks and <code>null</code> as progress monitor will be used.
+ *
+ * @param repo
+ * the repo to work on
+ */
+ public GC(FileRepository repo) {
+ this.repo = repo;
+ this.pm = NullProgressMonitor.INSTANCE;
+ this.expireAgeMillis = 14 * 24 * 60 * 60 * 1000L;
+ }
+
+ /**
+ * Runs a garbage collector on a {@link FileRepository}. It will
+ * <ul>
+ * <li>pack loose references into packed-refs</li>
+ * <li>repack all reachable objects into new pack files and delete the old
+ * pack files</li>
+ * <li>prune all loose objects which are now reachable by packs</li>
+ * </ul>
+ *
+ * @return the collection of {@link PackFile}'s which are newly created
+ * @throws IOException
+ */
+ public Collection<PackFile> gc() throws IOException {
+ packRefs();
+ // TODO: implement reflog_expire(pm, repo);
+ Collection<PackFile> newPacks = repack();
+ prune(Collections.<ObjectId> emptySet());
+ // TODO: implement rerere_gc(pm);
+ return newPacks;
+ }
+
+ /**
+ * Delete old pack files. What is 'old' is defined by specifying a set of
+ * old pack files and a set of new pack files. Each pack file contained in
+ * old pack files but not contained in new pack files will be deleted.
+ *
+ * @param oldPacks
+ * @param newPacks
+ * @param ignoreErrors
+ * <code>true</code> if we should ignore the fact that a certain
+ * pack files or index files couldn't be deleted.
+ * <code>false</code> if an exception should be thrown in such
+ * cases
+ * @throws IOException
+ * if a pack file couldn't be deleted and
+ * <code>ignoreErrors</code> is set to <code>false</code>
+ */
+ private void deleteOldPacks(Collection<PackFile> oldPacks,
+ Collection<PackFile> newPacks, boolean ignoreErrors)
+ throws IOException {
+ int deleteOptions = FileUtils.RETRY | FileUtils.SKIP_MISSING;
+ if (ignoreErrors)
+ deleteOptions |= FileUtils.IGNORE_ERRORS;
+ oldPackLoop: for (PackFile oldPack : oldPacks) {
+ String oldName = oldPack.getPackName();
+ // check whether an old pack file is also among the list of new
+ // pack files. Then we must not delete it.
+ for (PackFile newPack : newPacks)
+ if (oldName.equals(newPack.getPackName()))
+ continue oldPackLoop;
+
+ if (!oldPack.shouldBeKept()) {
+ oldPack.close();
+ FileUtils.delete(nameFor(oldName, ".pack"), deleteOptions);
+ FileUtils.delete(nameFor(oldName, ".idx"), deleteOptions);
+ }
+ }
+ // close the complete object database. Thats my only chance to force
+ // rescanning and to detect that certain pack files are now deleted.
+ repo.getObjectDatabase().close();
+ }
+
+ /**
+ * Like "git prune-packed" this method tries to prune all loose objects
+ * which can be found in packs. If certain objects can't be pruned (e.g.
+ * because the filesystem delete operation fails) this is silently ignored.
+ *
+ * @throws IOException
+ */
+ public void prunePacked() throws IOException {
+ ObjectDirectory objdb = repo.getObjectDatabase();
+ Collection<PackFile> packs = objdb.getPacks();
+ File objects = repo.getObjectsDirectory();
+ String[] fanout = objects.list();
+
+ if (fanout != null && fanout.length > 0) {
+ pm.beginTask(JGitText.get().pruneLoosePackedObjects, fanout.length);
+ try {
+ for (String d : fanout) {
+ pm.update(1);
+ if (d.length() != 2)
+ continue;
+ String[] entries = new File(objects, d).list();
+ if (entries == null)
+ continue;
+ for (String e : entries) {
+ if (e.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
+ continue;
+ ObjectId id;
+ try {
+ id = ObjectId.fromString(d + e);
+ } catch (IllegalArgumentException notAnObject) {
+ // ignoring the file that does not represent loose
+ // object
+ continue;
+ }
+ boolean found = false;
+ for (PackFile p : packs)
+ if (p.hasObject(id)) {
+ found = true;
+ break;
+ }
+ if (found)
+ FileUtils.delete(objdb.fileFor(id), FileUtils.RETRY
+ | FileUtils.SKIP_MISSING
+ | FileUtils.IGNORE_ERRORS);
+ }
+ }
+ } finally {
+ pm.endTask();
+ }
+ }
+ }
+
+ /**
+ * Like "git prune" this method tries to prune all loose objects which are
+ * unreferenced. If certain objects can't be pruned (e.g. because the
+ * filesystem delete operation fails) this is silently ignored.
+ *
+ * @param objectsToKeep
+ * a set of objects which should explicitly not be pruned
+ *
+ * @throws IOException
+ */
+ public void prune(Set<ObjectId> objectsToKeep)
+ throws IOException {
+ long expireDate = (expireAgeMillis == 0) ? Long.MAX_VALUE : System
+ .currentTimeMillis() - expireAgeMillis;
+
+ // Collect all loose objects which are old enough, not referenced from
+ // the index and not in objectsToKeep
+ Map<ObjectId, File> deletionCandidates = new HashMap<ObjectId, File>();
+ Set<ObjectId> indexObjects = null;
+ File objects = repo.getObjectsDirectory();
+ String[] fanout = objects.list();
+ if (fanout != null && fanout.length > 0) {
+ pm.beginTask(JGitText.get().pruneLooseUnreferencedObjects,
+ fanout.length);
+ for (String d : fanout) {
+ pm.update(1);
+ if (d.length() != 2)
+ continue;
+ File[] entries = new File(objects, d).listFiles();
+ if (entries == null)
+ continue;
+ for (File f : entries) {
+ String fName = f.getName();
+ if (fName.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
+ continue;
+ if (f.lastModified() >= expireDate)
+ continue;
+ try {
+ ObjectId id = ObjectId.fromString(d + fName);
+ if (objectsToKeep.contains(id))
+ continue;
+ if (indexObjects == null)
+ indexObjects = listNonHEADIndexObjects();
+ if (indexObjects.contains(id))
+ continue;
+ deletionCandidates.put(id, f);
+ } catch (IllegalArgumentException notAnObject) {
+ // ignoring the file that does not represent loose
+ // object
+ continue;
+ }
+ }
+ }
+ }
+ if (deletionCandidates.isEmpty())
+ return;
+
+ // From the set of current refs remove all those which have been handled
+ // during last repack(). Only those refs will survive which have been
+ // added or modified since the last repack. Only these can save existing
+ // loose refs from being pruned.
+ Map<String, Ref> newRefs;
+ if (lastPackedRefs == null || lastPackedRefs.isEmpty())
+ newRefs = getAllRefs();
+ else {
+ newRefs = new HashMap<String, Ref>();
+ for (Iterator<Map.Entry<String, Ref>> i = getAllRefs().entrySet()
+ .iterator(); i.hasNext();) {
+ Entry<String, Ref> newEntry = i.next();
+ Ref old = lastPackedRefs.get(newEntry.getKey());
+ if (!equals(newEntry.getValue(), old))
+ newRefs.put(newEntry.getKey(), newEntry.getValue());
+ }
+ }
+
+ if (!newRefs.isEmpty()) {
+ // There are new/modified refs! Check which loose objects are now
+ // referenced by these modified refs (or their reflogentries).
+ // Remove these loose objects
+ // from the deletionCandidates. When the last candidate is removed
+ // leave this method.
+ ObjectWalk w = new ObjectWalk(repo);
+ try {
+ for (Ref cr : newRefs.values())
+ w.markStart(w.parseAny(cr.getObjectId()));
+ if (lastPackedRefs != null)
+ for (Ref lpr : lastPackedRefs.values())
+ w.markUninteresting(w.parseAny(lpr.getObjectId()));
+ removeReferenced(deletionCandidates, w);
+ } finally {
+ w.dispose();
+ }
+ }
+
+ if (deletionCandidates.isEmpty())
+ return;
+
+ // Since we have not left the method yet there are still
+ // deletionCandidates. Last chance for these objects not to be pruned is
+ // that they are referenced by reflog entries. Even refs which currently
+ // point to the same object as during last repack() may have
+ // additional reflog entries not handled during last repack()
+ ObjectWalk w = new ObjectWalk(repo);
+ try {
+ for (Ref ar : getAllRefs().values())
+ for (ObjectId id : listRefLogObjects(ar, lastRepackTime))
+ w.markStart(w.parseAny(id));
+ if (lastPackedRefs != null)
+ for (Ref lpr : lastPackedRefs.values())
+ w.markUninteresting(w.parseAny(lpr.getObjectId()));
+ removeReferenced(deletionCandidates, w);
+ } finally {
+ w.dispose();
+ }
+
+ if (deletionCandidates.isEmpty())
+ return;
+
+ // delete all candidates which have survived: these are unreferenced
+ // loose objects
+ for (File f : deletionCandidates.values())
+ f.delete();
+
+ repo.getObjectDatabase().close();
+ }
+
+ /**
+ * Remove all entries from a map which key is the id of an object referenced
+ * by the given ObjectWalk
+ *
+ * @param id2File
+ * @param w
+ * @throws MissingObjectException
+ * @throws IncorrectObjectTypeException
+ * @throws IOException
+ */
+ private void removeReferenced(Map<ObjectId, File> id2File,
+ ObjectWalk w) throws MissingObjectException,
+ IncorrectObjectTypeException, IOException {
+ RevObject ro = w.next();
+ while (ro != null) {
+ if (id2File.remove(ro.getId()) != null)
+ if (id2File.isEmpty())
+ return;
+ ro = w.next();
+ }
+ ro = w.nextObject();
+ while (ro != null) {
+ if (id2File.remove(ro.getId()) != null)
+ if (id2File.isEmpty())
+ return;
+ ro = w.nextObject();
+ }
+ }
+
+ private static boolean equals(Ref r1, Ref r2) {
+ if (r1 == null || r2 == null)
+ return false;
+ if (r1.isSymbolic()) {
+ if (!r2.isSymbolic())
+ return false;
+ return r1.getTarget().getName().equals(r2.getTarget().getName());
+ } else {
+ if (r2.isSymbolic())
+ return false;
+ return r1.getObjectId().equals(r2.getObjectId());
+ }
+ }
+
+ /**
+ * Packs all non-symbolic, loose refs into packed-refs.
+ *
+ * @throws IOException
+ */
+ public void packRefs() throws IOException {
+ Collection<Ref> refs = repo.getAllRefs().values();
+ List<String> refsToBePacked = new ArrayList<String>(refs.size());
+ pm.beginTask(JGitText.get().packRefs, refs.size());
+ try {
+ for (Ref ref : refs) {
+ if (!ref.isSymbolic() && ref.getStorage().isLoose())
+ refsToBePacked.add(ref.getName());
+ pm.update(1);
+ }
+ ((RefDirectory) repo.getRefDatabase()).pack(refsToBePacked);
+ } finally {
+ pm.endTask();
+ }
+ }
+
+ /**
+ * Packs all objects which reachable from any of the heads into one pack
+ * file. Additionally all objects which are not reachable from any head but
+ * which are reachable from any of the other refs (e.g. tags), special refs
+ * (e.g. FETCH_HEAD) or index are packed into a separate pack file. Objects
+ * included in pack files which have a .keep file associated are never
+ * repacked. All old pack files which existed before are deleted.
+ *
+ * @return a collection of the newly created pack files
+ * @throws IOException
+ * when during reading of refs, index, packfiles, objects,
+ * reflog-entries or during writing to the packfiles
+ * {@link IOException} occurs
+ */
+ public Collection<PackFile> repack() throws IOException {
+ Collection<PackFile> toBeDeleted = repo.getObjectDatabase().getPacks();
+
+ long time = System.currentTimeMillis();
+ Map<String, Ref> refsBefore = getAllRefs();
+
+ Set<ObjectId> allHeads = new HashSet<ObjectId>();
+ Set<ObjectId> nonHeads = new HashSet<ObjectId>();
+ Set<ObjectId> tagTargets = new HashSet<ObjectId>();
+ Set<ObjectId> indexObjects = listNonHEADIndexObjects();
+
+ for (Ref ref : refsBefore.values()) {
+ nonHeads.addAll(listRefLogObjects(ref, 0));
+ if (ref.isSymbolic() || ref.getObjectId() == null)
+ continue;
+ if (ref.getName().startsWith(Constants.R_HEADS))
+ allHeads.add(ref.getObjectId());
+ else
+ nonHeads.add(ref.getObjectId());
+ if (ref.getPeeledObjectId() != null)
+ tagTargets.add(ref.getPeeledObjectId());
+ }
+
+ List<PackIndex> excluded = new LinkedList<PackIndex>();
+ for (PackFile f : repo.getObjectDatabase().getPacks())
+ if (f.shouldBeKept())
+ excluded.add(f.getIndex());
+
+ tagTargets.addAll(allHeads);
+ nonHeads.addAll(indexObjects);
+
+ List<PackFile> ret = new ArrayList<PackFile>(2);
+ PackFile heads = null;
+ if (!allHeads.isEmpty()) {
+ heads = writePack(allHeads, Collections.<ObjectId> emptySet(),
+ tagTargets, excluded);
+ if (heads != null) {
+ ret.add(heads);
+ excluded.add(0, heads.getIndex());
+ }
+ }
+ if (!nonHeads.isEmpty()) {
+ PackFile rest = writePack(nonHeads, allHeads, tagTargets, excluded);
+ if (rest != null)
+ ret.add(rest);
+ }
+ deleteOldPacks(toBeDeleted, ret, true);
+ prunePacked();
+
+ lastPackedRefs = refsBefore;
+ lastRepackTime = time;
+ return ret;
+ }
+
+ /**
+ * @param ref
+ * the ref which log should be inspected
+ * @param minTime only reflog entries not older then this time are processed
+ * @return the {@link ObjectId}s contained in the reflog
+ * @throws IOException
+ */
+ private Set<ObjectId> listRefLogObjects(Ref ref, long minTime) throws IOException {
+ List<ReflogEntry> rlEntries = repo.getReflogReader(ref.getName())
+ .getReverseEntries();
+ if (rlEntries == null || rlEntries.isEmpty())
+ return Collections.<ObjectId> emptySet();
+ Set<ObjectId> ret = new HashSet<ObjectId>();
+ for (ReflogEntry e : rlEntries) {
+ if (e.getWho().getWhen().getTime() < minTime)
+ break;
+ ret.add(e.getNewId());
+ ObjectId oldId = e.getOldId();
+ if (oldId != null && !ObjectId.zeroId().equals(oldId))
+ ret.add(oldId);
+ }
+ return ret;
+ }
+
+ /**
+ * Returns a map of all refs and additional refs (e.g. FETCH_HEAD,
+ * MERGE_HEAD, ...)
+ *
+ * @return a map where names of refs point to ref objects
+ * @throws IOException
+ */
+ private Map<String, Ref> getAllRefs() throws IOException {
+ Map<String, Ref> ret = repo.getAllRefs();
+ for (Ref ref : repo.getRefDatabase().getAdditionalRefs())
+ ret.put(ref.getName(), ref);
+ return ret;
+ }
+
+ /**
+ * Return a list of those objects in the index which differ from whats in
+ * HEAD
+ *
+ * @return a set of ObjectIds of changed objects in the index
+ * @throws IOException
+ * @throws CorruptObjectException
+ * @throws NoWorkTreeException
+ */
+ private Set<ObjectId> listNonHEADIndexObjects()
+ throws CorruptObjectException, IOException {
+ RevWalk revWalk = null;
+ try {
+ if (repo.getIndexFile() == null)
+ return Collections.emptySet();
+ } catch (NoWorkTreeException e) {
+ return Collections.emptySet();
+ }
+ TreeWalk treeWalk = new TreeWalk(repo);
+ try {
+ treeWalk.addTree(new DirCacheIterator(repo.readDirCache()));
+ ObjectId headID = repo.resolve(Constants.HEAD);
+ if (headID != null) {
+ revWalk = new RevWalk(repo);
+ treeWalk.addTree(revWalk.parseTree(headID));
+ revWalk.dispose();
+ revWalk = null;
+ }
+
+ treeWalk.setFilter(TreeFilter.ANY_DIFF);
+ treeWalk.setRecursive(true);
+ Set<ObjectId> ret = new HashSet<ObjectId>();
+
+ while (treeWalk.next()) {
+ ObjectId objectId = treeWalk.getObjectId(0);
+ switch (treeWalk.getRawMode(0) & FileMode.TYPE_MASK) {
+ case FileMode.TYPE_MISSING:
+ case FileMode.TYPE_GITLINK:
+ continue;
+ case FileMode.TYPE_TREE:
+ case FileMode.TYPE_FILE:
+ case FileMode.TYPE_SYMLINK:
+ ret.add(objectId);
+ continue;
+ default:
+ throw new IOException(MessageFormat.format(
+ JGitText.get().corruptObjectInvalidMode3, String
+ .format("%o", Integer.valueOf(treeWalk
+ .getRawMode(0)),
+ (objectId == null) ? "null"
+ : objectId.name(), treeWalk
+ .getPathString(), repo
+ .getIndexFile())));
+ }
+ }
+ return ret;
+ } finally {
+ if (revWalk != null)
+ revWalk.dispose();
+ treeWalk.release();
+ }
+ }
+
+ private PackFile writePack(Set<? extends ObjectId> want,
+ Set<? extends ObjectId> have, Set<ObjectId> tagTargets,
+ List<PackIndex> excludeObjects) throws IOException {
+ File tmpPack = null;
+ File tmpIdx = null;
+ PackWriter pw = new PackWriter(repo);
+ try {
+ // prepare the PackWriter
+ pw.setDeltaBaseAsOffset(true);
+ pw.setReuseDeltaCommits(false);
+ if (tagTargets != null)
+ pw.setTagTargets(tagTargets);
+ if (excludeObjects != null)
+ for (PackIndex idx : excludeObjects)
+ pw.excludeObjects(idx);
+ pw.preparePack(pm, want, have);
+ if (pw.getObjectCount() == 0)
+ return null;
+
+ // create temporary files
+ String id = pw.computeName().getName();
+ File packdir = new File(repo.getObjectsDirectory(), "pack");
+ tmpPack = File.createTempFile("gc_", ".pack_tmp", packdir);
+ tmpIdx = new File(packdir, tmpPack.getName().substring(0,
+ tmpPack.getName().lastIndexOf('.'))
+ + ".idx_tmp");
+
+ if (!tmpIdx.createNewFile())
+ throw new IOException(MessageFormat.format(
+ JGitText.get().cannotCreateIndexfile, tmpIdx.getPath()));
+
+ // write the packfile
+ FileChannel channel = new FileOutputStream(tmpPack).getChannel();
+ OutputStream channelStream = Channels.newOutputStream(channel);
+ try {
+ pw.writePack(pm, pm, channelStream);
+ } finally {
+ channel.force(true);
+ channelStream.close();
+ channel.close();
+ }
+
+ // write the packindex
+ FileChannel idxChannel = new FileOutputStream(tmpIdx).getChannel();
+ OutputStream idxStream = Channels.newOutputStream(idxChannel);
+ try {
+ pw.writeIndex(idxStream);
+ } finally {
+ idxChannel.force(true);
+ idxStream.close();
+ idxChannel.close();
+ }
+
+ // rename the temporary files to real files
+ File realPack = nameFor(id, ".pack");
+ tmpPack.setReadOnly();
+ File realIdx = nameFor(id, ".idx");
+ realIdx.setReadOnly();
+ boolean delete = true;
+ try {
+ if (!tmpPack.renameTo(realPack))
+ return null;
+ delete = false;
+ if (!tmpIdx.renameTo(realIdx)) {
+ File newIdx = new File(realIdx.getParentFile(),
+ realIdx.getName() + ".new");
+ if (!tmpIdx.renameTo(newIdx))
+ newIdx = tmpIdx;
+ throw new IOException(MessageFormat.format(
+ JGitText.get().panicCantRenameIndexFile, newIdx,
+ realIdx));
+ }
+ } finally {
+ if (delete && tmpPack.exists())
+ tmpPack.delete();
+ if (delete && tmpIdx.exists())
+ tmpIdx.delete();
+ }
+ return repo.getObjectDatabase().openPack(realPack, realIdx);
+ } finally {
+ pw.release();
+ if (tmpPack != null && tmpPack.exists())
+ tmpPack.delete();
+ if (tmpIdx != null && tmpIdx.exists())
+ tmpIdx.delete();
+ }
+ }
+
+ private File nameFor(String name, String ext) {
+ File packdir = new File(repo.getObjectsDirectory(), "pack");
+ return new File(packdir, "pack-" + name + ext);
+ }
+
+ /**
+ * A class holding statistical data for a FileRepository regarding how many
+ * objects are stored as loose or packed objects
+ */
+ public class RepoStatistics {
+ /**
+ * The number of objects stored in pack files. If the same object is
+ * stored in multiple pack files then it is counted as often as it
+ * occurs in pack files.
+ */
+ public long numberOfPackedObjects;
+
+ /**
+ * The number of pack files
+ */
+ public long numberOfPackFiles;
+
+ /**
+ * The number of objects stored as loose objects.
+ */
+ public long numberOfLooseObjects;
+ }
+
+ /**
+ * Returns the number of objects stored in pack files. If an object is
+ * contained in multiple pack files it is counted as often as it occurs.
+ *
+ * @return the number of objects stored in pack files
+ * @throws IOException
+ */
+ public RepoStatistics getStatistics() throws IOException {
+ RepoStatistics ret = new RepoStatistics();
+ Collection<PackFile> packs = repo.getObjectDatabase().getPacks();
+ for (PackFile f : packs)
+ ret.numberOfPackedObjects += f.getIndex().getObjectCount();
+ ret.numberOfPackFiles = packs.size();
+ File objDir = repo.getObjectsDirectory();
+ String[] fanout = objDir.list();
+ if (fanout != null && fanout.length > 0) {
+ for (String d : fanout) {
+ if (d.length() != 2)
+ continue;
+ String[] entries = new File(objDir, d).list();
+ if (entries == null)
+ continue;
+ for (String e : entries) {
+ if (e.length() != Constants.OBJECT_ID_STRING_LENGTH - 2)
+ continue;
+ ret.numberOfLooseObjects++;
+ }
+ }
+ }
+ return ret;
+ }
+
+ /**
+ * Set the progress monitor used for garbage collection methods.
+ *
+ * @param pm
+ * @return this
+ */
+ public GC setProgressMonitor(ProgressMonitor pm) {
+ this.pm = (pm == null) ? NullProgressMonitor.INSTANCE : pm;
+ return this;
+ }
+
+ /**
+ * During gc() or prune() each unreferenced, loose object which has been
+ * created or modified in the last <code>expireAgeMillis</code> milliseconds
+ * will not be pruned. Only older objects may be pruned. If set to 0 then
+ * every object is a candidate for pruning.
+ *
+ * @param expireAgeMillis
+ * minimal age of objects to be pruned in milliseconds.
+ */
+ public void setExpireAgeMillis(long expireAgeMillis) {
+ this.expireAgeMillis = expireAgeMillis;
+ }
+}
private final File packFile;
+ private File keepFile;
+
private volatile String packName;
final int hash;
return packFile;
}
+ /**
+ * @return the index for this pack file.
+ * @throws IOException
+ */
+ public PackIndex getIndex() throws IOException {
+ return idx();
+ }
+
/** @return name extracted from {@code pack-*.pack} pattern. */
public String getPackName() {
String name = packName;
return 0 < offset && !isCorrupt(offset);
}
+ /**
+ * Determines whether a .keep file exists for this pack file.
+ *
+ * @return true if a .keep file exist.
+ */
+ public boolean shouldBeKept() {
+ if (keepFile == null)
+ keepFile = new File(packFile.getPath() + ".keep");
+ return keepFile.exists();
+ }
+
/**
* Get an object from this pack.
*
fireRefsChanged();
}
+ /**
+ * Adds a set of refs to the set of packed-refs. Only non-symbolic refs are
+ * added. If a ref with the given name already existed in packed-refs it is
+ * updated with the new value. Each loose ref which was added to the
+ * packed-ref file is deleted. If a given ref can't be locked it will not be
+ * added to the pack file.
+ *
+ * @param refs
+ * the refs to be added. Must be fully qualified.
+ * @throws IOException
+ */
+ public void pack(List<String> refs) throws IOException {
+ if (refs.size() == 0)
+ return;
+ FS fs = parent.getFS();
+
+ // Lock the packed refs file and read the content
+ LockFile lck = new LockFile(packedRefsFile, fs);
+ if (!lck.lock())
+ throw new IOException(MessageFormat.format(
+ JGitText.get().cannotLock, packedRefsFile));
+
+ try {
+ final PackedRefList packed = getPackedRefs();
+ RefList<Ref> cur = readPackedRefs();
+
+ // Iterate over all refs to be packed
+ for (String refName : refs) {
+ Ref ref = readRef(refName, cur);
+ if (ref.isSymbolic())
+ continue; // can't pack symbolic refs
+ // Add/Update it to packed-refs
+ int idx = cur.find(refName);
+ if (idx >= 0)
+ cur = cur.set(idx, peeledPackedRef(ref));
+ else
+ cur = cur.add(idx, peeledPackedRef(ref));
+ }
+
+ // The new content for packed-refs is collected. Persist it.
+ commitPackedRefs(lck, cur, packed);
+
+ // Now delete the loose refs which are now packed
+ for (String refName : refs) {
+ // Lock the loose ref
+ File refFile = fileFor(refName);
+ if (!refFile.exists())
+ continue;
+ LockFile rLck = new LockFile(refFile,
+ parent.getFS());
+ if (!rLck.lock())
+ continue;
+ try {
+ LooseRef currentLooseRef = scanRef(null, refName);
+ if (currentLooseRef == null || currentLooseRef.isSymbolic())
+ continue;
+ Ref packedRef = cur.get(refName);
+ ObjectId clr_oid = currentLooseRef.getObjectId();
+ if (clr_oid != null
+ && clr_oid.equals(packedRef.getObjectId())) {
+ RefList<LooseRef> curLoose, newLoose;
+ do {
+ curLoose = looseRefs.get();
+ int idx = curLoose.find(refName);
+ if (idx < 0)
+ break;
+ newLoose = curLoose.remove(idx);
+ } while (!looseRefs.compareAndSet(curLoose, newLoose));
+ int levels = levelsIn(refName) - 2;
+ delete(fileFor(refName), levels);
+ }
+ } finally {
+ rLck.unlock();
+ }
+ }
+ // Don't fire refsChanged. The refs have not change, only their
+ // storage.
+ } finally {
+ lck.unlock();
+ }
+ }
+
+ /**
+ * Make sure a ref is peeled and has the Storage PACKED. If the given ref
+ * has this attributes simply return it. Otherwise create a new peeled
+ * {@link ObjectIdRef} where Storage is set to PACKED.
+ *
+ * @param f
+ * @return a ref for Storage PACKED having the same name, id, peeledId as f
+ * @throws MissingObjectException
+ * @throws IOException
+ */
+ private Ref peeledPackedRef(Ref f)
+ throws MissingObjectException, IOException {
+ if (f.getStorage().isPacked() && f.isPeeled())
+ return f;
+ if (!f.isPeeled())
+ f = peel(f);
+ if (f.getPeeledObjectId() != null)
+ return new ObjectIdRef.PeeledTag(PACKED, f.getName(),
+ f.getObjectId(), f.getPeeledObjectId());
+ else
+ return new ObjectIdRef.PeeledNonTag(PACKED, f.getName(),
+ f.getObjectId());
+ }
+
void log(final RefUpdate update, final String msg, final boolean deref)
throws IOException {
logWriter.log(update, msg, deref);