package org.eclipse.jgit.pgm;
import java.io.BufferedInputStream;
-import java.io.File;
-import org.kohsuke.args4j.Argument;
-import org.kohsuke.args4j.Option;
-import org.eclipse.jgit.lib.CoreConfig;
+import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.TextProgressMonitor;
+import org.eclipse.jgit.storage.file.ObjectDirectoryPackParser;
+import org.eclipse.jgit.transport.PackParser;
+import org.kohsuke.args4j.Option;
class IndexPack extends TextBuiltin {
@Option(name = "--fix-thin", usage = "usage_fixAThinPackToBeComplete")
@Option(name = "--index-version", usage = "usage_indexFileFormatToCreate")
private int indexVersion = -1;
- @Argument(index = 0, required = true, metaVar = "metaVar_base")
- private File base;
-
@Override
protected void run() throws Exception {
- if (indexVersion == -1)
- indexVersion = db.getConfig().get(CoreConfig.KEY)
- .getPackIndexVersion();
- final BufferedInputStream in;
- final org.eclipse.jgit.transport.IndexPack ip;
- in = new BufferedInputStream(System.in);
- ip = new org.eclipse.jgit.transport.IndexPack(db, in, base);
- ip.setFixThin(fixThin);
- ip.setIndexVersion(indexVersion);
- ip.index(new TextProgressMonitor());
+ BufferedInputStream in = new BufferedInputStream(System.in);
+ ObjectInserter inserter = db.newObjectInserter();
+ try {
+ PackParser p = inserter.newPackParser(in);
+ p.setAllowThin(fixThin);
+ if (indexVersion != -1 && p instanceof ObjectDirectoryPackParser) {
+ ObjectDirectoryPackParser imp = (ObjectDirectoryPackParser) p;
+ imp.setIndexVersion(indexVersion);
+ }
+ p.parse(new TextProgressMonitor());
+ inserter.flush();
+ } finally {
+ inserter.release();
+ }
}
}
import org.eclipse.jgit.lib.ObjectStream;
import org.eclipse.jgit.revwalk.RevBlob;
import org.eclipse.jgit.storage.pack.DeltaEncoder;
-import org.eclipse.jgit.transport.IndexPack;
+import org.eclipse.jgit.transport.PackParser;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.NB;
import org.eclipse.jgit.util.TemporaryBuffer;
deflate(pack, delta3);
digest(pack);
- final byte[] raw = pack.toByteArray();
- IndexPack ip = IndexPack.create(repo, new ByteArrayInputStream(raw));
- ip.setFixThin(true);
- ip.index(NullProgressMonitor.INSTANCE);
- ip.renameAndOpenPack();
+ PackParser ip = index(pack.toByteArray());
+ ip.setAllowThin(true);
+ ip.parse(NullProgressMonitor.INSTANCE);
assertTrue("has blob", wc.has(id3));
deflate(pack, delta3);
digest(pack);
- final byte[] raw = pack.toByteArray();
- IndexPack ip = IndexPack.create(repo, new ByteArrayInputStream(raw));
- ip.setFixThin(true);
- ip.index(NullProgressMonitor.INSTANCE);
- ip.renameAndOpenPack();
+ PackParser ip = index(pack.toByteArray());
+ ip.setAllowThin(true);
+ ip.parse(NullProgressMonitor.INSTANCE);
assertTrue("has blob", wc.has(id3));
md.update(buf.toByteArray());
buf.write(md.digest());
}
+
+ private ObjectInserter inserter;
+
+ @After
+ public void release() {
+ if (inserter != null)
+ inserter.release();
+ }
+
+ private PackParser index(byte[] raw) throws IOException {
+ if (inserter == null)
+ inserter = repo.newObjectInserter();
+ return inserter.newPackParser(new ByteArrayInputStream(raw));
+ }
}
package org.eclipse.jgit.storage.file;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
-import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import org.eclipse.jgit.junit.JGitTestUtil;
import org.eclipse.jgit.lib.NullProgressMonitor;
import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.SampleDataRepositoryTestCase;
-import org.eclipse.jgit.lib.TextProgressMonitor;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.storage.file.PackIndex.MutableEntry;
import org.eclipse.jgit.storage.pack.PackConfig;
import org.eclipse.jgit.storage.pack.PackWriter;
-import org.eclipse.jgit.transport.IndexPack;
+import org.eclipse.jgit.transport.PackParser;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
private ByteArrayOutputStream os;
- private File packBase;
-
- private File packFile;
+ private PackFile pack;
- private File indexFile;
+ private ObjectInserter inserter;
- private PackFile pack;
+ private FileRepository dst;
@Before
public void setUp() throws Exception {
super.setUp();
os = new ByteArrayOutputStream();
- packBase = new File(trash, "tmp_pack");
- packFile = new File(trash, "tmp_pack.pack");
- indexFile = new File(trash, "tmp_pack.idx");
config = new PackConfig(db);
+
+ dst = createBareRepository();
+ File alt = new File(dst.getObjectDatabase().getDirectory(), "info/alternates");
+ alt.getParentFile().mkdirs();
+ write(alt, db.getObjectDatabase().getDirectory().getAbsolutePath() + "\n");
}
@After
public void tearDown() throws Exception {
- if (writer != null)
+ if (writer != null) {
writer.release();
+ writer = null;
+ }
+ if (inserter != null) {
+ inserter.release();
+ inserter = null;
+ }
super.tearDown();
}
config.setIndexVersion(2);
writeVerifyPack4(false);
+ File packFile = pack.getPackFile();
+ String name = packFile.getName();
+ String base = name.substring(0, name.lastIndexOf('.'));
+ File indexFile = new File(packFile.getParentFile(), base + ".idx");
+
// Validate that IndexPack came up with the right CRC32 value.
final PackIndex idx1 = PackIndex.open(indexFile);
assertTrue(idx1 instanceof PackIndexV2);
}
private void verifyOpenPack(final boolean thin) throws IOException {
+ final byte[] packData = os.toByteArray();
+
if (thin) {
- final InputStream is = new ByteArrayInputStream(os.toByteArray());
- final IndexPack indexer = new IndexPack(db, is, packBase);
+ PackParser p = index(packData);
try {
- indexer.index(new TextProgressMonitor());
+ p.parse(NullProgressMonitor.INSTANCE);
fail("indexer should grumble about missing object");
} catch (IOException x) {
// expected
}
}
- final InputStream is = new ByteArrayInputStream(os.toByteArray());
- final IndexPack indexer = new IndexPack(db, is, packBase);
- indexer.setKeepEmpty(true);
- indexer.setFixThin(thin);
- indexer.setIndexVersion(2);
- indexer.index(new TextProgressMonitor());
- pack = new PackFile(indexFile, packFile);
+
+ ObjectDirectoryPackParser p = (ObjectDirectoryPackParser) index(packData);
+ p.setKeepEmpty(true);
+ p.setAllowThin(thin);
+ p.setIndexVersion(2);
+ p.parse(NullProgressMonitor.INSTANCE);
+ pack = p.getPackFile();
+ assertNotNull("have PackFile after parsing", pack);
+ }
+
+ private PackParser index(final byte[] packData) throws IOException {
+ if (inserter == null)
+ inserter = dst.newObjectInserter();
+ return inserter.newPackParser(new ByteArrayInputStream(packData));
}
private void verifyObjectsOrder(final ObjectId objectsOrder[]) {
+++ /dev/null
-/*
- * Copyright (C) 2009, Google Inc.
- * Copyright (C) 2008, Imran M Yousuf <imyousuf@smartitengineering.com>
- * Copyright (C) 2007-2008, Robin Rosenberg <robin.rosenberg@dewire.com>
- * Copyright (C) 2008, Shawn O. Pearce <spearce@spearce.org>
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- * copyright notice, this list of conditions and the following
- * disclaimer in the documentation and/or other materials provided
- * with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- * names of its contributors may be used to endorse or promote
- * products derived from this software without specific prior
- * written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.transport;
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.security.MessageDigest;
-import java.util.zip.Deflater;
-
-import org.eclipse.jgit.junit.JGitTestUtil;
-import org.eclipse.jgit.junit.TestRepository;
-import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.lib.NullProgressMonitor;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.Repository;
-import org.eclipse.jgit.lib.RepositoryTestCase;
-import org.eclipse.jgit.lib.TextProgressMonitor;
-import org.eclipse.jgit.revwalk.RevBlob;
-import org.eclipse.jgit.storage.file.PackFile;
-import org.eclipse.jgit.util.NB;
-import org.eclipse.jgit.util.TemporaryBuffer;
-import org.junit.Test;
-
-/**
- * Test indexing of git packs. A pack is read from a stream, copied
- * to a new pack and an index is created. Then the packs are tested
- * to make sure they contain the expected objects (well we don't test
- * for all of them unless the packs are very small).
- */
-public class IndexPackTest extends RepositoryTestCase {
-
- /**
- * Test indexing one of the test packs in the egit repo. It has deltas.
- *
- * @throws IOException
- */
- @Test
- public void test1() throws IOException {
- File packFile = JGitTestUtil.getTestResourceFile("pack-34be9032ac282b11fa9babdc2b2a93ca996c9c2f.pack");
- final InputStream is = new FileInputStream(packFile);
- try {
- IndexPack pack = new IndexPack(db, is, new File(trash, "tmp_pack1"));
- pack.index(new TextProgressMonitor());
- PackFile file = new PackFile(new File(trash, "tmp_pack1.idx"), new File(trash, "tmp_pack1.pack"));
- assertTrue(file.hasObject(ObjectId.fromString("4b825dc642cb6eb9a060e54bf8d69288fbee4904")));
- assertTrue(file.hasObject(ObjectId.fromString("540a36d136cf413e4b064c2b0e0a4db60f77feab")));
- assertTrue(file.hasObject(ObjectId.fromString("5b6e7c66c276e7610d4a73c70ec1a1f7c1003259")));
- assertTrue(file.hasObject(ObjectId.fromString("6ff87c4664981e4397625791c8ea3bbb5f2279a3")));
- assertTrue(file.hasObject(ObjectId.fromString("82c6b885ff600be425b4ea96dee75dca255b69e7")));
- assertTrue(file.hasObject(ObjectId.fromString("902d5476fa249b7abc9d84c611577a81381f0327")));
- assertTrue(file.hasObject(ObjectId.fromString("aabf2ffaec9b497f0950352b3e582d73035c2035")));
- assertTrue(file.hasObject(ObjectId.fromString("c59759f143fb1fe21c197981df75a7ee00290799")));
- } finally {
- is.close();
- }
- }
-
- /**
- * This is just another pack. It so happens that we have two convenient pack to
- * test with in the repository.
- *
- * @throws IOException
- */
- @Test
- public void test2() throws IOException {
- File packFile = JGitTestUtil.getTestResourceFile("pack-df2982f284bbabb6bdb59ee3fcc6eb0983e20371.pack");
- final InputStream is = new FileInputStream(packFile);
- try {
- IndexPack pack = new IndexPack(db, is, new File(trash, "tmp_pack2"));
- pack.index(new TextProgressMonitor());
- PackFile file = new PackFile(new File(trash, "tmp_pack2.idx"), new File(trash, "tmp_pack2.pack"));
- assertTrue(file.hasObject(ObjectId.fromString("02ba32d3649e510002c21651936b7077aa75ffa9")));
- assertTrue(file.hasObject(ObjectId.fromString("0966a434eb1a025db6b71485ab63a3bfbea520b6")));
- assertTrue(file.hasObject(ObjectId.fromString("09efc7e59a839528ac7bda9fa020dc9101278680")));
- assertTrue(file.hasObject(ObjectId.fromString("0a3d7772488b6b106fb62813c4d6d627918d9181")));
- assertTrue(file.hasObject(ObjectId.fromString("1004d0d7ac26fbf63050a234c9b88a46075719d3")));
- assertTrue(file.hasObject(ObjectId.fromString("10da5895682013006950e7da534b705252b03be6")));
- assertTrue(file.hasObject(ObjectId.fromString("1203b03dc816ccbb67773f28b3c19318654b0bc8")));
- assertTrue(file.hasObject(ObjectId.fromString("15fae9e651043de0fd1deef588aa3fbf5a7a41c6")));
- assertTrue(file.hasObject(ObjectId.fromString("16f9ec009e5568c435f473ba3a1df732d49ce8c3")));
- assertTrue(file.hasObject(ObjectId.fromString("1fd7d579fb6ae3fe942dc09c2c783443d04cf21e")));
- assertTrue(file.hasObject(ObjectId.fromString("20a8ade77639491ea0bd667bf95de8abf3a434c8")));
- assertTrue(file.hasObject(ObjectId.fromString("2675188fd86978d5bc4d7211698b2118ae3bf658")));
- // and lots more...
- } finally {
- is.close();
- }
- }
-
- @Test
- public void testTinyThinPack() throws Exception {
- TestRepository d = new TestRepository(db);
- RevBlob a = d.blob("a");
-
- TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(1024);
-
- packHeader(pack, 1);
-
- pack.write((Constants.OBJ_REF_DELTA) << 4 | 4);
- a.copyRawTo(pack);
- deflate(pack, new byte[] { 0x1, 0x1, 0x1, 'b' });
-
- digest(pack);
-
- final byte[] raw = pack.toByteArray();
- IndexPack ip = IndexPack.create(db, new ByteArrayInputStream(raw));
- ip.setFixThin(true);
- ip.index(NullProgressMonitor.INSTANCE);
- ip.renameAndOpenPack();
- }
-
- @Test
- public void testPackWithDuplicateBlob() throws Exception {
- final byte[] data = Constants.encode("0123456789abcdefg");
- TestRepository<Repository> d = new TestRepository<Repository>(db);
- assertTrue(db.hasObject(d.blob(data)));
-
- TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(1024);
- packHeader(pack, 1);
- pack.write((Constants.OBJ_BLOB) << 4 | 0x80 | 1);
- pack.write(1);
- deflate(pack, data);
- digest(pack);
-
- final byte[] raw = pack.toByteArray();
- IndexPack ip = IndexPack.create(db, new ByteArrayInputStream(raw));
- ip.index(NullProgressMonitor.INSTANCE);
- ip.renameAndOpenPack();
- }
-
- private void packHeader(TemporaryBuffer.Heap tinyPack, int cnt)
- throws IOException {
- final byte[] hdr = new byte[8];
- NB.encodeInt32(hdr, 0, 2);
- NB.encodeInt32(hdr, 4, cnt);
-
- tinyPack.write(Constants.PACK_SIGNATURE);
- tinyPack.write(hdr, 0, 8);
- }
-
- private void deflate(TemporaryBuffer.Heap tinyPack, final byte[] content)
- throws IOException {
- final Deflater deflater = new Deflater();
- final byte[] buf = new byte[128];
- deflater.setInput(content, 0, content.length);
- deflater.finish();
- do {
- final int n = deflater.deflate(buf, 0, buf.length);
- if (n > 0)
- tinyPack.write(buf, 0, n);
- } while (!deflater.finished());
- }
-
- private void digest(TemporaryBuffer.Heap buf) throws IOException {
- MessageDigest md = Constants.newMessageDigest();
- md.update(buf.toByteArray());
- buf.write(md.digest());
- }
-}
--- /dev/null
+/*
+ * Copyright (C) 2009, Google Inc.
+ * Copyright (C) 2008, Imran M Yousuf <imyousuf@smartitengineering.com>
+ * Copyright (C) 2007-2008, Robin Rosenberg <robin.rosenberg@dewire.com>
+ * Copyright (C) 2008, Shawn O. Pearce <spearce@spearce.org>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.transport;
+
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.MessageDigest;
+import java.util.zip.Deflater;
+
+import org.eclipse.jgit.junit.JGitTestUtil;
+import org.eclipse.jgit.junit.TestRepository;
+import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.NullProgressMonitor;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectInserter;
+import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.lib.RepositoryTestCase;
+import org.eclipse.jgit.revwalk.RevBlob;
+import org.eclipse.jgit.storage.file.ObjectDirectoryPackParser;
+import org.eclipse.jgit.storage.file.PackFile;
+import org.eclipse.jgit.util.NB;
+import org.eclipse.jgit.util.TemporaryBuffer;
+import org.junit.After;
+import org.junit.Test;
+
+/**
+ * Test indexing of git packs. A pack is read from a stream, copied
+ * to a new pack and an index is created. Then the packs are tested
+ * to make sure they contain the expected objects (well we don't test
+ * for all of them unless the packs are very small).
+ */
+public class PackParserTest extends RepositoryTestCase {
+ /**
+ * Test indexing one of the test packs in the egit repo. It has deltas.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void test1() throws IOException {
+ File packFile = JGitTestUtil.getTestResourceFile("pack-34be9032ac282b11fa9babdc2b2a93ca996c9c2f.pack");
+ final InputStream is = new FileInputStream(packFile);
+ try {
+ ObjectDirectoryPackParser p = (ObjectDirectoryPackParser) index(is);
+ p.parse(NullProgressMonitor.INSTANCE);
+ PackFile file = p.getPackFile();
+
+ assertTrue(file.hasObject(ObjectId.fromString("4b825dc642cb6eb9a060e54bf8d69288fbee4904")));
+ assertTrue(file.hasObject(ObjectId.fromString("540a36d136cf413e4b064c2b0e0a4db60f77feab")));
+ assertTrue(file.hasObject(ObjectId.fromString("5b6e7c66c276e7610d4a73c70ec1a1f7c1003259")));
+ assertTrue(file.hasObject(ObjectId.fromString("6ff87c4664981e4397625791c8ea3bbb5f2279a3")));
+ assertTrue(file.hasObject(ObjectId.fromString("82c6b885ff600be425b4ea96dee75dca255b69e7")));
+ assertTrue(file.hasObject(ObjectId.fromString("902d5476fa249b7abc9d84c611577a81381f0327")));
+ assertTrue(file.hasObject(ObjectId.fromString("aabf2ffaec9b497f0950352b3e582d73035c2035")));
+ assertTrue(file.hasObject(ObjectId.fromString("c59759f143fb1fe21c197981df75a7ee00290799")));
+ } finally {
+ is.close();
+ }
+ }
+
+ /**
+ * This is just another pack. It so happens that we have two convenient pack to
+ * test with in the repository.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void test2() throws IOException {
+ File packFile = JGitTestUtil.getTestResourceFile("pack-df2982f284bbabb6bdb59ee3fcc6eb0983e20371.pack");
+ final InputStream is = new FileInputStream(packFile);
+ try {
+ ObjectDirectoryPackParser p = (ObjectDirectoryPackParser) index(is);
+ p.parse(NullProgressMonitor.INSTANCE);
+ PackFile file = p.getPackFile();
+
+ assertTrue(file.hasObject(ObjectId.fromString("02ba32d3649e510002c21651936b7077aa75ffa9")));
+ assertTrue(file.hasObject(ObjectId.fromString("0966a434eb1a025db6b71485ab63a3bfbea520b6")));
+ assertTrue(file.hasObject(ObjectId.fromString("09efc7e59a839528ac7bda9fa020dc9101278680")));
+ assertTrue(file.hasObject(ObjectId.fromString("0a3d7772488b6b106fb62813c4d6d627918d9181")));
+ assertTrue(file.hasObject(ObjectId.fromString("1004d0d7ac26fbf63050a234c9b88a46075719d3")));
+ assertTrue(file.hasObject(ObjectId.fromString("10da5895682013006950e7da534b705252b03be6")));
+ assertTrue(file.hasObject(ObjectId.fromString("1203b03dc816ccbb67773f28b3c19318654b0bc8")));
+ assertTrue(file.hasObject(ObjectId.fromString("15fae9e651043de0fd1deef588aa3fbf5a7a41c6")));
+ assertTrue(file.hasObject(ObjectId.fromString("16f9ec009e5568c435f473ba3a1df732d49ce8c3")));
+ assertTrue(file.hasObject(ObjectId.fromString("1fd7d579fb6ae3fe942dc09c2c783443d04cf21e")));
+ assertTrue(file.hasObject(ObjectId.fromString("20a8ade77639491ea0bd667bf95de8abf3a434c8")));
+ assertTrue(file.hasObject(ObjectId.fromString("2675188fd86978d5bc4d7211698b2118ae3bf658")));
+ // and lots more...
+ } finally {
+ is.close();
+ }
+ }
+
+ @Test
+ public void testTinyThinPack() throws Exception {
+ TestRepository d = new TestRepository(db);
+ RevBlob a = d.blob("a");
+
+ TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(1024);
+
+ packHeader(pack, 1);
+
+ pack.write((Constants.OBJ_REF_DELTA) << 4 | 4);
+ a.copyRawTo(pack);
+ deflate(pack, new byte[] { 0x1, 0x1, 0x1, 'b' });
+
+ digest(pack);
+
+ PackParser p = index(new ByteArrayInputStream(pack.toByteArray()));
+ p.setAllowThin(true);
+ p.parse(NullProgressMonitor.INSTANCE);
+ }
+
+ @Test
+ public void testPackWithDuplicateBlob() throws Exception {
+ final byte[] data = Constants.encode("0123456789abcdefg");
+ TestRepository<Repository> d = new TestRepository<Repository>(db);
+ assertTrue(db.hasObject(d.blob(data)));
+
+ TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(1024);
+ packHeader(pack, 1);
+ pack.write((Constants.OBJ_BLOB) << 4 | 0x80 | 1);
+ pack.write(1);
+ deflate(pack, data);
+ digest(pack);
+
+ PackParser p = index(new ByteArrayInputStream(pack.toByteArray()));
+ p.setAllowThin(false);
+ p.parse(NullProgressMonitor.INSTANCE);
+ }
+
+ private void packHeader(TemporaryBuffer.Heap tinyPack, int cnt)
+ throws IOException {
+ final byte[] hdr = new byte[8];
+ NB.encodeInt32(hdr, 0, 2);
+ NB.encodeInt32(hdr, 4, cnt);
+
+ tinyPack.write(Constants.PACK_SIGNATURE);
+ tinyPack.write(hdr, 0, 8);
+ }
+
+ private void deflate(TemporaryBuffer.Heap tinyPack, final byte[] content)
+ throws IOException {
+ final Deflater deflater = new Deflater();
+ final byte[] buf = new byte[128];
+ deflater.setInput(content, 0, content.length);
+ deflater.finish();
+ do {
+ final int n = deflater.deflate(buf, 0, buf.length);
+ if (n > 0)
+ tinyPack.write(buf, 0, n);
+ } while (!deflater.finished());
+ }
+
+ private void digest(TemporaryBuffer.Heap buf) throws IOException {
+ MessageDigest md = Constants.newMessageDigest();
+ md.update(buf.toByteArray());
+ buf.write(md.digest());
+ }
+
+ private ObjectInserter inserter;
+
+ @After
+ public void release() {
+ if (inserter != null)
+ inserter.release();
+ }
+
+ private PackParser index(InputStream in) throws IOException {
+ if (inserter == null)
+ inserter = db.newObjectInserter();
+ return inserter.newPackParser(in);
+ }
+}
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.NullProgressMonitor;
import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
buf.write(md.digest());
}
+ private ObjectInserter inserter;
+
+ @After
+ public void release() {
+ if (inserter != null)
+ inserter.release();
+ }
+
private void openPack(TemporaryBuffer.Heap buf) throws IOException {
+ if (inserter == null)
+ inserter = src.newObjectInserter();
+
final byte[] raw = buf.toByteArray();
- IndexPack ip = IndexPack.create(src, new ByteArrayInputStream(raw));
- ip.setFixThin(true);
- ip.index(PM);
- ip.renameAndOpenPack();
+ PackParser p = inserter.newPackParser(new ByteArrayInputStream(raw));
+ p.setAllowThin(true);
+ p.parse(PM);
}
private static PacketLineIn asPacketLineIn(TemporaryBuffer.Heap buf)
/**
* @return the preferred pack index file format; 0 for oldest possible.
- * @see org.eclipse.jgit.transport.IndexPack
*/
public int getPackIndexVersion() {
return packIndexVersion;
import java.io.InputStream;
import java.security.MessageDigest;
+import org.eclipse.jgit.transport.PackParser;
+
/**
* Inserts objects into an existing {@code ObjectDatabase}.
* <p>
throw new UnsupportedOperationException();
}
+ @Override
+ public PackParser newPackParser(InputStream in) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
@Override
public void flush() throws IOException {
// Do nothing.
public abstract ObjectId insert(int objectType, long length, InputStream in)
throws IOException;
+ /**
+ * Initialize a parser to read from a pack formatted stream.
+ *
+ * @param in
+ * the input stream. The stream is not closed by the parser, and
+ * must instead be closed by the caller once parsing is complete.
+ * @return the pack parser.
+ * @throws IOException
+ * the parser instance, which can be configured and then used to
+ * parse objects into the ObjectDatabase.
+ */
+ public abstract PackParser newPackParser(InputStream in) throws IOException;
+
/**
* Make all inserted objects visible.
* <p>
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.storage.pack.ObjectToPack;
import org.eclipse.jgit.storage.pack.PackWriter;
+import org.eclipse.jgit.util.FS;
/**
* The cached instance of an {@link ObjectDirectory}.
return wrapped.getConfig();
}
+ @Override
+ FS getFS() {
+ return wrapped.getFS();
+ }
+
@Override
AlternateHandle[] myAlternates() {
if (alts == null) {
return result;
}
+ @Override
+ PackFile openPack(File pack, File idx) throws IOException {
+ return wrapped.openPack(pack, idx);
+ }
+
@Override
void selectObjectRepresentation(PackWriter packer, ObjectToPack otp,
WindowCursor curs) throws IOException {
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.storage.pack.ObjectToPack;
import org.eclipse.jgit.storage.pack.PackWriter;
+import org.eclipse.jgit.util.FS;
abstract class FileObjectDatabase extends ObjectDatabase {
static enum InsertLooseObjectResult {
abstract Config getConfig();
+ abstract FS getFS();
+
/**
* Open an object from this database.
* <p>
abstract InsertLooseObjectResult insertUnpackedObject(File tmp,
ObjectId id, boolean createDuplicate) throws IOException;
+ abstract PackFile openPack(File pack, File idx) throws IOException;
+
abstract FileObjectDatabase newCachedFileObjectDatabase();
static class AlternateHandle {
* path of the pack file to open.
* @param idx
* path of the corresponding index file.
+ * @return the pack that was opened and added to the database.
* @throws IOException
* index file could not be opened, read, or is not recognized as
* a Git pack file index.
*/
- public void openPack(final File pack, final File idx) throws IOException {
+ public PackFile openPack(final File pack, final File idx)
+ throws IOException {
final String p = pack.getName();
final String i = idx.getName();
if (!p.substring(0, 45).equals(i.substring(0, 45)))
throw new IOException(MessageFormat.format(JGitText.get().packDoesNotMatchIndex, pack));
- insertPack(new PackFile(idx, pack));
+ PackFile res = new PackFile(idx, pack);
+ insertPack(res);
+ return res;
}
@Override
return config;
}
+ @Override
+ FS getFS() {
+ return fs;
+ }
+
private void insertPack(final PackFile pf) {
PackList o, n;
do {
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
+import org.eclipse.jgit.transport.PackParser;
import org.eclipse.jgit.util.FileUtils;
/** Creates loose objects in a {@link ObjectDirectory}. */
throw new ObjectWritingException("Unable to create new object: " + dst);
}
+ @Override
+ public PackParser newPackParser(InputStream in) throws IOException {
+ return new ObjectDirectoryPackParser(db, in);
+ }
+
@Override
public void flush() throws IOException {
// Do nothing. Objects are immediately visible.
--- /dev/null
+/*
+ * Copyright (C) 2008-2011, Google Inc.
+ * Copyright (C) 2007-2008, Robin Rosenberg <robin.rosenberg@dewire.com>
+ * Copyright (C) 2008, Shawn O. Pearce <spearce@spearce.org>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.storage.file;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.RandomAccessFile;
+import java.security.MessageDigest;
+import java.text.MessageFormat;
+import java.util.Arrays;
+import java.util.List;
+import java.util.zip.CRC32;
+import java.util.zip.Deflater;
+
+import org.eclipse.jgit.JGitText;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.CoreConfig;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectInserter;
+import org.eclipse.jgit.lib.ProgressMonitor;
+import org.eclipse.jgit.transport.PackParser;
+import org.eclipse.jgit.transport.PackedObjectInfo;
+import org.eclipse.jgit.util.FileUtils;
+import org.eclipse.jgit.util.NB;
+
+/**
+ * Consumes a pack stream and stores as a pack file in {@link ObjectDirectory}.
+ * <p>
+ * To obtain an instance of a parser, applications should use
+ * {@link ObjectInserter#newPackParser(InputStream)}.
+ */
+public class ObjectDirectoryPackParser extends PackParser {
+ private final FileObjectDatabase db;
+
+ /** CRC-32 computation for objects that are appended onto the pack. */
+ private final CRC32 crc;
+
+ /** Running SHA-1 of any base objects appended after {@link #origEnd}. */
+ private final MessageDigest tailDigest;
+
+ /** Preferred format version of the pack-*.idx file to generate. */
+ private int indexVersion;
+
+ /** If true, pack with 0 objects will be stored. Usually these are deleted. */
+ private boolean keepEmpty;
+
+ /** Path of the temporary file holding the pack data. */
+ private File tmpPack;
+
+ /**
+ * Path of the index created for the pack, to find objects quickly at read
+ * time.
+ */
+ private File tmpIdx;
+
+ /** Read/write handle to {@link #tmpPack} while it is being parsed. */
+ private RandomAccessFile out;
+
+ /** Length of the original pack stream, before missing bases were appended. */
+ private long origEnd;
+
+ /** The original checksum of data up to {@link #origEnd}. */
+ private byte[] origHash;
+
+ /** Current end of the pack file. */
+ private long packEnd;
+
+ /** Checksum of the entire pack file. */
+ private byte[] packHash;
+
+ /** Compresses delta bases when completing a thin pack. */
+ private Deflater def;
+
+ /** The pack that was created, if parsing was successful. */
+ private PackFile newPack;
+
+ ObjectDirectoryPackParser(FileObjectDatabase odb, InputStream src) {
+ super(odb, src);
+ this.db = odb;
+ this.crc = new CRC32();
+ this.tailDigest = Constants.newMessageDigest();
+
+ indexVersion = db.getConfig().get(CoreConfig.KEY).getPackIndexVersion();
+ }
+
+ /**
+ * Set the pack index file format version this instance will create.
+ *
+ * @param version
+ * the version to write. The special version 0 designates the
+ * oldest (most compatible) format available for the objects.
+ * @see PackIndexWriter
+ */
+ public void setIndexVersion(int version) {
+ indexVersion = version;
+ }
+
+ /**
+ * Configure this index pack instance to keep an empty pack.
+ * <p>
+ * By default an empty pack (a pack with no objects) is not kept, as doi so
+ * is completely pointless. With no objects in the pack there is no d stored
+ * by it, so the pack is unnecessary.
+ *
+ * @param empty
+ * true to enable keeping an empty pack.
+ */
+ public void setKeepEmpty(final boolean empty) {
+ keepEmpty = empty;
+ }
+
+ /**
+ * Get the imported {@link PackFile}.
+ * <p>
+ * This method is supplied only to support testing; applications shouldn't
+ * be using it directly to access the imported data.
+ *
+ * @return the imported PackFile, if parsing was successful.
+ */
+ public PackFile getPackFile() {
+ return newPack;
+ }
+
+ @Override
+ public PackLock parse(ProgressMonitor progress) throws IOException {
+ tmpPack = File.createTempFile("incoming_", ".pack", db.getDirectory());
+ tmpIdx = new File(db.getDirectory(), baseName(tmpPack) + ".idx");
+ try {
+ out = new RandomAccessFile(tmpPack, "rw");
+
+ super.parse(progress);
+
+ out.seek(packEnd);
+ out.write(packHash);
+ out.getChannel().force(true);
+ out.close();
+
+ writeIdx();
+
+ tmpPack.setReadOnly();
+ tmpIdx.setReadOnly();
+
+ return renameAndOpenPack(getLockMessage());
+ } finally {
+ if (def != null)
+ def.end();
+ try {
+ if (out != null && out.getChannel().isOpen())
+ out.close();
+ } catch (IOException closeError) {
+ // Ignored. We want to delete the file.
+ }
+ cleanupTemporaryFiles();
+ }
+ }
+
+ @Override
+ protected void onBeginWholeObject(long streamPosition, int type,
+ long inflatedSize) throws IOException {
+ crc.reset();
+ }
+
+ @Override
+ protected void onEndWholeObject(PackedObjectInfo info) throws IOException {
+ info.setCRC((int) crc.getValue());
+ }
+
+ @Override
+ protected void onBeginOfsDelta(long streamPosition,
+ long baseStreamPosition, long inflatedSize) throws IOException {
+ crc.reset();
+ }
+
+ @Override
+ protected void onBeginRefDelta(long streamPosition, AnyObjectId baseId,
+ long inflatedSize) throws IOException {
+ crc.reset();
+ }
+
+ @Override
+ protected UnresolvedDelta onEndDelta() throws IOException {
+ UnresolvedDelta delta = new UnresolvedDelta();
+ delta.setCRC((int) crc.getValue());
+ return delta;
+ }
+
+ @Override
+ protected void onObjectHeader(Source src, byte[] raw, int pos, int len)
+ throws IOException {
+ crc.update(raw, pos, len);
+ }
+
+ @Override
+ protected void onObjectData(Source src, byte[] raw, int pos, int len)
+ throws IOException {
+ crc.update(raw, pos, len);
+ }
+
+ @Override
+ protected void onStoreStream(byte[] raw, int pos, int len)
+ throws IOException {
+ out.write(raw, pos, len);
+ }
+
+ @Override
+ protected void onPackFooter(byte[] hash) throws IOException {
+ packEnd = out.getFilePointer();
+ origEnd = packEnd;
+ origHash = hash;
+ packHash = hash;
+ }
+
+ @Override
+ protected ObjectTypeAndSize seekDatabase(UnresolvedDelta delta,
+ ObjectTypeAndSize info) throws IOException {
+ out.seek(delta.getOffset());
+ crc.reset();
+ return readObjectHeader(info);
+ }
+
+ @Override
+ protected ObjectTypeAndSize seekDatabase(PackedObjectInfo obj,
+ ObjectTypeAndSize info) throws IOException {
+ out.seek(obj.getOffset());
+ crc.reset();
+ return readObjectHeader(info);
+ }
+
+ @Override
+ protected int readDatabase(byte[] dst, int pos, int cnt) throws IOException {
+ return out.read(dst, pos, cnt);
+ }
+
+ @Override
+ protected boolean checkCRC(int oldCRC) {
+ return oldCRC == (int) crc.getValue();
+ }
+
+ private static String baseName(File tmpPack) {
+ String name = tmpPack.getName();
+ return name.substring(0, name.lastIndexOf('.'));
+ }
+
+ private void cleanupTemporaryFiles() {
+ if (tmpIdx != null && !tmpIdx.delete() && tmpIdx.exists())
+ tmpIdx.deleteOnExit();
+ if (tmpPack != null && !tmpPack.delete() && tmpPack.exists())
+ tmpPack.deleteOnExit();
+ }
+
+ @Override
+ protected boolean onAppendBase(final int typeCode, final byte[] data,
+ final PackedObjectInfo info) throws IOException {
+ info.setOffset(packEnd);
+
+ final byte[] buf = buffer();
+ int sz = data.length;
+ int len = 0;
+ buf[len++] = (byte) ((typeCode << 4) | sz & 15);
+ sz >>>= 4;
+ while (sz > 0) {
+ buf[len - 1] |= 0x80;
+ buf[len++] = (byte) (sz & 0x7f);
+ sz >>>= 7;
+ }
+
+ tailDigest.update(buf, 0, len);
+ crc.reset();
+ crc.update(buf, 0, len);
+ out.seek(packEnd);
+ out.write(buf, 0, len);
+ packEnd += len;
+
+ if (def == null)
+ def = new Deflater(Deflater.DEFAULT_COMPRESSION, false);
+ else
+ def.reset();
+ def.setInput(data);
+ def.finish();
+
+ while (!def.finished()) {
+ len = def.deflate(buf);
+ tailDigest.update(buf, 0, len);
+ crc.update(buf, 0, len);
+ out.write(buf, 0, len);
+ packEnd += len;
+ }
+
+ info.setCRC((int) crc.getValue());
+ return true;
+ }
+
+ @Override
+ protected void onEndThinPack() throws IOException {
+ final byte[] tailHash = this.tailDigest.digest();
+ final byte[] buf = buffer();
+
+ final MessageDigest origDigest = Constants.newMessageDigest();
+ final MessageDigest tailDigest = Constants.newMessageDigest();
+ final MessageDigest packDigest = Constants.newMessageDigest();
+
+ long origRemaining = origEnd;
+ out.seek(0);
+ out.readFully(buf, 0, 12);
+ origDigest.update(buf, 0, 12);
+ origRemaining -= 12;
+
+ NB.encodeInt32(buf, 8, getObjectCount());
+ out.seek(0);
+ out.write(buf, 0, 12);
+ packDigest.update(buf, 0, 12);
+
+ for (;;) {
+ final int n = out.read(buf);
+ if (n < 0)
+ break;
+ if (origRemaining != 0) {
+ final int origCnt = (int) Math.min(n, origRemaining);
+ origDigest.update(buf, 0, origCnt);
+ origRemaining -= origCnt;
+ if (origRemaining == 0)
+ tailDigest.update(buf, origCnt, n - origCnt);
+ } else
+ tailDigest.update(buf, 0, n);
+
+ packDigest.update(buf, 0, n);
+ }
+
+ if (!Arrays.equals(origDigest.digest(), origHash)
+ || !Arrays.equals(tailDigest.digest(), tailHash))
+ throw new IOException(
+ JGitText.get().packCorruptedWhileWritingToFilesystem);
+
+ packHash = packDigest.digest();
+ }
+
+ private void writeIdx() throws IOException {
+ List<PackedObjectInfo> list = getSortedObjectList(null /* by ObjectId */);
+ final FileOutputStream os = new FileOutputStream(tmpIdx);
+ try {
+ final PackIndexWriter iw;
+ if (indexVersion <= 0)
+ iw = PackIndexWriter.createOldestPossible(os, list);
+ else
+ iw = PackIndexWriter.createVersion(os, indexVersion);
+ iw.write(list, packHash);
+ os.getChannel().force(true);
+ } finally {
+ os.close();
+ }
+ }
+
+ private PackLock renameAndOpenPack(final String lockMessage)
+ throws IOException {
+ if (!keepEmpty && getObjectCount() == 0) {
+ cleanupTemporaryFiles();
+ return null;
+ }
+
+ final MessageDigest d = Constants.newMessageDigest();
+ final byte[] oeBytes = new byte[Constants.OBJECT_ID_LENGTH];
+ for (int i = 0; i < getObjectCount(); i++) {
+ final PackedObjectInfo oe = getObject(i);
+ oe.copyRawTo(oeBytes, 0);
+ d.update(oeBytes);
+ }
+
+ final String name = ObjectId.fromRaw(d.digest()).name();
+ final File packDir = new File(db.getDirectory(), "pack");
+ final File finalPack = new File(packDir, "pack-" + name + ".pack");
+ final File finalIdx = new File(packDir, "pack-" + name + ".idx");
+ final PackLock keep = new PackLock(finalPack, db.getFS());
+
+ if (!packDir.exists() && !packDir.mkdir() && !packDir.exists()) {
+ // The objects/pack directory isn't present, and we are unable
+ // to create it. There is no way to move this pack in.
+ //
+ cleanupTemporaryFiles();
+ throw new IOException(MessageFormat.format(
+ JGitText.get().cannotCreateDirectory, packDir
+ .getAbsolutePath()));
+ }
+
+ if (finalPack.exists()) {
+ // If the pack is already present we should never replace it.
+ //
+ cleanupTemporaryFiles();
+ return null;
+ }
+
+ if (lockMessage != null) {
+ // If we have a reason to create a keep file for this pack, do
+ // so, or fail fast and don't put the pack in place.
+ //
+ try {
+ if (!keep.lock(lockMessage))
+ throw new IOException(MessageFormat.format(
+ JGitText.get().cannotLockPackIn, finalPack));
+ } catch (IOException e) {
+ cleanupTemporaryFiles();
+ throw e;
+ }
+ }
+
+ if (!tmpPack.renameTo(finalPack)) {
+ cleanupTemporaryFiles();
+ keep.unlock();
+ throw new IOException(MessageFormat.format(
+ JGitText.get().cannotMovePackTo, finalPack));
+ }
+
+ if (!tmpIdx.renameTo(finalIdx)) {
+ cleanupTemporaryFiles();
+ keep.unlock();
+ if (!finalPack.delete())
+ finalPack.deleteOnExit();
+ throw new IOException(MessageFormat.format(
+ JGitText.get().cannotMoveIndexTo, finalIdx));
+ }
+
+ try {
+ newPack = db.openPack(finalPack, finalIdx);
+ } catch (IOException err) {
+ keep.unlock();
+ if (finalPack.exists())
+ FileUtils.delete(finalPack);
+ if (finalIdx.exists())
+ FileUtils.delete(finalIdx);
+ throw err;
+ }
+
+ return lockMessage != null ? keep : null;
+ }
+}
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.MutableObjectId;
import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Config.SectionParser;
}
private void receivePack(final ProgressMonitor monitor) throws IOException {
- final IndexPack ip;
-
InputStream input = in;
if (sideband)
input = new SideBandInputStream(input, monitor, getMessageWriter());
- ip = IndexPack.create(local, input);
- ip.setFixThin(thinPack);
- ip.setObjectChecking(transport.isCheckFetchedObjects());
- ip.index(monitor);
- packLock = ip.renameAndOpenPack(lockMessage);
+ ObjectInserter ins = local.newObjectInserter();
+ try {
+ PackParser parser = ins.newPackParser(input);
+ parser.setAllowThin(thinPack);
+ parser.setObjectChecking(transport.isCheckFetchedObjects());
+ parser.setLockMessage(lockMessage);
+ packLock = parser.parse(monitor);
+ ins.flush();
+ } finally {
+ ins.release();
+ }
}
private static class CancelledException extends Exception {
import org.eclipse.jgit.errors.PackProtocolException;
import org.eclipse.jgit.errors.TransportException;
import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.NullProgressMonitor;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectIdRef;
+import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.revwalk.RevCommit;
BundleFetchConnection(Transport transportBundle, final InputStream src) throws TransportException {
transport = transportBundle;
- bin = new BufferedInputStream(src, IndexPack.BUFFER_SIZE);
+ bin = new BufferedInputStream(src);
try {
switch (readSignature()) {
case 2:
throws TransportException {
verifyPrerequisites();
try {
- final IndexPack ip = newIndexPack();
- ip.index(monitor);
- packLock = ip.renameAndOpenPack(lockMessage);
+ ObjectInserter ins = transport.local.newObjectInserter();
+ try {
+ PackParser parser = ins.newPackParser(bin);
+ parser.setAllowThin(true);
+ parser.setObjectChecking(transport.isCheckFetchedObjects());
+ parser.setLockMessage(lockMessage);
+ packLock = parser.parse(NullProgressMonitor.INSTANCE);
+ ins.flush();
+ } finally {
+ ins.release();
+ }
} catch (IOException err) {
close();
throw new TransportException(transport.uri, err.getMessage(), err);
return Collections.<PackLock> emptyList();
}
- private IndexPack newIndexPack() throws IOException {
- final IndexPack ip = IndexPack.create(transport.local, bin);
- ip.setFixThin(true);
- ip.setObjectChecking(transport.isCheckFetchedObjects());
- return ip;
- }
-
private void verifyPrerequisites() throws TransportException {
if (prereqs.isEmpty())
return;
+++ /dev/null
-/*
- * Copyright (C) 2008-2010, Google Inc.
- * Copyright (C) 2007-2008, Robin Rosenberg <robin.rosenberg@dewire.com>
- * Copyright (C) 2008, Shawn O. Pearce <spearce@spearce.org>
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- * copyright notice, this list of conditions and the following
- * disclaimer in the documentation and/or other materials provided
- * with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- * names of its contributors may be used to endorse or promote
- * products derived from this software without specific prior
- * written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.transport;
-
-import java.io.EOFException;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.RandomAccessFile;
-import java.security.MessageDigest;
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.zip.CRC32;
-import java.util.zip.DataFormatException;
-import java.util.zip.Deflater;
-import java.util.zip.Inflater;
-
-import org.eclipse.jgit.JGitText;
-import org.eclipse.jgit.errors.CorruptObjectException;
-import org.eclipse.jgit.errors.MissingObjectException;
-import org.eclipse.jgit.lib.AnyObjectId;
-import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.lib.CoreConfig;
-import org.eclipse.jgit.lib.InflaterCache;
-import org.eclipse.jgit.lib.MutableObjectId;
-import org.eclipse.jgit.lib.ObjectChecker;
-import org.eclipse.jgit.lib.ObjectDatabase;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.ObjectIdSubclassMap;
-import org.eclipse.jgit.lib.ObjectLoader;
-import org.eclipse.jgit.lib.ObjectReader;
-import org.eclipse.jgit.lib.ObjectStream;
-import org.eclipse.jgit.lib.ProgressMonitor;
-import org.eclipse.jgit.lib.Repository;
-import org.eclipse.jgit.storage.file.PackIndexWriter;
-import org.eclipse.jgit.storage.file.PackLock;
-import org.eclipse.jgit.storage.pack.BinaryDelta;
-import org.eclipse.jgit.util.FileUtils;
-import org.eclipse.jgit.util.IO;
-import org.eclipse.jgit.util.NB;
-
-/** Indexes Git pack files for local use. */
-public class IndexPack {
- /**
- * Size of the internal stream buffer.
- * <p>
- * If callers are going to be supplying IndexPack a BufferedInputStream they
- * should use this buffer size as the size of the buffer for that
- * BufferedInputStream, and any other its may be wrapping. This way the
- * buffers will cascade efficiently and only the IndexPack buffer will be
- * receiving the bulk of the data stream.
- */
- public static final int BUFFER_SIZE = 8192;
-
- /**
- * Create an index pack instance to load a new pack into a repository.
- * <p>
- * The received pack data and generated index will be saved to temporary
- * files within the repository's <code>objects</code> directory. To use the
- * data contained within them call {@link #renameAndOpenPack()} once the
- * indexing is complete.
- *
- * @param db
- * the repository that will receive the new pack.
- * @param is
- * stream to read the pack data from. If the stream is buffered
- * use {@link #BUFFER_SIZE} as the buffer size for the stream.
- * @return a new index pack instance.
- * @throws IOException
- * a temporary file could not be created.
- */
- public static IndexPack create(final Repository db, final InputStream is)
- throws IOException {
- final String suffix = ".pack";
- final File objdir = db.getObjectsDirectory();
- final File tmp = File.createTempFile("incoming_", suffix, objdir);
- final String n = tmp.getName();
- final File base;
-
- base = new File(objdir, n.substring(0, n.length() - suffix.length()));
- final IndexPack ip = new IndexPack(db, is, base);
- ip.setIndexVersion(db.getConfig().get(CoreConfig.KEY)
- .getPackIndexVersion());
- return ip;
- }
-
- private static enum Source {
- /** Data is read from the incoming stream. */
- INPUT,
-
- /**
- * Data is read from the spooled pack file.
- * <p>
- * During streaming, some (or all) data might be saved into the spooled
- * pack file so it can be randomly accessed later.
- */
- FILE;
- }
-
- private final Repository repo;
-
- /**
- * Object database used for loading existing objects
- */
- private final ObjectDatabase objectDatabase;
-
- private InflaterStream inflater;
-
- private byte[] readBuffer;
-
- private final MessageDigest objectDigest;
-
- private final MutableObjectId tempObjectId;
-
- private InputStream in;
-
- private byte[] buf;
-
- private long bBase;
-
- private int bOffset;
-
- private int bAvail;
-
- private ObjectChecker objCheck;
-
- private boolean fixThin;
-
- private boolean keepEmpty;
-
- private boolean needBaseObjectIds;
-
- private int outputVersion;
-
- private final File dstPack;
-
- private final File dstIdx;
-
- private long objectCount;
-
- private PackedObjectInfo[] entries;
-
- /**
- * Every object contained within the incoming pack.
- * <p>
- * This is a subset of {@link #entries}, as thin packs can add additional
- * objects to {@code entries} by copying already existing objects from the
- * repository onto the end of the thin pack to make it self-contained.
- */
- private ObjectIdSubclassMap<ObjectId> newObjectIds;
-
- private int deltaCount;
-
- private int entryCount;
-
- private final CRC32 crc = new CRC32();
-
- private ObjectIdSubclassMap<DeltaChain> baseById;
-
- /**
- * Objects referenced by their name from deltas, that aren't in this pack.
- * <p>
- * This is the set of objects that were copied onto the end of this pack to
- * make it complete. These objects were not transmitted by the remote peer,
- * but instead were assumed to already exist in the local repository.
- */
- private ObjectIdSubclassMap<ObjectId> baseObjectIds;
-
- private LongMap<UnresolvedDelta> baseByPos;
-
- /** Blobs whose contents need to be double-checked after indexing. */
- private List<PackedObjectInfo> deferredCheckBlobs;
-
- private MessageDigest packDigest;
-
- private RandomAccessFile packOut;
-
- private byte[] packcsum;
-
- /** If {@link #fixThin} this is the last byte of the original checksum. */
- private long originalEOF;
-
- private ObjectReader readCurs;
-
- /**
- * Create a new pack indexer utility.
- *
- * @param db
- * @param src
- * stream to read the pack data from. If the stream is buffered
- * use {@link #BUFFER_SIZE} as the buffer size for the stream.
- * @param dstBase
- * @throws IOException
- * the output packfile could not be created.
- */
- public IndexPack(final Repository db, final InputStream src,
- final File dstBase) throws IOException {
- repo = db;
- objectDatabase = db.getObjectDatabase().newCachedDatabase();
- in = src;
- inflater = new InflaterStream();
- readCurs = objectDatabase.newReader();
- buf = new byte[BUFFER_SIZE];
- readBuffer = new byte[BUFFER_SIZE];
- objectDigest = Constants.newMessageDigest();
- tempObjectId = new MutableObjectId();
- packDigest = Constants.newMessageDigest();
-
- if (dstBase != null) {
- final File dir = dstBase.getParentFile();
- final String nam = dstBase.getName();
- dstPack = new File(dir, nam + ".pack");
- dstIdx = new File(dir, nam + ".idx");
- packOut = new RandomAccessFile(dstPack, "rw");
- packOut.setLength(0);
- } else {
- dstPack = null;
- dstIdx = null;
- }
- }
-
- /**
- * Set the pack index file format version this instance will create.
- *
- * @param version
- * the version to write. The special version 0 designates the
- * oldest (most compatible) format available for the objects.
- * @see PackIndexWriter
- */
- public void setIndexVersion(final int version) {
- outputVersion = version;
- }
-
- /**
- * Configure this index pack instance to make a thin pack complete.
- * <p>
- * Thin packs are sometimes used during network transfers to allow a delta
- * to be sent without a base object. Such packs are not permitted on disk.
- * They can be fixed by copying the base object onto the end of the pack.
- *
- * @param fix
- * true to enable fixing a thin pack.
- */
- public void setFixThin(final boolean fix) {
- fixThin = fix;
- }
-
- /**
- * Configure this index pack instance to keep an empty pack.
- * <p>
- * By default an empty pack (a pack with no objects) is not kept, as doing
- * so is completely pointless. With no objects in the pack there is no data
- * stored by it, so the pack is unnecessary.
- *
- * @param empty true to enable keeping an empty pack.
- */
- public void setKeepEmpty(final boolean empty) {
- keepEmpty = empty;
- }
-
- /**
- * Configure this index pack instance to keep track of new objects.
- * <p>
- * By default an index pack doesn't save the new objects that were created
- * when it was instantiated. Setting this flag to {@code true} allows the
- * caller to use {@link #getNewObjectIds()} to retrieve that list.
- *
- * @param b {@code true} to enable keeping track of new objects.
- */
- public void setNeedNewObjectIds(boolean b) {
- if (b)
- newObjectIds = new ObjectIdSubclassMap<ObjectId>();
- else
- newObjectIds = null;
- }
-
- private boolean needNewObjectIds() {
- return newObjectIds != null;
- }
-
- /**
- * Configure this index pack instance to keep track of the objects assumed
- * for delta bases.
- * <p>
- * By default an index pack doesn't save the objects that were used as delta
- * bases. Setting this flag to {@code true} will allow the caller to
- * use {@link #getBaseObjectIds()} to retrieve that list.
- *
- * @param b {@code true} to enable keeping track of delta bases.
- */
- public void setNeedBaseObjectIds(boolean b) {
- this.needBaseObjectIds = b;
- }
-
- /** @return the new objects that were sent by the user */
- public ObjectIdSubclassMap<ObjectId> getNewObjectIds() {
- if (newObjectIds != null)
- return newObjectIds;
- return new ObjectIdSubclassMap<ObjectId>();
- }
-
- /** @return set of objects the incoming pack assumed for delta purposes */
- public ObjectIdSubclassMap<ObjectId> getBaseObjectIds() {
- if (baseObjectIds != null)
- return baseObjectIds;
- return new ObjectIdSubclassMap<ObjectId>();
- }
-
- /**
- * Configure the checker used to validate received objects.
- * <p>
- * Usually object checking isn't necessary, as Git implementations only
- * create valid objects in pack files. However, additional checking may be
- * useful if processing data from an untrusted source.
- *
- * @param oc
- * the checker instance; null to disable object checking.
- */
- public void setObjectChecker(final ObjectChecker oc) {
- objCheck = oc;
- }
-
- /**
- * Configure the checker used to validate received objects.
- * <p>
- * Usually object checking isn't necessary, as Git implementations only
- * create valid objects in pack files. However, additional checking may be
- * useful if processing data from an untrusted source.
- * <p>
- * This is shorthand for:
- *
- * <pre>
- * setObjectChecker(on ? new ObjectChecker() : null);
- * </pre>
- *
- * @param on
- * true to enable the default checker; false to disable it.
- */
- public void setObjectChecking(final boolean on) {
- setObjectChecker(on ? new ObjectChecker() : null);
- }
-
- /**
- * Consume data from the input stream until the packfile is indexed.
- *
- * @param progress
- * progress feedback
- *
- * @throws IOException
- */
- public void index(final ProgressMonitor progress) throws IOException {
- progress.start(2 /* tasks */);
- try {
- try {
- readPackHeader();
-
- entries = new PackedObjectInfo[(int) objectCount];
- baseById = new ObjectIdSubclassMap<DeltaChain>();
- baseByPos = new LongMap<UnresolvedDelta>();
- deferredCheckBlobs = new ArrayList<PackedObjectInfo>();
-
- progress.beginTask(JGitText.get().receivingObjects,
- (int) objectCount);
- for (int done = 0; done < objectCount; done++) {
- indexOneObject();
- progress.update(1);
- if (progress.isCancelled())
- throw new IOException(JGitText.get().downloadCancelled);
- }
- readPackFooter();
- endInput();
- if (!deferredCheckBlobs.isEmpty())
- doDeferredCheckBlobs();
- progress.endTask();
- if (deltaCount > 0) {
- if (packOut == null)
- throw new IOException(JGitText.get().needPackOut);
- resolveDeltas(progress);
- if (entryCount < objectCount) {
- if (!fixThin) {
- throw new IOException(MessageFormat.format(
- JGitText.get().packHasUnresolvedDeltas, (objectCount - entryCount)));
- }
- fixThinPack(progress);
- }
- }
- if (packOut != null && (keepEmpty || entryCount > 0))
- packOut.getChannel().force(true);
-
- packDigest = null;
- baseById = null;
- baseByPos = null;
-
- if (dstIdx != null && (keepEmpty || entryCount > 0))
- writeIdx();
-
- } finally {
- try {
- if (readCurs != null)
- readCurs.release();
- } finally {
- readCurs = null;
- }
-
- try {
- inflater.release();
- } finally {
- inflater = null;
- objectDatabase.close();
- }
-
- progress.endTask();
- if (packOut != null)
- packOut.close();
- }
-
- if (keepEmpty || entryCount > 0) {
- if (dstPack != null)
- dstPack.setReadOnly();
- if (dstIdx != null)
- dstIdx.setReadOnly();
- }
- } catch (IOException err) {
- if (dstPack != null)
- FileUtils.delete(dstPack);
- if (dstIdx != null)
- FileUtils.delete(dstIdx);
- throw err;
- }
- }
-
- private void resolveDeltas(final ProgressMonitor progress)
- throws IOException {
- progress.beginTask(JGitText.get().resolvingDeltas, deltaCount);
- final int last = entryCount;
- for (int i = 0; i < last; i++) {
- final int before = entryCount;
- resolveDeltas(entries[i]);
- progress.update(entryCount - before);
- if (progress.isCancelled())
- throw new IOException(JGitText.get().downloadCancelledDuringIndexing);
- }
- progress.endTask();
- }
-
- private void resolveDeltas(final PackedObjectInfo oe) throws IOException {
- UnresolvedDelta children = firstChildOf(oe);
- if (children == null)
- return;
-
- DeltaVisit visit = new DeltaVisit();
- visit.nextChild = children;
-
- crc.reset();
- position(oe.getOffset());
- int c = readFrom(Source.FILE);
- final int typeCode = (c >> 4) & 7;
- long sz = c & 15;
- int shift = 4;
- while ((c & 0x80) != 0) {
- c = readFrom(Source.FILE);
- sz += (c & 0x7f) << shift;
- shift += 7;
- }
-
- switch (typeCode) {
- case Constants.OBJ_COMMIT:
- case Constants.OBJ_TREE:
- case Constants.OBJ_BLOB:
- case Constants.OBJ_TAG:
- visit.data = inflateAndReturn(Source.FILE, sz);
- break;
- default:
- throw new IOException(MessageFormat.format(
- JGitText.get().unknownObjectType, typeCode));
- }
-
- if (oe.getCRC() != (int) crc.getValue()) {
- throw new IOException(MessageFormat.format(
- JGitText.get().corruptionDetectedReReadingAt,
- oe.getOffset()));
- }
-
- resolveDeltas(visit.next(), typeCode);
- }
-
- private void resolveDeltas(DeltaVisit visit, final int type)
- throws IOException {
- do {
- final long pos = visit.delta.position;
- crc.reset();
- position(pos);
- int c = readFrom(Source.FILE);
- final int typeCode = (c >> 4) & 7;
- long sz = c & 15;
- int shift = 4;
- while ((c & 0x80) != 0) {
- c = readFrom(Source.FILE);
- sz += (c & 0x7f) << shift;
- shift += 7;
- }
-
- switch (typeCode) {
- case Constants.OBJ_OFS_DELTA: {
- c = readFrom(Source.FILE) & 0xff;
- while ((c & 128) != 0)
- c = readFrom(Source.FILE) & 0xff;
- visit.data = BinaryDelta.apply(visit.parent.data, inflateAndReturn(Source.FILE, sz));
- break;
- }
- case Constants.OBJ_REF_DELTA: {
- crc.update(buf, fill(Source.FILE, 20), 20);
- use(20);
- visit.data = BinaryDelta.apply(visit.parent.data, inflateAndReturn(Source.FILE, sz));
- break;
- }
- default:
- throw new IOException(MessageFormat.format(JGitText.get().unknownObjectType, typeCode));
- }
-
- final int crc32 = (int) crc.getValue();
- if (visit.delta.crc != crc32)
- throw new IOException(MessageFormat.format(JGitText.get().corruptionDetectedReReadingAt, pos));
-
- objectDigest.update(Constants.encodedTypeString(type));
- objectDigest.update((byte) ' ');
- objectDigest.update(Constants.encodeASCII(visit.data.length));
- objectDigest.update((byte) 0);
- objectDigest.update(visit.data);
- tempObjectId.fromRaw(objectDigest.digest(), 0);
-
- verifySafeObject(tempObjectId, type, visit.data);
-
- PackedObjectInfo oe;
- oe = new PackedObjectInfo(pos, crc32, tempObjectId);
- addObjectAndTrack(oe);
-
- visit.nextChild = firstChildOf(oe);
- visit = visit.next();
- } while (visit != null);
- }
-
- private UnresolvedDelta removeBaseById(final AnyObjectId id){
- final DeltaChain d = baseById.get(id);
- return d != null ? d.remove() : null;
- }
-
- private static UnresolvedDelta reverse(UnresolvedDelta c) {
- UnresolvedDelta tail = null;
- while (c != null) {
- final UnresolvedDelta n = c.next;
- c.next = tail;
- tail = c;
- c = n;
- }
- return tail;
- }
-
- private UnresolvedDelta firstChildOf(PackedObjectInfo oe) {
- UnresolvedDelta a = reverse(removeBaseById(oe));
- UnresolvedDelta b = reverse(baseByPos.remove(oe.getOffset()));
-
- if (a == null)
- return b;
- if (b == null)
- return a;
-
- UnresolvedDelta first = null;
- UnresolvedDelta last = null;
- while (a != null || b != null) {
- UnresolvedDelta curr;
- if (b == null || (a != null && a.position < b.position)) {
- curr = a;
- a = a.next;
- } else {
- curr = b;
- b = b.next;
- }
- if (last != null)
- last.next = curr;
- else
- first = curr;
- last = curr;
- curr.next = null;
- }
- return first;
- }
-
- private void fixThinPack(final ProgressMonitor progress) throws IOException {
- growEntries();
-
- if (needBaseObjectIds)
- baseObjectIds = new ObjectIdSubclassMap<ObjectId>();
-
- packDigest.reset();
- originalEOF = packOut.length() - 20;
- final Deflater def = new Deflater(Deflater.DEFAULT_COMPRESSION, false);
- final List<DeltaChain> missing = new ArrayList<DeltaChain>(64);
- long end = originalEOF;
- for (final DeltaChain baseId : baseById) {
- if (baseId.head == null)
- continue;
- if (needBaseObjectIds)
- baseObjectIds.add(baseId);
- final ObjectLoader ldr;
- try {
- ldr = readCurs.open(baseId);
- } catch (MissingObjectException notFound) {
- missing.add(baseId);
- continue;
- }
-
- final DeltaVisit visit = new DeltaVisit();
- visit.data = ldr.getCachedBytes(Integer.MAX_VALUE);
- final int typeCode = ldr.getType();
- final PackedObjectInfo oe;
-
- crc.reset();
- packOut.seek(end);
- writeWhole(def, typeCode, visit.data);
- oe = new PackedObjectInfo(end, (int) crc.getValue(), baseId);
- entries[entryCount++] = oe;
- end = packOut.getFilePointer();
-
- visit.nextChild = firstChildOf(oe);
- resolveDeltas(visit.next(), typeCode);
-
- if (progress.isCancelled())
- throw new IOException(JGitText.get().downloadCancelledDuringIndexing);
- }
- def.end();
-
- for (final DeltaChain base : missing) {
- if (base.head != null)
- throw new MissingObjectException(base, "delta base");
- }
-
- if (end - originalEOF < 20) {
- // Ugly corner case; if what we appended on to complete deltas
- // doesn't completely cover the SHA-1 we have to truncate off
- // we need to shorten the file, otherwise we will include part
- // of the old footer as object content.
- packOut.setLength(end);
- }
-
- fixHeaderFooter(packcsum, packDigest.digest());
- }
-
- private void writeWhole(final Deflater def, final int typeCode,
- final byte[] data) throws IOException {
- int sz = data.length;
- int hdrlen = 0;
- buf[hdrlen++] = (byte) ((typeCode << 4) | sz & 15);
- sz >>>= 4;
- while (sz > 0) {
- buf[hdrlen - 1] |= 0x80;
- buf[hdrlen++] = (byte) (sz & 0x7f);
- sz >>>= 7;
- }
- packDigest.update(buf, 0, hdrlen);
- crc.update(buf, 0, hdrlen);
- packOut.write(buf, 0, hdrlen);
- def.reset();
- def.setInput(data);
- def.finish();
- while (!def.finished()) {
- final int datlen = def.deflate(buf);
- packDigest.update(buf, 0, datlen);
- crc.update(buf, 0, datlen);
- packOut.write(buf, 0, datlen);
- }
- }
-
- private void fixHeaderFooter(final byte[] origcsum, final byte[] tailcsum)
- throws IOException {
- final MessageDigest origDigest = Constants.newMessageDigest();
- final MessageDigest tailDigest = Constants.newMessageDigest();
- long origRemaining = originalEOF;
-
- packOut.seek(0);
- bAvail = 0;
- bOffset = 0;
- fill(Source.FILE, 12);
-
- {
- final int origCnt = (int) Math.min(bAvail, origRemaining);
- origDigest.update(buf, 0, origCnt);
- origRemaining -= origCnt;
- if (origRemaining == 0)
- tailDigest.update(buf, origCnt, bAvail - origCnt);
- }
-
- NB.encodeInt32(buf, 8, entryCount);
- packOut.seek(0);
- packOut.write(buf, 0, 12);
- packOut.seek(bAvail);
-
- packDigest.reset();
- packDigest.update(buf, 0, bAvail);
- for (;;) {
- final int n = packOut.read(buf);
- if (n < 0)
- break;
- if (origRemaining != 0) {
- final int origCnt = (int) Math.min(n, origRemaining);
- origDigest.update(buf, 0, origCnt);
- origRemaining -= origCnt;
- if (origRemaining == 0)
- tailDigest.update(buf, origCnt, n - origCnt);
- } else
- tailDigest.update(buf, 0, n);
-
- packDigest.update(buf, 0, n);
- }
-
- if (!Arrays.equals(origDigest.digest(), origcsum)
- || !Arrays.equals(tailDigest.digest(), tailcsum))
- throw new IOException(JGitText.get().packCorruptedWhileWritingToFilesystem);
-
- packcsum = packDigest.digest();
- packOut.write(packcsum);
- }
-
- private void growEntries() {
- final PackedObjectInfo[] ne;
-
- ne = new PackedObjectInfo[(int) objectCount + baseById.size()];
- System.arraycopy(entries, 0, ne, 0, entryCount);
- entries = ne;
- }
-
- private void writeIdx() throws IOException {
- Arrays.sort(entries, 0, entryCount);
- List<PackedObjectInfo> list = Arrays.asList(entries);
- if (entryCount < entries.length)
- list = list.subList(0, entryCount);
-
- final FileOutputStream os = new FileOutputStream(dstIdx);
- try {
- final PackIndexWriter iw;
- if (outputVersion <= 0)
- iw = PackIndexWriter.createOldestPossible(os, list);
- else
- iw = PackIndexWriter.createVersion(os, outputVersion);
- iw.write(list, packcsum);
- os.getChannel().force(true);
- } finally {
- os.close();
- }
- }
-
- private void readPackHeader() throws IOException {
- final int hdrln = Constants.PACK_SIGNATURE.length + 4 + 4;
- final int p = fill(Source.INPUT, hdrln);
- for (int k = 0; k < Constants.PACK_SIGNATURE.length; k++)
- if (buf[p + k] != Constants.PACK_SIGNATURE[k])
- throw new IOException(JGitText.get().notAPACKFile);
-
- final long vers = NB.decodeUInt32(buf, p + 4);
- if (vers != 2 && vers != 3)
- throw new IOException(MessageFormat.format(JGitText.get().unsupportedPackVersion, vers));
- objectCount = NB.decodeUInt32(buf, p + 8);
- use(hdrln);
- }
-
- private void readPackFooter() throws IOException {
- sync();
- final byte[] cmpcsum = packDigest.digest();
- final int c = fill(Source.INPUT, 20);
- packcsum = new byte[20];
- System.arraycopy(buf, c, packcsum, 0, 20);
- use(20);
- if (packOut != null)
- packOut.write(packcsum);
-
- if (!Arrays.equals(cmpcsum, packcsum))
- throw new CorruptObjectException(JGitText.get().corruptObjectPackfileChecksumIncorrect);
- }
-
- // Cleanup all resources associated with our input parsing.
- private void endInput() {
- in = null;
- }
-
- // Read one entire object or delta from the input.
- private void indexOneObject() throws IOException {
- final long pos = position();
-
- crc.reset();
- int c = readFrom(Source.INPUT);
- final int typeCode = (c >> 4) & 7;
- long sz = c & 15;
- int shift = 4;
- while ((c & 0x80) != 0) {
- c = readFrom(Source.INPUT);
- sz += (c & 0x7f) << shift;
- shift += 7;
- }
-
- switch (typeCode) {
- case Constants.OBJ_COMMIT:
- case Constants.OBJ_TREE:
- case Constants.OBJ_BLOB:
- case Constants.OBJ_TAG:
- whole(typeCode, pos, sz);
- break;
- case Constants.OBJ_OFS_DELTA: {
- c = readFrom(Source.INPUT);
- long ofs = c & 127;
- while ((c & 128) != 0) {
- ofs += 1;
- c = readFrom(Source.INPUT);
- ofs <<= 7;
- ofs += (c & 127);
- }
- final long base = pos - ofs;
- final UnresolvedDelta n;
- inflateAndSkip(Source.INPUT, sz);
- n = new UnresolvedDelta(pos, (int) crc.getValue());
- n.next = baseByPos.put(base, n);
- deltaCount++;
- break;
- }
- case Constants.OBJ_REF_DELTA: {
- c = fill(Source.INPUT, 20);
- crc.update(buf, c, 20);
- final ObjectId base = ObjectId.fromRaw(buf, c);
- use(20);
- DeltaChain r = baseById.get(base);
- if (r == null) {
- r = new DeltaChain(base);
- baseById.add(r);
- }
- inflateAndSkip(Source.INPUT, sz);
- r.add(new UnresolvedDelta(pos, (int) crc.getValue()));
- deltaCount++;
- break;
- }
- default:
- throw new IOException(MessageFormat.format(JGitText.get().unknownObjectType, typeCode));
- }
- }
-
- private void whole(final int type, final long pos, final long sz)
- throws IOException {
- objectDigest.update(Constants.encodedTypeString(type));
- objectDigest.update((byte) ' ');
- objectDigest.update(Constants.encodeASCII(sz));
- objectDigest.update((byte) 0);
-
- boolean checkContentLater = false;
- if (type == Constants.OBJ_BLOB) {
- InputStream inf = inflate(Source.INPUT, sz);
- long cnt = 0;
- while (cnt < sz) {
- int r = inf.read(readBuffer);
- if (r <= 0)
- break;
- objectDigest.update(readBuffer, 0, r);
- cnt += r;
- }
- inf.close();
- tempObjectId.fromRaw(objectDigest.digest(), 0);
- checkContentLater = readCurs.has(tempObjectId);
-
- } else {
- final byte[] data = inflateAndReturn(Source.INPUT, sz);
- objectDigest.update(data);
- tempObjectId.fromRaw(objectDigest.digest(), 0);
- verifySafeObject(tempObjectId, type, data);
- }
-
- final int crc32 = (int) crc.getValue();
- PackedObjectInfo obj = new PackedObjectInfo(pos, crc32, tempObjectId);
- addObjectAndTrack(obj);
- if (checkContentLater)
- deferredCheckBlobs.add(obj);
- }
-
- private void verifySafeObject(final AnyObjectId id, final int type,
- final byte[] data) throws IOException {
- if (objCheck != null) {
- try {
- objCheck.check(type, data);
- } catch (CorruptObjectException e) {
- throw new IOException(MessageFormat.format(JGitText.get().invalidObject
- , Constants.typeString(type) , id.name() , e.getMessage()));
- }
- }
-
- try {
- final ObjectLoader ldr = readCurs.open(id, type);
- final byte[] existingData = ldr.getCachedBytes(data.length);
- if (!Arrays.equals(data, existingData)) {
- throw new IOException(MessageFormat.format(JGitText.get().collisionOn, id.name()));
- }
- } catch (MissingObjectException notLocal) {
- // This is OK, we don't have a copy of the object locally
- // but the API throws when we try to read it as usually its
- // an error to read something that doesn't exist.
- }
- }
-
- private void doDeferredCheckBlobs() throws IOException {
- final byte[] curBuffer = new byte[readBuffer.length];
- for (PackedObjectInfo obj : deferredCheckBlobs) {
- position(obj.getOffset());
-
- int c = readFrom(Source.FILE);
- final int type = (c >> 4) & 7;
- long sz = c & 15;
- int shift = 4;
- while ((c & 0x80) != 0) {
- c = readFrom(Source.FILE);
- sz += (c & 0x7f) << shift;
- shift += 7;
- }
-
- if (type != Constants.OBJ_BLOB)
- throw new IOException(MessageFormat.format(
- JGitText.get().unknownObjectType, type));
-
- ObjectStream cur = readCurs.open(obj, type).openStream();
- try {
- if (cur.getSize() != sz)
- throw new IOException(MessageFormat.format(
- JGitText.get().collisionOn, obj.name()));
- InputStream pck = inflate(Source.FILE, sz);
- while (0 < sz) {
- int n = (int) Math.min(readBuffer.length, sz);
- IO.readFully(cur, curBuffer, 0, n);
- IO.readFully(pck, readBuffer, 0, n);
- for (int i = 0; i < n; i++) {
- if (curBuffer[i] != readBuffer[i])
- throw new IOException(MessageFormat.format(JGitText
- .get().collisionOn, obj.name()));
- }
- sz -= n;
- }
- pck.close();
- } finally {
- cur.close();
- }
- }
- }
-
- // Current position of {@link #bOffset} within the entire file.
- private long position() {
- return bBase + bOffset;
- }
-
- private void position(final long pos) throws IOException {
- packOut.seek(pos);
- bBase = pos;
- bOffset = 0;
- bAvail = 0;
- }
-
- // Consume exactly one byte from the buffer and return it.
- private int readFrom(final Source src) throws IOException {
- if (bAvail == 0)
- fill(src, 1);
- bAvail--;
- final int b = buf[bOffset++] & 0xff;
- crc.update(b);
- return b;
- }
-
- // Consume cnt bytes from the buffer.
- private void use(final int cnt) {
- bOffset += cnt;
- bAvail -= cnt;
- }
-
- // Ensure at least need bytes are available in in {@link #buf}.
- private int fill(final Source src, final int need) throws IOException {
- while (bAvail < need) {
- int next = bOffset + bAvail;
- int free = buf.length - next;
- if (free + bAvail < need) {
- switch(src){
- case INPUT:
- sync();
- break;
- case FILE:
- if (bAvail > 0)
- System.arraycopy(buf, bOffset, buf, 0, bAvail);
- bOffset = 0;
- break;
- }
- next = bAvail;
- free = buf.length - next;
- }
- switch(src){
- case INPUT:
- next = in.read(buf, next, free);
- break;
- case FILE:
- next = packOut.read(buf, next, free);
- break;
- }
- if (next <= 0)
- throw new EOFException(JGitText.get().packfileIsTruncated);
- bAvail += next;
- }
- return bOffset;
- }
-
- // Store consumed bytes in {@link #buf} up to {@link #bOffset}.
- private void sync() throws IOException {
- packDigest.update(buf, 0, bOffset);
- if (packOut != null)
- packOut.write(buf, 0, bOffset);
- if (bAvail > 0)
- System.arraycopy(buf, bOffset, buf, 0, bAvail);
- bBase += bOffset;
- bOffset = 0;
- }
-
- private void inflateAndSkip(final Source src, final long inflatedSize)
- throws IOException {
- final InputStream inf = inflate(src, inflatedSize);
- IO.skipFully(inf, inflatedSize);
- inf.close();
- }
-
- private byte[] inflateAndReturn(final Source src, final long inflatedSize)
- throws IOException {
- final byte[] dst = new byte[(int) inflatedSize];
- final InputStream inf = inflate(src, inflatedSize);
- IO.readFully(inf, dst, 0, dst.length);
- inf.close();
- return dst;
- }
-
- private InputStream inflate(final Source src, final long inflatedSize)
- throws IOException {
- inflater.open(src, inflatedSize);
- return inflater;
- }
-
- private static class DeltaChain extends ObjectId {
- UnresolvedDelta head;
-
- DeltaChain(final AnyObjectId id) {
- super(id);
- }
-
- UnresolvedDelta remove() {
- final UnresolvedDelta r = head;
- if (r != null)
- head = null;
- return r;
- }
-
- void add(final UnresolvedDelta d) {
- d.next = head;
- head = d;
- }
- }
-
- private static class UnresolvedDelta {
- final long position;
-
- final int crc;
-
- UnresolvedDelta next;
-
- UnresolvedDelta(final long headerOffset, final int crc32) {
- position = headerOffset;
- crc = crc32;
- }
- }
-
- private static class DeltaVisit {
- final UnresolvedDelta delta;
-
- byte[] data;
-
- DeltaVisit parent;
-
- UnresolvedDelta nextChild;
-
- DeltaVisit() {
- this.delta = null; // At the root of the stack we have a base.
- }
-
- DeltaVisit(DeltaVisit parent) {
- this.parent = parent;
- this.delta = parent.nextChild;
- parent.nextChild = delta.next;
- }
-
- DeltaVisit next() {
- // If our parent has no more children, discard it.
- if (parent != null && parent.nextChild == null) {
- parent.data = null;
- parent = parent.parent;
- }
-
- if (nextChild != null)
- return new DeltaVisit(this);
-
- // If we have no child ourselves, our parent must (if it exists),
- // due to the discard rule above. With no parent, we are done.
- if (parent != null)
- return new DeltaVisit(parent);
- return null;
- }
- }
-
- /**
- * Rename the pack to it's final name and location and open it.
- * <p>
- * If the call completes successfully the repository this IndexPack instance
- * was created with will have the objects in the pack available for reading
- * and use, without needing to scan for packs.
- *
- * @throws IOException
- * The pack could not be inserted into the repository's objects
- * directory. The pack no longer exists on disk, as it was
- * removed prior to throwing the exception to the caller.
- */
- public void renameAndOpenPack() throws IOException {
- renameAndOpenPack(null);
- }
-
- /**
- * Rename the pack to it's final name and location and open it.
- * <p>
- * If the call completes successfully the repository this IndexPack instance
- * was created with will have the objects in the pack available for reading
- * and use, without needing to scan for packs.
- *
- * @param lockMessage
- * message to place in the pack-*.keep file. If null, no lock
- * will be created, and this method returns null.
- * @return the pack lock object, if lockMessage is not null.
- * @throws IOException
- * The pack could not be inserted into the repository's objects
- * directory. The pack no longer exists on disk, as it was
- * removed prior to throwing the exception to the caller.
- */
- public PackLock renameAndOpenPack(final String lockMessage)
- throws IOException {
- if (!keepEmpty && entryCount == 0) {
- cleanupTemporaryFiles();
- return null;
- }
-
- final MessageDigest d = Constants.newMessageDigest();
- final byte[] oeBytes = new byte[Constants.OBJECT_ID_LENGTH];
- for (int i = 0; i < entryCount; i++) {
- final PackedObjectInfo oe = entries[i];
- oe.copyRawTo(oeBytes, 0);
- d.update(oeBytes);
- }
-
- final String name = ObjectId.fromRaw(d.digest()).name();
- final File packDir = new File(repo.getObjectsDirectory(), "pack");
- final File finalPack = new File(packDir, "pack-" + name + ".pack");
- final File finalIdx = new File(packDir, "pack-" + name + ".idx");
- final PackLock keep = new PackLock(finalPack, repo.getFS());
-
- if (!packDir.exists() && !packDir.mkdir() && !packDir.exists()) {
- // The objects/pack directory isn't present, and we are unable
- // to create it. There is no way to move this pack in.
- //
- cleanupTemporaryFiles();
- throw new IOException(MessageFormat.format(JGitText.get().cannotCreateDirectory, packDir.getAbsolutePath()));
- }
-
- if (finalPack.exists()) {
- // If the pack is already present we should never replace it.
- //
- cleanupTemporaryFiles();
- return null;
- }
-
- if (lockMessage != null) {
- // If we have a reason to create a keep file for this pack, do
- // so, or fail fast and don't put the pack in place.
- //
- try {
- if (!keep.lock(lockMessage))
- throw new IOException(MessageFormat.format(JGitText.get().cannotLockPackIn, finalPack));
- } catch (IOException e) {
- cleanupTemporaryFiles();
- throw e;
- }
- }
-
- if (!dstPack.renameTo(finalPack)) {
- cleanupTemporaryFiles();
- keep.unlock();
- throw new IOException(MessageFormat.format(JGitText.get().cannotMovePackTo, finalPack));
- }
-
- if (!dstIdx.renameTo(finalIdx)) {
- cleanupTemporaryFiles();
- keep.unlock();
- if (!finalPack.delete())
- finalPack.deleteOnExit();
- throw new IOException(MessageFormat.format(JGitText.get().cannotMoveIndexTo, finalIdx));
- }
-
- try {
- repo.openPack(finalPack, finalIdx);
- } catch (IOException err) {
- keep.unlock();
- FileUtils.delete(finalPack);
- FileUtils.delete(finalIdx);
- throw err;
- }
-
- return lockMessage != null ? keep : null;
- }
-
- private void cleanupTemporaryFiles() {
- if (!dstIdx.delete())
- dstIdx.deleteOnExit();
- if (!dstPack.delete())
- dstPack.deleteOnExit();
- }
-
- private void addObjectAndTrack(PackedObjectInfo oe) {
- entries[entryCount++] = oe;
- if (needNewObjectIds())
- newObjectIds.add(oe);
- }
-
- private class InflaterStream extends InputStream {
- private final Inflater inf;
-
- private final byte[] skipBuffer;
-
- private Source src;
-
- private long expectedSize;
-
- private long actualSize;
-
- private int p;
-
- InflaterStream() {
- inf = InflaterCache.get();
- skipBuffer = new byte[512];
- }
-
- void release() {
- inf.reset();
- InflaterCache.release(inf);
- }
-
- void open(Source source, long inflatedSize) throws IOException {
- src = source;
- expectedSize = inflatedSize;
- actualSize = 0;
-
- p = fill(src, 24);
- inf.setInput(buf, p, bAvail);
- }
-
- @Override
- public long skip(long toSkip) throws IOException {
- long n = 0;
- while (n < toSkip) {
- final int cnt = (int) Math.min(skipBuffer.length, toSkip - n);
- final int r = read(skipBuffer, 0, cnt);
- if (r <= 0)
- break;
- n += r;
- }
- return n;
- }
-
- @Override
- public int read() throws IOException {
- int n = read(skipBuffer, 0, 1);
- return n == 1 ? skipBuffer[0] & 0xff : -1;
- }
-
- @Override
- public int read(byte[] dst, int pos, int cnt) throws IOException {
- try {
- int n = 0;
- while (n < cnt) {
- int r = inf.inflate(dst, pos + n, cnt - n);
- if (r == 0) {
- if (inf.finished())
- break;
- if (inf.needsInput()) {
- crc.update(buf, p, bAvail);
- use(bAvail);
-
- p = fill(src, 24);
- inf.setInput(buf, p, bAvail);
- } else {
- throw new CorruptObjectException(
- MessageFormat
- .format(
- JGitText.get().packfileCorruptionDetected,
- JGitText.get().unknownZlibError));
- }
- } else {
- n += r;
- }
- }
- actualSize += n;
- return 0 < n ? n : -1;
- } catch (DataFormatException dfe) {
- throw new CorruptObjectException(MessageFormat.format(JGitText
- .get().packfileCorruptionDetected, dfe.getMessage()));
- }
- }
-
- @Override
- public void close() throws IOException {
- // We need to read here to enter the loop above and pump the
- // trailing checksum into the Inflater. It should return -1 as the
- // caller was supposed to consume all content.
- //
- if (read(skipBuffer) != -1 || actualSize != expectedSize) {
- throw new CorruptObjectException(MessageFormat.format(JGitText
- .get().packfileCorruptionDetected,
- JGitText.get().wrongDecompressedLength));
- }
-
- int used = bAvail - inf.getRemaining();
- if (0 < used) {
- crc.update(buf, p, used);
- use(used);
- }
-
- inf.reset();
- }
- }
-}
package org.eclipse.jgit.transport;
/**
- * Simple Map<long,Object> helper for {@link IndexPack}.
+ * Simple Map<long,Object> helper for {@link PackParser}.
*
* @param <V>
* type of the value instance.
--- /dev/null
+/*
+ * Copyright (C) 2008-2011, Google Inc.
+ * Copyright (C) 2007-2008, Robin Rosenberg <robin.rosenberg@dewire.com>
+ * Copyright (C) 2008, Shawn O. Pearce <spearce@spearce.org>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.transport;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.MessageDigest;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+import java.util.zip.DataFormatException;
+import java.util.zip.Inflater;
+
+import org.eclipse.jgit.JGitText;
+import org.eclipse.jgit.errors.CorruptObjectException;
+import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.InflaterCache;
+import org.eclipse.jgit.lib.MutableObjectId;
+import org.eclipse.jgit.lib.NullProgressMonitor;
+import org.eclipse.jgit.lib.ObjectChecker;
+import org.eclipse.jgit.lib.ObjectDatabase;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdSubclassMap;
+import org.eclipse.jgit.lib.ObjectInserter;
+import org.eclipse.jgit.lib.ObjectLoader;
+import org.eclipse.jgit.lib.ObjectReader;
+import org.eclipse.jgit.lib.ObjectStream;
+import org.eclipse.jgit.lib.ProgressMonitor;
+import org.eclipse.jgit.storage.file.PackLock;
+import org.eclipse.jgit.storage.pack.BinaryDelta;
+import org.eclipse.jgit.util.IO;
+import org.eclipse.jgit.util.NB;
+
+/**
+ * Parses a pack stream and imports it for an {@link ObjectInserter}.
+ * <p>
+ * Applications can acquire an instance of a parser from ObjectInserter's
+ * {@link ObjectInserter#newPackParser(InputStream)} method.
+ * <p>
+ * Implementations of {@link ObjectInserter} should subclass this type and
+ * provide their own logic for the various {@code on*()} event methods declared
+ * to be abstract.
+ */
+public abstract class PackParser {
+ /** Size of the internal stream buffer. */
+ private static final int BUFFER_SIZE = 8192;
+
+ /** Location data is being obtained from. */
+ public static enum Source {
+ /** Data is read from the incoming stream. */
+ INPUT,
+
+ /** Data is read back from the database's buffers. */
+ DATABASE;
+ }
+
+ /** Object database used for loading existing objects. */
+ private final ObjectDatabase objectDatabase;
+
+ private InflaterStream inflater;
+
+ private byte[] tempBuffer;
+
+ private byte[] hdrBuf;
+
+ private final MessageDigest objectDigest;
+
+ private final MutableObjectId tempObjectId;
+
+ private InputStream in;
+
+ private byte[] buf;
+
+ /** Position in the input stream of {@code buf[0]}. */
+ private long bBase;
+
+ private int bOffset;
+
+ private int bAvail;
+
+ private ObjectChecker objCheck;
+
+ private boolean allowThin;
+
+ private boolean needBaseObjectIds;
+
+ private long objectCount;
+
+ private PackedObjectInfo[] entries;
+
+ /**
+ * Every object contained within the incoming pack.
+ * <p>
+ * This is a subset of {@link #entries}, as thin packs can add additional
+ * objects to {@code entries} by copying already existing objects from the
+ * repository onto the end of the thin pack to make it self-contained.
+ */
+ private ObjectIdSubclassMap<ObjectId> newObjectIds;
+
+ private int deltaCount;
+
+ private int entryCount;
+
+ private ObjectIdSubclassMap<DeltaChain> baseById;
+
+ /**
+ * Objects referenced by their name from deltas, that aren't in this pack.
+ * <p>
+ * This is the set of objects that were copied onto the end of this pack to
+ * make it complete. These objects were not transmitted by the remote peer,
+ * but instead were assumed to already exist in the local repository.
+ */
+ private ObjectIdSubclassMap<ObjectId> baseObjectIds;
+
+ private LongMap<UnresolvedDelta> baseByPos;
+
+ /** Blobs whose contents need to be double-checked after indexing. */
+ private List<PackedObjectInfo> deferredCheckBlobs;
+
+ private MessageDigest packDigest;
+
+ private ObjectReader readCurs;
+
+ /** Message to protect the pack data from garbage collection. */
+ private String lockMessage;
+
+ /**
+ * Initialize a pack parser.
+ *
+ * @param odb
+ * database the parser will write its objects into.
+ * @param src
+ * the stream the parser will read.
+ */
+ protected PackParser(final ObjectDatabase odb, final InputStream src) {
+ objectDatabase = odb.newCachedDatabase();
+ in = src;
+
+ inflater = new InflaterStream();
+ readCurs = objectDatabase.newReader();
+ buf = new byte[BUFFER_SIZE];
+ tempBuffer = new byte[BUFFER_SIZE];
+ hdrBuf = new byte[64];
+ objectDigest = Constants.newMessageDigest();
+ tempObjectId = new MutableObjectId();
+ packDigest = Constants.newMessageDigest();
+ }
+
+ /** @return true if a thin pack (missing base objects) is permitted. */
+ public boolean isAllowThin() {
+ return allowThin;
+ }
+
+ /**
+ * Configure this index pack instance to allow a thin pack.
+ * <p>
+ * Thin packs are sometimes used during network transfers to allow a delta
+ * to be sent without a base object. Such packs are not permitted on disk.
+ *
+ * @param allow
+ * true to enable a thin pack.
+ */
+ public void setAllowThin(final boolean allow) {
+ allowThin = allow;
+ }
+
+ /**
+ * Configure this index pack instance to keep track of new objects.
+ * <p>
+ * By default an index pack doesn't save the new objects that were created
+ * when it was instantiated. Setting this flag to {@code true} allows the
+ * caller to use {@link #getNewObjectIds()} to retrieve that list.
+ *
+ * @param b
+ * {@code true} to enable keeping track of new objects.
+ */
+ public void setNeedNewObjectIds(boolean b) {
+ if (b)
+ newObjectIds = new ObjectIdSubclassMap<ObjectId>();
+ else
+ newObjectIds = null;
+ }
+
+ private boolean needNewObjectIds() {
+ return newObjectIds != null;
+ }
+
+ /**
+ * Configure this index pack instance to keep track of the objects assumed
+ * for delta bases.
+ * <p>
+ * By default an index pack doesn't save the objects that were used as delta
+ * bases. Setting this flag to {@code true} will allow the caller to use
+ * {@link #getBaseObjectIds()} to retrieve that list.
+ *
+ * @param b
+ * {@code true} to enable keeping track of delta bases.
+ */
+ public void setNeedBaseObjectIds(boolean b) {
+ this.needBaseObjectIds = b;
+ }
+
+ /** @return the new objects that were sent by the user */
+ public ObjectIdSubclassMap<ObjectId> getNewObjectIds() {
+ if (newObjectIds != null)
+ return newObjectIds;
+ return new ObjectIdSubclassMap<ObjectId>();
+ }
+
+ /** @return set of objects the incoming pack assumed for delta purposes */
+ public ObjectIdSubclassMap<ObjectId> getBaseObjectIds() {
+ if (baseObjectIds != null)
+ return baseObjectIds;
+ return new ObjectIdSubclassMap<ObjectId>();
+ }
+
+ /**
+ * Configure the checker used to validate received objects.
+ * <p>
+ * Usually object checking isn't necessary, as Git implementations only
+ * create valid objects in pack files. However, additional checking may be
+ * useful if processing data from an untrusted source.
+ *
+ * @param oc
+ * the checker instance; null to disable object checking.
+ */
+ public void setObjectChecker(final ObjectChecker oc) {
+ objCheck = oc;
+ }
+
+ /**
+ * Configure the checker used to validate received objects.
+ * <p>
+ * Usually object checking isn't necessary, as Git implementations only
+ * create valid objects in pack files. However, additional checking may be
+ * useful if processing data from an untrusted source.
+ * <p>
+ * This is shorthand for:
+ *
+ * <pre>
+ * setObjectChecker(on ? new ObjectChecker() : null);
+ * </pre>
+ *
+ * @param on
+ * true to enable the default checker; false to disable it.
+ */
+ public void setObjectChecking(final boolean on) {
+ setObjectChecker(on ? new ObjectChecker() : null);
+ }
+
+ /** @return the message to record with the pack lock. */
+ public String getLockMessage() {
+ return lockMessage;
+ }
+
+ /**
+ * Set the lock message for the incoming pack data.
+ *
+ * @param msg
+ * if not null, the message to associate with the incoming data
+ * while it is locked to prevent garbage collection.
+ */
+ public void setLockMessage(String msg) {
+ lockMessage = msg;
+ }
+
+ /**
+ * Get the number of objects in the stream.
+ * <p>
+ * The object count is only available after {@link #parse(ProgressMonitor)}
+ * has returned. The count may have been increased if the stream was a thin
+ * pack, and missing bases objects were appending onto it by the subclass.
+ *
+ * @return number of objects parsed out of the stream.
+ */
+ public int getObjectCount() {
+ return entryCount;
+ }
+
+ /***
+ * Get the information about the requested object.
+ * <p>
+ * The object information is only available after
+ * {@link #parse(ProgressMonitor)} has returned.
+ *
+ * @param nth
+ * index of the object in the stream. Must be between 0 and
+ * {@link #getObjectCount()}-1.
+ * @return the object information.
+ */
+ public PackedObjectInfo getObject(int nth) {
+ return entries[nth];
+ }
+
+ /**
+ * Get all of the objects, sorted by their name.
+ * <p>
+ * The object information is only available after
+ * {@link #parse(ProgressMonitor)} has returned.
+ * <p>
+ * To maintain lower memory usage and good runtime performance, this method
+ * sorts the objects in-place and therefore impacts the ordering presented
+ * by {@link #getObject(int)}.
+ *
+ * @param cmp
+ * comparison function, if null objects are stored by ObjectId.
+ * @return sorted list of objects in this pack stream.
+ */
+ public List<PackedObjectInfo> getSortedObjectList(
+ Comparator<PackedObjectInfo> cmp) {
+ Arrays.sort(entries, 0, entryCount, cmp);
+ List<PackedObjectInfo> list = Arrays.asList(entries);
+ if (entryCount < entries.length)
+ list = list.subList(0, entryCount);
+ return list;
+ }
+
+ /**
+ * Parse the pack stream.
+ *
+ * @param progress
+ * callback to provide progress feedback during parsing. If null,
+ * {@link NullProgressMonitor} will be used.
+ * @return the pack lock, if one was requested by setting
+ * {@link #setLockMessage(String)}.
+ * @throws IOException
+ * the stream is malformed, or contains corrupt objects.
+ */
+ public PackLock parse(ProgressMonitor progress) throws IOException {
+ if (progress == null)
+ progress = NullProgressMonitor.INSTANCE;
+ progress.start(2 /* tasks */);
+ try {
+ readPackHeader();
+
+ entries = new PackedObjectInfo[(int) objectCount];
+ baseById = new ObjectIdSubclassMap<DeltaChain>();
+ baseByPos = new LongMap<UnresolvedDelta>();
+ deferredCheckBlobs = new ArrayList<PackedObjectInfo>();
+
+ progress.beginTask(JGitText.get().receivingObjects,
+ (int) objectCount);
+ for (int done = 0; done < objectCount; done++) {
+ indexOneObject();
+ progress.update(1);
+ if (progress.isCancelled())
+ throw new IOException(JGitText.get().downloadCancelled);
+ }
+ readPackFooter();
+ endInput();
+ if (!deferredCheckBlobs.isEmpty())
+ doDeferredCheckBlobs();
+ progress.endTask();
+ if (deltaCount > 0) {
+ resolveDeltas(progress);
+ if (entryCount < objectCount) {
+ if (!isAllowThin()) {
+ throw new IOException(MessageFormat.format(JGitText
+ .get().packHasUnresolvedDeltas,
+ (objectCount - entryCount)));
+ }
+
+ resolveDeltasWithExternalBases(progress);
+ }
+ }
+
+ packDigest = null;
+ baseById = null;
+ baseByPos = null;
+ } finally {
+ try {
+ if (readCurs != null)
+ readCurs.release();
+ } finally {
+ readCurs = null;
+ }
+
+ try {
+ inflater.release();
+ } finally {
+ inflater = null;
+ objectDatabase.close();
+ }
+
+ progress.endTask();
+ }
+ return null; // By default there is no locking.
+ }
+
+ private void resolveDeltas(final ProgressMonitor progress)
+ throws IOException {
+ progress.beginTask(JGitText.get().resolvingDeltas, deltaCount);
+ final int last = entryCount;
+ for (int i = 0; i < last; i++) {
+ final int before = entryCount;
+ resolveDeltas(entries[i]);
+ progress.update(entryCount - before);
+ if (progress.isCancelled())
+ throw new IOException(
+ JGitText.get().downloadCancelledDuringIndexing);
+ }
+ progress.endTask();
+ }
+
+ private void resolveDeltas(final PackedObjectInfo oe) throws IOException {
+ UnresolvedDelta children = firstChildOf(oe);
+ if (children == null)
+ return;
+
+ DeltaVisit visit = new DeltaVisit();
+ visit.nextChild = children;
+
+ ObjectTypeAndSize info = openDatabase(oe, new ObjectTypeAndSize());
+ switch (info.type) {
+ case Constants.OBJ_COMMIT:
+ case Constants.OBJ_TREE:
+ case Constants.OBJ_BLOB:
+ case Constants.OBJ_TAG:
+ visit.data = inflateAndReturn(Source.DATABASE, info.size);
+ visit.id = oe;
+ break;
+ default:
+ throw new IOException(MessageFormat.format(
+ JGitText.get().unknownObjectType, info.type));
+ }
+
+ if (!checkCRC(oe.getCRC())) {
+ throw new IOException(MessageFormat.format(
+ JGitText.get().corruptionDetectedReReadingAt, oe
+ .getOffset()));
+ }
+
+ resolveDeltas(visit.next(), info.type, info);
+ }
+
+ private void resolveDeltas(DeltaVisit visit, final int type,
+ ObjectTypeAndSize info) throws IOException {
+ do {
+ info = openDatabase(visit.delta, info);
+ switch (info.type) {
+ case Constants.OBJ_OFS_DELTA:
+ case Constants.OBJ_REF_DELTA:
+ break;
+
+ default:
+ throw new IOException(MessageFormat.format(
+ JGitText.get().unknownObjectType, info.type));
+ }
+
+ visit.data = BinaryDelta.apply(visit.parent.data, //
+ inflateAndReturn(Source.DATABASE, info.size));
+
+ if (!checkCRC(visit.delta.crc))
+ throw new IOException(MessageFormat.format(
+ JGitText.get().corruptionDetectedReReadingAt,
+ visit.delta.position));
+
+ objectDigest.update(Constants.encodedTypeString(type));
+ objectDigest.update((byte) ' ');
+ objectDigest.update(Constants.encodeASCII(visit.data.length));
+ objectDigest.update((byte) 0);
+ objectDigest.update(visit.data);
+ tempObjectId.fromRaw(objectDigest.digest(), 0);
+
+ verifySafeObject(tempObjectId, type, visit.data);
+
+ PackedObjectInfo oe;
+ oe = newInfo(tempObjectId, visit.delta, visit.parent.id);
+ oe.setOffset(visit.delta.position);
+ addObjectAndTrack(oe);
+ visit.id = oe;
+
+ visit.nextChild = firstChildOf(oe);
+ visit = visit.next();
+ } while (visit != null);
+ }
+
+ /**
+ * Read the header of the current object.
+ * <p>
+ * After the header has been parsed, this method automatically invokes
+ * {@link #onObjectHeader(Source, byte[], int, int)} to allow the
+ * implementation to update its internal checksums for the bytes read.
+ * <p>
+ * When this method returns the database will be positioned on the first
+ * byte of the deflated data stream.
+ *
+ * @param info
+ * the info object to populate.
+ * @return {@code info}, after populating.
+ * @throws IOException
+ * the size cannot be read.
+ */
+ protected ObjectTypeAndSize readObjectHeader(ObjectTypeAndSize info)
+ throws IOException {
+ int hdrPtr = 0;
+ int c = readFrom(Source.DATABASE);
+ hdrBuf[hdrPtr++] = (byte) c;
+
+ info.type = (c >> 4) & 7;
+ long sz = c & 15;
+ int shift = 4;
+ while ((c & 0x80) != 0) {
+ c = readFrom(Source.DATABASE);
+ hdrBuf[hdrPtr++] = (byte) c;
+ sz += (c & 0x7f) << shift;
+ shift += 7;
+ }
+ info.size = sz;
+
+ switch (info.type) {
+ case Constants.OBJ_COMMIT:
+ case Constants.OBJ_TREE:
+ case Constants.OBJ_BLOB:
+ case Constants.OBJ_TAG:
+ onObjectHeader(Source.DATABASE, hdrBuf, 0, hdrPtr);
+ break;
+
+ case Constants.OBJ_OFS_DELTA:
+ c = readFrom(Source.DATABASE);
+ hdrBuf[hdrPtr++] = (byte) c;
+ while ((c & 128) != 0) {
+ c = readFrom(Source.DATABASE);
+ hdrBuf[hdrPtr++] = (byte) c;
+ }
+ onObjectHeader(Source.DATABASE, hdrBuf, 0, hdrPtr);
+ break;
+
+ case Constants.OBJ_REF_DELTA:
+ System.arraycopy(buf, fill(Source.DATABASE, 20), hdrBuf, hdrPtr, 20);
+ hdrPtr += 20;
+ use(20);
+ onObjectHeader(Source.DATABASE, hdrBuf, 0, hdrPtr);
+ break;
+
+ default:
+ throw new IOException(MessageFormat.format(
+ JGitText.get().unknownObjectType, info.type));
+ }
+ return info;
+ }
+
+ private UnresolvedDelta removeBaseById(final AnyObjectId id) {
+ final DeltaChain d = baseById.get(id);
+ return d != null ? d.remove() : null;
+ }
+
+ private static UnresolvedDelta reverse(UnresolvedDelta c) {
+ UnresolvedDelta tail = null;
+ while (c != null) {
+ final UnresolvedDelta n = c.next;
+ c.next = tail;
+ tail = c;
+ c = n;
+ }
+ return tail;
+ }
+
+ private UnresolvedDelta firstChildOf(PackedObjectInfo oe) {
+ UnresolvedDelta a = reverse(removeBaseById(oe));
+ UnresolvedDelta b = reverse(baseByPos.remove(oe.getOffset()));
+
+ if (a == null)
+ return b;
+ if (b == null)
+ return a;
+
+ UnresolvedDelta first = null;
+ UnresolvedDelta last = null;
+ while (a != null || b != null) {
+ UnresolvedDelta curr;
+ if (b == null || (a != null && a.position < b.position)) {
+ curr = a;
+ a = a.next;
+ } else {
+ curr = b;
+ b = b.next;
+ }
+ if (last != null)
+ last.next = curr;
+ else
+ first = curr;
+ last = curr;
+ curr.next = null;
+ }
+ return first;
+ }
+
+ private void resolveDeltasWithExternalBases(final ProgressMonitor progress)
+ throws IOException {
+ growEntries(baseById.size());
+
+ if (needBaseObjectIds)
+ baseObjectIds = new ObjectIdSubclassMap<ObjectId>();
+
+ final List<DeltaChain> missing = new ArrayList<DeltaChain>(64);
+ for (final DeltaChain baseId : baseById) {
+ if (baseId.head == null)
+ continue;
+
+ if (needBaseObjectIds)
+ baseObjectIds.add(baseId);
+
+ final ObjectLoader ldr;
+ try {
+ ldr = readCurs.open(baseId);
+ } catch (MissingObjectException notFound) {
+ missing.add(baseId);
+ continue;
+ }
+
+ final DeltaVisit visit = new DeltaVisit();
+ visit.data = ldr.getCachedBytes(Integer.MAX_VALUE);
+ visit.id = baseId;
+ final int typeCode = ldr.getType();
+ final PackedObjectInfo oe = newInfo(baseId, null, null);
+
+ if (onAppendBase(typeCode, visit.data, oe))
+ entries[entryCount++] = oe;
+
+ visit.nextChild = firstChildOf(oe);
+ resolveDeltas(visit.next(), typeCode, new ObjectTypeAndSize());
+
+ if (progress.isCancelled())
+ throw new IOException(
+ JGitText.get().downloadCancelledDuringIndexing);
+ }
+
+ for (final DeltaChain base : missing) {
+ if (base.head != null)
+ throw new MissingObjectException(base, "delta base");
+ }
+
+ onEndThinPack();
+ }
+
+ private void growEntries(int extraObjects) {
+ final PackedObjectInfo[] ne;
+
+ ne = new PackedObjectInfo[(int) objectCount + extraObjects];
+ System.arraycopy(entries, 0, ne, 0, entryCount);
+ entries = ne;
+ }
+
+ private void readPackHeader() throws IOException {
+ final int hdrln = Constants.PACK_SIGNATURE.length + 4 + 4;
+ final int p = fill(Source.INPUT, hdrln);
+ for (int k = 0; k < Constants.PACK_SIGNATURE.length; k++)
+ if (buf[p + k] != Constants.PACK_SIGNATURE[k])
+ throw new IOException(JGitText.get().notAPACKFile);
+
+ final long vers = NB.decodeUInt32(buf, p + 4);
+ if (vers != 2 && vers != 3)
+ throw new IOException(MessageFormat.format(
+ JGitText.get().unsupportedPackVersion, vers));
+ objectCount = NB.decodeUInt32(buf, p + 8);
+ use(hdrln);
+ }
+
+ private void readPackFooter() throws IOException {
+ sync();
+ final byte[] actHash = packDigest.digest();
+
+ final int c = fill(Source.INPUT, 20);
+ final byte[] srcHash = new byte[20];
+ System.arraycopy(buf, c, srcHash, 0, 20);
+ use(20);
+
+ if (!Arrays.equals(actHash, srcHash))
+ throw new CorruptObjectException(
+ JGitText.get().corruptObjectPackfileChecksumIncorrect);
+
+ onPackFooter(srcHash);
+ }
+
+ // Cleanup all resources associated with our input parsing.
+ private void endInput() {
+ in = null;
+ }
+
+ // Read one entire object or delta from the input.
+ private void indexOneObject() throws IOException {
+ final long streamPosition = streamPosition();
+
+ int hdrPtr = 0;
+ int c = readFrom(Source.INPUT);
+ hdrBuf[hdrPtr++] = (byte) c;
+
+ final int typeCode = (c >> 4) & 7;
+ long sz = c & 15;
+ int shift = 4;
+ while ((c & 0x80) != 0) {
+ c = readFrom(Source.INPUT);
+ hdrBuf[hdrPtr++] = (byte) c;
+ sz += (c & 0x7f) << shift;
+ shift += 7;
+ }
+
+ switch (typeCode) {
+ case Constants.OBJ_COMMIT:
+ case Constants.OBJ_TREE:
+ case Constants.OBJ_BLOB:
+ case Constants.OBJ_TAG:
+ onBeginWholeObject(streamPosition, typeCode, sz);
+ onObjectHeader(Source.INPUT, hdrBuf, 0, hdrPtr);
+ whole(streamPosition, typeCode, sz);
+ break;
+
+ case Constants.OBJ_OFS_DELTA: {
+ c = readFrom(Source.INPUT);
+ hdrBuf[hdrPtr++] = (byte) c;
+ long ofs = c & 127;
+ while ((c & 128) != 0) {
+ ofs += 1;
+ c = readFrom(Source.INPUT);
+ hdrBuf[hdrPtr++] = (byte) c;
+ ofs <<= 7;
+ ofs += (c & 127);
+ }
+ final long base = streamPosition - ofs;
+ onBeginOfsDelta(streamPosition, base, sz);
+ onObjectHeader(Source.INPUT, hdrBuf, 0, hdrPtr);
+ inflateAndSkip(Source.INPUT, sz);
+ UnresolvedDelta n = onEndDelta();
+ n.position = streamPosition;
+ n.next = baseByPos.put(base, n);
+ deltaCount++;
+ break;
+ }
+
+ case Constants.OBJ_REF_DELTA: {
+ c = fill(Source.INPUT, 20);
+ final ObjectId base = ObjectId.fromRaw(buf, c);
+ System.arraycopy(buf, c, hdrBuf, hdrPtr, 20);
+ hdrPtr += 20;
+ use(20);
+ DeltaChain r = baseById.get(base);
+ if (r == null) {
+ r = new DeltaChain(base);
+ baseById.add(r);
+ }
+ onBeginRefDelta(streamPosition, base, sz);
+ onObjectHeader(Source.INPUT, hdrBuf, 0, hdrPtr);
+ inflateAndSkip(Source.INPUT, sz);
+ UnresolvedDelta n = onEndDelta();
+ n.position = streamPosition;
+ r.add(n);
+ deltaCount++;
+ break;
+ }
+
+ default:
+ throw new IOException(MessageFormat.format(
+ JGitText.get().unknownObjectType, typeCode));
+ }
+ }
+
+ private void whole(final long pos, final int type, final long sz)
+ throws IOException {
+ objectDigest.update(Constants.encodedTypeString(type));
+ objectDigest.update((byte) ' ');
+ objectDigest.update(Constants.encodeASCII(sz));
+ objectDigest.update((byte) 0);
+
+ boolean checkContentLater = false;
+ if (type == Constants.OBJ_BLOB) {
+ byte[] readBuffer = buffer();
+ InputStream inf = inflate(Source.INPUT, sz);
+ long cnt = 0;
+ while (cnt < sz) {
+ int r = inf.read(readBuffer);
+ if (r <= 0)
+ break;
+ objectDigest.update(readBuffer, 0, r);
+ cnt += r;
+ }
+ inf.close();
+ tempObjectId.fromRaw(objectDigest.digest(), 0);
+ checkContentLater = readCurs.has(tempObjectId);
+
+ } else {
+ final byte[] data = inflateAndReturn(Source.INPUT, sz);
+ objectDigest.update(data);
+ tempObjectId.fromRaw(objectDigest.digest(), 0);
+ verifySafeObject(tempObjectId, type, data);
+ }
+
+ PackedObjectInfo obj = newInfo(tempObjectId, null, null);
+ obj.setOffset(pos);
+ onEndWholeObject(obj);
+ addObjectAndTrack(obj);
+ if (checkContentLater)
+ deferredCheckBlobs.add(obj);
+ }
+
+ private void verifySafeObject(final AnyObjectId id, final int type,
+ final byte[] data) throws IOException {
+ if (objCheck != null) {
+ try {
+ objCheck.check(type, data);
+ } catch (CorruptObjectException e) {
+ throw new IOException(MessageFormat.format(
+ JGitText.get().invalidObject, Constants
+ .typeString(type), id.name(), e.getMessage()));
+ }
+ }
+
+ try {
+ final ObjectLoader ldr = readCurs.open(id, type);
+ final byte[] existingData = ldr.getCachedBytes(data.length);
+ if (!Arrays.equals(data, existingData)) {
+ throw new IOException(MessageFormat.format(
+ JGitText.get().collisionOn, id.name()));
+ }
+ } catch (MissingObjectException notLocal) {
+ // This is OK, we don't have a copy of the object locally
+ // but the API throws when we try to read it as usually its
+ // an error to read something that doesn't exist.
+ }
+ }
+
+ private void doDeferredCheckBlobs() throws IOException {
+ final byte[] readBuffer = buffer();
+ final byte[] curBuffer = new byte[readBuffer.length];
+ ObjectTypeAndSize info = new ObjectTypeAndSize();
+
+ for (PackedObjectInfo obj : deferredCheckBlobs) {
+ info = openDatabase(obj, info);
+
+ if (info.type != Constants.OBJ_BLOB)
+ throw new IOException(MessageFormat.format(
+ JGitText.get().unknownObjectType, info.type));
+
+ ObjectStream cur = readCurs.open(obj, info.type).openStream();
+ try {
+ long sz = info.size;
+ if (cur.getSize() != sz)
+ throw new IOException(MessageFormat.format(
+ JGitText.get().collisionOn, obj.name()));
+ InputStream pck = inflate(Source.DATABASE, sz);
+ while (0 < sz) {
+ int n = (int) Math.min(readBuffer.length, sz);
+ IO.readFully(cur, curBuffer, 0, n);
+ IO.readFully(pck, readBuffer, 0, n);
+ for (int i = 0; i < n; i++) {
+ if (curBuffer[i] != readBuffer[i])
+ throw new IOException(MessageFormat.format(JGitText
+ .get().collisionOn, obj.name()));
+ }
+ sz -= n;
+ }
+ pck.close();
+ } finally {
+ cur.close();
+ }
+ }
+ }
+
+ /** @return current position of the input stream being parsed. */
+ private long streamPosition() {
+ return bBase + bOffset;
+ }
+
+ private ObjectTypeAndSize openDatabase(PackedObjectInfo obj,
+ ObjectTypeAndSize info) throws IOException {
+ bOffset = 0;
+ bAvail = 0;
+ return seekDatabase(obj, info);
+ }
+
+ private ObjectTypeAndSize openDatabase(UnresolvedDelta delta,
+ ObjectTypeAndSize info) throws IOException {
+ bOffset = 0;
+ bAvail = 0;
+ return seekDatabase(delta, info);
+ }
+
+ // Consume exactly one byte from the buffer and return it.
+ private int readFrom(final Source src) throws IOException {
+ if (bAvail == 0)
+ fill(src, 1);
+ bAvail--;
+ return buf[bOffset++] & 0xff;
+ }
+
+ // Consume cnt bytes from the buffer.
+ private void use(final int cnt) {
+ bOffset += cnt;
+ bAvail -= cnt;
+ }
+
+ // Ensure at least need bytes are available in in {@link #buf}.
+ private int fill(final Source src, final int need) throws IOException {
+ while (bAvail < need) {
+ int next = bOffset + bAvail;
+ int free = buf.length - next;
+ if (free + bAvail < need) {
+ switch (src) {
+ case INPUT:
+ sync();
+ break;
+ case DATABASE:
+ if (bAvail > 0)
+ System.arraycopy(buf, bOffset, buf, 0, bAvail);
+ bOffset = 0;
+ break;
+ }
+ next = bAvail;
+ free = buf.length - next;
+ }
+ switch (src) {
+ case INPUT:
+ next = in.read(buf, next, free);
+ break;
+ case DATABASE:
+ next = readDatabase(buf, next, free);
+ break;
+ }
+ if (next <= 0)
+ throw new EOFException(JGitText.get().packfileIsTruncated);
+ bAvail += next;
+ }
+ return bOffset;
+ }
+
+ // Store consumed bytes in {@link #buf} up to {@link #bOffset}.
+ private void sync() throws IOException {
+ packDigest.update(buf, 0, bOffset);
+ onStoreStream(buf, 0, bOffset);
+ if (bAvail > 0)
+ System.arraycopy(buf, bOffset, buf, 0, bAvail);
+ bBase += bOffset;
+ bOffset = 0;
+ }
+
+ /** @return a temporary byte array for use by the caller. */
+ protected byte[] buffer() {
+ return tempBuffer;
+ }
+
+ /**
+ * Construct a PackedObjectInfo instance for this parser.
+ *
+ * @param id
+ * identity of the object to be tracked.
+ * @param delta
+ * if the object was previously an unresolved delta, this is the
+ * delta object that was tracking it. Otherwise null.
+ * @param deltaBase
+ * if the object was previously an unresolved delta, this is the
+ * ObjectId of the base of the delta. The base may be outside of
+ * the pack stream if the stream was a thin-pack.
+ * @return info object containing this object's data.
+ */
+ protected PackedObjectInfo newInfo(AnyObjectId id, UnresolvedDelta delta,
+ ObjectId deltaBase) {
+ PackedObjectInfo oe = new PackedObjectInfo(id);
+ if (delta != null)
+ oe.setCRC(delta.crc);
+ return oe;
+ }
+
+ /**
+ * Store bytes received from the raw stream.
+ * <p>
+ * This method is invoked during {@link #parse(ProgressMonitor)} as data is
+ * consumed from the incoming stream. Implementors may use this event to
+ * archive the raw incoming stream to the destination repository in large
+ * chunks, without paying attention to object boundaries.
+ * <p>
+ * The only component of the pack not supplied to this method is the last 20
+ * bytes of the pack that comprise the trailing SHA-1 checksum. Those are
+ * passed to {@link #onPackFooter(byte[])}.
+ *
+ * @param raw
+ * buffer to copy data out of.
+ * @param pos
+ * first offset within the buffer that is valid.
+ * @param len
+ * number of bytes in the buffer that are valid.
+ * @throws IOException
+ * the stream cannot be archived.
+ */
+ protected abstract void onStoreStream(byte[] raw, int pos, int len)
+ throws IOException;
+
+ /**
+ * Store (and/or checksum) an object header.
+ * <p>
+ * Invoked after any of the {@code onBegin()} events. The entire header is
+ * supplied in a single invocation, before any object data is supplied.
+ *
+ * @param src
+ * where the data came from
+ * @param raw
+ * buffer to read data from.
+ * @param pos
+ * first offset within buffer that is valid.
+ * @param len
+ * number of bytes in buffer that are valid.
+ * @throws IOException
+ * the stream cannot be archived.
+ */
+ protected abstract void onObjectHeader(Source src, byte[] raw, int pos,
+ int len) throws IOException;
+
+ /**
+ * Store (and/or checksum) a portion of an object's data.
+ * <p>
+ * This method may be invoked multiple times per object, depending on the
+ * size of the object, the size of the parser's internal read buffer, and
+ * the alignment of the object relative to the read buffer.
+ * <p>
+ * Invoked after {@link #onObjectHeader(Source, byte[], int, int)}.
+ *
+ * @param src
+ * where the data came from
+ * @param raw
+ * buffer to read data from.
+ * @param pos
+ * first offset within buffer that is valid.
+ * @param len
+ * number of bytes in buffer that are valid.
+ * @throws IOException
+ * the stream cannot be archived.
+ */
+ protected abstract void onObjectData(Source src, byte[] raw, int pos,
+ int len) throws IOException;
+
+ /**
+ * Provide the implementation with the original stream's pack footer.
+ *
+ * @param hash
+ * the trailing 20 bytes of the pack, this is a SHA-1 checksum of
+ * all of the pack data.
+ * @throws IOException
+ * the stream cannot be archived.
+ */
+ protected abstract void onPackFooter(byte[] hash) throws IOException;
+
+ /**
+ * Provide the implementation with a base that was outside of the pack.
+ * <p>
+ * This event only occurs on a thin pack for base objects that were outside
+ * of the pack and came from the local repository. Usually an implementation
+ * uses this event to compress the base and append it onto the end of the
+ * pack, so the pack stays self-contained.
+ *
+ * @param typeCode
+ * type of the base object.
+ * @param data
+ * complete content of the base object.
+ * @param info
+ * packed object information for this base. Implementors must
+ * populate the CRC and offset members if returning true.
+ * @return true if the {@code info} should be included in the object list
+ * returned by {@link #getSortedObjectList(Comparator)}, false if it
+ * should not be included.
+ * @throws IOException
+ * the base could not be included into the pack.
+ */
+ protected abstract boolean onAppendBase(int typeCode, byte[] data,
+ PackedObjectInfo info) throws IOException;
+
+ /**
+ * Event indicating a thin pack has been completely processed.
+ * <p>
+ * This event is invoked only if a thin pack has delta references to objects
+ * external from the pack. The event is called after all of those deltas
+ * have been resolved.
+ *
+ * @throws IOException
+ * the pack cannot be archived.
+ */
+ protected abstract void onEndThinPack() throws IOException;
+
+ /**
+ * Reposition the database to re-read a previously stored object.
+ * <p>
+ * If the database is computing CRC-32 checksums for object data, it should
+ * reset its internal CRC instance during this method call.
+ *
+ * @param obj
+ * the object position to begin reading from. This is from
+ * {@link #newInfo(AnyObjectId, UnresolvedDelta, ObjectId)}.
+ * @param info
+ * object to populate with type and size.
+ * @return the {@code info} object.
+ * @throws IOException
+ * the database cannot reposition to this location.
+ */
+ protected abstract ObjectTypeAndSize seekDatabase(PackedObjectInfo obj,
+ ObjectTypeAndSize info) throws IOException;
+
+ /**
+ * Reposition the database to re-read a previously stored object.
+ * <p>
+ * If the database is computing CRC-32 checksums for object data, it should
+ * reset its internal CRC instance during this method call.
+ *
+ * @param delta
+ * the object position to begin reading from. This is an instance
+ * previously returned by {@link #onEndDelta()}.
+ * @param info
+ * object to populate with type and size.
+ * @return the {@code info} object.
+ * @throws IOException
+ * the database cannot reposition to this location.
+ */
+ protected abstract ObjectTypeAndSize seekDatabase(UnresolvedDelta delta,
+ ObjectTypeAndSize info) throws IOException;
+
+ /**
+ * Read from the database's current position into the buffer.
+ *
+ * @param dst
+ * the buffer to copy read data into.
+ * @param pos
+ * position within {@code dst} to start copying data into.
+ * @param cnt
+ * ideal target number of bytes to read. Actual read length may
+ * be shorter.
+ * @return number of bytes stored.
+ * @throws IOException
+ * the database cannot be accessed.
+ */
+ protected abstract int readDatabase(byte[] dst, int pos, int cnt)
+ throws IOException;
+
+ /**
+ * Check the current CRC matches the expected value.
+ * <p>
+ * This method is invoked when an object is read back in from the database
+ * and its data is used during delta resolution. The CRC is validated after
+ * the object has been fully read, allowing the parser to verify there was
+ * no silent data corruption.
+ * <p>
+ * Implementations are free to ignore this check by always returning true if
+ * they are performing other data integrity validations at a lower level.
+ *
+ * @param oldCRC
+ * the prior CRC that was recorded during the first scan of the
+ * object from the pack stream.
+ * @return true if the CRC matches; false if it does not.
+ */
+ protected abstract boolean checkCRC(int oldCRC);
+
+ /**
+ * Event notifying the start of an object stored whole (not as a delta).
+ *
+ * @param streamPosition
+ * position of this object in the incoming stream.
+ * @param type
+ * type of the object; one of {@link Constants#OBJ_COMMIT},
+ * {@link Constants#OBJ_TREE}, {@link Constants#OBJ_BLOB}, or
+ * {@link Constants#OBJ_TAG}.
+ * @param inflatedSize
+ * size of the object when fully inflated. The size stored within
+ * the pack may be larger or smaller, and is not yet known.
+ * @throws IOException
+ * the object cannot be recorded.
+ */
+ protected abstract void onBeginWholeObject(long streamPosition, int type,
+ long inflatedSize) throws IOException;
+
+ /**
+ * Event notifying the the current object.
+ *
+ *@param info
+ * object information.
+ * @throws IOException
+ * the object cannot be recorded.
+ */
+ protected abstract void onEndWholeObject(PackedObjectInfo info)
+ throws IOException;
+
+ /**
+ * Event notifying start of a delta referencing its base by offset.
+ *
+ * @param deltaStreamPosition
+ * position of this object in the incoming stream.
+ * @param baseStreamPosition
+ * position of the base object in the incoming stream. The base
+ * must be before the delta, therefore {@code baseStreamPosition
+ * < deltaStreamPosition}. This is <b>not</b> the position
+ * returned by a prior end object event.
+ * @param inflatedSize
+ * size of the delta when fully inflated. The size stored within
+ * the pack may be larger or smaller, and is not yet known.
+ * @throws IOException
+ * the object cannot be recorded.
+ */
+ protected abstract void onBeginOfsDelta(long deltaStreamPosition,
+ long baseStreamPosition, long inflatedSize) throws IOException;
+
+ /**
+ * Event notifying start of a delta referencing its base by ObjectId.
+ *
+ * @param deltaStreamPosition
+ * position of this object in the incoming stream.
+ * @param baseId
+ * name of the base object. This object may be later in the
+ * stream, or might not appear at all in the stream (in the case
+ * of a thin-pack).
+ * @param inflatedSize
+ * size of the delta when fully inflated. The size stored within
+ * the pack may be larger or smaller, and is not yet known.
+ * @throws IOException
+ * the object cannot be recorded.
+ */
+ protected abstract void onBeginRefDelta(long deltaStreamPosition,
+ AnyObjectId baseId, long inflatedSize) throws IOException;
+
+ /**
+ * Event notifying the the current object.
+ *
+ *@return object information that must be populated with at least the
+ * offset.
+ * @throws IOException
+ * the object cannot be recorded.
+ */
+ protected UnresolvedDelta onEndDelta() throws IOException {
+ return new UnresolvedDelta();
+ }
+
+ /** Type and size information about an object in the database buffer. */
+ public static class ObjectTypeAndSize {
+ /** The type of the object. */
+ public int type;
+
+ /** The inflated size of the object. */
+ public long size;
+ }
+
+ private void inflateAndSkip(final Source src, final long inflatedSize)
+ throws IOException {
+ final InputStream inf = inflate(src, inflatedSize);
+ IO.skipFully(inf, inflatedSize);
+ inf.close();
+ }
+
+ private byte[] inflateAndReturn(final Source src, final long inflatedSize)
+ throws IOException {
+ final byte[] dst = new byte[(int) inflatedSize];
+ final InputStream inf = inflate(src, inflatedSize);
+ IO.readFully(inf, dst, 0, dst.length);
+ inf.close();
+ return dst;
+ }
+
+ private InputStream inflate(final Source src, final long inflatedSize)
+ throws IOException {
+ inflater.open(src, inflatedSize);
+ return inflater;
+ }
+
+ private static class DeltaChain extends ObjectId {
+ UnresolvedDelta head;
+
+ DeltaChain(final AnyObjectId id) {
+ super(id);
+ }
+
+ UnresolvedDelta remove() {
+ final UnresolvedDelta r = head;
+ if (r != null)
+ head = null;
+ return r;
+ }
+
+ void add(final UnresolvedDelta d) {
+ d.next = head;
+ head = d;
+ }
+ }
+
+ /** Information about an unresolved delta in this pack stream. */
+ public static class UnresolvedDelta {
+ long position;
+
+ int crc;
+
+ UnresolvedDelta next;
+
+ /** @return offset within the input stream. */
+ public long getOffset() {
+ return position;
+ }
+
+ /** @return the CRC-32 checksum of the stored delta data. */
+ public int getCRC() {
+ return crc;
+ }
+
+ /**
+ * @param crc32
+ * the CRC-32 checksum of the stored delta data.
+ */
+ public void setCRC(int crc32) {
+ crc = crc32;
+ }
+ }
+
+ private static class DeltaVisit {
+ final UnresolvedDelta delta;
+
+ ObjectId id;
+
+ byte[] data;
+
+ DeltaVisit parent;
+
+ UnresolvedDelta nextChild;
+
+ DeltaVisit() {
+ this.delta = null; // At the root of the stack we have a base.
+ }
+
+ DeltaVisit(DeltaVisit parent) {
+ this.parent = parent;
+ this.delta = parent.nextChild;
+ parent.nextChild = delta.next;
+ }
+
+ DeltaVisit next() {
+ // If our parent has no more children, discard it.
+ if (parent != null && parent.nextChild == null) {
+ parent.data = null;
+ parent = parent.parent;
+ }
+
+ if (nextChild != null)
+ return new DeltaVisit(this);
+
+ // If we have no child ourselves, our parent must (if it exists),
+ // due to the discard rule above. With no parent, we are done.
+ if (parent != null)
+ return new DeltaVisit(parent);
+ return null;
+ }
+ }
+
+ private void addObjectAndTrack(PackedObjectInfo oe) {
+ entries[entryCount++] = oe;
+ if (needNewObjectIds())
+ newObjectIds.add(oe);
+ }
+
+ private class InflaterStream extends InputStream {
+ private final Inflater inf;
+
+ private final byte[] skipBuffer;
+
+ private Source src;
+
+ private long expectedSize;
+
+ private long actualSize;
+
+ private int p;
+
+ InflaterStream() {
+ inf = InflaterCache.get();
+ skipBuffer = new byte[512];
+ }
+
+ void release() {
+ inf.reset();
+ InflaterCache.release(inf);
+ }
+
+ void open(Source source, long inflatedSize) throws IOException {
+ src = source;
+ expectedSize = inflatedSize;
+ actualSize = 0;
+
+ p = fill(src, 1);
+ inf.setInput(buf, p, bAvail);
+ }
+
+ @Override
+ public long skip(long toSkip) throws IOException {
+ long n = 0;
+ while (n < toSkip) {
+ final int cnt = (int) Math.min(skipBuffer.length, toSkip - n);
+ final int r = read(skipBuffer, 0, cnt);
+ if (r <= 0)
+ break;
+ n += r;
+ }
+ return n;
+ }
+
+ @Override
+ public int read() throws IOException {
+ int n = read(skipBuffer, 0, 1);
+ return n == 1 ? skipBuffer[0] & 0xff : -1;
+ }
+
+ @Override
+ public int read(byte[] dst, int pos, int cnt) throws IOException {
+ try {
+ int n = 0;
+ while (n < cnt) {
+ int r = inf.inflate(dst, pos + n, cnt - n);
+ if (r == 0) {
+ if (inf.finished())
+ break;
+ if (inf.needsInput()) {
+ onObjectData(src, buf, p, bAvail);
+ use(bAvail);
+
+ p = fill(src, 1);
+ inf.setInput(buf, p, bAvail);
+ } else {
+ throw new CorruptObjectException(
+ MessageFormat
+ .format(
+ JGitText.get().packfileCorruptionDetected,
+ JGitText.get().unknownZlibError));
+ }
+ } else {
+ n += r;
+ }
+ }
+ actualSize += n;
+ return 0 < n ? n : -1;
+ } catch (DataFormatException dfe) {
+ throw new CorruptObjectException(MessageFormat.format(JGitText
+ .get().packfileCorruptionDetected, dfe.getMessage()));
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ // We need to read here to enter the loop above and pump the
+ // trailing checksum into the Inflater. It should return -1 as the
+ // caller was supposed to consume all content.
+ //
+ if (read(skipBuffer) != -1 || actualSize != expectedSize) {
+ throw new CorruptObjectException(MessageFormat.format(JGitText
+ .get().packfileCorruptionDetected,
+ JGitText.get().wrongDecompressedLength));
+ }
+
+ int used = bAvail - inf.getRemaining();
+ if (0 < used) {
+ onObjectData(src, buf, p, used);
+ use(used);
+ }
+
+ inf.reset();
+ }
+ }
+}
import org.eclipse.jgit.errors.PackProtocolException;
import org.eclipse.jgit.errors.UnpackException;
import org.eclipse.jgit.lib.Config;
+import org.eclipse.jgit.lib.Config.SectionParser;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.NullProgressMonitor;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectIdSubclassMap;
+import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.lib.Repository;
-import org.eclipse.jgit.lib.Config.SectionParser;
import org.eclipse.jgit.revwalk.ObjectWalk;
import org.eclipse.jgit.revwalk.RevBlob;
import org.eclipse.jgit.revwalk.RevCommit;
private Writer msgs;
- private IndexPack ip;
+ private PackParser parser;
/** The refs we advertised as existing at the start of the connection. */
private Map<String, Ref> refs;
receivePack();
if (needCheckConnectivity())
checkConnectivity();
- ip = null;
+ parser = null;
unpackError = null;
} catch (IOException err) {
unpackError = err;
if (timeoutIn != null)
timeoutIn.setTimeout(10 * timeout * 1000);
- ip = IndexPack.create(db, rawIn);
- ip.setFixThin(true);
- ip.setNeedNewObjectIds(checkReferencedIsReachable);
- ip.setNeedBaseObjectIds(checkReferencedIsReachable);
- ip.setObjectChecking(isCheckReceivedObjects());
- ip.index(NullProgressMonitor.INSTANCE);
-
- String lockMsg = "jgit receive-pack";
- if (getRefLogIdent() != null)
- lockMsg += " from " + getRefLogIdent().toExternalString();
- packLock = ip.renameAndOpenPack(lockMsg);
+ ObjectInserter ins = db.newObjectInserter();
+ try {
+ String lockMsg = "jgit receive-pack";
+ if (getRefLogIdent() != null)
+ lockMsg += " from " + getRefLogIdent().toExternalString();
+
+ parser = ins.newPackParser(rawIn);
+ parser.setAllowThin(true);
+ parser.setNeedNewObjectIds(checkReferencedIsReachable);
+ parser.setNeedBaseObjectIds(checkReferencedIsReachable);
+ parser.setObjectChecking(isCheckReceivedObjects());
+ parser.setLockMessage(lockMsg);
+ packLock = parser.parse(NullProgressMonitor.INSTANCE);
+ ins.flush();
+ } finally {
+ ins.release();
+ }
if (timeoutIn != null)
timeoutIn.setTimeout(timeout * 1000);
ObjectIdSubclassMap<ObjectId> providedObjects = null;
if (checkReferencedIsReachable) {
- baseObjects = ip.getBaseObjectIds();
- providedObjects = ip.getNewObjectIds();
+ baseObjects = parser.getBaseObjectIds();
+ providedObjects = parser.getNewObjectIds();
}
- ip = null;
+ parser = null;
final ObjectWalk ow = new ObjectWalk(db);
ow.setRetainBody(false);
}
void downloadPack(final ProgressMonitor monitor) throws IOException {
- final WalkRemoteObjectDatabase.FileStream s;
- final IndexPack ip;
-
- s = connection.open("pack/" + packName);
- ip = IndexPack.create(local, s.in);
- ip.setFixThin(false);
- ip.setObjectChecker(objCheck);
- ip.index(monitor);
- final PackLock keep = ip.renameAndOpenPack(lockMessage);
- if (keep != null)
- packLocks.add(keep);
+ String name = "pack/" + packName;
+ WalkRemoteObjectDatabase.FileStream s = connection.open(name);
+ PackParser parser = inserter.newPackParser(s.in);
+ parser.setAllowThin(false);
+ parser.setObjectChecker(objCheck);
+ parser.setLockMessage(lockMessage);
+ PackLock lock = parser.parse(monitor);
+ if (lock != null)
+ packLocks.add(lock);
+ inserter.flush();
}
}
}