--- /dev/null
- File file = new File(db.getWorkDir(), "a.txt");
+/*
+ * Copyright (C) 2010, Stefan Lay <stefan.lay@sap.com>
+ * Copyright (C) 2010, Christian Halstrick <christian.halstrick@sap.com>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.eclipse.jgit.api;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+
+import org.eclipse.jgit.dircache.DirCache;
+import org.eclipse.jgit.dircache.DirCacheBuilder;
+import org.eclipse.jgit.dircache.DirCacheEntry;
+import org.eclipse.jgit.lib.FileMode;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectWriter;
+import org.eclipse.jgit.lib.RepositoryTestCase;
+
+public class AddCommandTest extends RepositoryTestCase {
+
+ public void testAddNothing() {
+ Git git = new Git(db);
+
+ try {
+ git.add().call();
+ fail("Expected IllegalArgumentException");
+ } catch (NoFilepatternException e) {
+ // expected
+ }
+
+ }
+
+ public void testAddNonExistingSingleFile() throws NoFilepatternException {
+ Git git = new Git(db);
+
+ DirCache dc = git.add().addFilepattern("a.txt").call();
+ assertEquals(0, dc.getEntryCount());
+
+ }
+
+ public void testAddExistingSingleFile() throws IOException, NoFilepatternException {
- new File(db.getWorkDir(), "sub").mkdir();
- File file = new File(db.getWorkDir(), "sub/a.txt");
++ File file = new File(db.getWorkTree(), "a.txt");
+ file.createNewFile();
+ PrintWriter writer = new PrintWriter(file);
+ writer.print("content");
+ writer.close();
+
+ Git git = new Git(db);
+
+ DirCache dc = git.add().addFilepattern("a.txt").call();
+
+ assertEquals(1, dc.getEntryCount());
+ assertEquals("a.txt", dc.getEntry(0).getPathString());
+ assertNotNull(dc.getEntry(0).getObjectId());
+ assertEquals(file.lastModified(), dc.getEntry(0).getLastModified());
+ assertEquals(file.length(), dc.getEntry(0).getLength());
+ assertEquals(FileMode.REGULAR_FILE, dc.getEntry(0).getFileMode());
+ assertEquals(0, dc.getEntry(0).getStage());
+ }
+
+ public void testAddExistingSingleFileInSubDir() throws IOException, NoFilepatternException {
- File file = new File(db.getWorkDir(), "a.txt");
++ new File(db.getWorkTree(), "sub").mkdir();
++ File file = new File(db.getWorkTree(), "sub/a.txt");
+ file.createNewFile();
+ PrintWriter writer = new PrintWriter(file);
+ writer.print("content");
+ writer.close();
+
+ Git git = new Git(db);
+
+ DirCache dc = git.add().addFilepattern("sub/a.txt").call();
+
+ assertEquals(1, dc.getEntryCount());
+ assertEquals("sub/a.txt", dc.getEntry(0).getPathString());
+ assertNotNull(dc.getEntry(0).getObjectId());
+ assertEquals(file.lastModified(), dc.getEntry(0).getLastModified());
+ assertEquals(file.length(), dc.getEntry(0).getLength());
+ assertEquals(FileMode.REGULAR_FILE, dc.getEntry(0).getFileMode());
+ assertEquals(0, dc.getEntry(0).getStage());
+ }
+
+ public void testAddExistingSingleFileTwice() throws IOException, NoFilepatternException {
- File file = new File(db.getWorkDir(), "a.txt");
++ File file = new File(db.getWorkTree(), "a.txt");
+ file.createNewFile();
+ PrintWriter writer = new PrintWriter(file);
+ writer.print("content");
+ writer.close();
+
+ Git git = new Git(db);
+ DirCache dc = git.add().addFilepattern("a.txt").call();
+
+ ObjectId id1 = dc.getEntry(0).getObjectId();
+
+ writer = new PrintWriter(file);
+ writer.print("other content");
+ writer.close();
+
+ dc = git.add().addFilepattern("a.txt").call();
+
+ assertEquals(1, dc.getEntryCount());
+ assertEquals("a.txt", dc.getEntry(0).getPathString());
+ assertNotSame(id1, dc.getEntry(0).getObjectId());
+ assertEquals(0, dc.getEntry(0).getStage());
+ }
+
+ public void testAddExistingSingleFileTwiceWithCommit() throws Exception {
- File file = new File(db.getWorkDir(), "a.txt");
++ File file = new File(db.getWorkTree(), "a.txt");
+ file.createNewFile();
+ PrintWriter writer = new PrintWriter(file);
+ writer.print("content");
+ writer.close();
+
+ Git git = new Git(db);
+ DirCache dc = git.add().addFilepattern("a.txt").call();
+
+ ObjectId id1 = dc.getEntry(0).getObjectId();
+
+ git.commit().setMessage("commit a.txt").call();
+
+ writer = new PrintWriter(file);
+ writer.print("other content");
+ writer.close();
+
+ dc = git.add().addFilepattern("a.txt").call();
+
+ assertEquals(1, dc.getEntryCount());
+ assertEquals("a.txt", dc.getEntry(0).getPathString());
+ assertNotSame(id1, dc.getEntry(0).getObjectId());
+ assertEquals(0, dc.getEntry(0).getStage());
+ }
+
+ public void testAddRemovedFile() throws Exception {
- File file = new File(db.getWorkDir(), "a.txt");
++ File file = new File(db.getWorkTree(), "a.txt");
+ file.createNewFile();
+ PrintWriter writer = new PrintWriter(file);
+ writer.print("content");
+ writer.close();
+
+ Git git = new Git(db);
+ DirCache dc = git.add().addFilepattern("a.txt").call();
+
+ ObjectId id1 = dc.getEntry(0).getObjectId();
+ file.delete();
+
+ // is supposed to do nothing
+ dc = git.add().addFilepattern("a.txt").call();
+
+ assertEquals(1, dc.getEntryCount());
+ assertEquals("a.txt", dc.getEntry(0).getPathString());
+ assertEquals(id1, dc.getEntry(0).getObjectId());
+ assertEquals(0, dc.getEntry(0).getStage());
+ }
+
+ public void testAddRemovedCommittedFile() throws Exception {
- File file = new File(db.getWorkDir(), "a.txt");
++ File file = new File(db.getWorkTree(), "a.txt");
+ file.createNewFile();
+ PrintWriter writer = new PrintWriter(file);
+ writer.print("content");
+ writer.close();
+
+ Git git = new Git(db);
+ DirCache dc = git.add().addFilepattern("a.txt").call();
+
+ git.commit().setMessage("commit a.txt").call();
+
+ ObjectId id1 = dc.getEntry(0).getObjectId();
+ file.delete();
+
+ // is supposed to do nothing
+ dc = git.add().addFilepattern("a.txt").call();
+
+ assertEquals(1, dc.getEntryCount());
+ assertEquals("a.txt", dc.getEntry(0).getPathString());
+ assertEquals(id1, dc.getEntry(0).getObjectId());
+ assertEquals(0, dc.getEntry(0).getStage());
+ }
+
+ public void testAddWithConflicts() throws Exception {
+ // prepare conflict
+
- File file2 = new File(db.getWorkDir(), "b.txt");
++ File file = new File(db.getWorkTree(), "a.txt");
+ file.createNewFile();
+ PrintWriter writer = new PrintWriter(file);
+ writer.print("content");
+ writer.close();
+
- DirCache dc = DirCache.lock(db);
++ File file2 = new File(db.getWorkTree(), "b.txt");
+ file2.createNewFile();
+ writer = new PrintWriter(file2);
+ writer.print("content b");
+ writer.close();
+
+ ObjectWriter ow = new ObjectWriter(db);
- File file = new File(db.getWorkDir(), "a.txt");
++ DirCache dc = db.lockDirCache();
+ DirCacheBuilder builder = dc.builder();
+
+ addEntryToBuilder("b.txt", file2, ow, builder, 0);
+ addEntryToBuilder("a.txt", file, ow, builder, 1);
+
+ writer = new PrintWriter(file);
+ writer.print("other content");
+ writer.close();
+ addEntryToBuilder("a.txt", file, ow, builder, 3);
+
+ writer = new PrintWriter(file);
+ writer.print("our content");
+ writer.close();
+ ObjectId id1 = addEntryToBuilder("a.txt", file, ow, builder, 2)
+ .getObjectId();
+
+ builder.commit();
+
+ assertEquals(4, dc.getEntryCount());
+
+ // now the test begins
+
+ Git git = new Git(db);
+ dc = git.add().addFilepattern("a.txt").call();
+
+ assertEquals(2, dc.getEntryCount());
+ assertEquals("a.txt", dc.getEntry("a.txt").getPathString());
+ assertEquals(id1, dc.getEntry("a.txt").getObjectId());
+ assertEquals(0, dc.getEntry("a.txt").getStage());
+ assertEquals(0, dc.getEntry("b.txt").getStage());
+ }
+
+ public void testAddTwoFiles() throws Exception {
- File file2 = new File(db.getWorkDir(), "b.txt");
++ File file = new File(db.getWorkTree(), "a.txt");
+ file.createNewFile();
+ PrintWriter writer = new PrintWriter(file);
+ writer.print("content");
+ writer.close();
+
- new File(db.getWorkDir(), "sub").mkdir();
- File file = new File(db.getWorkDir(), "sub/a.txt");
++ File file2 = new File(db.getWorkTree(), "b.txt");
+ file2.createNewFile();
+ writer = new PrintWriter(file2);
+ writer.print("content b");
+ writer.close();
+
+ Git git = new Git(db);
+ DirCache dc = git.add().addFilepattern("a.txt").addFilepattern("b.txt").call();
+ assertEquals("a.txt", dc.getEntry("a.txt").getPathString());
+ assertEquals("b.txt", dc.getEntry("b.txt").getPathString());
+ assertNotNull(dc.getEntry("a.txt").getObjectId());
+ assertNotNull(dc.getEntry("b.txt").getObjectId());
+ assertEquals(0, dc.getEntry("a.txt").getStage());
+ assertEquals(0, dc.getEntry("b.txt").getStage());
+ }
+
+ public void testAddFolder() throws Exception {
- File file2 = new File(db.getWorkDir(), "sub/b.txt");
++ new File(db.getWorkTree(), "sub").mkdir();
++ File file = new File(db.getWorkTree(), "sub/a.txt");
+ file.createNewFile();
+ PrintWriter writer = new PrintWriter(file);
+ writer.print("content");
+ writer.close();
+
- new File(db.getWorkDir(), "sub").mkdir();
- File file = new File(db.getWorkDir(), "sub/a.txt");
++ File file2 = new File(db.getWorkTree(), "sub/b.txt");
+ file2.createNewFile();
+ writer = new PrintWriter(file2);
+ writer.print("content b");
+ writer.close();
+
+ Git git = new Git(db);
+ DirCache dc = git.add().addFilepattern("sub").call();
+ assertEquals("sub/a.txt", dc.getEntry("sub/a.txt").getPathString());
+ assertEquals("sub/b.txt", dc.getEntry("sub/b.txt").getPathString());
+ assertNotNull(dc.getEntry("sub/a.txt").getObjectId());
+ assertNotNull(dc.getEntry("sub/b.txt").getObjectId());
+ assertEquals(0, dc.getEntry("sub/a.txt").getStage());
+ assertEquals(0, dc.getEntry("sub/b.txt").getStage());
+ }
+
+ public void testAddIgnoredFile() throws Exception {
- File ignoreFile = new File(db.getWorkDir(), ".gitignore");
++ new File(db.getWorkTree(), "sub").mkdir();
++ File file = new File(db.getWorkTree(), "sub/a.txt");
+ file.createNewFile();
+ PrintWriter writer = new PrintWriter(file);
+ writer.print("content");
+ writer.close();
+
- File file2 = new File(db.getWorkDir(), "sub/b.txt");
++ File ignoreFile = new File(db.getWorkTree(), ".gitignore");
+ ignoreFile.createNewFile();
+ writer = new PrintWriter(ignoreFile);
+ writer.print("sub/b.txt");
+ writer.close();
+
- new File(db.getWorkDir(), "sub").mkdir();
- File file = new File(db.getWorkDir(), "sub/a.txt");
++ File file2 = new File(db.getWorkTree(), "sub/b.txt");
+ file2.createNewFile();
+ writer = new PrintWriter(file2);
+ writer.print("content b");
+ writer.close();
+
+ Git git = new Git(db);
+ DirCache dc = git.add().addFilepattern("sub").call();
+ assertEquals("sub/a.txt", dc.getEntry("sub/a.txt").getPathString());
+ assertNull(dc.getEntry("sub/b.txt"));
+ assertNotNull(dc.getEntry("sub/a.txt").getObjectId());
+ assertEquals(0, dc.getEntry("sub/a.txt").getStage());
+ }
+
+ public void testAddWholeRepo() throws Exception {
- File file2 = new File(db.getWorkDir(), "sub/b.txt");
++ new File(db.getWorkTree(), "sub").mkdir();
++ File file = new File(db.getWorkTree(), "sub/a.txt");
+ file.createNewFile();
+ PrintWriter writer = new PrintWriter(file);
+ writer.print("content");
+ writer.close();
+
++ File file2 = new File(db.getWorkTree(), "sub/b.txt");
+ file2.createNewFile();
+ writer = new PrintWriter(file2);
+ writer.print("content b");
+ writer.close();
+
+ Git git = new Git(db);
+ DirCache dc = git.add().addFilepattern(".").call();
+ assertEquals("sub/a.txt", dc.getEntry("sub/a.txt").getPathString());
+ assertEquals("sub/b.txt", dc.getEntry("sub/b.txt").getPathString());
+ }
+
+ private DirCacheEntry addEntryToBuilder(String path, File file,
+ ObjectWriter ow, DirCacheBuilder builder, int stage)
+ throws IOException {
+ ObjectId id = ow.writeBlob(file);
+ DirCacheEntry entry = new DirCacheEntry(path, stage);
+ entry.setObjectId(id);
+ entry.setFileMode(FileMode.REGULAR_FILE);
+ entry.setLastModified(file.lastModified());
+ entry.setLength((int) file.length());
+
+ builder.add(entry);
+ return entry;
+ }
+
+}
--- /dev/null
- public void testIndexing() {
+/*
+ * Copyright (C) 2010, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.diff;
+
++import java.io.ByteArrayInputStream;
++import java.io.IOException;
++
+import junit.framework.TestCase;
+
+import org.eclipse.jgit.lib.Constants;
+
+public class SimilarityIndexTest extends TestCase {
++ public void testIndexingSmallObject() {
+ SimilarityIndex si = hash("" //
+ + "A\n" //
+ + "B\n" //
+ + "D\n" //
+ + "B\n" //
+ );
+
+ int key_A = keyFor("A\n");
+ int key_B = keyFor("B\n");
+ int key_D = keyFor("D\n");
+ assertTrue(key_A != key_B && key_A != key_D && key_B != key_D);
+
+ assertEquals(3, si.size());
+ assertEquals(2, si.count(si.findIndex(key_A)));
+ assertEquals(4, si.count(si.findIndex(key_B)));
+ assertEquals(2, si.count(si.findIndex(key_D)));
+ }
+
++ public void testIndexingLargeObject() throws IOException {
++ byte[] in = ("" //
++ + "A\n" //
++ + "B\n" //
++ + "B\n" //
++ + "B\n").getBytes("UTF-8");
++ SimilarityIndex si = new SimilarityIndex();
++ si.hash(new ByteArrayInputStream(in), in.length);
++ assertEquals(2, si.size());
++ }
++
+ public void testCommonScore_SameFiles() {
+ String text = "" //
+ + "A\n" //
+ + "B\n" //
+ + "D\n" //
+ + "B\n";
+ SimilarityIndex src = hash(text);
+ SimilarityIndex dst = hash(text);
+ assertEquals(8, src.common(dst));
+ assertEquals(8, dst.common(src));
+
+ assertEquals(100, src.score(dst, 100));
+ assertEquals(100, dst.score(src, 100));
+ }
+
+ public void testCommonScore_EmptyFiles() {
+ SimilarityIndex src = hash("");
+ SimilarityIndex dst = hash("");
+ assertEquals(0, src.common(dst));
+ assertEquals(0, dst.common(src));
+ }
+
+ public void testCommonScore_TotallyDifferentFiles() {
+ SimilarityIndex src = hash("A\n");
+ SimilarityIndex dst = hash("D\n");
+ assertEquals(0, src.common(dst));
+ assertEquals(0, dst.common(src));
+ }
+
+ public void testCommonScore_SimiliarBy75() {
+ SimilarityIndex src = hash("A\nB\nC\nD\n");
+ SimilarityIndex dst = hash("A\nB\nC\nQ\n");
+ assertEquals(6, src.common(dst));
+ assertEquals(6, dst.common(src));
+
+ assertEquals(75, src.score(dst, 100));
+ assertEquals(75, dst.score(src, 100));
+ }
+
+ private static SimilarityIndex hash(String text) {
+ SimilarityIndex src = new SimilarityIndex() {
+ @Override
+ void hash(byte[] raw, int ptr, final int end) {
+ while (ptr < end) {
+ int hash = raw[ptr] & 0xff;
+ int start = ptr;
+ do {
+ int c = raw[ptr++] & 0xff;
+ if (c == '\n')
+ break;
+ } while (ptr < end && ptr - start < 64);
+ add(hash, ptr - start);
+ }
+ }
+ };
+ byte[] raw = Constants.encode(text);
+ src.setFileSize(raw.length);
+ src.hash(raw, 0, raw.length);
+ src.sort();
+ return src;
+ }
+
+ private static int keyFor(String line) {
+ SimilarityIndex si = hash(line);
+ assertEquals("single line scored", 1, si.size());
+ return si.key(0);
+ }
+}
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-
package org.eclipse.jgit.lib;
- import java.io.ByteArrayInputStream;
import java.io.File;
+import java.io.FileInputStream;
import java.io.IOException;
- import java.io.InputStream;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.HashMap;
import org.eclipse.jgit.errors.CheckoutConflictException;
void checkout() throws IOException;
}
- /**
- * Return the current implementation of the {@link Checkout} interface.
- * <p>
- * May be overridden by subclasses which would inherit all tests but can
- * specify their own implementation of a Checkout
- *
- * @param head
- * @param index
- * @param merge
- * @return the current implementation of {@link Checkout}
- */
- protected Checkout getCheckoutImpl(Tree head, GitIndex index,
- Tree merge) {
- return new WorkdirCheckoutImpl(head, index, merge);
- }
-
- /**
- * An implementation of the {@link Checkout} interface which uses {@link WorkDirCheckout}
- */
- class WorkdirCheckoutImpl extends WorkDirCheckout implements Checkout {
- public WorkdirCheckoutImpl(Tree head, GitIndex index,
- Tree merge) {
- super(db, trash, head, index, merge);
+ public void assertWorkDir(HashMap<String, String> i)
+ throws CorruptObjectException, IOException {
+ TreeWalk walk = new TreeWalk(db);
+ walk.reset();
+ walk.setRecursive(true);
+ walk.addTree(new FileTreeIterator(db));
+ String expectedValue;
+ String path;
+ int nrFiles = 0;
+ FileTreeIterator ft;
+ while (walk.next()) {
+ ft = walk.getTree(0, FileTreeIterator.class);
+ path = ft.getEntryPathString();
+ expectedValue = i.get(path);
+ assertNotNull("found unexpected file for path "
+ + path + " in workdir", expectedValue);
- File file = new File(db.getWorkDir(), path);
++ File file = new File(db.getWorkTree(), path);
+ assertTrue(file.exists());
+ if (file.isFile()) {
+ FileInputStream is = new FileInputStream(file);
+ byte[] buffer = new byte[(int) file.length()];
+ int offset = 0;
+ int numRead = 0;
+ while (offset < buffer.length
+ && (numRead = is.read(buffer, offset, buffer.length
+ - offset)) >= 0) {
+ offset += numRead;
+ }
+ is.close();
+ assertTrue("unexpected content for path " + path
+ + " in workDir. Expected: <" + expectedValue + ">",
+ Arrays.equals(buffer, i.get(path).getBytes()));
+ nrFiles++;
+ }
}
-
- public HashMap<String, ObjectId> updated() {
- return updated;
- }
-
- public ArrayList<String> conflicts() {
- return conflicts;
- }
-
- public ArrayList<String> removed() {
- return removed;
- }
-
- public void prescanTwoTrees() throws IOException {
- super.prescanTwoTrees();
+ assertEquals("WorkDir has not the right size.", i.size(), nrFiles);
+ }
+
+
+ public void assertIndex(HashMap<String, String> i)
+ throws CorruptObjectException, IOException {
+ String expectedValue;
+ String path;
+ GitIndex theIndex=db.getIndex();
+ assertEquals("Index has not the right size.", i.size(),
+ theIndex.getMembers().length);
+ for (int j = 0; j < theIndex.getMembers().length; j++) {
+ path = theIndex.getMembers()[j].getName();
+ expectedValue = i.get(path);
+ assertNotNull("found unexpected entry for path " + path
+ + " in index", expectedValue);
+ assertTrue("unexpected content for path " + path
+ + " in index. Expected: <" + expectedValue + ">",
+ Arrays.equals(
- db.openBlob(theIndex.getMembers()[j].getObjectId())
- .getBytes(), i.get(path).getBytes()));
++ db.open(theIndex.getMembers()[j].getObjectId())
++ .getCachedBytes(), i.get(path).getBytes()));
}
}
+
+ public abstract void prescanTwoTrees(Tree head, Tree merge) throws IllegalStateException, IOException;
+ public abstract void checkout() throws IOException;
+ public abstract ArrayList<String> getRemoved();
+ public abstract HashMap<String, ObjectId> getUpdated();
+ public abstract ArrayList<String> getConflicts();
}
--- /dev/null
- wdc = new WorkDirCheckout(db, db.getWorkDir(), head, db.getIndex(), merge);
+/*
+ * Copyright (C) 2010, Christian Halstrick <christian.halstrick@sap.om>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.eclipse.jgit.lib;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+/**
+ * Test cases for ReadTree operations as implemented in WorkDirCheckout
+ */
+public class WorkDirCheckout_ReadTreeTest extends ReadTreeTest {
+ private WorkDirCheckout wdc;
+ public void prescanTwoTrees(Tree head, Tree merge) throws IllegalStateException, IOException {
- wdc = new WorkDirCheckout(db, db.getWorkDir(), theHead, db.getIndex(), theMerge);
++ wdc = new WorkDirCheckout(db, db.getWorkTree(), head, db.getIndex(), merge);
+ wdc.prescanTwoTrees();
+ }
+
+ public void checkout() throws IOException {
++ wdc = new WorkDirCheckout(db, db.getWorkTree(), theHead, db.getIndex(), theMerge);
+ wdc.checkout();
+ }
+
+ public ArrayList<String> getRemoved() {
+ return wdc.getRemoved();
+ }
+
+ public HashMap<String, ObjectId> getUpdated() {
+ return wdc.updated;
+ }
+
+ public ArrayList<String> getConflicts() {
+ return wdc.getConflicts();
+ }
+}
+
org.eclipse.jgit.api;version="0.9.0",
org.eclipse.jgit.diff;version="0.9.0",
org.eclipse.jgit.dircache;version="0.9.0",
+ org.eclipse.jgit.events;version="0.9.0",
org.eclipse.jgit.errors;version="0.9.0",
org.eclipse.jgit.fnmatch;version="0.9.0",
+ org.eclipse.jgit.ignore;version="0.9.0",
org.eclipse.jgit.lib;version="0.9.0",
org.eclipse.jgit.merge;version="0.9.0",
org.eclipse.jgit.nls;version="0.9.0",
remoteDoesNotSupportSmartHTTPPush=remote does not support smart HTTP push
remoteHungUpUnexpectedly=remote hung up unexpectedly
remoteNameCantBeNull=Remote name can't be null.
+renamesAlreadyFound=Renames have already been found.
+renamesFindingByContent=Finding renames by content similarity
+renamesFindingExact=Finding exact renames
repositoryAlreadyExists=Repository already exists: {0}
+ repositoryConfigFileInvalid=Repository config file {0} invalid {1}
+repositoryIsRequired=Repository is required.
repositoryNotFound=repository not found: {0}
+repositoryState_applyMailbox=Apply mailbox
+repositoryState_bisecting=Bisecting
+repositoryState_conflicts=Conflicts
+repositoryState_merged=Merged
+repositoryState_normal=Normal
+repositoryState_rebase=Rebase
+repositoryState_rebaseInteractive=Rebase interactive
+repositoryState_rebaseOrApplyMailbox=Rebase/Apply mailbox
+repositoryState_rebaseWithMerge=Rebase w/merge
requiredHashFunctionNotAvailable=Required hash function {0} not available.
resolvingDeltas=Resolving deltas
serviceNotPermitted={0} not permitted
/***/ public String remoteDoesNotSupportSmartHTTPPush;
/***/ public String remoteHungUpUnexpectedly;
/***/ public String remoteNameCantBeNull;
+ /***/ public String renamesAlreadyFound;
+ /***/ public String renamesFindingByContent;
+ /***/ public String renamesFindingExact;
/***/ public String repositoryAlreadyExists;
+ /***/ public String repositoryConfigFileInvalid;
+ /***/ public String repositoryIsRequired;
/***/ public String repositoryNotFound;
+ /***/ public String repositoryState_applyMailbox;
+ /***/ public String repositoryState_bisecting;
+ /***/ public String repositoryState_conflicts;
+ /***/ public String repositoryState_merged;
+ /***/ public String repositoryState_normal;
+ /***/ public String repositoryState_rebase;
+ /***/ public String repositoryState_rebaseInteractive;
+ /***/ public String repositoryState_rebaseOrApplyMailbox;
+ /***/ public String repositoryState_rebaseWithMerge;
/***/ public String requiredHashFunctionNotAvailable;
/***/ public String resolvingDeltas;
/***/ public String serviceNotPermitted;
--- /dev/null
- dc = DirCache.lock(repo);
+/*
+ * Copyright (C) 2010, Christian Halstrick <christian.halstrick@sap.com>
+ * Copyright (C) 2010, Stefan Lay <stefan.lay@sap.com>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.eclipse.jgit.api;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.LinkedList;
+
+import org.eclipse.jgit.JGitText;
+import org.eclipse.jgit.dircache.DirCache;
+import org.eclipse.jgit.dircache.DirCacheBuildIterator;
+import org.eclipse.jgit.dircache.DirCacheBuilder;
+import org.eclipse.jgit.dircache.DirCacheEntry;
+import org.eclipse.jgit.dircache.DirCacheIterator;
+import org.eclipse.jgit.lib.ObjectWriter;
+import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.treewalk.FileTreeIterator;
+import org.eclipse.jgit.treewalk.TreeWalk;
+import org.eclipse.jgit.treewalk.WorkingTreeIterator;
+import org.eclipse.jgit.treewalk.filter.PathFilterGroup;
+
+/**
+ * A class used to execute a {@code Add} command. It has setters for all
+ * supported options and arguments of this command and a {@link #call()} method
+ * to finally execute the command. Each instance of this class should only be
+ * used for one invocation of the command (means: one call to {@link #call()})
+ *
+ * @see <a href="http://www.kernel.org/pub/software/scm/git/docs/git-add.html"
+ * >Git documentation about Add</a>
+ */
+public class AddCommand extends GitCommand<DirCache> {
+
+ private Collection<String> filepatterns;
+
+ private WorkingTreeIterator workingTreeIterator;
+
+ /**
+ *
+ * @param repo
+ */
+ public AddCommand(Repository repo) {
+ super(repo);
+ filepatterns = new LinkedList<String>();
+ }
+
+ /**
+ * @param filepattern
+ * File to add content from. Also a leading directory name (e.g.
+ * dir to add dir/file1 and dir/file2) can be given to add all
+ * files in the directory, recursively. Fileglobs (e.g. *.c) are
+ * not yet supported.
+ * @return {@code this}
+ */
+ public AddCommand addFilepattern(String filepattern) {
+ checkCallable();
+ filepatterns.add(filepattern);
+ return this;
+ }
+
+ /**
+ * Allow clients to provide their own implementation of a FileTreeIterator
+ * @param f
+ * @return {@code this}
+ */
+ public AddCommand setWorkingTreeIterator(WorkingTreeIterator f) {
+ workingTreeIterator = f;
+ return this;
+ }
+
+ /**
+ * Executes the {@code Add} command. Each instance of this class should only
+ * be used for one invocation of the command. Don't call this method twice
+ * on an instance.
+ *
+ * @return the DirCache after Add
+ */
+ public DirCache call() throws NoFilepatternException {
+
+ if (filepatterns.isEmpty())
+ throw new NoFilepatternException(JGitText.get().atLeastOnePatternIsRequired);
+ checkCallable();
+ DirCache dc = null;
+ boolean addAll = false;
+ if (filepatterns.contains("."))
+ addAll = true;
+
+ try {
- final File file = new File(repo.getWorkDir(), path);
++ dc = repo.lockDirCache();
+ ObjectWriter ow = new ObjectWriter(repo);
+ DirCacheIterator c;
+
+ DirCacheBuilder builder = dc.builder();
+ final TreeWalk tw = new TreeWalk(repo);
+ tw.reset();
+ tw.addTree(new DirCacheBuildIterator(builder));
+ if (workingTreeIterator == null)
+ workingTreeIterator = new FileTreeIterator(repo);
+ tw.addTree(workingTreeIterator);
+ tw.setRecursive(true);
+ if (!addAll)
+ tw.setFilter(PathFilterGroup.createFromStrings(filepatterns));
+
+ String lastAddedFile = null;
+
+ while (tw.next()) {
+ String path = tw.getPathString();
+
++ final File file = new File(repo.getWorkTree(), path);
+ WorkingTreeIterator f = tw.getTree(1, WorkingTreeIterator.class);
+ if (tw.getTree(0, DirCacheIterator.class) == null &&
+ f != null && f.isEntryIgnored()) {
+ // file is not in index but is ignored, do nothing
+ }
+ // In case of an existing merge conflict the
+ // DirCacheBuildIterator iterates over all stages of
+ // this path, we however want to add only one
+ // new DirCacheEntry per path.
+ else if (!(path.equals(lastAddedFile))) {
+ if (f != null) { // the file exists
+ DirCacheEntry entry = new DirCacheEntry(path);
+ entry.setLength((int)f.getEntryLength());
+ entry.setLastModified(f.getEntryLastModified());
+ entry.setFileMode(f.getEntryFileMode());
+ entry.setObjectId(ow.writeBlob(file));
+
+ builder.add(entry);
+ lastAddedFile = path;
+ } else {
+ c = tw.getTree(0, DirCacheIterator.class);
+ builder.add(c.getDirCacheEntry());
+ }
+ }
+ }
+ builder.commit();
+ setCallable(false);
+ } catch (IOException e) {
+ throw new JGitInternalException(
+ JGitText.get().exceptionCaughtDuringExecutionOfAddCommand, e);
+ } finally {
+ if (dc != null)
+ dc.unlock();
+ }
+
+ return dc;
+ }
+
+}
package org.eclipse.jgit.diff;
+import static org.eclipse.jgit.lib.Constants.encode;
import static org.eclipse.jgit.lib.Constants.encodeASCII;
+import static org.eclipse.jgit.lib.FileMode.GITLINK;
+import java.io.ByteArrayOutputStream;
import java.io.IOException;
++import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import org.eclipse.jgit.JGitText;
+import org.eclipse.jgit.errors.CorruptObjectException;
++import org.eclipse.jgit.errors.LargeObjectException;
+import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.lib.AbbreviatedObjectId;
+import org.eclipse.jgit.lib.Constants;
++import org.eclipse.jgit.lib.CoreConfig;
+import org.eclipse.jgit.lib.FileMode;
+import org.eclipse.jgit.lib.ObjectLoader;
+import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.patch.FileHeader;
+import org.eclipse.jgit.patch.HunkHeader;
+import org.eclipse.jgit.patch.FileHeader.PatchType;
++import org.eclipse.jgit.util.IO;
+import org.eclipse.jgit.util.QuotedString;
+import org.eclipse.jgit.util.io.DisabledOutputStream;
/**
* Format an {@link EditList} as a Git style unified patch script.
public class DiffFormatter {
private static final byte[] noNewLine = encodeASCII("\\ No newline at end of file\n");
+ private final OutputStream out;
+
+ private Repository db;
+
private int context;
- /** Create a new formatter with a default level of context. */
- public DiffFormatter() {
+ private int abbreviationLength;
+
+ private RawText.Factory rawTextFactory = RawText.FACTORY;
+
++ private long bigFileThreshold = 50 * 1024 * 1024;
++
+ /**
+ * Create a new formatter with a default level of context.
+ *
+ * @param out
+ * the stream the formatter will write line data to. This stream
+ * should have buffering arranged by the caller, as many small
+ * writes are performed to it.
+ */
+ public DiffFormatter(OutputStream out) {
+ this.out = out;
setContext(3);
+ setAbbreviationLength(8);
+ }
+
+ /** @return the stream we are outputting data to. */
+ protected OutputStream getOutputStream() {
+ return out;
+ }
+
+ /**
+ * Set the repository the formatter can load object contents from.
+ *
+ * @param repository
+ * source repository holding referenced objects.
+ */
+ public void setRepository(Repository repository) {
+ db = repository;
++
++ CoreConfig cfg = db.getConfig().get(CoreConfig.KEY);
++ bigFileThreshold = cfg.getStreamFileThreshold();
}
/**
context = lineCount;
}
- ObjectLoader ldr = db.openObject(id.toObjectId());
- return ldr.getCachedBytes();
+ /**
+ * Change the number of digits to show in an ObjectId.
+ *
+ * @param count
+ * number of digits to show in an ObjectId.
+ */
+ public void setAbbreviationLength(final int count) {
+ if (count < 0)
+ throw new IllegalArgumentException(
+ JGitText.get().abbreviationLengthMustBeNonNegative);
+ abbreviationLength = count;
+ }
+
+ /**
+ * Set the helper that constructs difference output.
+ *
+ * @param type
+ * the factory to create different output. Different types of
+ * factories can produce different whitespace behavior, for
+ * example.
+ * @see RawText#FACTORY
+ * @see RawTextIgnoreAllWhitespace#FACTORY
+ * @see RawTextIgnoreLeadingWhitespace#FACTORY
+ * @see RawTextIgnoreTrailingWhitespace#FACTORY
+ * @see RawTextIgnoreWhitespaceChange#FACTORY
+ */
+ public void setRawTextFactory(RawText.Factory type) {
+ rawTextFactory = type;
+ }
+
++ /**
++ * Set the maximum file size that should be considered for diff output.
++ * <p>
++ * Text files that are larger than this size will not have a difference
++ * generated during output.
++ *
++ * @param bigFileThreshold
++ * the limit, in bytes.
++ */
++ public void setBigFileThreshold(long bigFileThreshold) {
++ this.bigFileThreshold = bigFileThreshold;
++ }
++
+ /**
+ * Flush the underlying output stream of this formatter.
+ *
+ * @throws IOException
+ * the stream's own flush method threw an exception.
+ */
+ public void flush() throws IOException {
+ out.flush();
+ }
+
+ /**
+ * Format a patch script from a list of difference entries.
+ *
+ * @param entries
+ * entries describing the affected files.
+ * @throws IOException
+ * a file's content cannot be read, or the output stream cannot
+ * be written to.
+ */
+ public void format(List<? extends DiffEntry> entries) throws IOException {
+ for (DiffEntry ent : entries)
+ format(ent);
+ }
+
+ /**
+ * Format a patch script for one file entry.
+ *
+ * @param ent
+ * the entry to be formatted.
+ * @throws IOException
+ * a file's content cannot be read, or the output stream cannot
+ * be written to.
+ */
+ public void format(DiffEntry ent) throws IOException {
+ writeDiffHeader(out, ent);
+
+ if (ent.getOldMode() == GITLINK || ent.getNewMode() == GITLINK) {
+ writeGitLinkDiffText(out, ent);
+ } else {
+ byte[] aRaw = open(ent.getOldMode(), ent.getOldId());
+ byte[] bRaw = open(ent.getNewMode(), ent.getNewId());
+
+ if (RawText.isBinary(aRaw) || RawText.isBinary(bRaw)) {
+ out.write(encodeASCII("Binary files differ\n"));
+
+ } else {
+ RawText a = rawTextFactory.create(aRaw);
+ RawText b = rawTextFactory.create(bRaw);
+ formatEdits(a, b, new MyersDiff(a, b).getEdits());
+ }
+ }
+ }
+
+ private void writeGitLinkDiffText(OutputStream o, DiffEntry ent)
+ throws IOException {
+ if (ent.getOldMode() == GITLINK) {
+ o.write(encodeASCII("-Subproject commit " + ent.getOldId().name()
+ + "\n"));
+ }
+ if (ent.getNewMode() == GITLINK) {
+ o.write(encodeASCII("+Subproject commit " + ent.getNewId().name()
+ + "\n"));
+ }
+ }
+
+ private void writeDiffHeader(OutputStream o, DiffEntry ent)
+ throws IOException {
+ String oldName = quotePath("a/" + ent.getOldName());
+ String newName = quotePath("b/" + ent.getNewName());
+ o.write(encode("diff --git " + oldName + " " + newName + "\n"));
+
+ switch (ent.getChangeType()) {
+ case ADD:
+ o.write(encodeASCII("new file mode "));
+ ent.getNewMode().copyTo(o);
+ o.write('\n');
+ break;
+
+ case DELETE:
+ o.write(encodeASCII("deleted file mode "));
+ ent.getOldMode().copyTo(o);
+ o.write('\n');
+ break;
+
+ case RENAME:
+ o.write(encodeASCII("similarity index " + ent.getScore() + "%"));
+ o.write('\n');
+
+ o.write(encode("rename from " + quotePath(ent.getOldName())));
+ o.write('\n');
+
+ o.write(encode("rename to " + quotePath(ent.getNewName())));
+ o.write('\n');
+ break;
+
+ case COPY:
+ o.write(encodeASCII("similarity index " + ent.getScore() + "%"));
+ o.write('\n');
+
+ o.write(encode("copy from " + quotePath(ent.getOldName())));
+ o.write('\n');
+
+ o.write(encode("copy to " + quotePath(ent.getNewName())));
+ o.write('\n');
+
+ if (!ent.getOldMode().equals(ent.getNewMode())) {
+ o.write(encodeASCII("new file mode "));
+ ent.getNewMode().copyTo(o);
+ o.write('\n');
+ }
+ break;
+ }
+
+ switch (ent.getChangeType()) {
+ case RENAME:
+ case MODIFY:
+ if (!ent.getOldMode().equals(ent.getNewMode())) {
+ o.write(encodeASCII("old mode "));
+ ent.getOldMode().copyTo(o);
+ o.write('\n');
+
+ o.write(encodeASCII("new mode "));
+ ent.getNewMode().copyTo(o);
+ o.write('\n');
+ }
+ }
+
+ o.write(encodeASCII("index " //
+ + format(ent.getOldId()) //
+ + ".." //
+ + format(ent.getNewId())));
+ if (ent.getOldMode().equals(ent.getNewMode())) {
+ o.write(' ');
+ ent.getNewMode().copyTo(o);
+ }
+ o.write('\n');
+ o.write(encode("--- " + oldName + '\n'));
+ o.write(encode("+++ " + newName + '\n'));
+ }
+
+ private String format(AbbreviatedObjectId oldId) {
+ if (oldId.isComplete() && db != null)
+ oldId = oldId.toObjectId().abbreviate(db, abbreviationLength);
+ return oldId.name();
+ }
+
+ private static String quotePath(String name) {
+ String q = QuotedString.GIT_PATH.quote(name);
+ return ('"' + name + '"').equals(q) ? name : q;
+ }
+
+ private byte[] open(FileMode mode, AbbreviatedObjectId id)
+ throws IOException {
+ if (mode == FileMode.MISSING)
+ return new byte[] {};
+
+ if (mode.getObjectType() != Constants.OBJ_BLOB)
+ return new byte[] {};
+
+ if (db == null)
+ throw new IllegalStateException(JGitText.get().repositoryIsRequired);
++
+ if (id.isComplete()) {
++ ObjectLoader ldr = db.open(id.toObjectId());
++ if (!ldr.isLarge())
++ return ldr.getCachedBytes();
++
++ long sz = ldr.getSize();
++ if (sz < bigFileThreshold && sz < Integer.MAX_VALUE) {
++ byte[] buf;
++ try {
++ buf = new byte[(int) sz];
++ } catch (OutOfMemoryError noMemory) {
++ LargeObjectException e;
++
++ e = new LargeObjectException(id.toObjectId());
++ e.initCause(noMemory);
++ throw e;
++ }
++ InputStream in = ldr.openStream();
++ try {
++ IO.readFully(in, buf, 0, buf.length);
++ } finally {
++ in.close();
++ }
++ return buf;
++ }
+ }
+
+ return new byte[] {};
+ }
+
/**
* Format a patch script, reusing a previously parsed FileHeader.
* <p>
--- /dev/null
- SimilarityRenameDetector d;
-
- d = new SimilarityRenameDetector(repo, deleted, added);
- d.setRenameScore(getRenameScore());
- d.compute(pm);
- deleted = d.getLeftOverSources();
- added = d.getLeftOverDestinations();
- entries.addAll(d.getMatches());
+/*
+ * Copyright (C) 2010, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.diff;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+
+import org.eclipse.jgit.JGitText;
+import org.eclipse.jgit.diff.DiffEntry.ChangeType;
+import org.eclipse.jgit.lib.AbbreviatedObjectId;
+import org.eclipse.jgit.lib.FileMode;
+import org.eclipse.jgit.lib.NullProgressMonitor;
++import org.eclipse.jgit.lib.ObjectReader;
+import org.eclipse.jgit.lib.ProgressMonitor;
+import org.eclipse.jgit.lib.Repository;
+
+/** Detect and resolve object renames. */
+public class RenameDetector {
+ private static final int EXACT_RENAME_SCORE = 100;
+
+ private static final Comparator<DiffEntry> DIFF_COMPARATOR = new Comparator<DiffEntry>() {
+ public int compare(DiffEntry a, DiffEntry b) {
+ int cmp = nameOf(a).compareTo(nameOf(b));
+ if (cmp == 0)
+ cmp = sortOf(a.getChangeType()) - sortOf(b.getChangeType());
+ return cmp;
+ }
+
+ private String nameOf(DiffEntry ent) {
+ // Sort by the new name, unless the change is a delete. On
+ // deletes the new name is /dev/null, so we sort instead by
+ // the old name.
+ //
+ if (ent.changeType == ChangeType.DELETE)
+ return ent.oldName;
+ return ent.newName;
+ }
+
+ private int sortOf(ChangeType changeType) {
+ // Sort deletes before adds so that a major type change for
+ // a file path (such as symlink to regular file) will first
+ // remove the path, then add it back with the new type.
+ //
+ switch (changeType) {
+ case DELETE:
+ return 1;
+ case ADD:
+ return 2;
+ default:
+ return 10;
+ }
+ }
+ };
+
+ private final List<DiffEntry> entries = new ArrayList<DiffEntry>();
+
+ private List<DiffEntry> deleted = new ArrayList<DiffEntry>();
+
+ private List<DiffEntry> added = new ArrayList<DiffEntry>();
+
+ private boolean done;
+
+ private final Repository repo;
+
+ /** Similarity score required to pair an add/delete as a rename. */
+ private int renameScore = 60;
+
+ /** Limit in the number of files to consider for renames. */
+ private int renameLimit;
+
+ /** Set if the number of adds or deletes was over the limit. */
+ private boolean overRenameLimit;
+
+ /**
+ * Create a new rename detector for the given repository
+ *
+ * @param repo
+ * the repository to use for rename detection
+ */
+ public RenameDetector(Repository repo) {
+ this.repo = repo;
+
+ DiffConfig cfg = repo.getConfig().get(DiffConfig.KEY);
+ renameLimit = cfg.getRenameLimit();
+ }
+
+ /**
+ * @return minimum score required to pair an add/delete as a rename. The
+ * score ranges are within the bounds of (0, 100).
+ */
+ public int getRenameScore() {
+ return renameScore;
+ }
+
+ /**
+ * Set the minimum score required to pair an add/delete as a rename.
+ * <p>
+ * When comparing two files together their score must be greater than or
+ * equal to the rename score for them to be considered a rename match. The
+ * score is computed based on content similarity, so a score of 60 implies
+ * that approximately 60% of the bytes in the files are identical.
+ *
+ * @param score
+ * new rename score, must be within [0, 100].
+ * @throws IllegalArgumentException
+ * the score was not within [0, 100].
+ */
+ public void setRenameScore(int score) {
+ if (score < 0 || score > 100)
+ throw new IllegalArgumentException(
+ JGitText.get().similarityScoreMustBeWithinBounds);
+ renameScore = score;
+ }
+
+ /** @return limit on number of paths to perform inexact rename detection. */
+ public int getRenameLimit() {
+ return renameLimit;
+ }
+
+ /**
+ * Set the limit on the number of files to perform inexact rename detection.
+ * <p>
+ * The rename detector has to build a square matrix of the rename limit on
+ * each side, then perform that many file compares to determine similarity.
+ * If 1000 files are added, and 1000 files are deleted, a 1000*1000 matrix
+ * must be allocated, and 1,000,000 file compares may need to be performed.
+ *
+ * @param limit
+ * new file limit.
+ */
+ public void setRenameLimit(int limit) {
+ renameLimit = limit;
+ }
+
+ /**
+ * Check if the detector is over the rename limit.
+ * <p>
+ * This method can be invoked either before or after {@code getEntries} has
+ * been used to perform rename detection.
+ *
+ * @return true if the detector has more file additions or removals than the
+ * rename limit is currently set to. In such configurations the
+ * detector will skip expensive computation.
+ */
+ public boolean isOverRenameLimit() {
+ if (done)
+ return overRenameLimit;
+ int cnt = Math.max(added.size(), deleted.size());
+ return getRenameLimit() != 0 && getRenameLimit() < cnt;
+ }
+
+ /**
+ * Add entries to be considered for rename detection.
+ *
+ * @param entriesToAdd
+ * one or more entries to add.
+ * @throws IllegalStateException
+ * if {@code getEntries} was already invoked.
+ */
+ public void addAll(Collection<DiffEntry> entriesToAdd) {
+ if (done)
+ throw new IllegalStateException(JGitText.get().renamesAlreadyFound);
+
+ for (DiffEntry entry : entriesToAdd) {
+ switch (entry.getChangeType()) {
+ case ADD:
+ added.add(entry);
+ break;
+
+ case DELETE:
+ deleted.add(entry);
+ break;
+
+ case MODIFY:
+ if (sameType(entry.getOldMode(), entry.getNewMode()))
+ entries.add(entry);
+ else
+ entries.addAll(DiffEntry.breakModify(entry));
+ break;
+
+ case COPY:
+ case RENAME:
+ default:
+ entriesToAdd.add(entry);
+ }
+ }
+ }
+
+ /**
+ * Add an entry to be considered for rename detection.
+ *
+ * @param entry
+ * to add.
+ * @throws IllegalStateException
+ * if {@code getEntries} was already invoked.
+ */
+ public void add(DiffEntry entry) {
+ addAll(Collections.singletonList(entry));
+ }
+
+ /**
+ * Detect renames in the current file set.
+ * <p>
+ * This convenience function runs without a progress monitor.
+ *
+ * @return an unmodifiable list of {@link DiffEntry}s representing all files
+ * that have been changed.
+ * @throws IOException
+ * file contents cannot be read from the repository.
+ */
+ public List<DiffEntry> compute() throws IOException {
+ return compute(NullProgressMonitor.INSTANCE);
+ }
+
+ /**
+ * Detect renames in the current file set.
+ *
+ * @param pm
+ * report progress during the detection phases.
+ * @return an unmodifiable list of {@link DiffEntry}s representing all files
+ * that have been changed.
+ * @throws IOException
+ * file contents cannot be read from the repository.
+ */
+ public List<DiffEntry> compute(ProgressMonitor pm) throws IOException {
+ if (!done) {
+ done = true;
+
+ if (pm == null)
+ pm = NullProgressMonitor.INSTANCE;
+ findExactRenames(pm);
+ findContentRenames(pm);
+
+ entries.addAll(added);
+ added = null;
+
+ entries.addAll(deleted);
+ deleted = null;
+
+ Collections.sort(entries, DIFF_COMPARATOR);
+ }
+ return Collections.unmodifiableList(entries);
+ }
+
+ private void findContentRenames(ProgressMonitor pm) throws IOException {
+ int cnt = Math.max(added.size(), deleted.size());
+ if (cnt == 0)
+ return;
+
+ if (getRenameLimit() == 0 || cnt <= getRenameLimit()) {
++ ObjectReader reader = repo.newObjectReader();
++ try {
++ SimilarityRenameDetector d;
++
++ d = new SimilarityRenameDetector(reader, deleted, added);
++ d.setRenameScore(getRenameScore());
++ d.compute(pm);
++ deleted = d.getLeftOverSources();
++ added = d.getLeftOverDestinations();
++ entries.addAll(d.getMatches());
++ } finally {
++ reader.release();
++ }
+ } else {
+ overRenameLimit = true;
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void findExactRenames(ProgressMonitor pm) {
+ if (added.isEmpty() || deleted.isEmpty())
+ return;
+
+ pm.beginTask(JGitText.get().renamesFindingExact, //
+ added.size() + added.size() + deleted.size()
+ + added.size() * deleted.size());
+
+ HashMap<AbbreviatedObjectId, Object> deletedMap = populateMap(deleted, pm);
+ HashMap<AbbreviatedObjectId, Object> addedMap = populateMap(added, pm);
+
+ ArrayList<DiffEntry> uniqueAdds = new ArrayList<DiffEntry>(added.size());
+ ArrayList<List<DiffEntry>> nonUniqueAdds = new ArrayList<List<DiffEntry>>();
+
+ for (Object o : addedMap.values()) {
+ if (o instanceof DiffEntry)
+ uniqueAdds.add((DiffEntry) o);
+ else
+ nonUniqueAdds.add((List<DiffEntry>) o);
+ }
+
+ ArrayList<DiffEntry> left = new ArrayList<DiffEntry>(added.size());
+
+ for (DiffEntry a : uniqueAdds) {
+ Object del = deletedMap.get(a.newId);
+ if (del instanceof DiffEntry) {
+ // We have one add to one delete: pair them if they are the same
+ // type
+ DiffEntry e = (DiffEntry) del;
+ if (sameType(e.oldMode, a.newMode)) {
+ e.changeType = ChangeType.RENAME;
+ entries.add(exactRename(e, a));
+ } else {
+ left.add(a);
+ }
+ } else if (del != null) {
+ // We have one add to many deletes: find the delete with the
+ // same type and closest name to the add, then pair them
+ List<DiffEntry> list = (List<DiffEntry>) del;
+ DiffEntry best = bestPathMatch(a, list);
+ if (best != null) {
+ best.changeType = ChangeType.RENAME;
+ entries.add(exactRename(best, a));
+ } else {
+ left.add(a);
+ }
+ } else {
+ left.add(a);
+ }
+ pm.update(1);
+ }
+
+ for (List<DiffEntry> adds : nonUniqueAdds) {
+ Object o = deletedMap.get(adds.get(0).newId);
+ if (o instanceof DiffEntry) {
+ // We have many adds to one delete: find the add with the same
+ // type and closest name to the delete, then pair them. Mark the
+ // rest as copies of the delete.
+ DiffEntry d = (DiffEntry) o;
+ DiffEntry best = bestPathMatch(d, adds);
+ if (best != null) {
+ d.changeType = ChangeType.RENAME;
+ entries.add(exactRename(d, best));
+ for (DiffEntry a : adds) {
+ if (a != best) {
+ if (sameType(d.oldMode, a.newMode)) {
+ entries.add(exactCopy(d, a));
+ } else {
+ left.add(a);
+ }
+ }
+ }
+ } else {
+ left.addAll(adds);
+ }
+ } else if (o != null) {
+ // We have many adds to many deletes: score all the adds against
+ // all the deletes by path name, take the best matches, pair
+ // them as renames, then call the rest copies
+ List<DiffEntry> dels = (List<DiffEntry>) o;
+ long[] matrix = new long[dels.size() * adds.size()];
+ int mNext = 0;
+ for (int addIdx = 0; addIdx < adds.size(); addIdx++) {
+ String addedName = adds.get(addIdx).newName;
+
+ for (int delIdx = 0; delIdx < dels.size(); delIdx++) {
+ String deletedName = dels.get(delIdx).oldName;
+
+ int score = SimilarityRenameDetector.nameScore(addedName, deletedName);
+ matrix[mNext] = SimilarityRenameDetector.encode(score, addIdx, delIdx);
+ mNext++;
+ }
+ }
+
+ Arrays.sort(matrix);
+
+ for (--mNext; mNext >= 0; mNext--) {
+ long ent = matrix[mNext];
+ int delIdx = SimilarityRenameDetector.srcFile(ent);
+ int addIdx = SimilarityRenameDetector.dstFile(ent);
+ DiffEntry d = dels.get(delIdx);
+ DiffEntry a = adds.get(addIdx);
+
+ if (a == null) {
+ pm.update(1);
+ continue; // was already matched earlier
+ }
+
+ ChangeType type;
+ if (d.changeType == ChangeType.DELETE) {
+ // First use of this source file. Tag it as a rename so we
+ // later know it is already been used as a rename, other
+ // matches (if any) will claim themselves as copies instead.
+ //
+ d.changeType = ChangeType.RENAME;
+ type = ChangeType.RENAME;
+ } else {
+ type = ChangeType.COPY;
+ }
+
+ entries.add(DiffEntry.pair(type, d, a, 100));
+ adds.set(addIdx, null); // Claim the destination was matched.
+ pm.update(1);
+ }
+ } else {
+ left.addAll(adds);
+ }
+ }
+ added = left;
+
+ deleted = new ArrayList<DiffEntry>(deletedMap.size());
+ for (Object o : deletedMap.values()) {
+ if (o instanceof DiffEntry) {
+ DiffEntry e = (DiffEntry) o;
+ if (e.changeType == ChangeType.DELETE)
+ deleted.add(e);
+ } else {
+ List<DiffEntry> list = (List<DiffEntry>) o;
+ for (DiffEntry e : list) {
+ if (e.changeType == ChangeType.DELETE)
+ deleted.add(e);
+ }
+ }
+ }
+ pm.endTask();
+ }
+
+ /**
+ * Find the best match by file path for a given DiffEntry from a list of
+ * DiffEntrys. The returned DiffEntry will be of the same type as <src>. If
+ * no DiffEntry can be found that has the same type, this method will return
+ * null.
+ *
+ * @param src
+ * the DiffEntry to try to find a match for
+ * @param list
+ * a list of DiffEntrys to search through
+ * @return the DiffEntry from <list> who's file path best matches <src>
+ */
+ private static DiffEntry bestPathMatch(DiffEntry src, List<DiffEntry> list) {
+ DiffEntry best = null;
+ int score = -1;
+
+ for (DiffEntry d : list) {
+ if (sameType(mode(d), mode(src))) {
+ int tmp = SimilarityRenameDetector
+ .nameScore(path(d), path(src));
+ if (tmp > score) {
+ best = d;
+ score = tmp;
+ }
+ }
+ }
+
+ return best;
+ }
+
+ @SuppressWarnings("unchecked")
+ private HashMap<AbbreviatedObjectId, Object> populateMap(
+ List<DiffEntry> diffEntries, ProgressMonitor pm) {
+ HashMap<AbbreviatedObjectId, Object> map = new HashMap<AbbreviatedObjectId, Object>();
+ for (DiffEntry de : diffEntries) {
+ Object old = map.put(id(de), de);
+ if (old instanceof DiffEntry) {
+ ArrayList<DiffEntry> list = new ArrayList<DiffEntry>(2);
+ list.add((DiffEntry) old);
+ list.add(de);
+ map.put(id(de), list);
+ } else if (old != null) {
+ // Must be a list of DiffEntries
+ ((List<DiffEntry>) old).add(de);
+ map.put(id(de), old);
+ }
+ pm.update(1);
+ }
+ return map;
+ }
+
+ private static String path(DiffEntry de) {
+ return de.changeType == ChangeType.DELETE ? de.oldName : de.newName;
+ }
+
+ private static FileMode mode(DiffEntry de) {
+ return de.changeType == ChangeType.DELETE ? de.oldMode : de.newMode;
+ }
+
+ private static AbbreviatedObjectId id(DiffEntry de) {
+ return de.changeType == ChangeType.DELETE ? de.oldId : de.newId;
+ }
+
+ static boolean sameType(FileMode a, FileMode b) {
+ // Files have to be of the same type in order to rename them.
+ // We would never want to rename a file to a gitlink, or a
+ // symlink to a file.
+ //
+ int aType = a.getBits() & FileMode.TYPE_MASK;
+ int bType = b.getBits() & FileMode.TYPE_MASK;
+ return aType == bType;
+ }
+
+ private static DiffEntry exactRename(DiffEntry src, DiffEntry dst) {
+ return DiffEntry.pair(ChangeType.RENAME, src, dst, EXACT_RENAME_SCORE);
+ }
+
+ private static DiffEntry exactCopy(DiffEntry src, DiffEntry dst) {
+ return DiffEntry.pair(ChangeType.COPY, src, dst, EXACT_RENAME_SCORE);
+ }
+}
--- /dev/null
- void hash(ObjectLoader obj) {
- byte[] raw = obj.getCachedBytes();
- setFileSize(raw.length);
- hash(raw, 0, raw.length);
+/*
+ * Copyright (C) 2010, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.diff;
+
++import java.io.EOFException;
++import java.io.IOException;
++import java.io.InputStream;
+import java.util.Arrays;
+
++import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.lib.ObjectLoader;
++import org.eclipse.jgit.lib.ObjectStream;
+
+/**
+ * Index structure of lines/blocks in one file.
+ * <p>
+ * This structure can be used to compute an approximation of the similarity
+ * between two files. The index is used by {@link SimilarityRenameDetector} to
+ * compute scores between files.
+ * <p>
+ * To save space in memory, this index uses a space efficient encoding which
+ * will not exceed 1 MiB per instance. The index starts out at a smaller size
+ * (closer to 2 KiB), but may grow as more distinct blocks within the scanned
+ * file are discovered.
+ */
+class SimilarityIndex {
+ /** The {@link #idHash} table stops growing at {@code 1 << MAX_HASH_BITS}. */
+ private static final int MAX_HASH_BITS = 17;
+
+ /** The {@link #idHash} table will not grow bigger than this, ever. */
+ private static final int MAX_HASH_SIZE = 1 << MAX_HASH_BITS;
+
+ /** Prime just before {@link #MAX_HASH_SIZE}. */
+ private static final int P = 131071;
+
+ /**
+ * Shift to apply before storing a key.
+ * <p>
+ * Within the 64 bit table record space, we leave the highest bit unset so
+ * all values are positive, and we need {@link #MAX_HASH_BITS} bits for the
+ * keys. The lower 32 bits are used to count bytes impacted.
+ */
+ private static final int KEY_SHIFT = 64 - 1 - MAX_HASH_BITS;
+
+ /** Total size of the file we hashed into the structure. */
+ private long fileSize;
+
+ /** Number of non-zero entries in {@link #idHash}. */
+ private int idSize;
+
+ /**
+ * Pairings of content keys and counters.
+ * <p>
+ * Slots in the table are actually two ints wedged into a single long. The
+ * upper {@link #MAX_HASH_BITS} bits stores the content key, and the
+ * remaining lower bits stores the number of bytes associated with that key.
+ * Empty slots are denoted by 0, which cannot occur because the count cannot
+ * be 0. Values can only be positive, which we enforce during key addition.
+ */
+ private long[] idHash;
+
+ SimilarityIndex() {
+ idHash = new long[256];
+ }
+
+ long getFileSize() {
+ return fileSize;
+ }
+
+ void setFileSize(long size) {
+ fileSize = size;
+ }
+
++ void hash(ObjectLoader obj) throws MissingObjectException, IOException {
++ if (obj.isLarge()) {
++ ObjectStream in = obj.openStream();
++ try {
++ setFileSize(in.getSize());
++ hash(in, fileSize);
++ } finally {
++ in.close();
++ }
++ } else {
++ byte[] raw = obj.getCachedBytes();
++ setFileSize(raw.length);
++ hash(raw, 0, raw.length);
++ }
+ }
+
+ void hash(byte[] raw, int ptr, final int end) {
+ while (ptr < end) {
+ int hash = 5381;
+ int start = ptr;
+
+ // Hash one line, or one block, whichever occurs first.
+ do {
+ int c = raw[ptr++] & 0xff;
+ if (c == '\n')
+ break;
+ hash = (hash << 5) ^ c;
+ } while (ptr < end && ptr - start < 64);
+ add(hash, ptr - start);
+ }
+ }
+
++ void hash(InputStream in, long remaining) throws IOException {
++ byte[] buf = new byte[4096];
++ int ptr = 0;
++ int cnt = 0;
++
++ while (0 < remaining) {
++ int hash = 5381;
++
++ // Hash one line, or one block, whichever occurs first.
++ int n = 0;
++ do {
++ if (ptr == cnt) {
++ ptr = 0;
++ cnt = in.read(buf, 0, buf.length);
++ if (cnt <= 0)
++ throw new EOFException();
++ }
++
++ n++;
++ int c = buf[ptr++] & 0xff;
++ if (c == '\n')
++ break;
++ hash = (hash << 5) ^ c;
++ } while (n < 64 && n < remaining);
++ add(hash, n);
++ remaining -= n;
++ }
++ }
++
+ /**
+ * Sort the internal table so it can be used for efficient scoring.
+ * <p>
+ * Once sorted, additional lines/blocks cannot be added to the index.
+ */
+ void sort() {
+ // Sort the array. All of the empty space will wind up at the front,
+ // because we forced all of the keys to always be positive. Later
+ // we only work with the back half of the array.
+ //
+ Arrays.sort(idHash);
+ }
+
+ int score(SimilarityIndex dst, int maxScore) {
+ long max = Math.max(fileSize, dst.fileSize);
+ if (max == 0)
+ return maxScore;
+ return (int) ((common(dst) * maxScore) / max);
+ }
+
+ int common(SimilarityIndex dst) {
+ return common(this, dst);
+ }
+
+ private static int common(SimilarityIndex src, SimilarityIndex dst) {
+ int srcIdx = src.packedIndex(0);
+ int dstIdx = dst.packedIndex(0);
+ long[] srcHash = src.idHash;
+ long[] dstHash = dst.idHash;
+ return common(srcHash, srcIdx, dstHash, dstIdx);
+ }
+
+ private static int common(long[] srcHash, int srcIdx, //
+ long[] dstHash, int dstIdx) {
+ if (srcIdx == srcHash.length || dstIdx == dstHash.length)
+ return 0;
+
+ int common = 0;
+ int srcKey = keyOf(srcHash[srcIdx]);
+ int dstKey = keyOf(dstHash[dstIdx]);
+
+ for (;;) {
+ if (srcKey == dstKey) {
+ common += countOf(dstHash[dstIdx]);
+
+ if (++srcIdx == srcHash.length)
+ break;
+ srcKey = keyOf(srcHash[srcIdx]);
+
+ if (++dstIdx == dstHash.length)
+ break;
+ dstKey = keyOf(dstHash[dstIdx]);
+
+ } else if (srcKey < dstKey) {
+ // Regions of src which do not appear in dst.
+ if (++srcIdx == srcHash.length)
+ break;
+ srcKey = keyOf(srcHash[srcIdx]);
+
+ } else /* if (srcKey > dstKey) */{
+ // Regions of dst which do not appear in dst.
+ if (++dstIdx == dstHash.length)
+ break;
+ dstKey = keyOf(dstHash[dstIdx]);
+ }
+ }
+
+ return common;
+ }
+
+ // Testing only
+ int size() {
+ return idSize;
+ }
+
+ // Testing only
+ int key(int idx) {
+ return keyOf(idHash[packedIndex(idx)]);
+ }
+
+ // Testing only
+ long count(int idx) {
+ return countOf(idHash[packedIndex(idx)]);
+ }
+
+ // Brute force approach only for testing.
+ int findIndex(int key) {
+ for (int i = 0; i < idSize; i++)
+ if (key(i) == key)
+ return i;
+ return -1;
+ }
+
+ private int packedIndex(int idx) {
+ return (idHash.length - idSize) + idx;
+ }
+
+ void add(int key, int cnt) {
+ key = hash(key);
+ int j = slot(key);
+ for (;;) {
+ long v = idHash[j];
+ if (v == 0) {
+ // Empty slot in the table, store here.
+ if (shouldGrow()) {
+ grow();
+ j = slot(key);
+ continue;
+ }
+ idHash[j] = (((long) key) << KEY_SHIFT) | cnt;
+ idSize++;
+ return;
+
+ } else if (keyOf(v) == key) {
+ // Same key, increment the counter.
+ idHash[j] = v + cnt;
+ return;
+
+ } else if (++j >= idHash.length) {
+ j = 0;
+ }
+ }
+ }
+
+ private static int hash(int key) {
+ // Make the key fit into our table. Since we have a maximum size
+ // that we cap the table at, all keys get squashed before going
+ // into the table. This prevents overflow.
+ //
+ return (key >>> 1) % P;
+ }
+
+ private int slot(int key) {
+ return key % idHash.length;
+ }
+
+ private boolean shouldGrow() {
+ int n = idHash.length;
+ return n < MAX_HASH_SIZE && n <= idSize * 2;
+ }
+
+ private void grow() {
+ long[] oldHash = idHash;
+ int oldSize = idHash.length;
+
+ idHash = new long[2 * oldSize];
+ for (int i = 0; i < oldSize; i++) {
+ long v = oldHash[i];
+ if (v != 0) {
+ int j = slot(keyOf(v));
+ while (idHash[j] != 0)
+ if (++j >= idHash.length)
+ j = 0;
+ idHash[j] = v;
+ }
+ }
+ }
+
+ private static int keyOf(long v) {
+ return (int) (v >>> KEY_SHIFT);
+ }
+
+ private static int countOf(long v) {
+ return (int) v;
+ }
+}
--- /dev/null
- import org.eclipse.jgit.lib.Repository;
+/*
+ * Copyright (C) 2010, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.diff;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.eclipse.jgit.JGitText;
+import org.eclipse.jgit.diff.DiffEntry.ChangeType;
++import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.FileMode;
+import org.eclipse.jgit.lib.NullProgressMonitor;
+import org.eclipse.jgit.lib.ObjectId;
++import org.eclipse.jgit.lib.ObjectReader;
+import org.eclipse.jgit.lib.ProgressMonitor;
- private final Repository repo;
+
+class SimilarityRenameDetector {
+ /**
+ * Number of bits we need to express an index into src or dst list.
+ * <p>
+ * This must be 28, giving us a limit of 2^28 entries in either list, which
+ * is an insane limit of 536,870,912 file names being considered in a single
+ * rename pass. The other 8 bits are used to store the score, while staying
+ * under 127 so the long doesn't go negative.
+ */
+ private static final int BITS_PER_INDEX = 28;
+
+ private static final int INDEX_MASK = (1 << BITS_PER_INDEX) - 1;
+
+ private static final int SCORE_SHIFT = 2 * BITS_PER_INDEX;
+
- SimilarityRenameDetector(Repository repo, List<DiffEntry> srcs,
++ private ObjectReader reader;
+
+ /**
+ * All sources to consider for copies or renames.
+ * <p>
+ * A source is typically a {@link ChangeType#DELETE} change, but could be
+ * another type when trying to perform copy detection concurrently with
+ * rename detection.
+ */
+ private List<DiffEntry> srcs;
+
+ /**
+ * All destinations to consider looking for a rename.
+ * <p>
+ * A destination is typically an {@link ChangeType#ADD}, as the name has
+ * just come into existence, and we want to discover where its initial
+ * content came from.
+ */
+ private List<DiffEntry> dsts;
+
+ /**
+ * Matrix of all examined file pairs, and their scores.
+ * <p>
+ * The upper 8 bits of each long stores the score, but the score is bounded
+ * to be in the range (0, 128] so that the highest bit is never set, and all
+ * entries are therefore positive.
+ * <p>
+ * List indexes to an element of {@link #srcs} and {@link #dsts} are encoded
+ * as the lower two groups of 28 bits, respectively, but the encoding is
+ * inverted, so that 0 is expressed as {@code (1 << 28) - 1}. This sorts
+ * lower list indices later in the matrix, giving precedence to files whose
+ * names sort earlier in the tree.
+ */
+ private long[] matrix;
+
+ /** Score a pair must exceed to be considered a rename. */
+ private int renameScore = 60;
+
+ private List<DiffEntry> out;
+
- this.repo = repo;
++ SimilarityRenameDetector(ObjectReader reader, List<DiffEntry> srcs,
+ List<DiffEntry> dsts) {
- r.hash(repo.openObject(objectId));
++ this.reader = reader;
+ this.srcs = srcs;
+ this.dsts = dsts;
+ }
+
+ void setRenameScore(int score) {
+ renameScore = score;
+ }
+
+ void compute(ProgressMonitor pm) throws IOException {
+ if (pm == null)
+ pm = NullProgressMonitor.INSTANCE;
+
+ pm.beginTask(JGitText.get().renamesFindingByContent, //
+ 2 * srcs.size() * dsts.size());
+
+ int mNext = buildMatrix(pm);
+ out = new ArrayList<DiffEntry>(Math.min(mNext, dsts.size()));
+
+ // Match rename pairs on a first come, first serve basis until
+ // we have looked at everything that is above our minimum score.
+ //
+ for (--mNext; mNext >= 0; mNext--) {
+ long ent = matrix[mNext];
+ int sIdx = srcFile(ent);
+ int dIdx = dstFile(ent);
+ DiffEntry s = srcs.get(sIdx);
+ DiffEntry d = dsts.get(dIdx);
+
+ if (d == null) {
+ pm.update(1);
+ continue; // was already matched earlier
+ }
+
+ ChangeType type;
+ if (s.changeType == ChangeType.DELETE) {
+ // First use of this source file. Tag it as a rename so we
+ // later know it is already been used as a rename, other
+ // matches (if any) will claim themselves as copies instead.
+ //
+ s.changeType = ChangeType.RENAME;
+ type = ChangeType.RENAME;
+ } else {
+ type = ChangeType.COPY;
+ }
+
+ out.add(DiffEntry.pair(type, s, d, score(ent)));
+ dsts.set(dIdx, null); // Claim the destination was matched.
+ pm.update(1);
+ }
+
+ srcs = compactSrcList(srcs);
+ dsts = compactDstList(dsts);
+ pm.endTask();
+ }
+
+ List<DiffEntry> getMatches() {
+ return out;
+ }
+
+ List<DiffEntry> getLeftOverSources() {
+ return srcs;
+ }
+
+ List<DiffEntry> getLeftOverDestinations() {
+ return dsts;
+ }
+
+ private static List<DiffEntry> compactSrcList(List<DiffEntry> in) {
+ ArrayList<DiffEntry> r = new ArrayList<DiffEntry>(in.size());
+ for (DiffEntry e : in) {
+ if (e.changeType == ChangeType.DELETE)
+ r.add(e);
+ }
+ return r;
+ }
+
+ private static List<DiffEntry> compactDstList(List<DiffEntry> in) {
+ ArrayList<DiffEntry> r = new ArrayList<DiffEntry>(in.size());
+ for (DiffEntry e : in) {
+ if (e != null)
+ r.add(e);
+ }
+ return r;
+ }
+
+ private int buildMatrix(ProgressMonitor pm) throws IOException {
+ // Allocate for the worst-case scenario where every pair has a
+ // score that we need to consider. We might not need that many.
+ //
+ matrix = new long[srcs.size() * dsts.size()];
+
+ long[] srcSizes = new long[srcs.size()];
+ long[] dstSizes = new long[dsts.size()];
+
+ // Init the size arrays to some value that indicates that we haven't
+ // calculated the size yet. Since sizes cannot be negative, -1 will work
+ Arrays.fill(srcSizes, -1);
+ Arrays.fill(dstSizes, -1);
+
+ // Consider each pair of files, if the score is above the minimum
+ // threshold we need record that scoring in the matrix so we can
+ // later find the best matches.
+ //
+ int mNext = 0;
+ for (int srcIdx = 0; srcIdx < srcs.size(); srcIdx++) {
+ DiffEntry srcEnt = srcs.get(srcIdx);
+ if (!isFile(srcEnt.oldMode)) {
+ pm.update(dsts.size());
+ continue;
+ }
+
+ SimilarityIndex s = hash(srcEnt.oldId.toObjectId());
+ for (int dstIdx = 0; dstIdx < dsts.size(); dstIdx++) {
+ DiffEntry dstEnt = dsts.get(dstIdx);
+
+ if (!isFile(dstEnt.newMode)) {
+ pm.update(1);
+ continue;
+ }
+
+ if (!RenameDetector.sameType(srcEnt.oldMode, dstEnt.newMode)) {
+ pm.update(1);
+ continue;
+ }
+
+ long srcSize = srcSizes[srcIdx];
+ if (srcSize < 0) {
+ srcSize = size(srcEnt.oldId.toObjectId());
+ srcSizes[srcIdx] = srcSize;
+ }
+
+ long dstSize = dstSizes[dstIdx];
+ if (dstSize < 0) {
+ dstSize = size(dstEnt.newId.toObjectId());
+ dstSizes[dstIdx] = dstSize;
+ }
+
+ long max = Math.max(srcSize, dstSize);
+ long min = Math.min(srcSize, dstSize);
+ if (min * 100 / max < renameScore) {
+ // Cannot possibly match, as the file sizes are so different
+ pm.update(1);
+ continue;
+ }
+
+ SimilarityIndex d = hash(dstEnt.newId.toObjectId());
+ int contentScore = s.score(d, 10000);
+
+ // nameScore returns a value between 0 and 100, but we want it
+ // to be in the same range as the content score. This allows it
+ // to be dropped into the pretty formula for the final score.
+ int nameScore = nameScore(srcEnt.oldName, dstEnt.newName) * 100;
+
+ int score = (contentScore * 99 + nameScore * 1) / 10000;
+
+ if (score < renameScore) {
+ pm.update(1);
+ continue;
+ }
+
+ matrix[mNext++] = encode(score, srcIdx, dstIdx);
+ pm.update(1);
+ }
+ }
+
+ // Sort everything in the range we populated, which might be the
+ // entire matrix, or just a smaller slice if we had some bad low
+ // scoring pairs.
+ //
+ Arrays.sort(matrix, 0, mNext);
+ return mNext;
+ }
+
+ static int nameScore(String a, String b) {
+ int aDirLen = a.lastIndexOf("/") + 1;
+ int bDirLen = b.lastIndexOf("/") + 1;
+
+ int dirMin = Math.min(aDirLen, bDirLen);
+ int dirMax = Math.max(aDirLen, bDirLen);
+
+ final int dirScoreLtr;
+ final int dirScoreRtl;
+
+ if (dirMax == 0) {
+ dirScoreLtr = 100;
+ dirScoreRtl = 100;
+ } else {
+ int dirSim = 0;
+ for (; dirSim < dirMin; dirSim++) {
+ if (a.charAt(dirSim) != b.charAt(dirSim))
+ break;
+ }
+ dirScoreLtr = (dirSim * 100) / dirMax;
+
+ if (dirScoreLtr == 100) {
+ dirScoreRtl = 100;
+ } else {
+ for (dirSim = 0; dirSim < dirMin; dirSim++) {
+ if (a.charAt(aDirLen - 1 - dirSim) != b.charAt(bDirLen - 1
+ - dirSim))
+ break;
+ }
+ dirScoreRtl = (dirSim * 100) / dirMax;
+ }
+ }
+
+ int fileMin = Math.min(a.length() - aDirLen, b.length() - bDirLen);
+ int fileMax = Math.max(a.length() - aDirLen, b.length() - bDirLen);
+
+ int fileSim = 0;
+ for (; fileSim < fileMin; fileSim++) {
+ if (a.charAt(a.length() - 1 - fileSim) != b.charAt(b.length() - 1
+ - fileSim))
+ break;
+ }
+ int fileScore = (fileSim * 100) / fileMax;
+
+ return (((dirScoreLtr + dirScoreRtl) * 25) + (fileScore * 50)) / 100;
+ }
+
+ private SimilarityIndex hash(ObjectId objectId) throws IOException {
+ SimilarityIndex r = new SimilarityIndex();
- return repo.openObject(objectId).getSize();
++ r.hash(reader.open(objectId));
+ r.sort();
+ return r;
+ }
+
+ private long size(ObjectId objectId) throws IOException {
++ return reader.getObjectSize(objectId, Constants.OBJ_BLOB);
+ }
+
+ private static int score(long value) {
+ return (int) (value >>> SCORE_SHIFT);
+ }
+
+ static int srcFile(long value) {
+ return decodeFile(((int) (value >>> BITS_PER_INDEX)) & INDEX_MASK);
+ }
+
+ static int dstFile(long value) {
+ return decodeFile(((int) value) & INDEX_MASK);
+ }
+
+ static long encode(int score, int srcIdx, int dstIdx) {
+ return (((long) score) << SCORE_SHIFT) //
+ | (encodeFile(srcIdx) << BITS_PER_INDEX) //
+ | encodeFile(dstIdx);
+ }
+
+ private static long encodeFile(int idx) {
+ // We invert the index so that the first file in the list sorts
+ // later in the table. This permits us to break ties favoring
+ // earlier names over later ones.
+ //
+ return INDEX_MASK - idx;
+ }
+
+ private static int decodeFile(int v) {
+ return INDEX_MASK - v;
+ }
+
+ private static boolean isFile(FileMode mode) {
+ return (mode.getBits() & FileMode.TYPE_MASK) == FileMode.TYPE_FILE;
+ }
+}
* @param refName optional, only relevant for simple tags
* @return The Git object if found or null
* @throws IOException
- * To read a blob, open it with {@link #openObject(AnyObjectId)}.
+ * @deprecated Use {@link org.eclipse.jgit.revwalk.RevWalk#parseCommit(AnyObjectId)},
+ * or {@link org.eclipse.jgit.revwalk.RevWalk#parseTag(AnyObjectId)}.
+ * To read a tree, use {@link org.eclipse.jgit.treewalk.TreeWalk#addTree(AnyObjectId)}.
++ * To read a blob, open it with {@link #open(AnyObjectId)}.
*/
+ @Deprecated
public Object mapObject(final ObjectId id, final String refName) throws IOException {
- final ObjectLoader or = openObject(id);
- if (or == null)
+ final ObjectLoader or;
+ try {
+ or = open(id);
+ } catch (MissingObjectException notFound) {
return null;
- final byte[] raw = or.getBytes();
+ }
+ final byte[] raw = or.getCachedBytes();
switch (or.getType()) {
case Constants.OBJ_TREE:
- return makeTree(id, raw);
+ return new Tree(this, id, raw);
case Constants.OBJ_COMMIT:
- return makeCommit(id, raw);
+ return new Commit(this, id, raw);
case Constants.OBJ_TAG:
- return makeTag(id, refName, raw);
+ return new Tag(this, id, refName, raw);
case Constants.OBJ_BLOB:
return raw;
* @param id
* @return Commit or null
* @throws IOException for I/O error or unexpected object type.
+ * @deprecated Use {@link org.eclipse.jgit.revwalk.RevWalk#parseCommit(AnyObjectId)}.
*/
+ @Deprecated
public Commit mapCommit(final ObjectId id) throws IOException {
- final ObjectLoader or = openObject(id);
- if (or == null)
+ final ObjectLoader or;
+ try {
+ or = open(id, Constants.OBJ_COMMIT);
+ } catch (MissingObjectException notFound) {
return null;
- final byte[] raw = or.getBytes();
- if (Constants.OBJ_COMMIT == or.getType())
- return new Commit(this, id, raw);
- throw new IncorrectObjectTypeException(id, Constants.TYPE_COMMIT);
- }
-
- private Commit makeCommit(final ObjectId id, final byte[] raw) {
- Commit ret = new Commit(this, id, raw);
- return ret;
+ }
+ return new Commit(this, id, or.getCachedBytes());
}
/**
* @param id
* @return Tree or null
* @throws IOException for I/O error or unexpected object type.
+ * @deprecated Use {@link org.eclipse.jgit.treewalk.TreeWalk#addTree(AnyObjectId)}.
*/
+ @Deprecated
public Tree mapTree(final ObjectId id) throws IOException {
- final ObjectLoader or = openObject(id);
- if (or == null)
+ final ObjectLoader or;
+ try {
+ or = open(id);
+ } catch (MissingObjectException notFound) {
return null;
- final byte[] raw = or.getBytes();
+ }
+ final byte[] raw = or.getCachedBytes();
switch (or.getType()) {
case Constants.OBJ_TREE:
return new Tree(this, id, raw);
* @param id
* @return Commit or null
* @throws IOException for I/O error or unexpected object type.
+ * @deprecated Use {@link org.eclipse.jgit.revwalk.RevWalk#parseTag(AnyObjectId)}.
*/
+ @Deprecated
public Tag mapTag(final String refName, final ObjectId id) throws IOException {
- final ObjectLoader or = openObject(id);
- if (or == null)
+ final ObjectLoader or;
+ try {
+ or = open(id);
+ } catch (MissingObjectException notFound) {
return null;
- final byte[] raw = or.getBytes();
- if (Constants.OBJ_TAG == or.getType())
- return new Tag(this, id, refName, raw);
+ }
+ if (or.getType() == Constants.OBJ_TAG)
+ return new Tag(this, id, refName, or.getCachedBytes());
return new Tag(this, id, refName, null);
}
private final TreeWalk pathFilter;
- private final Repository repo;
++ private final Repository repository;
+
RewriteTreeFilter(final RevWalk walker, final TreeFilter t) {
- repo = walker.db;
- pathFilter = new TreeWalk(repo);
++ repository = walker.repository;
+ pathFilter = new TreeWalk(walker.reader);
pathFilter.setFilter(t);
pathFilter.setRecursive(t.shouldBeRecursive());
}
c.flags |= REWRITE;
return false;
}
- RenameDetector rd = new RenameDetector(repo);
+
+ private void updateFollowFilter(ObjectId[] trees)
+ throws MissingObjectException, IncorrectObjectTypeException,
+ CorruptObjectException, IOException {
+ TreeWalk tw = pathFilter;
+ FollowFilter oldFilter = (FollowFilter) tw.getFilter();
+ tw.setFilter(TreeFilter.ANY_DIFF);
+ tw.reset(trees);
+
+ List<DiffEntry> files = DiffEntry.scan(tw);
++ RenameDetector rd = new RenameDetector(repository);
+ rd.addAll(files);
+ files = rd.compute();
+
+ TreeFilter newFilter = oldFilter;
+ for (DiffEntry ent : files) {
+ if (isRename(ent) && ent.getNewName().equals(oldFilter.getPath())) {
+ newFilter = FollowFilter.create(ent.getOldName());
+ break;
+ }
+ }
+ tw.setFilter(newFilter);
+ }
+
+ private static boolean isRename(DiffEntry ent) {
+ return ent.getChangeType() == ChangeType.RENAME
+ || ent.getChangeType() == ChangeType.COPY;
+ }
}
objectDatabase = db.getObjectDatabase().newCachedDatabase();
in = src;
inflater = InflaterCache.get();
- readCurs = new WindowCursor();
+ readCurs = objectDatabase.newReader();
buf = new byte[BUFFER_SIZE];
- objectData = new byte[BUFFER_SIZE];
+ skipBuffer = new byte[512];
objectDigest = Constants.newMessageDigest();
tempObjectId = new MutableObjectId();
packDigest = Constants.newMessageDigest();
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.FileMode;
+ import org.eclipse.jgit.lib.ObjectReader;
+import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.util.FS;
/**
* specified working directory as part of a {@link TreeWalk}.
*/
public class FileTreeIterator extends WorkingTreeIterator {
- private final File directory;
- private final FS fs;
+ /**
+ * the starting directory. This directory should correspond to
+ * the root of the repository.
+ */
+ protected final File directory;
+
+ /**
+ * the file system abstraction which will be necessary to
+ * perform certain file system operations.
+ */
+ protected final FS fs;
+
+ /**
+ * Create a new iterator to traverse the work tree and its children.
+ *
+ * @param repo
+ * the repository whose working tree will be scanned.
+ */
+ public FileTreeIterator(Repository repo) {
- this(repo.getWorkDir(), repo.getFS());
++ this(repo.getWorkTree(), repo.getFS());
+ initRootIterator(repo);
+ }
/**
* Create a new iterator to traverse the given directory and its children.