import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.errors.NoMergeBaseException.MergeBaseFailureReason;
import org.eclipse.jgit.junit.RepositoryTestCase;
import org.eclipse.jgit.junit.TestRepository;
+import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
+import org.eclipse.jgit.lib.ObjectLoader;
+import org.eclipse.jgit.lib.ObjectReader;
+import org.eclipse.jgit.lib.ObjectStream;
import org.eclipse.jgit.merge.ResolveMerger.MergeFailureReason;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevObject;
}
}
+
+ /**
+ * Merging a change involving large binary files should short-circuit reads.
+ *
+ * @param strategy
+ * @throws Exception
+ */
+ @Theory
+ public void checkContentMergeLargeBinaries(MergeStrategy strategy) throws Exception {
+ Git git = Git.wrap(db);
+ final int LINELEN = 72;
+
+ // setup a merge that would work correctly if we disconsider the stray '\0'
+ // that the file contains near the start.
+ byte[] binary = new byte[LINELEN * 2000];
+ for (int i = 0; i < binary.length; i++) {
+ binary[i] = (byte)((i % LINELEN) == 0 ? '\n' : 'x');
+ }
+ binary[50] = '\0';
+
+ writeTrashFile("file", new String(binary, StandardCharsets.UTF_8));
+ git.add().addFilepattern("file").call();
+ RevCommit first = git.commit().setMessage("added file").call();
+
+ // Generate an edit in a single line.
+ int idx = LINELEN * 1200 + 1;
+ byte save = binary[idx];
+ binary[idx] = '@';
+ writeTrashFile("file", new String(binary, StandardCharsets.UTF_8));
+
+ binary[idx] = save;
+ git.add().addFilepattern("file").call();
+ RevCommit masterCommit = git.commit().setAll(true)
+ .setMessage("modified file l 1200").call();
+
+ git.checkout().setCreateBranch(true).setStartPoint(first).setName("side").call();
+ binary[LINELEN * 1500 + 1] = '!';
+ writeTrashFile("file", new String(binary, StandardCharsets.UTF_8));
+ git.add().addFilepattern("file").call();
+ RevCommit sideCommit = git.commit().setAll(true)
+ .setMessage("modified file l 1500").call();
+
+ try (ObjectInserter ins = db.newObjectInserter()) {
+ // Check that we don't read the large blobs.
+ ObjectInserter forbidInserter = new ObjectInserter.Filter() {
+ @Override
+ protected ObjectInserter delegate() {
+ return ins;
+ }
+
+ @Override
+ public ObjectReader newReader() {
+ return new BigReadForbiddenReader(super.newReader(), 8000);
+ }
+ };
+
+ ResolveMerger merger =
+ (ResolveMerger) strategy.newMerger(forbidInserter, db.getConfig());
+ boolean noProblems = merger.merge(masterCommit, sideCommit);
+ assertFalse(noProblems);
+ }
+ }
+
+ /**
+ * Throws an exception if reading beyond limit.
+ */
+ class BigReadForbiddenStream extends ObjectStream.Filter {
+ int limit;
+
+ BigReadForbiddenStream(ObjectStream orig, int limit) {
+ super(orig.getType(), orig.getSize(), orig);
+ this.limit = limit;
+ }
+
+ @Override
+ public long skip(long n) throws IOException {
+ limit -= n;
+ if (limit < 0) {
+ throw new IllegalStateException();
+ }
+
+ return super.skip(n);
+ }
+
+ @Override
+ public int read() throws IOException {
+ int r = super.read();
+ limit--;
+ if (limit < 0) {
+ throw new IllegalStateException();
+ }
+ return r;
+ }
+
+ @Override
+ public int read(byte[] b, int off, int len) throws IOException {
+ int n = super.read(b, off, len);
+ limit -= n;
+ if (limit < 0) {
+ throw new IllegalStateException();
+ }
+ return n;
+ }
+ }
+
+ class BigReadForbiddenReader extends ObjectReader.Filter {
+ ObjectReader delegate;
+ int limit;
+
+ @Override
+ protected ObjectReader delegate() {
+ return delegate;
+ }
+
+ BigReadForbiddenReader(ObjectReader delegate, int limit) {
+ this.delegate = delegate;
+ this.limit = limit;
+ }
+
+ @Override
+ public ObjectLoader open(AnyObjectId objectId, int typeHint) throws IOException {
+ ObjectLoader orig = super.open(objectId, typeHint);
+ return new ObjectLoader.Filter() {
+ @Override
+ protected ObjectLoader delegate() {
+ return orig;
+ }
+
+ @Override
+ public ObjectStream openStream() throws IOException {
+ ObjectStream os = orig.openStream();
+ return new BigReadForbiddenStream(os, limit);
+ }
+ };
+ }
+ }
+
@Theory
public void checkContentMergeConflict(MergeStrategy strategy)
throws Exception {
import org.eclipse.jgit.dircache.DirCacheBuilder;
import org.eclipse.jgit.dircache.DirCacheCheckout;
import org.eclipse.jgit.dircache.DirCacheEntry;
+import org.eclipse.jgit.errors.BinaryBlobException;
import org.eclipse.jgit.errors.CorruptObjectException;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.IndexWriteException;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
+import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevTree;
+import org.eclipse.jgit.storage.pack.PackConfig;
import org.eclipse.jgit.treewalk.AbstractTreeIterator;
import org.eclipse.jgit.treewalk.CanonicalTreeParser;
import org.eclipse.jgit.treewalk.NameConflictTreeWalk;
// OURS or THEIRS has been deleted
if (((modeO != 0 && !tw.idEqual(T_BASE, T_OURS)) || (modeT != 0 && !tw
.idEqual(T_BASE, T_THEIRS)))) {
+ MergeResult<RawText> result = contentMerge(base, ours, theirs);
add(tw.getRawPath(), base, DirCacheEntry.STAGE_1, 0, 0);
add(tw.getRawPath(), ours, DirCacheEntry.STAGE_2, 0, 0);
unmergedPaths.add(tw.getPathString());
// generate a MergeResult for the deleted file
- mergeResults.put(tw.getPathString(),
- contentMerge(base, ours, theirs));
+ mergeResults.put(tw.getPathString(), result);
}
}
return true;
private MergeResult<RawText> contentMerge(CanonicalTreeParser base,
CanonicalTreeParser ours, CanonicalTreeParser theirs)
throws IOException {
- RawText baseText = base == null ? RawText.EMPTY_TEXT : getRawText(
+ RawText baseText;
+ RawText ourText;
+ RawText theirsText;
+
+ try {
+ baseText = base == null ? RawText.EMPTY_TEXT : getRawText(
base.getEntryObjectId(), reader);
- RawText ourText = ours == null ? RawText.EMPTY_TEXT : getRawText(
+ ourText = ours == null ? RawText.EMPTY_TEXT : getRawText(
ours.getEntryObjectId(), reader);
- RawText theirsText = theirs == null ? RawText.EMPTY_TEXT : getRawText(
+ theirsText = theirs == null ? RawText.EMPTY_TEXT : getRawText(
theirs.getEntryObjectId(), reader);
+ } catch (BinaryBlobException e) {
+ MergeResult<RawText> r = new MergeResult<>(Collections.<RawText>emptyList());
+ r.setContainsConflicts(true);
+ return r;
+ }
return (mergeAlgorithm.merge(RawTextComparator.DEFAULT, baseText,
ourText, theirsText));
}
}
private static RawText getRawText(ObjectId id, ObjectReader reader)
- throws IOException {
+ throws IOException, BinaryBlobException {
if (id.equals(ObjectId.zeroId()))
return new RawText(new byte[] {});
- return new RawText(reader.open(id, OBJ_BLOB).getCachedBytes());
+
+ ObjectLoader loader = reader.open(id, OBJ_BLOB);
+ int threshold = PackConfig.DEFAULT_BIG_FILE_THRESHOLD;
+ return RawText.load(loader, threshold);
}
private static boolean nonTree(final int mode) {