diff options
Diffstat (limited to 'org.eclipse.jgit')
24 files changed, 1536 insertions, 261 deletions
diff --git a/org.eclipse.jgit/.settings/.api_filters b/org.eclipse.jgit/.settings/.api_filters new file mode 100644 index 0000000000..33331fbab7 --- /dev/null +++ b/org.eclipse.jgit/.settings/.api_filters @@ -0,0 +1,11 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<component id="org.eclipse.jgit" version="2"> + <resource path="src/org/eclipse/jgit/transport/SshConstants.java" type="org.eclipse.jgit.transport.SshConstants"> + <filter id="1142947843"> + <message_arguments> + <message_argument value="5.11.1"/> + <message_argument value="PUBKEY_ACCEPTED_ALGORITHMS"/> + </message_arguments> + </filter> + </resource> +</component> diff --git a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties index 2fa8713daa..962324e0f7 100644 --- a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties +++ b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties @@ -13,6 +13,9 @@ ambiguousObjectAbbreviation=Object abbreviation {0} is ambiguous aNewObjectIdIsRequired=A NewObjectId is required. anExceptionOccurredWhileTryingToAddTheIdOfHEAD=An exception occurred while trying to add the Id of HEAD anSSHSessionHasBeenAlreadyCreated=An SSH session has been already created +applyBinaryBaseOidWrong=Cannot apply binary patch; OID for file {0} does not match +applyBinaryOidTooShort=Binary patch for file {0} does not have full IDs +applyBinaryResultOidWrong=Result of binary patch for file {0} has wrong OID. applyingCommit=Applying {0} archiveFormatAlreadyAbsent=Archive format already absent: {0} archiveFormatAlreadyRegistered=Archive format already registered with different implementation: {0} @@ -37,7 +40,19 @@ badRef=Bad ref: {0}: {1} badSectionEntry=Bad section entry: {0} badShallowLine=Bad shallow line: {0} bareRepositoryNoWorkdirAndIndex=Bare Repository has neither a working tree, nor an index +base85invalidChar=Invalid base-85 character: 0x{0} +base85length=Base-85 encoded data must have a length that is a multiple of 5 +base85overflow=Base-85 value overflow, does not fit into 32 bits: 0x{0} +base85tooLong=Extra base-85 encoded data for output size of {0} bytes +base85tooShort=Base-85 data decoded into less than {0} bytes baseLengthIncorrect=base length incorrect +binaryDeltaBaseLengthMismatch=Binary delta base length does not match, expected {0}, got {1} +binaryDeltaInvalidOffset=Binary delta offset + length too large: {0} + {1} +binaryDeltaInvalidResultLength=Binary delta expected result length is negative +binaryHunkDecodeError=Binary hunk, line {0}: invalid input +binaryHunkInvalidLength=Binary hunk, line {0}: input corrupt; expected length byte, got 0x{1} +binaryHunkLineTooShort=Binary hunk, line {0}: input ended prematurely +binaryHunkMissingNewline=Binary hunk, line {0}: input line not terminated by newline bitmapMissingObject=Bitmap at {0} is missing {1}. bitmapsMustBePrepared=Bitmaps must be prepared before they may be written. blameNotCommittedYet=Not Committed Yet diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java index e228e8276a..583767af3f 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2011, 2020 IBM Corporation and others + * Copyright (C) 2011, 2021 IBM Corporation and others * * This program and the accompanying materials are made available under the * terms of the Eclipse Distribution License v. 1.0 which is available at @@ -9,29 +9,68 @@ */ package org.eclipse.jgit.api; +import java.io.BufferedInputStream; +import java.io.ByteArrayInputStream; import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.Writer; +import java.io.OutputStream; +import java.nio.ByteBuffer; import java.nio.file.Files; import java.nio.file.StandardCopyOption; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.zip.InflaterInputStream; +import org.eclipse.jgit.api.errors.FilterFailedException; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.api.errors.PatchApplyException; import org.eclipse.jgit.api.errors.PatchFormatException; +import org.eclipse.jgit.attributes.FilterCommand; +import org.eclipse.jgit.attributes.FilterCommandRegistry; import org.eclipse.jgit.diff.DiffEntry.ChangeType; import org.eclipse.jgit.diff.RawText; +import org.eclipse.jgit.dircache.DirCache; +import org.eclipse.jgit.dircache.DirCacheCheckout; +import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata; +import org.eclipse.jgit.dircache.DirCacheIterator; +import org.eclipse.jgit.errors.LargeObjectException; +import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.internal.JGitText; +import org.eclipse.jgit.lib.Constants; +import org.eclipse.jgit.lib.CoreConfig.EolStreamType; import org.eclipse.jgit.lib.FileMode; +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.ObjectLoader; +import org.eclipse.jgit.lib.ObjectStream; import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.patch.BinaryHunk; import org.eclipse.jgit.patch.FileHeader; +import org.eclipse.jgit.patch.FileHeader.PatchType; import org.eclipse.jgit.patch.HunkHeader; import org.eclipse.jgit.patch.Patch; +import org.eclipse.jgit.treewalk.FileTreeIterator; +import org.eclipse.jgit.treewalk.TreeWalk; +import org.eclipse.jgit.treewalk.TreeWalk.OperationType; +import org.eclipse.jgit.treewalk.filter.AndTreeFilter; +import org.eclipse.jgit.treewalk.filter.NotIgnoredFilter; +import org.eclipse.jgit.treewalk.filter.PathFilterGroup; +import org.eclipse.jgit.util.FS; +import org.eclipse.jgit.util.FS.ExecutionResult; import org.eclipse.jgit.util.FileUtils; +import org.eclipse.jgit.util.IO; +import org.eclipse.jgit.util.RawParseUtils; +import org.eclipse.jgit.util.StringUtils; +import org.eclipse.jgit.util.TemporaryBuffer; +import org.eclipse.jgit.util.TemporaryBuffer.LocalFile; +import org.eclipse.jgit.util.io.BinaryDeltaInputStream; +import org.eclipse.jgit.util.io.BinaryHunkInputStream; +import org.eclipse.jgit.util.io.EolStreamTypeUtil; +import org.eclipse.jgit.util.sha1.SHA1; /** * Apply a patch to files and/or to the index. @@ -45,7 +84,7 @@ public class ApplyCommand extends GitCommand<ApplyResult> { private InputStream in; /** - * Constructs the command if the patch is to be applied to the index. + * Constructs the command. * * @param repo */ @@ -79,6 +118,7 @@ public class ApplyCommand extends GitCommand<ApplyResult> { public ApplyResult call() throws GitAPIException, PatchFormatException, PatchApplyException { checkCallable(); + setCallable(false); ApplyResult r = new ApplyResult(); try { final Patch p = new Patch(); @@ -87,19 +127,22 @@ public class ApplyCommand extends GitCommand<ApplyResult> { } finally { in.close(); } - if (!p.getErrors().isEmpty()) + if (!p.getErrors().isEmpty()) { throw new PatchFormatException(p.getErrors()); + } + Repository repository = getRepository(); + DirCache cache = repository.readDirCache(); for (FileHeader fh : p.getFiles()) { ChangeType type = fh.getChangeType(); File f = null; switch (type) { case ADD: f = getFile(fh.getNewPath(), true); - apply(f, fh); + apply(repository, fh.getNewPath(), cache, f, fh); break; case MODIFY: f = getFile(fh.getOldPath(), false); - apply(f, fh); + apply(repository, fh.getOldPath(), cache, f, fh); break; case DELETE: f = getFile(fh.getOldPath(), false); @@ -118,14 +161,14 @@ public class ApplyCommand extends GitCommand<ApplyResult> { throw new PatchApplyException(MessageFormat.format( JGitText.get().renameFileFailed, f, dest), e); } - apply(dest, fh); + apply(repository, fh.getOldPath(), cache, dest, fh); break; case COPY: f = getFile(fh.getOldPath(), false); File target = getFile(fh.getNewPath(), false); FileUtils.mkdirs(target.getParentFile(), true); Files.copy(f.toPath(), target.toPath()); - apply(target, fh); + apply(repository, fh.getOldPath(), cache, target, fh); } r.addUpdatedFile(f); } @@ -133,14 +176,13 @@ public class ApplyCommand extends GitCommand<ApplyResult> { throw new PatchApplyException(MessageFormat.format( JGitText.get().patchApplyException, e.getMessage()), e); } - setCallable(false); return r; } private File getFile(String path, boolean create) throws PatchApplyException { File f = new File(getRepository().getWorkTree(), path); - if (create) + if (create) { try { File parent = f.getParentFile(); FileUtils.mkdirs(parent, true); @@ -149,22 +191,366 @@ public class ApplyCommand extends GitCommand<ApplyResult> { throw new PatchApplyException(MessageFormat.format( JGitText.get().createNewFileFailed, f), e); } + } return f; } + private void apply(Repository repository, String path, DirCache cache, + File f, FileHeader fh) throws IOException, PatchApplyException { + if (PatchType.BINARY.equals(fh.getPatchType())) { + return; + } + boolean convertCrLf = needsCrLfConversion(f, fh); + // Use a TreeWalk with a DirCacheIterator to pick up the correct + // clean/smudge filters. CR-LF handling is completely determined by + // whether the file or the patch have CR-LF line endings. + try (TreeWalk walk = new TreeWalk(repository)) { + walk.setOperationType(OperationType.CHECKIN_OP); + FileTreeIterator files = new FileTreeIterator(repository); + int fileIdx = walk.addTree(files); + int cacheIdx = walk.addTree(new DirCacheIterator(cache)); + files.setDirCacheIterator(walk, cacheIdx); + walk.setFilter(AndTreeFilter.create( + PathFilterGroup.createFromStrings(path), + new NotIgnoredFilter(fileIdx))); + walk.setRecursive(true); + if (walk.next()) { + // If the file on disk has no newline characters, convertCrLf + // will be false. In that case we want to honor the normal git + // settings. + EolStreamType streamType = convertCrLf ? EolStreamType.TEXT_CRLF + : walk.getEolStreamType(OperationType.CHECKOUT_OP); + String command = walk.getFilterCommand( + Constants.ATTR_FILTER_TYPE_SMUDGE); + CheckoutMetadata checkOut = new CheckoutMetadata(streamType, command); + FileTreeIterator file = walk.getTree(fileIdx, + FileTreeIterator.class); + if (file != null) { + if (PatchType.GIT_BINARY.equals(fh.getPatchType())) { + applyBinary(repository, path, f, fh, + file::openEntryStream, file.getEntryObjectId(), + checkOut); + } else { + command = walk.getFilterCommand( + Constants.ATTR_FILTER_TYPE_CLEAN); + RawText raw; + // Can't use file.openEntryStream() as it would do CR-LF + // conversion as usual, not as wanted by us. + try (InputStream input = filterClean(repository, path, + new FileInputStream(f), convertCrLf, command)) { + raw = new RawText( + IO.readWholeStream(input, 0).array()); + } + applyText(repository, path, raw, f, fh, checkOut); + } + return; + } + } + } + // File ignored? + RawText raw; + CheckoutMetadata checkOut; + if (PatchType.GIT_BINARY.equals(fh.getPatchType())) { + checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null); + applyBinary(repository, path, f, fh, () -> new FileInputStream(f), + null, checkOut); + } else { + if (convertCrLf) { + try (InputStream input = EolStreamTypeUtil.wrapInputStream( + new FileInputStream(f), EolStreamType.TEXT_LF)) { + raw = new RawText(IO.readWholeStream(input, 0).array()); + } + checkOut = new CheckoutMetadata(EolStreamType.TEXT_CRLF, null); + } else { + raw = new RawText(f); + checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null); + } + applyText(repository, path, raw, f, fh, checkOut); + } + } + + private boolean needsCrLfConversion(File f, FileHeader fileHeader) + throws IOException { + if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) { + return false; + } + if (!hasCrLf(fileHeader)) { + try (InputStream input = new FileInputStream(f)) { + return RawText.isCrLfText(input); + } + } + return false; + } + + private static boolean hasCrLf(FileHeader fileHeader) { + if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) { + return false; + } + for (HunkHeader header : fileHeader.getHunks()) { + byte[] buf = header.getBuffer(); + int hunkEnd = header.getEndOffset(); + int lineStart = header.getStartOffset(); + while (lineStart < hunkEnd) { + int nextLineStart = RawParseUtils.nextLF(buf, lineStart); + if (nextLineStart > hunkEnd) { + nextLineStart = hunkEnd; + } + if (nextLineStart <= lineStart) { + break; + } + if (nextLineStart - lineStart > 1) { + char first = (char) (buf[lineStart] & 0xFF); + if (first == ' ' || first == '-') { + // It's an old line. Does it end in CR-LF? + if (buf[nextLineStart - 2] == '\r') { + return true; + } + } + } + lineStart = nextLineStart; + } + } + return false; + } + + private InputStream filterClean(Repository repository, String path, + InputStream fromFile, boolean convertCrLf, String filterCommand) + throws IOException { + InputStream input = fromFile; + if (convertCrLf) { + input = EolStreamTypeUtil.wrapInputStream(input, + EolStreamType.TEXT_LF); + } + if (StringUtils.isEmptyOrNull(filterCommand)) { + return input; + } + if (FilterCommandRegistry.isRegistered(filterCommand)) { + LocalFile buffer = new TemporaryBuffer.LocalFile(null); + FilterCommand command = FilterCommandRegistry.createFilterCommand( + filterCommand, repository, input, buffer); + while (command.run() != -1) { + // loop as long as command.run() tells there is work to do + } + return buffer.openInputStreamWithAutoDestroy(); + } + FS fs = repository.getFS(); + ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand, + new String[0]); + filterProcessBuilder.directory(repository.getWorkTree()); + filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY, + repository.getDirectory().getAbsolutePath()); + ExecutionResult result; + try { + result = fs.execute(filterProcessBuilder, in); + } catch (IOException | InterruptedException e) { + throw new IOException( + new FilterFailedException(e, filterCommand, path)); + } + int rc = result.getRc(); + if (rc != 0) { + throw new IOException(new FilterFailedException(rc, filterCommand, + path, result.getStdout().toByteArray(4096), RawParseUtils + .decode(result.getStderr().toByteArray(4096)))); + } + return result.getStdout().openInputStreamWithAutoDestroy(); + } + + /** + * Something that can supply an {@link InputStream}. + */ + private interface StreamSupplier { + InputStream load() throws IOException; + } + /** - * @param f - * @param fh - * @throws IOException - * @throws PatchApplyException + * We write the patch result to a {@link TemporaryBuffer} and then use + * {@link DirCacheCheckout}.getContent() to run the result through the CR-LF + * and smudge filters. DirCacheCheckout needs an ObjectLoader, not a + * TemporaryBuffer, so this class bridges between the two, making any Stream + * provided by a {@link StreamSupplier} look like an ordinary git blob to + * DirCacheCheckout. */ - private void apply(File f, FileHeader fh) + private static class StreamLoader extends ObjectLoader { + + private StreamSupplier data; + + private long size; + + StreamLoader(StreamSupplier data, long length) { + this.data = data; + this.size = length; + } + + @Override + public int getType() { + return Constants.OBJ_BLOB; + } + + @Override + public long getSize() { + return size; + } + + @Override + public boolean isLarge() { + return true; + } + + @Override + public byte[] getCachedBytes() throws LargeObjectException { + throw new LargeObjectException(); + } + + @Override + public ObjectStream openStream() + throws MissingObjectException, IOException { + return new ObjectStream.Filter(getType(), getSize(), + new BufferedInputStream(data.load())); + } + } + + private void initHash(SHA1 hash, long size) { + hash.update(Constants.encodedTypeString(Constants.OBJ_BLOB)); + hash.update((byte) ' '); + hash.update(Constants.encodeASCII(size)); + hash.update((byte) 0); + } + + private ObjectId hash(File f) throws IOException { + SHA1 hash = SHA1.newInstance(); + initHash(hash, f.length()); + try (InputStream input = new FileInputStream(f)) { + byte[] buf = new byte[8192]; + int n; + while ((n = input.read(buf)) >= 0) { + hash.update(buf, 0, n); + } + } + return hash.toObjectId(); + } + + private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f, + String path) + throws PatchApplyException, IOException { + boolean hashOk = false; + if (id != null) { + hashOk = baseId.equals(id); + if (!hashOk && ChangeType.ADD.equals(type) + && ObjectId.zeroId().equals(baseId)) { + // We create the file first. The OID of an empty file is not the + // zero id! + hashOk = Constants.EMPTY_BLOB_ID.equals(id); + } + } else { + if (ObjectId.zeroId().equals(baseId)) { + // File empty is OK. + hashOk = !f.exists() || f.length() == 0; + } else { + hashOk = baseId.equals(hash(f)); + } + } + if (!hashOk) { + throw new PatchApplyException(MessageFormat + .format(JGitText.get().applyBinaryBaseOidWrong, path)); + } + } + + private void applyBinary(Repository repository, String path, File f, + FileHeader fh, StreamSupplier loader, ObjectId id, + CheckoutMetadata checkOut) + throws PatchApplyException, IOException { + if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) { + throw new PatchApplyException(MessageFormat + .format(JGitText.get().applyBinaryOidTooShort, path)); + } + BinaryHunk hunk = fh.getForwardBinaryHunk(); + // A BinaryHunk has the start at the "literal" or "delta" token. Data + // starts on the next line. + int start = RawParseUtils.nextLF(hunk.getBuffer(), + hunk.getStartOffset()); + int length = hunk.getEndOffset() - start; + SHA1 hash = SHA1.newInstance(); + // Write to a buffer and copy to the file only if everything was fine + TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null); + try { + switch (hunk.getType()) { + case LITERAL_DEFLATED: + // This just overwrites the file. We need to check the hash of + // the base. + checkOid(fh.getOldId().toObjectId(), id, fh.getChangeType(), f, + path); + initHash(hash, hunk.getSize()); + try (OutputStream out = buffer; + InputStream inflated = new SHA1InputStream(hash, + new InflaterInputStream( + new BinaryHunkInputStream( + new ByteArrayInputStream( + hunk.getBuffer(), start, + length))))) { + DirCacheCheckout.getContent(repository, path, checkOut, + new StreamLoader(() -> inflated, hunk.getSize()), + null, out); + if (!fh.getNewId().toObjectId().equals(hash.toObjectId())) { + throw new PatchApplyException(MessageFormat.format( + JGitText.get().applyBinaryResultOidWrong, + path)); + } + } + try (InputStream bufIn = buffer.openInputStream()) { + Files.copy(bufIn, f.toPath(), + StandardCopyOption.REPLACE_EXISTING); + } + break; + case DELTA_DEFLATED: + // Unfortunately delta application needs random access to the + // base to construct the result. + byte[] base; + try (InputStream input = loader.load()) { + base = IO.readWholeStream(input, 0).array(); + } + // At least stream the result! + try (BinaryDeltaInputStream input = new BinaryDeltaInputStream( + base, + new InflaterInputStream(new BinaryHunkInputStream( + new ByteArrayInputStream(hunk.getBuffer(), + start, length))))) { + long finalSize = input.getExpectedResultSize(); + initHash(hash, finalSize); + try (OutputStream out = buffer; + SHA1InputStream hashed = new SHA1InputStream(hash, + input)) { + DirCacheCheckout.getContent(repository, path, checkOut, + new StreamLoader(() -> hashed, finalSize), null, + out); + if (!fh.getNewId().toObjectId() + .equals(hash.toObjectId())) { + throw new PatchApplyException(MessageFormat.format( + JGitText.get().applyBinaryResultOidWrong, + path)); + } + } + } + try (InputStream bufIn = buffer.openInputStream()) { + Files.copy(bufIn, f.toPath(), + StandardCopyOption.REPLACE_EXISTING); + } + break; + default: + break; + } + } finally { + buffer.destroy(); + } + } + + private void applyText(Repository repository, String path, RawText rt, + File f, FileHeader fh, CheckoutMetadata checkOut) throws IOException, PatchApplyException { - RawText rt = new RawText(f); - List<String> oldLines = new ArrayList<>(rt.size()); - for (int i = 0; i < rt.size(); i++) - oldLines.add(rt.getString(i)); - List<String> newLines = new ArrayList<>(oldLines); + List<ByteBuffer> oldLines = new ArrayList<>(rt.size()); + for (int i = 0; i < rt.size(); i++) { + oldLines.add(rt.getRawString(i)); + } + List<ByteBuffer> newLines = new ArrayList<>(oldLines); int afterLastHunk = 0; int lineNumberShift = 0; int lastHunkNewLine = -1; @@ -182,9 +568,9 @@ public class ApplyCommand extends GitCommand<ApplyResult> { b.length); RawText hrt = new RawText(b); - List<String> hunkLines = new ArrayList<>(hrt.size()); + List<ByteBuffer> hunkLines = new ArrayList<>(hrt.size()); for (int i = 0; i < hrt.size(); i++) { - hunkLines.add(hrt.getString(i)); + hunkLines.add(hrt.getRawString(i)); } if (hh.getNewStartLine() == 0) { @@ -253,8 +639,13 @@ public class ApplyCommand extends GitCommand<ApplyResult> { lineNumberShift = applyAt - hh.getNewStartLine() + 1; int sz = hunkLines.size(); for (int j = 1; j < sz; j++) { - String hunkLine = hunkLines.get(j); - switch (hunkLine.charAt(0)) { + ByteBuffer hunkLine = hunkLines.get(j); + if (!hunkLine.hasRemaining()) { + // Completely empty line; accept as empty context line + applyAt++; + continue; + } + switch (hunkLine.array()[hunkLine.position()]) { case ' ': applyAt++; break; @@ -262,7 +653,7 @@ public class ApplyCommand extends GitCommand<ApplyResult> { newLines.remove(applyAt); break; case '+': - newLines.add(applyAt++, hunkLine.substring(1)); + newLines.add(applyAt++, slice(hunkLine, 1)); break; default: break; @@ -271,39 +662,64 @@ public class ApplyCommand extends GitCommand<ApplyResult> { afterLastHunk = applyAt; } if (!isNoNewlineAtEndOfFile(fh)) { - newLines.add(""); //$NON-NLS-1$ + newLines.add(null); } if (!rt.isMissingNewlineAtEnd()) { - oldLines.add(""); //$NON-NLS-1$ - } - if (!isChanged(oldLines, newLines)) { - return; // Don't touch the file - } - try (Writer fw = Files.newBufferedWriter(f.toPath())) { - for (Iterator<String> l = newLines.iterator(); l.hasNext();) { - fw.write(l.next()); - if (l.hasNext()) { - // Don't bother handling line endings - if it was Windows, - // the \r is still there! - fw.write('\n'); + oldLines.add(null); + } + if (oldLines.equals(newLines)) { + return; // Unchanged; don't touch the file + } + + TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null); + try { + try (OutputStream out = buffer) { + for (Iterator<ByteBuffer> l = newLines.iterator(); l + .hasNext();) { + ByteBuffer line = l.next(); + if (line == null) { + // Must be the marker for the final newline + break; + } + out.write(line.array(), line.position(), line.remaining()); + if (l.hasNext()) { + out.write('\n'); + } } } + try (OutputStream output = new FileOutputStream(f)) { + DirCacheCheckout.getContent(repository, path, checkOut, + new StreamLoader(buffer::openInputStream, + buffer.length()), + null, output); + } + } finally { + buffer.destroy(); } - getRepository().getFS().setExecute(f, fh.getNewMode() == FileMode.EXECUTABLE_FILE); + repository.getFS().setExecute(f, + fh.getNewMode() == FileMode.EXECUTABLE_FILE); } - private boolean canApplyAt(List<String> hunkLines, List<String> newLines, - int line) { + private boolean canApplyAt(List<ByteBuffer> hunkLines, + List<ByteBuffer> newLines, int line) { int sz = hunkLines.size(); int limit = newLines.size(); int pos = line; for (int j = 1; j < sz; j++) { - String hunkLine = hunkLines.get(j); - switch (hunkLine.charAt(0)) { + ByteBuffer hunkLine = hunkLines.get(j); + if (!hunkLine.hasRemaining()) { + // Empty line. Accept as empty context line. + if (pos >= limit || newLines.get(pos).hasRemaining()) { + return false; + } + pos++; + continue; + } + switch (hunkLine.array()[hunkLine.position()]) { case ' ': case '-': if (pos >= limit - || !newLines.get(pos).equals(hunkLine.substring(1))) { + || !newLines.get(pos).equals(slice(hunkLine, 1))) { return false; } pos++; @@ -315,13 +731,9 @@ public class ApplyCommand extends GitCommand<ApplyResult> { return true; } - private static boolean isChanged(List<String> ol, List<String> nl) { - if (ol.size() != nl.size()) - return true; - for (int i = 0; i < ol.size(); i++) - if (!ol.get(i).equals(nl.get(i))) - return true; - return false; + private ByteBuffer slice(ByteBuffer b, int off) { + int newOffset = b.position() + off; + return ByteBuffer.wrap(b.array(), newOffset, b.limit() - newOffset); } private boolean isNoNewlineAtEndOfFile(FileHeader fh) { @@ -330,8 +742,51 @@ public class ApplyCommand extends GitCommand<ApplyResult> { return false; } HunkHeader lastHunk = hunks.get(hunks.size() - 1); - RawText lhrt = new RawText(lastHunk.getBuffer()); + byte[] buf = new byte[lastHunk.getEndOffset() + - lastHunk.getStartOffset()]; + System.arraycopy(lastHunk.getBuffer(), lastHunk.getStartOffset(), buf, + 0, buf.length); + RawText lhrt = new RawText(buf); return lhrt.getString(lhrt.size() - 1) .equals("\\ No newline at end of file"); //$NON-NLS-1$ } + + /** + * An {@link InputStream} that updates a {@link SHA1} on every byte read. + * The hash is supposed to have been initialized before reading starts. + */ + private static class SHA1InputStream extends InputStream { + + private final SHA1 hash; + + private final InputStream in; + + SHA1InputStream(SHA1 hash, InputStream in) { + this.hash = hash; + this.in = in; + } + + @Override + public int read() throws IOException { + int b = in.read(); + if (b >= 0) { + hash.update((byte) b); + } + return b; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + int n = in.read(b, off, len); + if (n > 0) { + hash.update(b, off, n); + } + return n; + } + + @Override + public void close() throws IOException { + in.close(); + } + } } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java index c611f915ae..ef56d802c8 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java @@ -87,6 +87,20 @@ public class MergeCommand extends GitCommand<MergeResult> { private ProgressMonitor monitor = NullProgressMonitor.INSTANCE; /** + * Values for the "merge.conflictStyle" git config. + * + * @since 5.12 + */ + public enum ConflictStyle { + + /** "merge" style: only ours/theirs. This is the default. */ + MERGE, + + /** "diff3" style: ours/base/theirs. */ + DIFF3 + } + + /** * The modes available for fast forward merges corresponding to the * <code>--ff</code>, <code>--no-ff</code> and <code>--ff-only</code> * options under <code>branch.<name>.mergeoptions</code>. diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/diff/RawText.java b/org.eclipse.jgit/src/org/eclipse/jgit/diff/RawText.java index 9f4b1fa493..d09da019dd 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/diff/RawText.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/diff/RawText.java @@ -1,6 +1,6 @@ /* * Copyright (C) 2009, Google Inc. - * Copyright (C) 2008-2009, Johannes E. Schindelin <johannes.schindelin@gmx.de> and others + * Copyright (C) 2008-2021, Johannes E. Schindelin <johannes.schindelin@gmx.de> and others * * This program and the accompanying materials are made available under the * terms of the Eclipse Distribution License v. 1.0 which is available at @@ -16,6 +16,7 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.ByteBuffer; import org.eclipse.jgit.errors.BinaryBlobException; import org.eclipse.jgit.errors.LargeObjectException; @@ -165,6 +166,27 @@ public class RawText extends Sequence { } /** + * Get the raw text for a single line. + * + * @param i + * index of the line to extract. Note this is 0-based, so line + * number 1 is actually index 0. + * @return the text for the line, without a trailing LF, as a + * {@link ByteBuffer} that is backed by a slice of the + * {@link #getRawContent() raw content}, with the buffer's position + * on the start of the line and the limit at the end. + * @since 5.12 + */ + public ByteBuffer getRawString(int i) { + int s = getStart(i); + int e = getEnd(i); + if (e > 0 && content[e - 1] == '\n') { + e--; + } + return ByteBuffer.wrap(content, s, e - s); + } + + /** * Get the text for a region of lines. * * @param begin diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/diff/RenameDetector.java b/org.eclipse.jgit/src/org/eclipse/jgit/diff/RenameDetector.java index 75784c2556..ba1f63b680 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/diff/RenameDetector.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/diff/RenameDetector.java @@ -104,6 +104,13 @@ public class RenameDetector { */ private int bigFileThreshold = DEFAULT_BIG_FILE_THRESHOLD; + /** + * Skip detecting content renames for binary files. Content renames are + * those that are not exact, that is with a slight content modification + * between the two files. + */ + private boolean skipContentRenamesForBinaryFiles = false; + /** Set if the number of adds or deletes was over the limit. */ private boolean overRenameLimit; @@ -236,6 +243,26 @@ public class RenameDetector { } /** + * Get skipping detecting content renames for binary files. + * + * @return true if content renames should be skipped for binary files, false otherwise. + * @since 5.12 + */ + public boolean getSkipContentRenamesForBinaryFiles() { + return skipContentRenamesForBinaryFiles; + } + + /** + * Sets skipping detecting content renames for binary files. + * + * @param value true if content renames should be skipped for binary files, false otherwise. + * @since 5.12 + */ + public void setSkipContentRenamesForBinaryFiles(boolean value) { + this.skipContentRenamesForBinaryFiles = value; + } + + /** * Check if the detector is over the rename limit. * <p> * This method can be invoked either before or after {@code getEntries} has @@ -521,6 +548,7 @@ public class RenameDetector { d = new SimilarityRenameDetector(reader, deleted, added); d.setRenameScore(getRenameScore()); d.setBigFileThreshold(getBigFileThreshold()); + d.setSkipBinaryFiles(getSkipContentRenamesForBinaryFiles()); d.compute(pm); overRenameLimit |= d.isTableOverflow(); deleted = d.getLeftOverSources(); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/diff/SimilarityIndex.java b/org.eclipse.jgit/src/org/eclipse/jgit/diff/SimilarityIndex.java index fb6e5df589..661369b86a 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/diff/SimilarityIndex.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/diff/SimilarityIndex.java @@ -102,6 +102,15 @@ public class SimilarityIndex { idGrowAt = growAt(idHashBits); } + static boolean isBinary(ObjectLoader obj) throws IOException { + if (obj.isLarge()) { + try (ObjectStream in1 = obj.openStream()) { + return RawText.isBinary(in1); + } + } + return RawText.isBinary(obj.getCachedBytes()); + } + void hash(ObjectLoader obj) throws MissingObjectException, IOException, TableFullException { if (obj.isLarge()) { @@ -115,9 +124,7 @@ public class SimilarityIndex { private void hashLargeObject(ObjectLoader obj) throws IOException, TableFullException { boolean text; - try (ObjectStream in1 = obj.openStream()) { - text = !RawText.isBinary(in1); - } + text = !isBinary(obj); try (ObjectStream in2 = obj.openStream()) { hash(in2, in2.getSize(), text); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/diff/SimilarityRenameDetector.java b/org.eclipse.jgit/src/org/eclipse/jgit/diff/SimilarityRenameDetector.java index 082f31d178..5871b4aeea 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/diff/SimilarityRenameDetector.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/diff/SimilarityRenameDetector.java @@ -26,6 +26,7 @@ import org.eclipse.jgit.errors.CancelledException; import org.eclipse.jgit.internal.JGitText; import org.eclipse.jgit.lib.FileMode; import org.eclipse.jgit.lib.NullProgressMonitor; +import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.ProgressMonitor; class SimilarityRenameDetector { @@ -87,6 +88,9 @@ class SimilarityRenameDetector { */ private int bigFileThreshold = DEFAULT_BIG_FILE_THRESHOLD; + /** Skip content renames for binary files. */ + private boolean skipBinaryFiles = false; + /** Set if any {@link SimilarityIndex.TableFullException} occurs. */ private boolean tableOverflow; @@ -107,6 +111,10 @@ class SimilarityRenameDetector { bigFileThreshold = threshold; } + void setSkipBinaryFiles(boolean value) { + skipBinaryFiles = value; + } + void compute(ProgressMonitor pm) throws IOException, CancelledException { if (pm == null) pm = NullProgressMonitor.INSTANCE; @@ -271,7 +279,12 @@ class SimilarityRenameDetector { if (s == null) { try { - s = hash(OLD, srcEnt); + ObjectLoader loader = reader.open(OLD, srcEnt); + if (skipBinaryFiles && SimilarityIndex.isBinary(loader)) { + pm.update(1); + continue SRC; + } + s = hash(loader); } catch (TableFullException tableFull) { tableOverflow = true; continue SRC; @@ -280,7 +293,12 @@ class SimilarityRenameDetector { SimilarityIndex d; try { - d = hash(NEW, dstEnt); + ObjectLoader loader = reader.open(NEW, dstEnt); + if (skipBinaryFiles && SimilarityIndex.isBinary(loader)) { + pm.update(1); + continue; + } + d = hash(loader); } catch (TableFullException tableFull) { if (dstTooLarge == null) dstTooLarge = new BitSet(dsts.size()); @@ -364,10 +382,10 @@ class SimilarityRenameDetector { return (((dirScoreLtr + dirScoreRtl) * 25) + (fileScore * 50)) / 100; } - private SimilarityIndex hash(DiffEntry.Side side, DiffEntry ent) + private SimilarityIndex hash(ObjectLoader objectLoader) throws IOException, TableFullException { SimilarityIndex r = new SimilarityIndex(); - r.hash(reader.open(side, ent)); + r.hash(objectLoader); r.sort(); return r; } diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java index c039aaffa9..552315d43a 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java @@ -12,6 +12,7 @@ package org.eclipse.jgit.gitrepo; import static java.nio.charset.StandardCharsets.UTF_8; import static org.eclipse.jgit.lib.Constants.DEFAULT_REMOTE_NAME; import static org.eclipse.jgit.lib.Constants.R_REMOTES; +import static org.eclipse.jgit.lib.Constants.R_TAGS; import java.io.File; import java.io.FileInputStream; @@ -79,6 +80,13 @@ import org.eclipse.jgit.util.FileUtils; * @since 3.4 */ public class RepoCommand extends GitCommand<RevCommit> { + private static final int LOCK_FAILURE_MAX_RETRIES = 5; + + // Retry exponentially with delays in this range + private static final int LOCK_FAILURE_MIN_RETRY_DELAY_MILLIS = 50; + + private static final int LOCK_FAILURE_MAX_RETRY_DELAY_MILLIS = 5000; + private String manifestPath; private String baseUri; private URI targetUri; @@ -587,8 +595,11 @@ public class RepoCommand extends GitCommand<RevCommit> { throw new RemoteUnavailableException(url); } if (recordRemoteBranch) { - // can be branch or tag - cfg.setString("submodule", name, "branch", //$NON-NLS-1$ //$NON-NLS-2$ + // "branch" field is only for non-tag references. + // Keep tags in "ref" field as hint for other tools. + String field = proj.getRevision().startsWith( + R_TAGS) ? "ref" : "branch"; //$NON-NLS-1$ //$NON-NLS-2$ + cfg.setString("submodule", name, field, //$NON-NLS-1$ proj.getRevision()); } @@ -682,50 +693,22 @@ public class RepoCommand extends GitCommand<RevCommit> { builder.finish(); ObjectId treeId = index.writeTree(inserter); - // Create a Commit object, populate it and write it - ObjectId headId = repo.resolve(targetBranch + "^{commit}"); //$NON-NLS-1$ - if (headId != null && rw.parseCommit(headId).getTree().getId().equals(treeId)) { - // No change. Do nothing. - return rw.parseCommit(headId); - } - - CommitBuilder commit = new CommitBuilder(); - commit.setTreeId(treeId); - if (headId != null) - commit.setParentIds(headId); - commit.setAuthor(author); - commit.setCommitter(author); - commit.setMessage(RepoText.get().repoCommitMessage); - - ObjectId commitId = inserter.insert(commit); - inserter.flush(); - - RefUpdate ru = repo.updateRef(targetBranch); - ru.setNewObjectId(commitId); - ru.setExpectedOldObjectId(headId != null ? headId : ObjectId.zeroId()); - Result rc = ru.update(rw); - - switch (rc) { - case NEW: - case FORCED: - case FAST_FORWARD: - // Successful. Do nothing. - break; - case REJECTED: - case LOCK_FAILURE: - throw new ConcurrentRefUpdateException( - MessageFormat.format( - JGitText.get().cannotLock, targetBranch), - ru.getRef(), - rc); - default: - throw new JGitInternalException(MessageFormat.format( - JGitText.get().updatingRefFailed, - targetBranch, commitId.name(), rc)); + long prevDelay = 0; + for (int i = 0; i < LOCK_FAILURE_MAX_RETRIES - 1; i++) { + try { + return commitTreeOnCurrentTip( + inserter, rw, treeId); + } catch (ConcurrentRefUpdateException e) { + prevDelay = FileUtils.delay(prevDelay, + LOCK_FAILURE_MIN_RETRY_DELAY_MILLIS, + LOCK_FAILURE_MAX_RETRY_DELAY_MILLIS); + Thread.sleep(prevDelay); + repo.getRefDatabase().refresh(); + } } - - return rw.parseCommit(commitId); - } catch (GitAPIException | IOException e) { + // In the last try, just propagate the exceptions + return commitTreeOnCurrentTip(inserter, rw, treeId); + } catch (GitAPIException | IOException | InterruptedException e) { throw new ManifestErrorException(e); } } @@ -742,6 +725,51 @@ public class RepoCommand extends GitCommand<RevCommit> { } } + + private RevCommit commitTreeOnCurrentTip(ObjectInserter inserter, + RevWalk rw, ObjectId treeId) + throws IOException, ConcurrentRefUpdateException { + ObjectId headId = repo.resolve(targetBranch + "^{commit}"); //$NON-NLS-1$ + if (headId != null && rw.parseCommit(headId).getTree().getId().equals(treeId)) { + // No change. Do nothing. + return rw.parseCommit(headId); + } + + CommitBuilder commit = new CommitBuilder(); + commit.setTreeId(treeId); + if (headId != null) + commit.setParentIds(headId); + commit.setAuthor(author); + commit.setCommitter(author); + commit.setMessage(RepoText.get().repoCommitMessage); + + ObjectId commitId = inserter.insert(commit); + inserter.flush(); + + RefUpdate ru = repo.updateRef(targetBranch); + ru.setNewObjectId(commitId); + ru.setExpectedOldObjectId(headId != null ? headId : ObjectId.zeroId()); + Result rc = ru.update(rw); + switch (rc) { + case NEW: + case FORCED: + case FAST_FORWARD: + // Successful. Do nothing. + break; + case REJECTED: + case LOCK_FAILURE: + throw new ConcurrentRefUpdateException(MessageFormat + .format(JGitText.get().cannotLock, targetBranch), + ru.getRef(), rc); + default: + throw new JGitInternalException(MessageFormat.format( + JGitText.get().updatingRefFailed, + targetBranch, commitId.name(), rc)); + } + + return rw.parseCommit(commitId); + } + private void addSubmodule(String name, String url, String path, String revision, List<CopyFile> copyfiles, List<LinkFile> linkfiles, Git git) throws GitAPIException, IOException { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java index ab9fc5c9bb..fd54986f94 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java @@ -41,6 +41,9 @@ public class JGitText extends TranslationBundle { /***/ public String aNewObjectIdIsRequired; /***/ public String anExceptionOccurredWhileTryingToAddTheIdOfHEAD; /***/ public String anSSHSessionHasBeenAlreadyCreated; + /***/ public String applyBinaryBaseOidWrong; + /***/ public String applyBinaryOidTooShort; + /***/ public String applyBinaryResultOidWrong; /***/ public String applyingCommit; /***/ public String archiveFormatAlreadyAbsent; /***/ public String archiveFormatAlreadyRegistered; @@ -65,7 +68,19 @@ public class JGitText extends TranslationBundle { /***/ public String badSectionEntry; /***/ public String badShallowLine; /***/ public String bareRepositoryNoWorkdirAndIndex; + /***/ public String base85invalidChar; + /***/ public String base85length; + /***/ public String base85overflow; + /***/ public String base85tooLong; + /***/ public String base85tooShort; /***/ public String baseLengthIncorrect; + /***/ public String binaryDeltaBaseLengthMismatch; + /***/ public String binaryDeltaInvalidOffset; + /***/ public String binaryDeltaInvalidResultLength; + /***/ public String binaryHunkDecodeError; + /***/ public String binaryHunkInvalidLength; + /***/ public String binaryHunkLineTooShort; + /***/ public String binaryHunkMissingNewline; /***/ public String bitmapMissingObject; /***/ public String bitmapsMustBePrepared; /***/ public String blameNotCommittedYet; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java index 7d108feae9..17a910008c 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java @@ -244,47 +244,18 @@ public class RefDirectory extends RefDatabase { /** {@inheritDoc} */ @Override public boolean isNameConflicting(String name) throws IOException { - RefList<Ref> packed = getPackedRefs(); - RefList<LooseRef> loose = getLooseRefs(); - // Cannot be nested within an existing reference. int lastSlash = name.lastIndexOf('/'); while (0 < lastSlash) { String needle = name.substring(0, lastSlash); - if (loose.contains(needle) || packed.contains(needle)) + if (exactRef(needle) != null) { return true; + } lastSlash = name.lastIndexOf('/', lastSlash - 1); } // Cannot be the container of an existing reference. - String prefix = name + '/'; - int idx; - - idx = -(packed.find(prefix) + 1); - if (idx < packed.size() && packed.get(idx).getName().startsWith(prefix)) - return true; - - idx = -(loose.find(prefix) + 1); - if (idx < loose.size() && loose.get(idx).getName().startsWith(prefix)) - return true; - - return false; - } - - private RefList<LooseRef> getLooseRefs() { - final RefList<LooseRef> oldLoose = looseRefs.get(); - - LooseScanner scan = new LooseScanner(oldLoose); - scan.scan(ALL); - - RefList<LooseRef> loose; - if (scan.newLoose != null) { - loose = scan.newLoose.toRefList(); - if (looseRefs.compareAndSet(oldLoose, loose)) - modCnt.incrementAndGet(); - } else - loose = oldLoose; - return loose; + return !getRefsByPrefix(name + '/').isEmpty(); } @Nullable diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/transport/ssh/OpenSshConfigFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/transport/ssh/OpenSshConfigFile.java index de6a346cb2..228c25f0a5 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/transport/ssh/OpenSshConfigFile.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/transport/ssh/OpenSshConfigFile.java @@ -1,6 +1,6 @@ /* * Copyright (C) 2008, 2017, Google Inc. - * Copyright (C) 2017, 2018, Thomas Wolf <thomas.wolf@paranor.ch> and others + * Copyright (C) 2017, 2021, Thomas Wolf <thomas.wolf@paranor.ch> and others * * This program and the accompanying materials are made available under the * terms of the Eclipse Distribution License v. 1.0 which is available at @@ -21,7 +21,8 @@ import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.LinkedHashMap; +import java.util.Iterator; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; @@ -82,12 +83,6 @@ import org.eclipse.jgit.util.SystemReader; */ public class OpenSshConfigFile implements SshConfigStore { - /** - * "Host" name of the HostEntry for the default options before the first - * host block in a config file. - */ - private static final String DEFAULT_NAME = ""; //$NON-NLS-1$ - /** The user's home directory, as key files may be relative to here. */ private final File home; @@ -105,11 +100,9 @@ public class OpenSshConfigFile implements SshConfigStore { * fully resolved entries created from that. */ private static class State { - // Keyed by pattern; if a "Host" line has multiple patterns, we generate - // duplicate HostEntry objects - Map<String, HostEntry> entries = new LinkedHashMap<>(); + List<HostEntry> entries = new LinkedList<>(); - // Keyed by user@hostname:port + // Previous lookups, keyed by user@hostname:port Map<String, HostEntry> hosts = new HashMap<>(); @Override @@ -165,14 +158,16 @@ public class OpenSshConfigFile implements SshConfigStore { return h; } HostEntry fullConfig = new HostEntry(); - // Initialize with default entries at the top of the file, before the - // first Host block. - fullConfig.merge(cache.entries.get(DEFAULT_NAME)); - for (Map.Entry<String, HostEntry> e : cache.entries.entrySet()) { - String pattern = e.getKey(); - if (isHostMatch(pattern, hostName)) { - fullConfig.merge(e.getValue()); - } + Iterator<HostEntry> entries = cache.entries.iterator(); + if (entries.hasNext()) { + // Should always have at least the first top entry containing + // key-value pairs before the first Host block + fullConfig.merge(entries.next()); + entries.forEachRemaining(entry -> { + if (entry.matches(hostName)) { + fullConfig.merge(entry); + } + }); } fullConfig.substitute(hostName, port, userName, localUserName, home); cache.hosts.put(cacheKey, fullConfig); @@ -208,20 +203,19 @@ public class OpenSshConfigFile implements SshConfigStore { return state; } - private Map<String, HostEntry> parse(BufferedReader reader) + private List<HostEntry> parse(BufferedReader reader) throws IOException { - final Map<String, HostEntry> entries = new LinkedHashMap<>(); - final List<HostEntry> current = new ArrayList<>(4); - String line; + final List<HostEntry> entries = new LinkedList<>(); // The man page doesn't say so, but the openssh parser (readconf.c) // starts out in active mode and thus always applies any lines that // occur before the first host block. We gather those options in a // HostEntry for DEFAULT_NAME. HostEntry defaults = new HostEntry(); - current.add(defaults); - entries.put(DEFAULT_NAME, defaults); + HostEntry current = defaults; + entries.add(defaults); + String line; while ((line = reader.readLine()) != null) { // OpenSsh ignores trailing comments on a line. Anything after the // first # on a line is trimmed away (yes, even if the hash is @@ -246,38 +240,17 @@ public class OpenSshConfigFile implements SshConfigStore { String argValue = parts.length > 1 ? parts[1].trim() : ""; //$NON-NLS-1$ if (StringUtils.equalsIgnoreCase(SshConstants.HOST, keyword)) { - current.clear(); - for (String name : parseList(argValue)) { - if (name == null || name.isEmpty()) { - // null should not occur, but better be safe than sorry. - continue; - } - HostEntry c = entries.get(name); - if (c == null) { - c = new HostEntry(); - entries.put(name, c); - } - current.add(c); - } - continue; - } - - if (current.isEmpty()) { - // We received an option outside of a Host block. We - // don't know who this should match against, so skip. + current = new HostEntry(parseList(argValue)); + entries.add(current); continue; } if (HostEntry.isListKey(keyword)) { List<String> args = validate(keyword, parseList(argValue)); - for (HostEntry entry : current) { - entry.setValue(keyword, args); - } + current.setValue(keyword, args); } else if (!argValue.isEmpty()) { argValue = validate(keyword, dequote(argValue)); - for (HostEntry entry : current) { - entry.setValue(keyword, argValue); - } + current.setValue(keyword, argValue); } } @@ -300,7 +273,7 @@ public class OpenSshConfigFile implements SshConfigStore { int length = argument.length(); while (start < length) { // Skip whitespace - if (Character.isSpaceChar(argument.charAt(start))) { + if (Character.isWhitespace(argument.charAt(start))) { start++; continue; } @@ -315,7 +288,7 @@ public class OpenSshConfigFile implements SshConfigStore { } else { int stop = start + 1; while (stop < length - && !Character.isSpaceChar(argument.charAt(stop))) { + && !Character.isWhitespace(argument.charAt(stop))) { stop++; } result.add(argument.substring(start, stop)); @@ -358,13 +331,6 @@ public class OpenSshConfigFile implements SshConfigStore { return value; } - private static boolean isHostMatch(String pattern, String name) { - if (pattern.startsWith("!")) { //$NON-NLS-1$ - return !patternMatchesHost(pattern.substring(1), name); - } - return patternMatchesHost(pattern, name); - } - private static boolean patternMatchesHost(String pattern, String name) { if (pattern.indexOf('*') >= 0 || pattern.indexOf('?') >= 0) { final FileNameMatcher fn; @@ -389,9 +355,12 @@ public class OpenSshConfigFile implements SshConfigStore { private static String stripWhitespace(String value) { final StringBuilder b = new StringBuilder(); - for (int i = 0; i < value.length(); i++) { - if (!Character.isSpaceChar(value.charAt(i))) - b.append(value.charAt(i)); + int length = value.length(); + for (int i = 0; i < length; i++) { + char ch = value.charAt(i); + if (!Character.isWhitespace(ch)) { + b.append(ch); + } } return b.toString(); } @@ -511,6 +480,38 @@ public class OpenSshConfigFile implements SshConfigStore { private Map<String, List<String>> listOptions; + private final List<String> patterns; + + /** + * Constructor used to build the merged entry; never matches anything + */ + public HostEntry() { + this.patterns = Collections.emptyList(); + } + + /** + * @param patterns + * to be used in matching against host name. + */ + public HostEntry(List<String> patterns) { + this.patterns = patterns; + } + + boolean matches(String hostName) { + boolean doesMatch = false; + for (String pattern : patterns) { + if (pattern.startsWith("!")) { //$NON-NLS-1$ + if (patternMatchesHost(pattern.substring(1), hostName)) { + return false; + } + } else if (!doesMatch + && patternMatchesHost(pattern, hostName)) { + doesMatch = true; + } + } + return doesMatch; + } + private static String toKey(String key) { String k = ALIASES.get(key); return k != null ? k : key; @@ -886,8 +887,8 @@ public class OpenSshConfigFile implements SshConfigStore { public String substitute(String input, String allowed, boolean withEnv) { if (input == null || input.length() <= 1 - || input.indexOf('%') < 0 - && (!withEnv || input.indexOf("${") < 0)) { //$NON-NLS-1$ + || (input.indexOf('%') < 0 + && (!withEnv || input.indexOf("${") < 0))) { //$NON-NLS-1$ return input; } StringBuilder builder = new StringBuilder(); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java index 06009f885d..ef1379a238 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java @@ -13,7 +13,6 @@ package org.eclipse.jgit.lib; import static org.eclipse.jgit.transport.ReceiveCommand.Result.NOT_ATTEMPTED; import static org.eclipse.jgit.transport.ReceiveCommand.Result.REJECTED_OTHER_REASON; -import static java.util.stream.Collectors.toCollection; import java.io.IOException; import java.text.MessageFormat; @@ -29,7 +28,6 @@ import java.util.concurrent.TimeoutException; import org.eclipse.jgit.annotations.Nullable; import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.internal.JGitText; -import org.eclipse.jgit.lib.RefUpdate.Result; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.transport.PushCertificate; import org.eclipse.jgit.transport.ReceiveCommand; @@ -495,42 +493,24 @@ public class BatchRefUpdate { } } if (!commands2.isEmpty()) { - // What part of the name space is already taken - Collection<String> takenNames = refdb.getRefs().stream() - .map(Ref::getName) - .collect(toCollection(HashSet::new)); - Collection<String> takenPrefixes = getTakenPrefixes(takenNames); - - // Now to the update that may require more room in the name space + // Perform updates that may require more room in the name space for (ReceiveCommand cmd : commands2) { try { if (cmd.getResult() == NOT_ATTEMPTED) { cmd.updateType(walk); RefUpdate ru = newUpdate(cmd); - SWITCH: switch (cmd.getType()) { - case DELETE: - // Performed in the first phase - break; - case UPDATE: - case UPDATE_NONFASTFORWARD: - RefUpdate ruu = newUpdate(cmd); - cmd.setResult(ruu.update(walk)); - break; - case CREATE: - for (String prefix : getPrefixes(cmd.getRefName())) { - if (takenNames.contains(prefix)) { - cmd.setResult(Result.LOCK_FAILURE); - break SWITCH; - } - } - if (takenPrefixes.contains(cmd.getRefName())) { - cmd.setResult(Result.LOCK_FAILURE); - break SWITCH; - } - ru.setCheckConflicting(false); - takenPrefixes.addAll(getPrefixes(cmd.getRefName())); - takenNames.add(cmd.getRefName()); - cmd.setResult(ru.update(walk)); + switch (cmd.getType()) { + case DELETE: + // Performed in the first phase + break; + case UPDATE: + case UPDATE_NONFASTFORWARD: + RefUpdate ruu = newUpdate(cmd); + cmd.setResult(ruu.update(walk)); + break; + case CREATE: + cmd.setResult(ru.update(walk)); + break; } } } catch (IOException err) { @@ -602,14 +582,6 @@ public class BatchRefUpdate { execute(walk, monitor, null); } - private static Collection<String> getTakenPrefixes(Collection<String> names) { - Collection<String> ref = new HashSet<>(); - for (String name : names) { - addPrefixesTo(name, ref); - } - return ref; - } - /** * Get all path prefixes of a ref name. * diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java index 03c1ef904c..3e3d9b5694 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java @@ -398,7 +398,15 @@ public final class ConfigConstants { public static final String CONFIG_KEY_FF = "ff"; /** + * The "conflictStyle" key. + * + * @since 5.12 + */ + public static final String CONFIG_KEY_CONFLICTSTYLE = "conflictStyle"; + + /** * The "checkstat" key + * * @since 3.0 */ public static final String CONFIG_KEY_CHECKSTAT = "checkstat"; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/FetchV2Request.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/FetchV2Request.java index ea639332ea..50fb9d2262 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/FetchV2Request.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/FetchV2Request.java @@ -36,6 +36,8 @@ public final class FetchV2Request extends FetchRequest { private final boolean doneReceived; + private final boolean waitForDone; + @NonNull private final List<String> serverOptions; @@ -50,7 +52,8 @@ public final class FetchV2Request extends FetchRequest { @NonNull Set<ObjectId> clientShallowCommits, int deepenSince, @NonNull List<String> deepenNotRefs, int depth, @NonNull FilterSpec filterSpec, - boolean doneReceived, @NonNull Set<String> clientCapabilities, + boolean doneReceived, boolean waitForDone, + @NonNull Set<String> clientCapabilities, @Nullable String agent, @NonNull List<String> serverOptions, boolean sidebandAll, @NonNull List<String> packfileUriProtocols) { super(wantIds, depth, clientShallowCommits, filterSpec, @@ -59,6 +62,7 @@ public final class FetchV2Request extends FetchRequest { this.peerHas = requireNonNull(peerHas); this.wantedRefs = requireNonNull(wantedRefs); this.doneReceived = doneReceived; + this.waitForDone = waitForDone; this.serverOptions = requireNonNull(serverOptions); this.sidebandAll = sidebandAll; this.packfileUriProtocols = packfileUriProtocols; @@ -90,7 +94,14 @@ public final class FetchV2Request extends FetchRequest { } /** - * Options received in server-option lines. The caller can choose to act on + * @return true if the request had a "wait-for-done" line + */ + boolean wasWaitForDoneReceived() { + return waitForDone; + } + + /** + * Options received in server-option lines. The caller can choose to act on * these in an application-specific way * * @return Immutable list of server options received in the request @@ -141,6 +152,8 @@ public final class FetchV2Request extends FetchRequest { boolean doneReceived; + boolean waitForDone; + @Nullable String agent; @@ -280,6 +293,16 @@ public final class FetchV2Request extends FetchRequest { } /** + * Mark that the "wait-for-done" line has been received. + * + * @return this builder + */ + Builder setWaitForDone() { + waitForDone = true; + return this; + } + + /** * Value of an agent line received after the command and before the * arguments. E.g. "agent=a.b.c/1.0" should set "a.b.c/1.0". * @@ -328,7 +351,7 @@ public final class FetchV2Request extends FetchRequest { FetchV2Request build() { return new FetchV2Request(peerHas, wantedRefs, wantIds, clientShallowCommits, deepenSince, deepenNotRefs, - depth, filterSpec, doneReceived, clientCapabilities, + depth, filterSpec, doneReceived, waitForDone, clientCapabilities, agent, Collections.unmodifiableList(serverOptions), sidebandAll, Collections.unmodifiableList(packfileUriProtocols)); diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/GitProtocolConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/GitProtocolConstants.java index 36fce7a3f5..c5e52bef98 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/GitProtocolConstants.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/GitProtocolConstants.java @@ -149,6 +149,13 @@ public final class GitProtocolConstants { public static final String OPTION_SIDEBAND_ALL = "sideband-all"; //$NON-NLS-1$ /** + * The server waits for client to send "done" before sending any packs back. + * + * @since 5.13 + */ + public static final String OPTION_WAIT_FOR_DONE = "wait-for-done"; //$NON-NLS-1$ + + /** * The client supports atomic pushes. If this option is used, the server * will update all refs within one atomic transaction. * diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/ProtocolV2Parser.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/ProtocolV2Parser.java index faccc25185..92f0133f5a 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/ProtocolV2Parser.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/ProtocolV2Parser.java @@ -19,6 +19,7 @@ import static org.eclipse.jgit.transport.GitProtocolConstants.OPTION_SERVER_OPTI import static org.eclipse.jgit.transport.GitProtocolConstants.OPTION_SIDEBAND_ALL; import static org.eclipse.jgit.transport.GitProtocolConstants.OPTION_SIDE_BAND_64K; import static org.eclipse.jgit.transport.GitProtocolConstants.OPTION_THIN_PACK; +import static org.eclipse.jgit.transport.GitProtocolConstants.OPTION_WAIT_FOR_DONE; import static org.eclipse.jgit.transport.GitProtocolConstants.OPTION_WANT_REF; import java.io.IOException; @@ -123,6 +124,8 @@ final class ProtocolV2Parser { reqBuilder.addPeerHas(ObjectId.fromString(line2.substring(5))); } else if (line2.equals("done")) { //$NON-NLS-1$ reqBuilder.setDoneReceived(); + } else if (line2.equals(OPTION_WAIT_FOR_DONE)) { + reqBuilder.setWaitForDone(); } else if (line2.equals(OPTION_THIN_PACK)) { reqBuilder.addClientCapability(OPTION_THIN_PACK); } else if (line2.equals(OPTION_NO_PROGRESS)) { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/SshConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/SshConstants.java index be55cd1b81..5cd5b334ab 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/SshConstants.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/SshConstants.java @@ -118,7 +118,7 @@ public final class SshConstants { * Key in an ssh config file; defines signature algorithms for public key * authentication as a comma-separated list. * - * @since 5.11 + * @since 5.11.1 */ public static final String PUBKEY_ACCEPTED_ALGORITHMS = "PubkeyAcceptedAlgorithms"; diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java index 83ffd4123a..da97f1e580 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java @@ -120,7 +120,10 @@ public class TransferConfig { private final boolean allowReachableSha1InWant; private final boolean allowFilter; private final boolean allowSidebandAll; + private final boolean advertiseSidebandAll; + private final boolean advertiseWaitForDone; + final @Nullable ProtocolVersion protocolVersion; final String[] hideRefs; @@ -206,6 +209,8 @@ public class TransferConfig { "uploadpack", "allowsidebandall", false); advertiseSidebandAll = rc.getBoolean("uploadpack", "advertisesidebandall", false); + advertiseWaitForDone = rc.getBoolean("uploadpack", + "advertisewaitfordone", false); } /** @@ -305,6 +310,14 @@ public class TransferConfig { } /** + * @return true to advertise wait-for-done all to the clients + * @since 5.13 + */ + public boolean isAdvertiseWaitForDone() { + return advertiseWaitForDone; + } + + /** * Get {@link org.eclipse.jgit.transport.RefFilter} respecting configured * hidden refs. * diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java index 7f1ddaab2e..ecf1751932 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java @@ -33,6 +33,7 @@ import static org.eclipse.jgit.transport.GitProtocolConstants.OPTION_SIDEBAND_AL import static org.eclipse.jgit.transport.GitProtocolConstants.OPTION_SIDE_BAND; import static org.eclipse.jgit.transport.GitProtocolConstants.OPTION_SIDE_BAND_64K; import static org.eclipse.jgit.transport.GitProtocolConstants.OPTION_THIN_PACK; +import static org.eclipse.jgit.transport.GitProtocolConstants.OPTION_WAIT_FOR_DONE; import static org.eclipse.jgit.transport.GitProtocolConstants.VERSION_2_REQUEST; import static org.eclipse.jgit.util.RefMap.toRefMap; @@ -1192,9 +1193,10 @@ public class UploadPack { walk.assumeShallow(req.getClientShallowCommits()); if (req.wasDoneReceived()) { - processHaveLines(req.getPeerHas(), ObjectId.zeroId(), + processHaveLines( + req.getPeerHas(), ObjectId.zeroId(), new PacketLineOut(NullOutputStream.INSTANCE, false), - accumulator); + accumulator, req.wasWaitForDoneReceived() ? Option.WAIT_FOR_DONE : Option.NONE); } else { pckOut.writeString( GitProtocolConstants.SECTION_ACKNOWLEDGMENTS + '\n'); @@ -1205,8 +1207,8 @@ public class UploadPack { } processHaveLines(req.getPeerHas(), ObjectId.zeroId(), new PacketLineOut(NullOutputStream.INSTANCE, false), - accumulator); - if (okToGiveUp()) { + accumulator, Option.NONE); + if (!req.wasWaitForDoneReceived() && okToGiveUp()) { pckOut.writeString("ready\n"); //$NON-NLS-1$ } else if (commonBase.isEmpty()) { pckOut.writeString("NAK\n"); //$NON-NLS-1$ @@ -1214,7 +1216,7 @@ public class UploadPack { sectionSent = true; } - if (req.wasDoneReceived() || okToGiveUp()) { + if (req.wasDoneReceived() || (!req.wasWaitForDoneReceived() && okToGiveUp())) { if (mayHaveShallow) { if (sectionSent) pckOut.writeDelim(); @@ -1312,6 +1314,9 @@ public class UploadPack { ? OPTION_SIDEBAND_ALL + ' ' : "") + (cachedPackUriProvider != null ? "packfile-uris " : "") + + (transferConfig.isAdvertiseWaitForDone() + ? OPTION_WAIT_FOR_DONE + ' ' + : "") + OPTION_SHALLOW); caps.add(CAPABILITY_SERVER_OPTION); return caps; @@ -1656,7 +1661,7 @@ public class UploadPack { } if (PacketLineIn.isEnd(line)) { - last = processHaveLines(peerHas, last, pckOut, accumulator); + last = processHaveLines(peerHas, last, pckOut, accumulator, Option.NONE); if (commonBase.isEmpty() || multiAck != MultiAck.OFF) pckOut.writeString("NAK\n"); //$NON-NLS-1$ if (noDone && sentReady) { @@ -1671,7 +1676,7 @@ public class UploadPack { peerHas.add(ObjectId.fromString(line.substring(5))); accumulator.haves++; } else if (line.equals("done")) { //$NON-NLS-1$ - last = processHaveLines(peerHas, last, pckOut, accumulator); + last = processHaveLines(peerHas, last, pckOut, accumulator, Option.NONE); if (commonBase.isEmpty()) pckOut.writeString("NAK\n"); //$NON-NLS-1$ @@ -1687,8 +1692,14 @@ public class UploadPack { } } + private enum Option { + WAIT_FOR_DONE, + NONE; + } + private ObjectId processHaveLines(List<ObjectId> peerHas, ObjectId last, - PacketLineOut out, PackStatistics.Accumulator accumulator) + PacketLineOut out, PackStatistics.Accumulator accumulator, + Option option) throws IOException { preUploadHook.onBeginNegotiateRound(this, wantIds, peerHas.size()); if (wantAll.isEmpty() && !wantIds.isEmpty()) @@ -1754,6 +1765,18 @@ public class UploadPack { // create a pack at this point, let the client know so it stops // telling us about its history. // + if (option != Option.WAIT_FOR_DONE) { + sentReady = shouldGiveUp(peerHas, out, missCnt); + } + + preUploadHook.onEndNegotiateRound(this, wantAll, haveCnt, missCnt, sentReady); + peerHas.clear(); + return last; + } + + private boolean shouldGiveUp(List<ObjectId> peerHas, PacketLineOut out, int missCnt) + throws IOException { + boolean sentReady = false; boolean didOkToGiveUp = false; if (0 < missCnt) { for (int i = peerHas.size() - 1; i >= 0; i--) { @@ -1765,10 +1788,12 @@ public class UploadPack { case OFF: break; case CONTINUE: - out.writeString("ACK " + id.name() + " continue\n"); //$NON-NLS-1$ //$NON-NLS-2$ + out.writeString( + "ACK " + id.name() + " continue\n"); //$NON-NLS-1$ //$NON-NLS-2$ break; case DETAILED: - out.writeString("ACK " + id.name() + " ready\n"); //$NON-NLS-1$ //$NON-NLS-2$ + out.writeString( + "ACK " + id.name() + " ready\n"); //$NON-NLS-1$ //$NON-NLS-2$ sentReady = true; break; } @@ -1778,15 +1803,14 @@ public class UploadPack { } } - if (multiAck == MultiAck.DETAILED && !didOkToGiveUp && okToGiveUp()) { + if (multiAck == MultiAck.DETAILED && !didOkToGiveUp + && okToGiveUp()) { ObjectId id = peerHas.get(peerHas.size() - 1); out.writeString("ACK " + id.name() + " ready\n"); //$NON-NLS-1$ //$NON-NLS-2$ sentReady = true; } - preUploadHook.onEndNegotiateRound(this, wantAll, haveCnt, missCnt, sentReady); - peerHas.clear(); - return last; + return sentReady; } private void parseWants(PackStatistics.Accumulator accumulator) throws IOException { diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/Base85.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/Base85.java new file mode 100644 index 0000000000..54b7cfcaa7 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/Base85.java @@ -0,0 +1,195 @@ +/* + * Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ +package org.eclipse.jgit.util; + +import java.nio.charset.StandardCharsets; +import java.text.MessageFormat; +import java.util.Arrays; + +import org.eclipse.jgit.internal.JGitText; + +/** + * Base-85 encoder/decoder. + * + * @since 5.12 + */ +public final class Base85 { + + private static final byte[] ENCODE = ("0123456789" //$NON-NLS-1$ + + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" //$NON-NLS-1$ + + "abcdefghijklmnopqrstuvwxyz" //$NON-NLS-1$ + + "!#$%&()*+-;<=>?@^_`{|}~") //$NON-NLS-1$ + .getBytes(StandardCharsets.US_ASCII); + + private static final int[] DECODE = new int[256]; + + static { + Arrays.fill(DECODE, -1); + for (int i = 0; i < ENCODE.length; i++) { + DECODE[ENCODE[i]] = i; + } + } + + private Base85() { + // No instantiation + } + + /** + * Determines the length of the base-85 encoding for {@code rawLength} + * bytes. + * + * @param rawLength + * number of bytes to encode + * @return number of bytes needed for the base-85 encoding of + * {@code rawLength} bytes + */ + public static int encodedLength(int rawLength) { + return (rawLength + 3) / 4 * 5; + } + + /** + * Encodes the given {@code data} in Base-85. + * + * @param data + * to encode + * @return encoded data + */ + public static byte[] encode(byte[] data) { + return encode(data, 0, data.length); + } + + /** + * Encodes {@code length} bytes of {@code data} in Base-85, beginning at the + * {@code start} index. + * + * @param data + * to encode + * @param start + * index of the first byte to encode + * @param length + * number of bytes to encode + * @return encoded data + */ + public static byte[] encode(byte[] data, int start, int length) { + byte[] result = new byte[encodedLength(length)]; + int end = start + length; + int in = start; + int out = 0; + while (in < end) { + // Accumulate remaining bytes MSB first as a 32bit value + long accumulator = ((long) (data[in++] & 0xFF)) << 24; + if (in < end) { + accumulator |= (data[in++] & 0xFF) << 16; + if (in < end) { + accumulator |= (data[in++] & 0xFF) << 8; + if (in < end) { + accumulator |= (data[in++] & 0xFF); + } + } + } + // Write the 32bit value in base-85 encoding, also MSB first + for (int i = 4; i >= 0; i--) { + result[out + i] = ENCODE[(int) (accumulator % 85)]; + accumulator /= 85; + } + out += 5; + } + return result; + } + + /** + * Decodes the Base-85 {@code encoded} data into a byte array of + * {@code expectedSize} bytes. + * + * @param encoded + * Base-85 encoded data + * @param expectedSize + * of the result + * @return the decoded bytes + * @throws IllegalArgumentException + * if expectedSize doesn't match, the encoded data has a length + * that is not a multiple of 5, or there are invalid characters + * in the encoded data + */ + public static byte[] decode(byte[] encoded, int expectedSize) { + return decode(encoded, 0, encoded.length, expectedSize); + } + + /** + * Decodes {@code length} bytes of Base-85 {@code encoded} data, beginning + * at the {@code start} index, into a byte array of {@code expectedSize} + * bytes. + * + * @param encoded + * Base-85 encoded data + * @param start + * index at which the data to decode starts in {@code encoded} + * @param length + * of the Base-85 encoded data + * @param expectedSize + * of the result + * @return the decoded bytes + * @throws IllegalArgumentException + * if expectedSize doesn't match, {@code length} is not a + * multiple of 5, or there are invalid characters in the encoded + * data + */ + public static byte[] decode(byte[] encoded, int start, int length, + int expectedSize) { + if (length % 5 != 0) { + throw new IllegalArgumentException(JGitText.get().base85length); + } + byte[] result = new byte[expectedSize]; + int end = start + length; + int in = start; + int out = 0; + while (in < end && out < expectedSize) { + // Accumulate 5 bytes, "MSB" first + long accumulator = 0; + for (int i = 4; i >= 0; i--) { + int val = DECODE[encoded[in++] & 0xFF]; + if (val < 0) { + throw new IllegalArgumentException(MessageFormat.format( + JGitText.get().base85invalidChar, + Integer.toHexString(encoded[in - 1] & 0xFF))); + } + accumulator = accumulator * 85 + val; + } + if (accumulator > 0xFFFF_FFFFL) { + throw new IllegalArgumentException( + MessageFormat.format(JGitText.get().base85overflow, + Long.toHexString(accumulator))); + } + // Write remaining bytes, MSB first + result[out++] = (byte) (accumulator >>> 24); + if (out < expectedSize) { + result[out++] = (byte) (accumulator >>> 16); + if (out < expectedSize) { + result[out++] = (byte) (accumulator >>> 8); + if (out < expectedSize) { + result[out++] = (byte) accumulator; + } + } + } + } + // Should have exhausted 'in' and filled 'out' completely + if (in < end) { + throw new IllegalArgumentException( + MessageFormat.format(JGitText.get().base85tooLong, + Integer.valueOf(expectedSize))); + } + if (out < expectedSize) { + throw new IllegalArgumentException( + MessageFormat.format(JGitText.get().base85tooShort, + Integer.valueOf(expectedSize))); + } + return result; + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/io/BinaryDeltaInputStream.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/BinaryDeltaInputStream.java new file mode 100644 index 0000000000..9eceeb8117 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/BinaryDeltaInputStream.java @@ -0,0 +1,211 @@ +/* + * Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ +package org.eclipse.jgit.util.io; + +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; +import java.io.StreamCorruptedException; +import java.text.MessageFormat; + +import org.eclipse.jgit.internal.JGitText; + +/** + * An {@link InputStream} that applies a binary delta to a base on the fly. + * <p> + * Delta application to a base needs random access to the base data. The delta + * is expressed as a sequence of copy and insert instructions. A copy + * instruction has the form "COPY fromOffset length" and says "copy length bytes + * from the base, starting at offset fromOffset, to the result". An insert + * instruction has the form "INSERT length" followed by length bytes and says + * "copy the next length bytes from the delta to the result". + * </p> + * <p> + * These instructions are generated using a content-defined chunking algorithm + * (currently C git uses the standard Rabin variant; but there are others that + * could be used) that identifies equal chunks. It is entirely possible that a + * later copy instruction has a fromOffset that is before the fromOffset of an + * earlier copy instruction. + * </p> + * <p> + * This makes it impossible to stream the base. + * </p> + * <p> + * JGit is limited to 2GB maximum size for the base since array indices are + * signed 32bit values. + * + * @since 5.12 + */ +public class BinaryDeltaInputStream extends InputStream { + + private final byte[] base; + + private final InputStream delta; + + private long resultLength; + + private long toDeliver = -1; + + private int fromBase; + + private int fromDelta; + + private int baseOffset = -1; + + /** + * Creates a new {@link BinaryDeltaInputStream} that applies {@code delta} + * to {@code base}. + * + * @param base + * data to apply the delta to + * @param delta + * {@link InputStream} delivering the delta to apply + */ + public BinaryDeltaInputStream(byte[] base, InputStream delta) { + this.base = base; + this.delta = delta; + } + + @Override + public int read() throws IOException { + int b = readNext(); + if (b >= 0) { + toDeliver--; + } + return b; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + return super.read(b, off, len); + } + + private void initialize() throws IOException { + long baseSize = readVarInt(delta); + if (baseSize > Integer.MAX_VALUE || baseSize < 0 + || (int) baseSize != base.length) { + throw new IOException(MessageFormat.format( + JGitText.get().binaryDeltaBaseLengthMismatch, + Integer.valueOf(base.length), Long.valueOf(baseSize))); + } + resultLength = readVarInt(delta); + if (resultLength < 0) { + throw new StreamCorruptedException( + JGitText.get().binaryDeltaInvalidResultLength); + } + toDeliver = resultLength; + baseOffset = 0; + } + + private int readNext() throws IOException { + if (baseOffset < 0) { + initialize(); + } + if (fromBase > 0) { + fromBase--; + return base[baseOffset++] & 0xFF; + } else if (fromDelta > 0) { + fromDelta--; + return delta.read(); + } + int command = delta.read(); + if (command < 0) { + return -1; + } + if ((command & 0x80) != 0) { + // Decode offset and length to read from base + long copyOffset = 0; + for (int i = 1, shift = 0; i < 0x10; i *= 2, shift += 8) { + if ((command & i) != 0) { + copyOffset |= ((long) next(delta)) << shift; + } + } + int copySize = 0; + for (int i = 0x10, shift = 0; i < 0x80; i *= 2, shift += 8) { + if ((command & i) != 0) { + copySize |= next(delta) << shift; + } + } + if (copySize == 0) { + copySize = 0x10000; + } + if (copyOffset > base.length - copySize) { + throw new StreamCorruptedException(MessageFormat.format( + JGitText.get().binaryDeltaInvalidOffset, + Long.valueOf(copyOffset), Integer.valueOf(copySize))); + } + baseOffset = (int) copyOffset; + fromBase = copySize; + return readNext(); + } else if (command != 0) { + // The next 'command' bytes come from the delta + fromDelta = command - 1; + return delta.read(); + } else { + // Zero is reserved + throw new StreamCorruptedException( + JGitText.get().unsupportedCommand0); + } + } + + private int next(InputStream in) throws IOException { + int b = in.read(); + if (b < 0) { + throw new EOFException(); + } + return b; + } + + private long readVarInt(InputStream in) throws IOException { + long val = 0; + int shift = 0; + int b; + do { + b = next(in); + val |= ((long) (b & 0x7f)) << shift; + shift += 7; + } while ((b & 0x80) != 0); + return val; + } + + /** + * Tells the expected size of the final result. + * + * @return the size + * @throws IOException + * if the size cannot be determined from {@code delta} + */ + public long getExpectedResultSize() throws IOException { + if (baseOffset < 0) { + initialize(); + } + return resultLength; + } + + /** + * Tells whether the delta has been fully consumed, and the expected number + * of bytes for the combined result have been read from this + * {@link BinaryDeltaInputStream}. + * + * @return whether delta application was successful + */ + public boolean isFullyConsumed() { + try { + return toDeliver == 0 && delta.read() < 0; + } catch (IOException e) { + return toDeliver == 0; + } + } + + @Override + public void close() throws IOException { + delta.close(); + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/io/BinaryHunkInputStream.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/BinaryHunkInputStream.java new file mode 100644 index 0000000000..4f940d77a0 --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/BinaryHunkInputStream.java @@ -0,0 +1,118 @@ +/* + * Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ +package org.eclipse.jgit.util.io; + +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; +import java.io.StreamCorruptedException; +import java.text.MessageFormat; + +import org.eclipse.jgit.internal.JGitText; +import org.eclipse.jgit.util.Base85; + +/** + * A stream that decodes git binary patch data on the fly. + * + * @since 5.12 + */ +public class BinaryHunkInputStream extends InputStream { + + private final InputStream in; + + private int lineNumber; + + private byte[] buffer; + + private int pos = 0; + + /** + * Creates a new {@link BinaryHunkInputStream}. + * + * @param in + * {@link InputStream} to read the base-85 encoded patch data + * from + */ + public BinaryHunkInputStream(InputStream in) { + this.in = in; + } + + @Override + public int read() throws IOException { + if (pos < 0) { + return -1; + } + if (buffer == null || pos == buffer.length) { + fillBuffer(); + } + if (pos >= 0) { + return buffer[pos++] & 0xFF; + } + return -1; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + return super.read(b, off, len); + } + + @Override + public void close() throws IOException { + in.close(); + buffer = null; + } + + private void fillBuffer() throws IOException { + int length = in.read(); + if (length < 0) { + pos = length; + buffer = null; + return; + } + lineNumber++; + // Length is encoded with characters, A..Z for 1..26 and a..z for 27..52 + if ('A' <= length && length <= 'Z') { + length = length - 'A' + 1; + } else if ('a' <= length && length <= 'z') { + length = length - 'a' + 27; + } else { + throw new StreamCorruptedException(MessageFormat.format( + JGitText.get().binaryHunkInvalidLength, + Integer.valueOf(lineNumber), Integer.toHexString(length))); + } + byte[] encoded = new byte[Base85.encodedLength(length)]; + for (int i = 0; i < encoded.length; i++) { + int b = in.read(); + if (b < 0 || b == '\n') { + throw new EOFException(MessageFormat.format( + JGitText.get().binaryHunkInvalidLength, + Integer.valueOf(lineNumber))); + } + encoded[i] = (byte) b; + } + // Must be followed by a newline; tolerate EOF. + int b = in.read(); + if (b >= 0 && b != '\n') { + throw new StreamCorruptedException(MessageFormat.format( + JGitText.get().binaryHunkMissingNewline, + Integer.valueOf(lineNumber))); + } + try { + buffer = Base85.decode(encoded, length); + } catch (IllegalArgumentException e) { + StreamCorruptedException ex = new StreamCorruptedException( + MessageFormat.format(JGitText.get().binaryHunkDecodeError, + Integer.valueOf(lineNumber))); + ex.initCause(e); + throw ex; + } + pos = 0; + } +} diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/io/BinaryHunkOutputStream.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/BinaryHunkOutputStream.java new file mode 100644 index 0000000000..30551c09fd --- /dev/null +++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/BinaryHunkOutputStream.java @@ -0,0 +1,116 @@ +/* + * Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Distribution License v. 1.0 which is available at + * https://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + */ +package org.eclipse.jgit.util.io; + +import java.io.IOException; +import java.io.OutputStream; + +import org.eclipse.jgit.util.Base85; + +/** + * An {@link OutputStream} that encodes data for a git binary patch. + * + * @since 5.12 + */ +public class BinaryHunkOutputStream extends OutputStream { + + private static final int MAX_BYTES = 52; + + private final OutputStream out; + + private final byte[] buffer = new byte[MAX_BYTES]; + + private int pos; + + /** + * Creates a new {@link BinaryHunkOutputStream}. + * + * @param out + * {@link OutputStream} to write the encoded data to + */ + public BinaryHunkOutputStream(OutputStream out) { + this.out = out; + } + + /** + * Flushes and closes this stream, and closes the underlying + * {@link OutputStream}. + */ + @Override + public void close() throws IOException { + flush(); + out.close(); + } + + /** + * Writes any buffered output as a binary patch line to the underlying + * {@link OutputStream} and flushes that stream, too. + */ + @Override + public void flush() throws IOException { + if (pos > 0) { + encode(buffer, 0, pos); + pos = 0; + } + out.flush(); + } + + @Override + public void write(int b) throws IOException { + buffer[pos++] = (byte) b; + if (pos == buffer.length) { + encode(buffer, 0, pos); + pos = 0; + } + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + if (len == 0) { + return; + } + int toCopy = len; + int in = off; + if (pos > 0) { + // Fill the buffer + int chunk = Math.min(toCopy, buffer.length - pos); + System.arraycopy(b, in, buffer, pos, chunk); + in += chunk; + pos += chunk; + toCopy -= chunk; + if (pos == buffer.length) { + encode(buffer, 0, pos); + pos = 0; + } + if (toCopy == 0) { + return; + } + } + while (toCopy >= MAX_BYTES) { + encode(b, in, MAX_BYTES); + toCopy -= MAX_BYTES; + in += MAX_BYTES; + } + if (toCopy > 0) { + System.arraycopy(b, in, buffer, 0, toCopy); + pos = toCopy; + } + } + + private void encode(byte[] data, int off, int length) throws IOException { + if (length <= 26) { + out.write('A' + length - 1); + } else { + out.write('a' + length - 27); + } + out.write(Base85.encode(data, off, length)); + out.write('\n'); + } +} |