diff options
author | Nitzan Gur-Furman <nitzan@google.com> | 2022-08-31 19:26:13 +0200 |
---|---|---|
committer | Nitzan Gur-Furman <nitzan@google.com> | 2022-09-15 09:15:55 +0200 |
commit | acde6c8f5b538f900cfede9035584fd2ed654154 (patch) | |
tree | 192bbd25325464bbe62d01fa050ab86f2038f0bd /org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java | |
parent | 57087e2b92dda967f6e5e0e0a017afae2f782a80 (diff) | |
download | jgit-acde6c8f5b538f900cfede9035584fd2ed654154.tar.gz jgit-acde6c8f5b538f900cfede9035584fd2ed654154.zip |
Split out ApplyCommand logic to PatchApplier class
PatchApplier now routes updates through the index. This has two
results:
* we can now execute patches in-memory.
* the JGit apply command will now always update the
index to match the working tree.
Change-Id: Id60a88232f05d0367787d038d2518c670cdb543f
Co-authored-by: Han-Wen Nienhuys <hanwen@google.com>
Co-authored-by: Nitzan Gur-Furman <nitzan@google.com>
Diffstat (limited to 'org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java')
-rw-r--r-- | org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java | 679 |
1 files changed, 14 insertions, 665 deletions
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java index e7f40d811b..49f225f319 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java @@ -9,62 +9,13 @@ */ package org.eclipse.jgit.api; -import java.io.ByteArrayInputStream; import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; -import java.nio.ByteBuffer; -import java.nio.file.Files; -import java.nio.file.StandardCopyOption; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.zip.InflaterInputStream; -import org.eclipse.jgit.api.errors.FilterFailedException; import org.eclipse.jgit.api.errors.GitAPIException; -import org.eclipse.jgit.api.errors.PatchApplyException; -import org.eclipse.jgit.api.errors.PatchFormatException; -import org.eclipse.jgit.attributes.FilterCommand; -import org.eclipse.jgit.attributes.FilterCommandRegistry; -import org.eclipse.jgit.diff.DiffEntry.ChangeType; -import org.eclipse.jgit.diff.RawText; -import org.eclipse.jgit.dircache.DirCache; -import org.eclipse.jgit.dircache.DirCacheCheckout; -import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata; -import org.eclipse.jgit.dircache.DirCacheIterator; import org.eclipse.jgit.internal.JGitText; -import org.eclipse.jgit.lib.Constants; -import org.eclipse.jgit.lib.CoreConfig.EolStreamType; -import org.eclipse.jgit.lib.FileMode; -import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Repository; -import org.eclipse.jgit.patch.BinaryHunk; -import org.eclipse.jgit.patch.FileHeader; -import org.eclipse.jgit.patch.FileHeader.PatchType; -import org.eclipse.jgit.patch.HunkHeader; -import org.eclipse.jgit.patch.Patch; -import org.eclipse.jgit.treewalk.FileTreeIterator; -import org.eclipse.jgit.treewalk.TreeWalk; -import org.eclipse.jgit.treewalk.TreeWalk.OperationType; -import org.eclipse.jgit.treewalk.filter.AndTreeFilter; -import org.eclipse.jgit.treewalk.filter.NotIgnoredFilter; -import org.eclipse.jgit.treewalk.filter.PathFilterGroup; -import org.eclipse.jgit.util.FS; -import org.eclipse.jgit.util.FS.ExecutionResult; -import org.eclipse.jgit.util.FileUtils; -import org.eclipse.jgit.util.IO; -import org.eclipse.jgit.util.RawParseUtils; -import org.eclipse.jgit.util.StringUtils; -import org.eclipse.jgit.util.TemporaryBuffer; -import org.eclipse.jgit.util.TemporaryBuffer.LocalFile; -import org.eclipse.jgit.util.io.BinaryDeltaInputStream; -import org.eclipse.jgit.util.io.BinaryHunkInputStream; -import org.eclipse.jgit.util.io.EolStreamTypeUtil; -import org.eclipse.jgit.util.sha1.SHA1; +import org.eclipse.jgit.patch.PatchApplier; +import org.eclipse.jgit.patch.PatchApplier.Result; /** * Apply a patch to files and/or to the index. @@ -80,10 +31,13 @@ public class ApplyCommand extends GitCommand<ApplyResult> { /** * Constructs the command. * - * @param repo + * @param local */ - ApplyCommand(Repository repo) { - super(repo); + ApplyCommand(Repository local) { + super(local); + if (local == null) { + throw new NullPointerException(JGitText.get().repositoryIsRequired); + } } /** @@ -101,6 +55,7 @@ public class ApplyCommand extends GitCommand<ApplyResult> { /** * {@inheritDoc} + * * <p> * Executes the {@code ApplyCommand} command with all the options and * parameters collected by the setter methods (e.g. @@ -109,621 +64,15 @@ public class ApplyCommand extends GitCommand<ApplyResult> { * method twice on an instance. */ @Override - public ApplyResult call() throws GitAPIException, PatchFormatException, - PatchApplyException { + public ApplyResult call() throws GitAPIException { checkCallable(); setCallable(false); ApplyResult r = new ApplyResult(); - try { - final Patch p = new Patch(); - try { - p.parse(in); - } finally { - in.close(); - } - if (!p.getErrors().isEmpty()) { - throw new PatchFormatException(p.getErrors()); - } - Repository repository = getRepository(); - DirCache cache = repository.readDirCache(); - for (FileHeader fh : p.getFiles()) { - ChangeType type = fh.getChangeType(); - File f = null; - switch (type) { - case ADD: - f = getFile(fh.getNewPath(), true); - apply(repository, fh.getNewPath(), cache, f, fh); - break; - case MODIFY: - f = getFile(fh.getOldPath(), false); - apply(repository, fh.getOldPath(), cache, f, fh); - break; - case DELETE: - f = getFile(fh.getOldPath(), false); - if (!f.delete()) - throw new PatchApplyException(MessageFormat.format( - JGitText.get().cannotDeleteFile, f)); - break; - case RENAME: - f = getFile(fh.getOldPath(), false); - File dest = getFile(fh.getNewPath(), false); - try { - FileUtils.mkdirs(dest.getParentFile(), true); - FileUtils.rename(f, dest, - StandardCopyOption.ATOMIC_MOVE); - } catch (IOException e) { - throw new PatchApplyException(MessageFormat.format( - JGitText.get().renameFileFailed, f, dest), e); - } - apply(repository, fh.getOldPath(), cache, dest, fh); - r.addUpdatedFile(dest); - break; - case COPY: - File src = getFile(fh.getOldPath(), false); - f = getFile(fh.getNewPath(), false); - FileUtils.mkdirs(f.getParentFile(), true); - Files.copy(src.toPath(), f.toPath()); - apply(repository, fh.getOldPath(), cache, f, fh); - } - r.addUpdatedFile(f); - } - } catch (IOException e) { - throw new PatchApplyException(MessageFormat.format( - JGitText.get().patchApplyException, e.getMessage()), e); + PatchApplier patchApplier = new PatchApplier(repo); + Result applyResult = patchApplier.applyPatch(in); + for (String p : applyResult.getPaths()) { + r.addUpdatedFile(new File(repo.getWorkTree(), p)); } return r; } - - private File getFile(String path, boolean create) - throws PatchApplyException { - File f = new File(getRepository().getWorkTree(), path); - if (create) { - try { - File parent = f.getParentFile(); - FileUtils.mkdirs(parent, true); - FileUtils.createNewFile(f); - } catch (IOException e) { - throw new PatchApplyException(MessageFormat.format( - JGitText.get().createNewFileFailed, f), e); - } - } - return f; - } - - private void apply(Repository repository, String path, DirCache cache, - File f, FileHeader fh) throws IOException, PatchApplyException { - if (PatchType.BINARY.equals(fh.getPatchType())) { - return; - } - boolean convertCrLf = needsCrLfConversion(f, fh); - // Use a TreeWalk with a DirCacheIterator to pick up the correct - // clean/smudge filters. CR-LF handling is completely determined by - // whether the file or the patch have CR-LF line endings. - try (TreeWalk walk = new TreeWalk(repository)) { - walk.setOperationType(OperationType.CHECKIN_OP); - FileTreeIterator files = new FileTreeIterator(repository); - int fileIdx = walk.addTree(files); - int cacheIdx = walk.addTree(new DirCacheIterator(cache)); - files.setDirCacheIterator(walk, cacheIdx); - walk.setFilter(AndTreeFilter.create( - PathFilterGroup.createFromStrings(path), - new NotIgnoredFilter(fileIdx))); - walk.setRecursive(true); - if (walk.next()) { - // If the file on disk has no newline characters, convertCrLf - // will be false. In that case we want to honor the normal git - // settings. - EolStreamType streamType = convertCrLf ? EolStreamType.TEXT_CRLF - : walk.getEolStreamType(OperationType.CHECKOUT_OP); - String command = walk.getFilterCommand( - Constants.ATTR_FILTER_TYPE_SMUDGE); - CheckoutMetadata checkOut = new CheckoutMetadata(streamType, command); - FileTreeIterator file = walk.getTree(fileIdx, - FileTreeIterator.class); - if (file != null) { - if (PatchType.GIT_BINARY.equals(fh.getPatchType())) { - applyBinary(repository, path, f, fh, - file::openEntryStream, file.getEntryObjectId(), - checkOut); - } else { - command = walk.getFilterCommand( - Constants.ATTR_FILTER_TYPE_CLEAN); - RawText raw; - // Can't use file.openEntryStream() as it would do CR-LF - // conversion as usual, not as wanted by us. - try (InputStream input = filterClean(repository, path, - new FileInputStream(f), convertCrLf, command)) { - raw = new RawText( - IO.readWholeStream(input, 0).array()); - } - applyText(repository, path, raw, f, fh, checkOut); - } - return; - } - } - } - // File ignored? - RawText raw; - CheckoutMetadata checkOut; - if (PatchType.GIT_BINARY.equals(fh.getPatchType())) { - checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null); - applyBinary(repository, path, f, fh, () -> new FileInputStream(f), - null, checkOut); - } else { - if (convertCrLf) { - try (InputStream input = EolStreamTypeUtil.wrapInputStream( - new FileInputStream(f), EolStreamType.TEXT_LF)) { - raw = new RawText(IO.readWholeStream(input, 0).array()); - } - checkOut = new CheckoutMetadata(EolStreamType.TEXT_CRLF, null); - } else { - raw = new RawText(f); - checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null); - } - applyText(repository, path, raw, f, fh, checkOut); - } - } - - private boolean needsCrLfConversion(File f, FileHeader fileHeader) - throws IOException { - if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) { - return false; - } - if (!hasCrLf(fileHeader)) { - try (InputStream input = new FileInputStream(f)) { - return RawText.isCrLfText(input); - } - } - return false; - } - - private static boolean hasCrLf(FileHeader fileHeader) { - if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) { - return false; - } - for (HunkHeader header : fileHeader.getHunks()) { - byte[] buf = header.getBuffer(); - int hunkEnd = header.getEndOffset(); - int lineStart = header.getStartOffset(); - while (lineStart < hunkEnd) { - int nextLineStart = RawParseUtils.nextLF(buf, lineStart); - if (nextLineStart > hunkEnd) { - nextLineStart = hunkEnd; - } - if (nextLineStart <= lineStart) { - break; - } - if (nextLineStart - lineStart > 1) { - char first = (char) (buf[lineStart] & 0xFF); - if (first == ' ' || first == '-') { - // It's an old line. Does it end in CR-LF? - if (buf[nextLineStart - 2] == '\r') { - return true; - } - } - } - lineStart = nextLineStart; - } - } - return false; - } - - private InputStream filterClean(Repository repository, String path, - InputStream fromFile, boolean convertCrLf, String filterCommand) - throws IOException { - InputStream input = fromFile; - if (convertCrLf) { - input = EolStreamTypeUtil.wrapInputStream(input, - EolStreamType.TEXT_LF); - } - if (StringUtils.isEmptyOrNull(filterCommand)) { - return input; - } - if (FilterCommandRegistry.isRegistered(filterCommand)) { - LocalFile buffer = new TemporaryBuffer.LocalFile(null); - FilterCommand command = FilterCommandRegistry.createFilterCommand( - filterCommand, repository, input, buffer); - while (command.run() != -1) { - // loop as long as command.run() tells there is work to do - } - return buffer.openInputStreamWithAutoDestroy(); - } - FS fs = repository.getFS(); - ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand, - new String[0]); - filterProcessBuilder.directory(repository.getWorkTree()); - filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY, - repository.getDirectory().getAbsolutePath()); - ExecutionResult result; - try { - result = fs.execute(filterProcessBuilder, in); - } catch (IOException | InterruptedException e) { - throw new IOException( - new FilterFailedException(e, filterCommand, path)); - } - int rc = result.getRc(); - if (rc != 0) { - throw new IOException(new FilterFailedException(rc, filterCommand, - path, result.getStdout().toByteArray(4096), RawParseUtils - .decode(result.getStderr().toByteArray(4096)))); - } - return result.getStdout().openInputStreamWithAutoDestroy(); - } - - private void initHash(SHA1 hash, long size) { - hash.update(Constants.encodedTypeString(Constants.OBJ_BLOB)); - hash.update((byte) ' '); - hash.update(Constants.encodeASCII(size)); - hash.update((byte) 0); - } - - private ObjectId hash(File f) throws IOException { - SHA1 hash = SHA1.newInstance(); - initHash(hash, f.length()); - try (InputStream input = new FileInputStream(f)) { - byte[] buf = new byte[8192]; - int n; - while ((n = input.read(buf)) >= 0) { - hash.update(buf, 0, n); - } - } - return hash.toObjectId(); - } - - private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f, - String path) - throws PatchApplyException, IOException { - boolean hashOk = false; - if (id != null) { - hashOk = baseId.equals(id); - if (!hashOk && ChangeType.ADD.equals(type) - && ObjectId.zeroId().equals(baseId)) { - // We create the file first. The OID of an empty file is not the - // zero id! - hashOk = Constants.EMPTY_BLOB_ID.equals(id); - } - } else { - if (ObjectId.zeroId().equals(baseId)) { - // File empty is OK. - hashOk = !f.exists() || f.length() == 0; - } else { - hashOk = baseId.equals(hash(f)); - } - } - if (!hashOk) { - throw new PatchApplyException(MessageFormat - .format(JGitText.get().applyBinaryBaseOidWrong, path)); - } - } - - private void applyBinary(Repository repository, String path, File f, - FileHeader fh, DirCacheCheckout.StreamSupplier loader, ObjectId id, - CheckoutMetadata checkOut) - throws PatchApplyException, IOException { - if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) { - throw new PatchApplyException(MessageFormat - .format(JGitText.get().applyBinaryOidTooShort, path)); - } - BinaryHunk hunk = fh.getForwardBinaryHunk(); - // A BinaryHunk has the start at the "literal" or "delta" token. Data - // starts on the next line. - int start = RawParseUtils.nextLF(hunk.getBuffer(), - hunk.getStartOffset()); - int length = hunk.getEndOffset() - start; - SHA1 hash = SHA1.newInstance(); - // Write to a buffer and copy to the file only if everything was fine - TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null); - try { - switch (hunk.getType()) { - case LITERAL_DEFLATED: - // This just overwrites the file. We need to check the hash of - // the base. - checkOid(fh.getOldId().toObjectId(), id, fh.getChangeType(), f, - path); - initHash(hash, hunk.getSize()); - try (OutputStream out = buffer; - InputStream inflated = new SHA1InputStream(hash, - new InflaterInputStream( - new BinaryHunkInputStream( - new ByteArrayInputStream( - hunk.getBuffer(), start, - length))))) { - DirCacheCheckout.getContent(repository, path, checkOut, - () -> inflated, null, out); - if (!fh.getNewId().toObjectId().equals(hash.toObjectId())) { - throw new PatchApplyException(MessageFormat.format( - JGitText.get().applyBinaryResultOidWrong, - path)); - } - } - try (InputStream bufIn = buffer.openInputStream()) { - Files.copy(bufIn, f.toPath(), - StandardCopyOption.REPLACE_EXISTING); - } - break; - case DELTA_DEFLATED: - // Unfortunately delta application needs random access to the - // base to construct the result. - byte[] base; - try (InputStream input = loader.load()) { - base = IO.readWholeStream(input, 0).array(); - } - // At least stream the result! - try (BinaryDeltaInputStream input = new BinaryDeltaInputStream( - base, - new InflaterInputStream(new BinaryHunkInputStream( - new ByteArrayInputStream(hunk.getBuffer(), - start, length))))) { - long finalSize = input.getExpectedResultSize(); - initHash(hash, finalSize); - try (OutputStream out = buffer; - SHA1InputStream hashed = new SHA1InputStream(hash, - input)) { - DirCacheCheckout.getContent(repository, path, checkOut, - () -> hashed, null, out); - if (!fh.getNewId().toObjectId() - .equals(hash.toObjectId())) { - throw new PatchApplyException(MessageFormat.format( - JGitText.get().applyBinaryResultOidWrong, - path)); - } - } - } - try (InputStream bufIn = buffer.openInputStream()) { - Files.copy(bufIn, f.toPath(), - StandardCopyOption.REPLACE_EXISTING); - } - break; - default: - break; - } - } finally { - buffer.destroy(); - } - } - - private void applyText(Repository repository, String path, RawText rt, - File f, FileHeader fh, CheckoutMetadata checkOut) - throws IOException, PatchApplyException { - List<ByteBuffer> oldLines = new ArrayList<>(rt.size()); - for (int i = 0; i < rt.size(); i++) { - oldLines.add(rt.getRawString(i)); - } - List<ByteBuffer> newLines = new ArrayList<>(oldLines); - int afterLastHunk = 0; - int lineNumberShift = 0; - int lastHunkNewLine = -1; - for (HunkHeader hh : fh.getHunks()) { - - // We assume hunks to be ordered - if (hh.getNewStartLine() <= lastHunkNewLine) { - throw new PatchApplyException(MessageFormat - .format(JGitText.get().patchApplyException, hh)); - } - lastHunkNewLine = hh.getNewStartLine(); - - byte[] b = new byte[hh.getEndOffset() - hh.getStartOffset()]; - System.arraycopy(hh.getBuffer(), hh.getStartOffset(), b, 0, - b.length); - RawText hrt = new RawText(b); - - List<ByteBuffer> hunkLines = new ArrayList<>(hrt.size()); - for (int i = 0; i < hrt.size(); i++) { - hunkLines.add(hrt.getRawString(i)); - } - - if (hh.getNewStartLine() == 0) { - // Must be the single hunk for clearing all content - if (fh.getHunks().size() == 1 - && canApplyAt(hunkLines, newLines, 0)) { - newLines.clear(); - break; - } - throw new PatchApplyException(MessageFormat - .format(JGitText.get().patchApplyException, hh)); - } - // Hunk lines as reported by the hunk may be off, so don't rely on - // them. - int applyAt = hh.getNewStartLine() - 1 + lineNumberShift; - // But they definitely should not go backwards. - if (applyAt < afterLastHunk && lineNumberShift < 0) { - applyAt = hh.getNewStartLine() - 1; - lineNumberShift = 0; - } - if (applyAt < afterLastHunk) { - throw new PatchApplyException(MessageFormat - .format(JGitText.get().patchApplyException, hh)); - } - boolean applies = false; - int oldLinesInHunk = hh.getLinesContext() - + hh.getOldImage().getLinesDeleted(); - if (oldLinesInHunk <= 1) { - // Don't shift hunks without context lines. Just try the - // position corrected by the current lineNumberShift, and if - // that fails, the position recorded in the hunk header. - applies = canApplyAt(hunkLines, newLines, applyAt); - if (!applies && lineNumberShift != 0) { - applyAt = hh.getNewStartLine() - 1; - applies = applyAt >= afterLastHunk - && canApplyAt(hunkLines, newLines, applyAt); - } - } else { - int maxShift = applyAt - afterLastHunk; - for (int shift = 0; shift <= maxShift; shift++) { - if (canApplyAt(hunkLines, newLines, applyAt - shift)) { - applies = true; - applyAt -= shift; - break; - } - } - if (!applies) { - // Try shifting the hunk downwards - applyAt = hh.getNewStartLine() - 1 + lineNumberShift; - maxShift = newLines.size() - applyAt - oldLinesInHunk; - for (int shift = 1; shift <= maxShift; shift++) { - if (canApplyAt(hunkLines, newLines, applyAt + shift)) { - applies = true; - applyAt += shift; - break; - } - } - } - } - if (!applies) { - throw new PatchApplyException(MessageFormat - .format(JGitText.get().patchApplyException, hh)); - } - // Hunk applies at applyAt. Apply it, and update afterLastHunk and - // lineNumberShift - lineNumberShift = applyAt - hh.getNewStartLine() + 1; - int sz = hunkLines.size(); - for (int j = 1; j < sz; j++) { - ByteBuffer hunkLine = hunkLines.get(j); - if (!hunkLine.hasRemaining()) { - // Completely empty line; accept as empty context line - applyAt++; - continue; - } - switch (hunkLine.array()[hunkLine.position()]) { - case ' ': - applyAt++; - break; - case '-': - newLines.remove(applyAt); - break; - case '+': - newLines.add(applyAt++, slice(hunkLine, 1)); - break; - default: - break; - } - } - afterLastHunk = applyAt; - } - if (!isNoNewlineAtEndOfFile(fh)) { - newLines.add(null); - } - if (!rt.isMissingNewlineAtEnd()) { - oldLines.add(null); - } - if (oldLines.equals(newLines)) { - return; // Unchanged; don't touch the file - } - - TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null); - try { - try (OutputStream out = buffer) { - for (Iterator<ByteBuffer> l = newLines.iterator(); l - .hasNext();) { - ByteBuffer line = l.next(); - if (line == null) { - // Must be the marker for the final newline - break; - } - out.write(line.array(), line.position(), line.remaining()); - if (l.hasNext()) { - out.write('\n'); - } - } - } - try (OutputStream output = new FileOutputStream(f)) { - DirCacheCheckout.getContent(repository, path, checkOut, - buffer::openInputStream, null, output); - } - } finally { - buffer.destroy(); - } - repository.getFS().setExecute(f, - fh.getNewMode() == FileMode.EXECUTABLE_FILE); - } - - private boolean canApplyAt(List<ByteBuffer> hunkLines, - List<ByteBuffer> newLines, int line) { - int sz = hunkLines.size(); - int limit = newLines.size(); - int pos = line; - for (int j = 1; j < sz; j++) { - ByteBuffer hunkLine = hunkLines.get(j); - if (!hunkLine.hasRemaining()) { - // Empty line. Accept as empty context line. - if (pos >= limit || newLines.get(pos).hasRemaining()) { - return false; - } - pos++; - continue; - } - switch (hunkLine.array()[hunkLine.position()]) { - case ' ': - case '-': - if (pos >= limit - || !newLines.get(pos).equals(slice(hunkLine, 1))) { - return false; - } - pos++; - break; - default: - break; - } - } - return true; - } - - private ByteBuffer slice(ByteBuffer b, int off) { - int newOffset = b.position() + off; - return ByteBuffer.wrap(b.array(), newOffset, b.limit() - newOffset); - } - - private boolean isNoNewlineAtEndOfFile(FileHeader fh) { - List<? extends HunkHeader> hunks = fh.getHunks(); - if (hunks == null || hunks.isEmpty()) { - return false; - } - HunkHeader lastHunk = hunks.get(hunks.size() - 1); - byte[] buf = new byte[lastHunk.getEndOffset() - - lastHunk.getStartOffset()]; - System.arraycopy(lastHunk.getBuffer(), lastHunk.getStartOffset(), buf, - 0, buf.length); - RawText lhrt = new RawText(buf); - return lhrt.getString(lhrt.size() - 1) - .equals("\\ No newline at end of file"); //$NON-NLS-1$ - } - - /** - * An {@link InputStream} that updates a {@link SHA1} on every byte read. - * The hash is supposed to have been initialized before reading starts. - */ - private static class SHA1InputStream extends InputStream { - - private final SHA1 hash; - - private final InputStream in; - - SHA1InputStream(SHA1 hash, InputStream in) { - this.hash = hash; - this.in = in; - } - - @Override - public int read() throws IOException { - int b = in.read(); - if (b >= 0) { - hash.update((byte) b); - } - return b; - } - - @Override - public int read(byte[] b, int off, int len) throws IOException { - int n = in.read(b, off, len); - if (n > 0) { - hash.update(b, off, n); - } - return n; - } - - @Override - public void close() throws IOException { - in.close(); - } - } } |