diff options
Diffstat (limited to 'org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java')
-rw-r--r-- | org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java | 559 |
1 files changed, 507 insertions, 52 deletions
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java index e228e8276a..583767af3f 100644 --- a/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java +++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/ApplyCommand.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2011, 2020 IBM Corporation and others + * Copyright (C) 2011, 2021 IBM Corporation and others * * This program and the accompanying materials are made available under the * terms of the Eclipse Distribution License v. 1.0 which is available at @@ -9,29 +9,68 @@ */ package org.eclipse.jgit.api; +import java.io.BufferedInputStream; +import java.io.ByteArrayInputStream; import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.Writer; +import java.io.OutputStream; +import java.nio.ByteBuffer; import java.nio.file.Files; import java.nio.file.StandardCopyOption; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.zip.InflaterInputStream; +import org.eclipse.jgit.api.errors.FilterFailedException; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.api.errors.PatchApplyException; import org.eclipse.jgit.api.errors.PatchFormatException; +import org.eclipse.jgit.attributes.FilterCommand; +import org.eclipse.jgit.attributes.FilterCommandRegistry; import org.eclipse.jgit.diff.DiffEntry.ChangeType; import org.eclipse.jgit.diff.RawText; +import org.eclipse.jgit.dircache.DirCache; +import org.eclipse.jgit.dircache.DirCacheCheckout; +import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata; +import org.eclipse.jgit.dircache.DirCacheIterator; +import org.eclipse.jgit.errors.LargeObjectException; +import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.internal.JGitText; +import org.eclipse.jgit.lib.Constants; +import org.eclipse.jgit.lib.CoreConfig.EolStreamType; import org.eclipse.jgit.lib.FileMode; +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.ObjectLoader; +import org.eclipse.jgit.lib.ObjectStream; import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.patch.BinaryHunk; import org.eclipse.jgit.patch.FileHeader; +import org.eclipse.jgit.patch.FileHeader.PatchType; import org.eclipse.jgit.patch.HunkHeader; import org.eclipse.jgit.patch.Patch; +import org.eclipse.jgit.treewalk.FileTreeIterator; +import org.eclipse.jgit.treewalk.TreeWalk; +import org.eclipse.jgit.treewalk.TreeWalk.OperationType; +import org.eclipse.jgit.treewalk.filter.AndTreeFilter; +import org.eclipse.jgit.treewalk.filter.NotIgnoredFilter; +import org.eclipse.jgit.treewalk.filter.PathFilterGroup; +import org.eclipse.jgit.util.FS; +import org.eclipse.jgit.util.FS.ExecutionResult; import org.eclipse.jgit.util.FileUtils; +import org.eclipse.jgit.util.IO; +import org.eclipse.jgit.util.RawParseUtils; +import org.eclipse.jgit.util.StringUtils; +import org.eclipse.jgit.util.TemporaryBuffer; +import org.eclipse.jgit.util.TemporaryBuffer.LocalFile; +import org.eclipse.jgit.util.io.BinaryDeltaInputStream; +import org.eclipse.jgit.util.io.BinaryHunkInputStream; +import org.eclipse.jgit.util.io.EolStreamTypeUtil; +import org.eclipse.jgit.util.sha1.SHA1; /** * Apply a patch to files and/or to the index. @@ -45,7 +84,7 @@ public class ApplyCommand extends GitCommand<ApplyResult> { private InputStream in; /** - * Constructs the command if the patch is to be applied to the index. + * Constructs the command. * * @param repo */ @@ -79,6 +118,7 @@ public class ApplyCommand extends GitCommand<ApplyResult> { public ApplyResult call() throws GitAPIException, PatchFormatException, PatchApplyException { checkCallable(); + setCallable(false); ApplyResult r = new ApplyResult(); try { final Patch p = new Patch(); @@ -87,19 +127,22 @@ public class ApplyCommand extends GitCommand<ApplyResult> { } finally { in.close(); } - if (!p.getErrors().isEmpty()) + if (!p.getErrors().isEmpty()) { throw new PatchFormatException(p.getErrors()); + } + Repository repository = getRepository(); + DirCache cache = repository.readDirCache(); for (FileHeader fh : p.getFiles()) { ChangeType type = fh.getChangeType(); File f = null; switch (type) { case ADD: f = getFile(fh.getNewPath(), true); - apply(f, fh); + apply(repository, fh.getNewPath(), cache, f, fh); break; case MODIFY: f = getFile(fh.getOldPath(), false); - apply(f, fh); + apply(repository, fh.getOldPath(), cache, f, fh); break; case DELETE: f = getFile(fh.getOldPath(), false); @@ -118,14 +161,14 @@ public class ApplyCommand extends GitCommand<ApplyResult> { throw new PatchApplyException(MessageFormat.format( JGitText.get().renameFileFailed, f, dest), e); } - apply(dest, fh); + apply(repository, fh.getOldPath(), cache, dest, fh); break; case COPY: f = getFile(fh.getOldPath(), false); File target = getFile(fh.getNewPath(), false); FileUtils.mkdirs(target.getParentFile(), true); Files.copy(f.toPath(), target.toPath()); - apply(target, fh); + apply(repository, fh.getOldPath(), cache, target, fh); } r.addUpdatedFile(f); } @@ -133,14 +176,13 @@ public class ApplyCommand extends GitCommand<ApplyResult> { throw new PatchApplyException(MessageFormat.format( JGitText.get().patchApplyException, e.getMessage()), e); } - setCallable(false); return r; } private File getFile(String path, boolean create) throws PatchApplyException { File f = new File(getRepository().getWorkTree(), path); - if (create) + if (create) { try { File parent = f.getParentFile(); FileUtils.mkdirs(parent, true); @@ -149,22 +191,366 @@ public class ApplyCommand extends GitCommand<ApplyResult> { throw new PatchApplyException(MessageFormat.format( JGitText.get().createNewFileFailed, f), e); } + } return f; } + private void apply(Repository repository, String path, DirCache cache, + File f, FileHeader fh) throws IOException, PatchApplyException { + if (PatchType.BINARY.equals(fh.getPatchType())) { + return; + } + boolean convertCrLf = needsCrLfConversion(f, fh); + // Use a TreeWalk with a DirCacheIterator to pick up the correct + // clean/smudge filters. CR-LF handling is completely determined by + // whether the file or the patch have CR-LF line endings. + try (TreeWalk walk = new TreeWalk(repository)) { + walk.setOperationType(OperationType.CHECKIN_OP); + FileTreeIterator files = new FileTreeIterator(repository); + int fileIdx = walk.addTree(files); + int cacheIdx = walk.addTree(new DirCacheIterator(cache)); + files.setDirCacheIterator(walk, cacheIdx); + walk.setFilter(AndTreeFilter.create( + PathFilterGroup.createFromStrings(path), + new NotIgnoredFilter(fileIdx))); + walk.setRecursive(true); + if (walk.next()) { + // If the file on disk has no newline characters, convertCrLf + // will be false. In that case we want to honor the normal git + // settings. + EolStreamType streamType = convertCrLf ? EolStreamType.TEXT_CRLF + : walk.getEolStreamType(OperationType.CHECKOUT_OP); + String command = walk.getFilterCommand( + Constants.ATTR_FILTER_TYPE_SMUDGE); + CheckoutMetadata checkOut = new CheckoutMetadata(streamType, command); + FileTreeIterator file = walk.getTree(fileIdx, + FileTreeIterator.class); + if (file != null) { + if (PatchType.GIT_BINARY.equals(fh.getPatchType())) { + applyBinary(repository, path, f, fh, + file::openEntryStream, file.getEntryObjectId(), + checkOut); + } else { + command = walk.getFilterCommand( + Constants.ATTR_FILTER_TYPE_CLEAN); + RawText raw; + // Can't use file.openEntryStream() as it would do CR-LF + // conversion as usual, not as wanted by us. + try (InputStream input = filterClean(repository, path, + new FileInputStream(f), convertCrLf, command)) { + raw = new RawText( + IO.readWholeStream(input, 0).array()); + } + applyText(repository, path, raw, f, fh, checkOut); + } + return; + } + } + } + // File ignored? + RawText raw; + CheckoutMetadata checkOut; + if (PatchType.GIT_BINARY.equals(fh.getPatchType())) { + checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null); + applyBinary(repository, path, f, fh, () -> new FileInputStream(f), + null, checkOut); + } else { + if (convertCrLf) { + try (InputStream input = EolStreamTypeUtil.wrapInputStream( + new FileInputStream(f), EolStreamType.TEXT_LF)) { + raw = new RawText(IO.readWholeStream(input, 0).array()); + } + checkOut = new CheckoutMetadata(EolStreamType.TEXT_CRLF, null); + } else { + raw = new RawText(f); + checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null); + } + applyText(repository, path, raw, f, fh, checkOut); + } + } + + private boolean needsCrLfConversion(File f, FileHeader fileHeader) + throws IOException { + if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) { + return false; + } + if (!hasCrLf(fileHeader)) { + try (InputStream input = new FileInputStream(f)) { + return RawText.isCrLfText(input); + } + } + return false; + } + + private static boolean hasCrLf(FileHeader fileHeader) { + if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) { + return false; + } + for (HunkHeader header : fileHeader.getHunks()) { + byte[] buf = header.getBuffer(); + int hunkEnd = header.getEndOffset(); + int lineStart = header.getStartOffset(); + while (lineStart < hunkEnd) { + int nextLineStart = RawParseUtils.nextLF(buf, lineStart); + if (nextLineStart > hunkEnd) { + nextLineStart = hunkEnd; + } + if (nextLineStart <= lineStart) { + break; + } + if (nextLineStart - lineStart > 1) { + char first = (char) (buf[lineStart] & 0xFF); + if (first == ' ' || first == '-') { + // It's an old line. Does it end in CR-LF? + if (buf[nextLineStart - 2] == '\r') { + return true; + } + } + } + lineStart = nextLineStart; + } + } + return false; + } + + private InputStream filterClean(Repository repository, String path, + InputStream fromFile, boolean convertCrLf, String filterCommand) + throws IOException { + InputStream input = fromFile; + if (convertCrLf) { + input = EolStreamTypeUtil.wrapInputStream(input, + EolStreamType.TEXT_LF); + } + if (StringUtils.isEmptyOrNull(filterCommand)) { + return input; + } + if (FilterCommandRegistry.isRegistered(filterCommand)) { + LocalFile buffer = new TemporaryBuffer.LocalFile(null); + FilterCommand command = FilterCommandRegistry.createFilterCommand( + filterCommand, repository, input, buffer); + while (command.run() != -1) { + // loop as long as command.run() tells there is work to do + } + return buffer.openInputStreamWithAutoDestroy(); + } + FS fs = repository.getFS(); + ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand, + new String[0]); + filterProcessBuilder.directory(repository.getWorkTree()); + filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY, + repository.getDirectory().getAbsolutePath()); + ExecutionResult result; + try { + result = fs.execute(filterProcessBuilder, in); + } catch (IOException | InterruptedException e) { + throw new IOException( + new FilterFailedException(e, filterCommand, path)); + } + int rc = result.getRc(); + if (rc != 0) { + throw new IOException(new FilterFailedException(rc, filterCommand, + path, result.getStdout().toByteArray(4096), RawParseUtils + .decode(result.getStderr().toByteArray(4096)))); + } + return result.getStdout().openInputStreamWithAutoDestroy(); + } + + /** + * Something that can supply an {@link InputStream}. + */ + private interface StreamSupplier { + InputStream load() throws IOException; + } + /** - * @param f - * @param fh - * @throws IOException - * @throws PatchApplyException + * We write the patch result to a {@link TemporaryBuffer} and then use + * {@link DirCacheCheckout}.getContent() to run the result through the CR-LF + * and smudge filters. DirCacheCheckout needs an ObjectLoader, not a + * TemporaryBuffer, so this class bridges between the two, making any Stream + * provided by a {@link StreamSupplier} look like an ordinary git blob to + * DirCacheCheckout. */ - private void apply(File f, FileHeader fh) + private static class StreamLoader extends ObjectLoader { + + private StreamSupplier data; + + private long size; + + StreamLoader(StreamSupplier data, long length) { + this.data = data; + this.size = length; + } + + @Override + public int getType() { + return Constants.OBJ_BLOB; + } + + @Override + public long getSize() { + return size; + } + + @Override + public boolean isLarge() { + return true; + } + + @Override + public byte[] getCachedBytes() throws LargeObjectException { + throw new LargeObjectException(); + } + + @Override + public ObjectStream openStream() + throws MissingObjectException, IOException { + return new ObjectStream.Filter(getType(), getSize(), + new BufferedInputStream(data.load())); + } + } + + private void initHash(SHA1 hash, long size) { + hash.update(Constants.encodedTypeString(Constants.OBJ_BLOB)); + hash.update((byte) ' '); + hash.update(Constants.encodeASCII(size)); + hash.update((byte) 0); + } + + private ObjectId hash(File f) throws IOException { + SHA1 hash = SHA1.newInstance(); + initHash(hash, f.length()); + try (InputStream input = new FileInputStream(f)) { + byte[] buf = new byte[8192]; + int n; + while ((n = input.read(buf)) >= 0) { + hash.update(buf, 0, n); + } + } + return hash.toObjectId(); + } + + private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f, + String path) + throws PatchApplyException, IOException { + boolean hashOk = false; + if (id != null) { + hashOk = baseId.equals(id); + if (!hashOk && ChangeType.ADD.equals(type) + && ObjectId.zeroId().equals(baseId)) { + // We create the file first. The OID of an empty file is not the + // zero id! + hashOk = Constants.EMPTY_BLOB_ID.equals(id); + } + } else { + if (ObjectId.zeroId().equals(baseId)) { + // File empty is OK. + hashOk = !f.exists() || f.length() == 0; + } else { + hashOk = baseId.equals(hash(f)); + } + } + if (!hashOk) { + throw new PatchApplyException(MessageFormat + .format(JGitText.get().applyBinaryBaseOidWrong, path)); + } + } + + private void applyBinary(Repository repository, String path, File f, + FileHeader fh, StreamSupplier loader, ObjectId id, + CheckoutMetadata checkOut) + throws PatchApplyException, IOException { + if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) { + throw new PatchApplyException(MessageFormat + .format(JGitText.get().applyBinaryOidTooShort, path)); + } + BinaryHunk hunk = fh.getForwardBinaryHunk(); + // A BinaryHunk has the start at the "literal" or "delta" token. Data + // starts on the next line. + int start = RawParseUtils.nextLF(hunk.getBuffer(), + hunk.getStartOffset()); + int length = hunk.getEndOffset() - start; + SHA1 hash = SHA1.newInstance(); + // Write to a buffer and copy to the file only if everything was fine + TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null); + try { + switch (hunk.getType()) { + case LITERAL_DEFLATED: + // This just overwrites the file. We need to check the hash of + // the base. + checkOid(fh.getOldId().toObjectId(), id, fh.getChangeType(), f, + path); + initHash(hash, hunk.getSize()); + try (OutputStream out = buffer; + InputStream inflated = new SHA1InputStream(hash, + new InflaterInputStream( + new BinaryHunkInputStream( + new ByteArrayInputStream( + hunk.getBuffer(), start, + length))))) { + DirCacheCheckout.getContent(repository, path, checkOut, + new StreamLoader(() -> inflated, hunk.getSize()), + null, out); + if (!fh.getNewId().toObjectId().equals(hash.toObjectId())) { + throw new PatchApplyException(MessageFormat.format( + JGitText.get().applyBinaryResultOidWrong, + path)); + } + } + try (InputStream bufIn = buffer.openInputStream()) { + Files.copy(bufIn, f.toPath(), + StandardCopyOption.REPLACE_EXISTING); + } + break; + case DELTA_DEFLATED: + // Unfortunately delta application needs random access to the + // base to construct the result. + byte[] base; + try (InputStream input = loader.load()) { + base = IO.readWholeStream(input, 0).array(); + } + // At least stream the result! + try (BinaryDeltaInputStream input = new BinaryDeltaInputStream( + base, + new InflaterInputStream(new BinaryHunkInputStream( + new ByteArrayInputStream(hunk.getBuffer(), + start, length))))) { + long finalSize = input.getExpectedResultSize(); + initHash(hash, finalSize); + try (OutputStream out = buffer; + SHA1InputStream hashed = new SHA1InputStream(hash, + input)) { + DirCacheCheckout.getContent(repository, path, checkOut, + new StreamLoader(() -> hashed, finalSize), null, + out); + if (!fh.getNewId().toObjectId() + .equals(hash.toObjectId())) { + throw new PatchApplyException(MessageFormat.format( + JGitText.get().applyBinaryResultOidWrong, + path)); + } + } + } + try (InputStream bufIn = buffer.openInputStream()) { + Files.copy(bufIn, f.toPath(), + StandardCopyOption.REPLACE_EXISTING); + } + break; + default: + break; + } + } finally { + buffer.destroy(); + } + } + + private void applyText(Repository repository, String path, RawText rt, + File f, FileHeader fh, CheckoutMetadata checkOut) throws IOException, PatchApplyException { - RawText rt = new RawText(f); - List<String> oldLines = new ArrayList<>(rt.size()); - for (int i = 0; i < rt.size(); i++) - oldLines.add(rt.getString(i)); - List<String> newLines = new ArrayList<>(oldLines); + List<ByteBuffer> oldLines = new ArrayList<>(rt.size()); + for (int i = 0; i < rt.size(); i++) { + oldLines.add(rt.getRawString(i)); + } + List<ByteBuffer> newLines = new ArrayList<>(oldLines); int afterLastHunk = 0; int lineNumberShift = 0; int lastHunkNewLine = -1; @@ -182,9 +568,9 @@ public class ApplyCommand extends GitCommand<ApplyResult> { b.length); RawText hrt = new RawText(b); - List<String> hunkLines = new ArrayList<>(hrt.size()); + List<ByteBuffer> hunkLines = new ArrayList<>(hrt.size()); for (int i = 0; i < hrt.size(); i++) { - hunkLines.add(hrt.getString(i)); + hunkLines.add(hrt.getRawString(i)); } if (hh.getNewStartLine() == 0) { @@ -253,8 +639,13 @@ public class ApplyCommand extends GitCommand<ApplyResult> { lineNumberShift = applyAt - hh.getNewStartLine() + 1; int sz = hunkLines.size(); for (int j = 1; j < sz; j++) { - String hunkLine = hunkLines.get(j); - switch (hunkLine.charAt(0)) { + ByteBuffer hunkLine = hunkLines.get(j); + if (!hunkLine.hasRemaining()) { + // Completely empty line; accept as empty context line + applyAt++; + continue; + } + switch (hunkLine.array()[hunkLine.position()]) { case ' ': applyAt++; break; @@ -262,7 +653,7 @@ public class ApplyCommand extends GitCommand<ApplyResult> { newLines.remove(applyAt); break; case '+': - newLines.add(applyAt++, hunkLine.substring(1)); + newLines.add(applyAt++, slice(hunkLine, 1)); break; default: break; @@ -271,39 +662,64 @@ public class ApplyCommand extends GitCommand<ApplyResult> { afterLastHunk = applyAt; } if (!isNoNewlineAtEndOfFile(fh)) { - newLines.add(""); //$NON-NLS-1$ + newLines.add(null); } if (!rt.isMissingNewlineAtEnd()) { - oldLines.add(""); //$NON-NLS-1$ - } - if (!isChanged(oldLines, newLines)) { - return; // Don't touch the file - } - try (Writer fw = Files.newBufferedWriter(f.toPath())) { - for (Iterator<String> l = newLines.iterator(); l.hasNext();) { - fw.write(l.next()); - if (l.hasNext()) { - // Don't bother handling line endings - if it was Windows, - // the \r is still there! - fw.write('\n'); + oldLines.add(null); + } + if (oldLines.equals(newLines)) { + return; // Unchanged; don't touch the file + } + + TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null); + try { + try (OutputStream out = buffer) { + for (Iterator<ByteBuffer> l = newLines.iterator(); l + .hasNext();) { + ByteBuffer line = l.next(); + if (line == null) { + // Must be the marker for the final newline + break; + } + out.write(line.array(), line.position(), line.remaining()); + if (l.hasNext()) { + out.write('\n'); + } } } + try (OutputStream output = new FileOutputStream(f)) { + DirCacheCheckout.getContent(repository, path, checkOut, + new StreamLoader(buffer::openInputStream, + buffer.length()), + null, output); + } + } finally { + buffer.destroy(); } - getRepository().getFS().setExecute(f, fh.getNewMode() == FileMode.EXECUTABLE_FILE); + repository.getFS().setExecute(f, + fh.getNewMode() == FileMode.EXECUTABLE_FILE); } - private boolean canApplyAt(List<String> hunkLines, List<String> newLines, - int line) { + private boolean canApplyAt(List<ByteBuffer> hunkLines, + List<ByteBuffer> newLines, int line) { int sz = hunkLines.size(); int limit = newLines.size(); int pos = line; for (int j = 1; j < sz; j++) { - String hunkLine = hunkLines.get(j); - switch (hunkLine.charAt(0)) { + ByteBuffer hunkLine = hunkLines.get(j); + if (!hunkLine.hasRemaining()) { + // Empty line. Accept as empty context line. + if (pos >= limit || newLines.get(pos).hasRemaining()) { + return false; + } + pos++; + continue; + } + switch (hunkLine.array()[hunkLine.position()]) { case ' ': case '-': if (pos >= limit - || !newLines.get(pos).equals(hunkLine.substring(1))) { + || !newLines.get(pos).equals(slice(hunkLine, 1))) { return false; } pos++; @@ -315,13 +731,9 @@ public class ApplyCommand extends GitCommand<ApplyResult> { return true; } - private static boolean isChanged(List<String> ol, List<String> nl) { - if (ol.size() != nl.size()) - return true; - for (int i = 0; i < ol.size(); i++) - if (!ol.get(i).equals(nl.get(i))) - return true; - return false; + private ByteBuffer slice(ByteBuffer b, int off) { + int newOffset = b.position() + off; + return ByteBuffer.wrap(b.array(), newOffset, b.limit() - newOffset); } private boolean isNoNewlineAtEndOfFile(FileHeader fh) { @@ -330,8 +742,51 @@ public class ApplyCommand extends GitCommand<ApplyResult> { return false; } HunkHeader lastHunk = hunks.get(hunks.size() - 1); - RawText lhrt = new RawText(lastHunk.getBuffer()); + byte[] buf = new byte[lastHunk.getEndOffset() + - lastHunk.getStartOffset()]; + System.arraycopy(lastHunk.getBuffer(), lastHunk.getStartOffset(), buf, + 0, buf.length); + RawText lhrt = new RawText(buf); return lhrt.getString(lhrt.size() - 1) .equals("\\ No newline at end of file"); //$NON-NLS-1$ } + + /** + * An {@link InputStream} that updates a {@link SHA1} on every byte read. + * The hash is supposed to have been initialized before reading starts. + */ + private static class SHA1InputStream extends InputStream { + + private final SHA1 hash; + + private final InputStream in; + + SHA1InputStream(SHA1 hash, InputStream in) { + this.hash = hash; + this.in = in; + } + + @Override + public int read() throws IOException { + int b = in.read(); + if (b >= 0) { + hash.update((byte) b); + } + return b; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + int n = in.read(b, off, len); + if (n > 0) { + hash.update(b, off, n); + } + return n; + } + + @Override + public void close() throws IOException { + in.close(); + } + } } |