import java.io.ByteArrayOutputStream;
import java.io.IOException;
-import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import org.eclipse.jgit.JGitText;
import org.eclipse.jgit.errors.CorruptObjectException;
-import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.lib.AbbreviatedObjectId;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.patch.FileHeader;
import org.eclipse.jgit.patch.HunkHeader;
import org.eclipse.jgit.patch.FileHeader.PatchType;
-import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.QuotedString;
import org.eclipse.jgit.util.io.DisabledOutputStream;
private RawText.Factory rawTextFactory = RawText.FACTORY;
- private long bigFileThreshold = 50 * 1024 * 1024;
+ private int bigFileThreshold = 50 * 1024 * 1024;
/**
* Create a new formatter with a default level of context.
* @param bigFileThreshold
* the limit, in bytes.
*/
- public void setBigFileThreshold(long bigFileThreshold) {
+ public void setBigFileThreshold(int bigFileThreshold) {
this.bigFileThreshold = bigFileThreshold;
}
if (db == null)
throw new IllegalStateException(JGitText.get().repositoryIsRequired);
- if (id.isComplete()) {
- ObjectLoader ldr = db.open(id.toObjectId());
- if (!ldr.isLarge())
- return ldr.getCachedBytes();
-
- long sz = ldr.getSize();
- if (sz < bigFileThreshold && sz < Integer.MAX_VALUE) {
- byte[] buf;
- try {
- buf = new byte[(int) sz];
- } catch (OutOfMemoryError noMemory) {
- LargeObjectException e;
-
- e = new LargeObjectException(id.toObjectId());
- e.initCause(noMemory);
- throw e;
- }
- InputStream in = ldr.openStream();
- try {
- IO.readFully(in, buf, 0, buf.length);
- } finally {
- in.close();
- }
- return buf;
- }
- }
-
- return new byte[] {};
+ ObjectLoader ldr = db.open(id.toObjectId());
+ return ldr.getCachedBytes(bigFileThreshold);
}
/**
import org.eclipse.jgit.revwalk.RevTree;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.treewalk.TreeWalk;
-import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.RawParseUtils;
/**
throws MissingObjectException, IncorrectObjectTypeException,
IOException {
ObjectLoader loader = or.open(blobId, Constants.OBJ_BLOB);
- if (loader.isLarge()) {
- ObjectStream in = loader.openStream();
- try {
- byte[] buf = new byte[(int) in.getSize()];
- IO.readFully(in, buf, 0, buf.length);
- return buf;
- } finally {
- in.close();
- }
- }
- return loader.getCachedBytes();
+ return loader.getCachedBytes(Integer.MAX_VALUE);
}
/**
/**
* Default big file threshold: {@value}
*
- * @see #setBigFileThreshold(long)
+ * @see #setBigFileThreshold(int)
*/
- public static final long DEFAULT_BIG_FILE_THRESHOLD = 50 * 1024 * 1024;
+ public static final int DEFAULT_BIG_FILE_THRESHOLD = 50 * 1024 * 1024;
/**
* Default delta cache size: {@value}
private int deltaCacheLimit = DEFAULT_DELTA_CACHE_LIMIT;
- private long bigFileThreshold = DEFAULT_BIG_FILE_THRESHOLD;
+ private int bigFileThreshold = DEFAULT_BIG_FILE_THRESHOLD;
private int threads;
*
* @return the configured big file threshold.
*/
- public long getBigFileThreshold() {
+ public int getBigFileThreshold() {
return bigFileThreshold;
}
* @param bigFileThreshold
* the limit, in bytes.
*/
- public void setBigFileThreshold(long bigFileThreshold) {
+ public void setBigFileThreshold(int bigFileThreshold) {
this.bigFileThreshold = bigFileThreshold;
}
setCompressionLevel(rc.getInt("pack", "compression",
rc.getInt("core", "compression", getCompressionLevel())));
setIndexVersion(rc.getInt("pack", "indexversion", getIndexVersion()));
- setBigFileThreshold(rc.getLong("core", "bigfilethreshold", getBigFileThreshold()));
+ setBigFileThreshold(rc.getInt("core", "bigfilethreshold", getBigFileThreshold()));
setThreads(rc.getInt("pack", "threads", getThreads()));
// These variables aren't standardized
import static org.eclipse.jgit.storage.pack.StoredObjectRepresentation.PACK_WHOLE;
import java.io.IOException;
-import java.io.InputStream;
import java.io.OutputStream;
import java.security.MessageDigest;
import java.util.ArrayList;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevSort;
import org.eclipse.jgit.storage.file.PackIndexWriter;
-import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.TemporaryBuffer;
/**
static byte[] buffer(PackConfig config, ObjectReader or, AnyObjectId objId)
throws IOException {
- ObjectLoader ldr = or.open(objId);
- if (!ldr.isLarge())
- return ldr.getCachedBytes();
-
// PackWriter should have already pruned objects that
// are above the big file threshold, so our chances of
// the object being below it are very good. We really
// shouldn't be here, unless the implementation is odd.
- // If it really is too big to work with, abort out now.
- //
- long sz = ldr.getSize();
- if (config.getBigFileThreshold() <= sz || Integer.MAX_VALUE < sz)
- throw new LargeObjectException(objId.copy());
-
- // Its considered to be large by the loader, but we really
- // want it in byte array format. Try to make it happen.
- //
- byte[] buf;
- try {
- buf = new byte[(int) sz];
- } catch (OutOfMemoryError noMemory) {
- LargeObjectException e;
-
- e = new LargeObjectException(objId.copy());
- e.initCause(noMemory);
- throw e;
- }
- InputStream in = ldr.openStream();
- try {
- IO.readFully(in, buf, 0, buf.length);
- } finally {
- in.close();
- }
- return buf;
+ return or.open(objId).getCachedBytes(config.getBigFileThreshold());
}
private Deflater deflater() {