diff options
Diffstat (limited to 'org.eclipse.jgit.storage.dht/src/org')
37 files changed, 981 insertions, 2396 deletions
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackInfo.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackInfo.java deleted file mode 100644 index 95a5857f1a..0000000000 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackInfo.java +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Copyright (C) 2011, Google Inc. - * and other copyright owners as documented in the project's IP log. - * - * This program and the accompanying materials are made available - * under the terms of the Eclipse Distribution License v1.0 which - * accompanies this distribution, is reproduced below, and is - * available at http://www.eclipse.org/org/documents/edl-v10.php - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or - * without modification, are permitted provided that the following - * conditions are met: - * - * - Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * - Redistributions in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials provided - * with the distribution. - * - * - Neither the name of the Eclipse Foundation, Inc. nor the - * names of its contributors may be used to endorse or promote - * products derived from this software without specific prior - * written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND - * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, - * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES - * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR - * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, - * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.eclipse.jgit.storage.dht; - -import static org.eclipse.jgit.lib.Constants.OBJECT_ID_STRING_LENGTH; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.SortedSet; -import java.util.TreeSet; - -import org.eclipse.jgit.lib.ObjectId; - -/** - * Summary information about a cached pack owned by a repository. - */ -public class CachedPackInfo { - /** - * Parse info from the storage system. - * - * @param raw - * the raw encoding of the info. - * @return the info object. - */ - public static CachedPackInfo fromBytes(byte[] raw) { - return fromBytes(TinyProtobuf.decode(raw)); - } - - /** - * Parse info from the storage system. - * - * @param d - * decoder for the message buffer. - * @return the info object. - */ - public static CachedPackInfo fromBytes(TinyProtobuf.Decoder d) { - CachedPackInfo info = new CachedPackInfo(); - PARSE: for (;;) { - switch (d.next()) { - case 0: - break PARSE; - case 1: - info.name = d.stringObjectId(); - continue; - case 2: - info.version = d.stringObjectId(); - continue; - case 3: - info.objectsTotal = d.int64(); - continue; - case 4: - info.objectsDelta = d.int64(); - continue; - case 5: - info.bytesTotal = d.int64(); - continue; - case 6: { - TinyProtobuf.Decoder m = d.message(); - for (;;) { - switch (m.next()) { - case 0: - continue PARSE; - case 1: - info.tips.add(m.stringObjectId()); - continue; - default: - m.skip(); - continue; - } - } - } - case 7: { - TinyProtobuf.Decoder m = d.message(); - for (;;) { - switch (m.next()) { - case 0: - continue PARSE; - case 1: - info.chunks.add(ChunkKey.fromBytes(m)); - continue; - default: - m.skip(); - continue; - } - } - } - default: - d.skip(); - continue; - } - } - return info; - } - - private static byte[] asBytes(CachedPackInfo info) { - int tipSize = (2 + OBJECT_ID_STRING_LENGTH) * info.tips.size(); - TinyProtobuf.Encoder tipList = TinyProtobuf.encode(tipSize); - for (ObjectId tip : info.tips) - tipList.string(1, tip); - - int chunkSize = (2 + ChunkKey.KEYLEN) * info.chunks.size(); - TinyProtobuf.Encoder chunkList = TinyProtobuf.encode(chunkSize); - for (ChunkKey key : info.chunks) - chunkList.bytes(1, key.asBytes()); - - TinyProtobuf.Encoder e = TinyProtobuf.encode(1024); - e.string(1, info.name); - e.string(2, info.version); - e.int64(3, info.objectsTotal); - e.int64IfNotZero(4, info.objectsDelta); - e.int64IfNotZero(5, info.bytesTotal); - e.message(6, tipList); - e.message(7, chunkList); - return e.asByteArray(); - } - - ObjectId name; - - ObjectId version; - - SortedSet<ObjectId> tips = new TreeSet<ObjectId>(); - - long objectsTotal; - - long objectsDelta; - - long bytesTotal; - - List<ChunkKey> chunks = new ArrayList<ChunkKey>(); - - /** @return name of the information object. */ - public CachedPackKey getRowKey() { - return new CachedPackKey(name, version); - } - - /** @return number of objects stored in the cached pack. */ - public long getObjectsTotal() { - return objectsTotal; - } - - /** @return number of objects stored in delta format. */ - public long getObjectsDelta() { - return objectsDelta; - } - - /** @return number of bytes in the cached pack. */ - public long getTotalBytes() { - return bytesTotal; - } - - /** @return list of all chunks that make up this pack, in order. */ - public List<ChunkKey> getChunkKeys() { - return Collections.unmodifiableList(chunks); - } - - /** - * Convert this information into a byte array for storage. - * - * @return the data, encoded as a byte array. This does not include the key, - * callers must store that separately. - */ - public byte[] asBytes() { - return asBytes(this); - } - - @Override - public String toString() { - return getRowKey().toString(); - } -} diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackKey.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackKey.java index 0fc14f9e23..274cc68d87 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackKey.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackKey.java @@ -47,6 +47,7 @@ import static org.eclipse.jgit.util.RawParseUtils.decode; import java.text.MessageFormat; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo; import org.eclipse.jgit.lib.ObjectId; /** Unique identifier of a {@link CachedPackInfo} in the DHT. */ @@ -62,18 +63,6 @@ public final class CachedPackKey implements RowKey { } /** - * @param d - * decoder to read key from current field from. - * @return the key - */ - public static CachedPackKey fromBytes(TinyProtobuf.Decoder d) { - int len = d.bytesLength(); - int ptr = d.bytesOffset(); - byte[] buf = d.bytesArray(); - return fromBytes(buf, ptr, len); - } - - /** * @param key * @param ptr * @param len @@ -100,6 +89,16 @@ public final class CachedPackKey implements RowKey { return new CachedPackKey(name, vers); } + /** + * @param info + * @return the key + */ + public static CachedPackKey fromInfo(CachedPackInfo info) { + ObjectId name = ObjectId.fromString(info.getName()); + ObjectId vers = ObjectId.fromString(info.getVersion()); + return new CachedPackKey(name, vers); + } + private final ObjectId name; private final ObjectId version; diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkFormatter.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkFormatter.java index 27c520bc9c..011cfb06ec 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkFormatter.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkFormatter.java @@ -52,10 +52,13 @@ import java.util.List; import java.util.Map; import java.util.zip.Deflater; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType; import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.ObjectId; -import org.eclipse.jgit.storage.dht.ChunkMeta.BaseChunk; import org.eclipse.jgit.storage.dht.spi.Database; import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.transport.PackedObjectInfo; @@ -75,8 +78,6 @@ class ChunkFormatter { private final byte[] varIntBuf; - private final ChunkInfo info; - private final int maxObjects; private Map<ChunkKey, BaseChunkInfo> baseChunks; @@ -95,25 +96,35 @@ class ChunkFormatter { private PackChunk.Members builder; + private GitStore.ChunkInfo.Source source; + + private boolean fragment; + + private int objectType; + + private int objectsTotal, objectsWhole, objectsRefDelta, objectsOfsDelta; + + private ChunkInfo chunkInfo; + ChunkFormatter(RepositoryKey repo, DhtInserterOptions options) { this.repo = repo; this.options = options; this.varIntBuf = new byte[32]; - this.info = new ChunkInfo(); this.chunkData = new byte[options.getChunkSize()]; this.maxObjects = options.getMaxObjectCount(); + this.objectType = -1; } - void setSource(ChunkInfo.Source src) { - info.source = src; + void setSource(GitStore.ChunkInfo.Source src) { + source = src; } void setObjectType(int type) { - info.objectType = type; + objectType = type; } void setFragment() { - info.fragment = true; + fragment = true; } ChunkKey getChunkKey() { @@ -121,7 +132,7 @@ class ChunkFormatter { } ChunkInfo getChunkInfo() { - return info; + return chunkInfo; } ChunkMeta getChunkMeta() { @@ -150,37 +161,58 @@ class ChunkFormatter { ptr += 4; md.update(chunkData, 0, ptr); - info.chunkKey = ChunkKey.create(repo, ObjectId.fromRaw(md.digest())); - info.chunkSize = chunkData.length; + ChunkKey key = ChunkKey.create(repo, ObjectId.fromRaw(md.digest())); + + GitStore.ChunkInfo.Builder info = GitStore.ChunkInfo.newBuilder(); + info.setSource(source); + info.setObjectType(GitStore.ChunkInfo.ObjectType.valueOf(objectType)); + if (fragment) + info.setIsFragment(true); + info.setChunkSize(chunkData.length); + + GitStore.ChunkInfo.ObjectCounts.Builder cnts = info.getObjectCountsBuilder(); + cnts.setTotal(objectsTotal); + if (objectsWhole > 0) + cnts.setWhole(objectsWhole); + if (objectsRefDelta > 0) + cnts.setRefDelta(objectsRefDelta); + if (objectsOfsDelta > 0) + cnts.setOfsDelta(objectsOfsDelta); builder = new PackChunk.Members(); - builder.setChunkKey(info.chunkKey); + builder.setChunkKey(key); builder.setChunkData(chunkData); - ChunkMeta meta = new ChunkMeta(info.chunkKey); if (baseChunks != null) { - meta.baseChunks = new ArrayList<BaseChunk>(baseChunks.size()); + List<BaseChunk> list = new ArrayList<BaseChunk>(baseChunks.size()); for (BaseChunkInfo b : baseChunks.values()) { - if (0 < b.useCount) - meta.baseChunks.add(new BaseChunk(b.relativeStart, b.key)); + if (0 < b.useCount) { + BaseChunk.Builder c = BaseChunk.newBuilder(); + c.setRelativeStart(b.relativeStart); + c.setChunkKey(b.key.asString()); + list.add(c.build()); + } } - Collections.sort(meta.baseChunks, new Comparator<BaseChunk>() { + Collections.sort(list, new Comparator<BaseChunk>() { public int compare(BaseChunk a, BaseChunk b) { - return Long.signum(a.relativeStart - b.relativeStart); + return Long.signum(a.getRelativeStart() + - b.getRelativeStart()); } }); - } - if (!meta.isEmpty()) { + ChunkMeta.Builder b = ChunkMeta.newBuilder(); + b.addAllBaseChunk(list); + ChunkMeta meta = b.build(); builder.setMeta(meta); - info.metaSize = meta.asBytes().length; + info.setMetaSize(meta.getSerializedSize()); } if (objectList != null && !objectList.isEmpty()) { byte[] index = ChunkIndex.create(objectList); builder.setChunkIndex(index); - info.indexSize = index.length; + info.setIndexSize(index.length); } + chunkInfo = new ChunkInfo(key, info.build()); return getChunkKey(); } @@ -198,7 +230,7 @@ class ChunkFormatter { void safePut(Database db, WriteBuffer dbWriteBuffer) throws DhtException { WriteBuffer chunkBuf = db.newWriteBuffer(); - db.repository().put(repo, info, chunkBuf); + db.repository().put(repo, getChunkInfo(), chunkBuf); chunkBuf.flush(); db.chunk().put(builder, chunkBuf); @@ -208,7 +240,7 @@ class ChunkFormatter { } void unsafePut(Database db, WriteBuffer dbWriteBuffer) throws DhtException { - db.repository().put(repo, info, dbWriteBuffer); + db.repository().put(repo, getChunkInfo(), dbWriteBuffer); db.chunk().put(builder, dbWriteBuffer); linkObjects(db, dbWriteBuffer); } @@ -225,11 +257,11 @@ class ChunkFormatter { boolean whole(Deflater def, int type, byte[] data, int off, final int size, ObjectId objId) { - if (free() < 10 || maxObjects <= info.objectsTotal) + if (free() < 10 || maxObjects <= objectsTotal) return false; header(type, size); - info.objectsWhole++; + objectsWhole++; currentObjectType = type; int endOfHeader = ptr; @@ -257,20 +289,20 @@ class ChunkFormatter { final int packedSize = ptr - endOfHeader; objectList.add(new StoredObject(objId, type, mark, packedSize, size)); - if (info.objectType < 0) - info.objectType = type; - else if (info.objectType != type) - info.objectType = ChunkInfo.OBJ_MIXED; + if (objectType < 0) + objectType = type; + else if (objectType != type) + objectType = ChunkInfo.OBJ_MIXED; return true; } boolean whole(int type, long inflatedSize) { - if (free() < 10 || maxObjects <= info.objectsTotal) + if (free() < 10 || maxObjects <= objectsTotal) return false; header(type, inflatedSize); - info.objectsWhole++; + objectsWhole++; currentObjectType = type; return true; } @@ -278,11 +310,11 @@ class ChunkFormatter { boolean ofsDelta(long inflatedSize, long negativeOffset) { final int ofsPtr = encodeVarInt(negativeOffset); final int ofsLen = varIntBuf.length - ofsPtr; - if (free() < 10 + ofsLen || maxObjects <= info.objectsTotal) + if (free() < 10 + ofsLen || maxObjects <= objectsTotal) return false; header(Constants.OBJ_OFS_DELTA, inflatedSize); - info.objectsOfsDelta++; + objectsOfsDelta++; currentObjectType = Constants.OBJ_OFS_DELTA; currentObjectBase = null; @@ -294,11 +326,11 @@ class ChunkFormatter { } boolean refDelta(long inflatedSize, AnyObjectId baseId) { - if (free() < 30 || maxObjects <= info.objectsTotal) + if (free() < 30 || maxObjects <= objectsTotal) return false; header(Constants.OBJ_REF_DELTA, inflatedSize); - info.objectsRefDelta++; + objectsRefDelta++; currentObjectType = Constants.OBJ_REF_DELTA; baseId.copyRawTo(chunkData, ptr); @@ -345,7 +377,7 @@ class ChunkFormatter { } int getObjectCount() { - return info.objectsTotal; + return objectsTotal; } int position() { @@ -374,32 +406,32 @@ class ChunkFormatter { } void adjustObjectCount(int delta, int type) { - info.objectsTotal += delta; + objectsTotal += delta; switch (type) { case Constants.OBJ_COMMIT: case Constants.OBJ_TREE: case Constants.OBJ_BLOB: case Constants.OBJ_TAG: - info.objectsWhole += delta; + objectsWhole += delta; break; case Constants.OBJ_OFS_DELTA: - info.objectsOfsDelta += delta; + objectsOfsDelta += delta; if (currentObjectBase != null && --currentObjectBase.useCount == 0) baseChunks.remove(currentObjectBase.key); currentObjectBase = null; break; case Constants.OBJ_REF_DELTA: - info.objectsRefDelta += delta; + objectsRefDelta += delta; break; } } private void header(int type, long inflatedSize) { mark = ptr; - info.objectsTotal++; + objectsTotal++; long nextLength = inflatedSize >>> 4; chunkData[ptr++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (type << 4) | (inflatedSize & 0x0F)); @@ -454,8 +486,12 @@ class ChunkFormatter { } ObjectInfo link(ChunkKey key) { - final int ptr = (int) getOffset(); - return new ObjectInfo(key, -1, type, ptr, packed, inflated, null, false); + GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder(); + b.setObjectType(ObjectType.valueOf(type)); + b.setOffset((int) getOffset()); + b.setPackedSize(packed); + b.setInflatedSize(inflated); + return new ObjectInfo(key, b.build()); } } } diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkInfo.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkInfo.java index 5282a1d4ee..2c156c8a68 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkInfo.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkInfo.java @@ -43,150 +43,32 @@ package org.eclipse.jgit.storage.dht; -import org.eclipse.jgit.lib.Constants; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore; /** * Summary information about a chunk owned by a repository. */ public class ChunkInfo { - /** Source the chunk (what code path created it). */ - public static enum Source implements TinyProtobuf.Enum { - /** Came in over the network from an external source */ - RECEIVE(1), - /** Created in this repository (e.g. a merge). */ - INSERT(2), - /** Generated during a repack of this repository. */ - REPACK(3); - - private final int value; - - Source(int val) { - this.value = val; - } - - public int value() { - return value; - } - } - /** Mixed objects are stored in the chunk (instead of single type). */ public static final int OBJ_MIXED = 0; + private final ChunkKey chunkKey; + + private final GitStore.ChunkInfo data; + /** - * Parse info from the storage system. + * Wrap a ChunkInfo message. * - * @param chunkKey - * the chunk the link points to. - * @param raw - * the raw encoding of the info. - * @return the info object. + * @param key + * associated chunk key. + * @param data + * data. */ - public static ChunkInfo fromBytes(ChunkKey chunkKey, byte[] raw) { - ChunkInfo info = new ChunkInfo(); - info.chunkKey = chunkKey; - - TinyProtobuf.Decoder d = TinyProtobuf.decode(raw); - PARSE: for (;;) { - switch (d.next()) { - case 0: - break PARSE; - case 1: - info.source = d.intEnum(Source.values()); - continue; - case 2: - info.objectType = d.int32(); - continue; - case 3: - info.fragment = d.bool(); - continue; - case 4: - info.cachedPack = CachedPackKey.fromBytes(d); - continue; - - case 5: { - TinyProtobuf.Decoder m = d.message(); - for (;;) { - switch (m.next()) { - case 0: - continue PARSE; - case 1: - info.objectsTotal = m.int32(); - continue; - case 2: - info.objectsWhole = m.int32(); - continue; - case 3: - info.objectsOfsDelta = m.int32(); - continue; - case 4: - info.objectsRefDelta = m.int32(); - continue; - default: - m.skip(); - continue; - } - } - } - case 6: - info.chunkSize = d.int32(); - continue; - case 7: - info.indexSize = d.int32(); - continue; - case 8: - info.metaSize = d.int32(); - continue; - default: - d.skip(); - continue; - } - } - return info; - } - - private static byte[] asBytes(ChunkInfo info) { - TinyProtobuf.Encoder objects = TinyProtobuf.encode(48); - objects.int32IfNotZero(1, info.objectsTotal); - objects.int32IfNotZero(2, info.objectsWhole); - objects.int32IfNotZero(3, info.objectsOfsDelta); - objects.int32IfNotZero(4, info.objectsRefDelta); - - TinyProtobuf.Encoder e = TinyProtobuf.encode(128); - e.intEnum(1, info.source); - e.int32IfNotNegative(2, info.objectType); - e.boolIfTrue(3, info.fragment); - e.string(4, info.cachedPack); - e.message(5, objects); - e.int32IfNotZero(6, info.chunkSize); - e.int32IfNotZero(7, info.indexSize); - e.int32IfNotZero(8, info.metaSize); - return e.asByteArray(); + public ChunkInfo(ChunkKey key, GitStore.ChunkInfo data) { + this.chunkKey = key; + this.data = data; } - ChunkKey chunkKey; - - Source source; - - int objectType = -1; - - boolean fragment; - - CachedPackKey cachedPack; - - int objectsTotal; - - int objectsWhole; - - int objectsOfsDelta; - - int objectsRefDelta; - - int chunkSize; - - int indexSize; - - int metaSize; - /** @return the repository that contains the chunk. */ public RepositoryKey getRepositoryKey() { return chunkKey.getRepositoryKey(); @@ -197,69 +79,9 @@ public class ChunkInfo { return chunkKey; } - /** @return source of this chunk. */ - public Source getSource() { - return source; - } - - /** @return type of object in the chunk, or {@link #OBJ_MIXED}. */ - public int getObjectType() { - return objectType; - } - - /** @return true if this chunk is part of a large fragmented object. */ - public boolean isFragment() { - return fragment; - } - - /** @return cached pack this is a member of, or null. */ - public CachedPackKey getCachedPack() { - return cachedPack; - } - - /** @return size of the chunk's compressed data, in bytes. */ - public int getChunkSizeInBytes() { - return chunkSize; - } - - /** @return size of the chunk's index data, in bytes. */ - public int getIndexSizeInBytes() { - return indexSize; - } - - /** @return size of the chunk's meta data, in bytes. */ - public int getMetaSizeInBytes() { - return metaSize; - } - - /** @return number of objects stored in the chunk. */ - public int getObjectsTotal() { - return objectsTotal; - } - - /** @return number of whole objects stored in the chunk. */ - public int getObjectsWhole() { - return objectsWhole; - } - - /** @return number of OFS_DELTA objects stored in the chunk. */ - public int getObjectsOffsetDelta() { - return objectsOfsDelta; - } - - /** @return number of REF_DELTA objects stored in the chunk. */ - public int getObjectsReferenceDelta() { - return objectsRefDelta; - } - - /** - * Convert this link into a byte array for storage. - * - * @return the link data, encoded as a byte array. This does not include the - * ChunkKey, callers must store that separately. - */ - public byte[] asBytes() { - return asBytes(this); + /** @return the underlying message containing all data. */ + public GitStore.ChunkInfo getData() { + return data; } @Override @@ -267,20 +89,8 @@ public class ChunkInfo { StringBuilder b = new StringBuilder(); b.append("ChunkInfo:"); b.append(chunkKey); - b.append(" ["); - if (getSource() != null) - b.append(" ").append(getSource()); - if (isFragment()) - b.append(" fragment"); - if (getObjectType() != 0) - b.append(" ").append(Constants.typeString(getObjectType())); - if (0 < getObjectsTotal()) - b.append(" objects=").append(getObjectsTotal()); - if (0 < getChunkSizeInBytes()) - b.append(" chunk=").append(getChunkSizeInBytes()).append("B"); - if (0 < getIndexSizeInBytes()) - b.append(" index=").append(getIndexSizeInBytes()).append("B"); - b.append(" ]"); + b.append("\n"); + b.append(data); return b.toString(); } } diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkKey.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkKey.java index e136df268a..272b5ea173 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkKey.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkKey.java @@ -74,18 +74,6 @@ public final class ChunkKey implements RowKey { } /** - * @param d - * decoder to read key from current field from. - * @return the key - */ - public static ChunkKey fromBytes(TinyProtobuf.Decoder d) { - int len = d.bytesLength(); - int ptr = d.bytesOffset(); - byte[] buf = d.bytesArray(); - return fromBytes(buf, ptr, len); - } - - /** * @param key * @param ptr * @param len diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMeta.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMeta.java deleted file mode 100644 index a02382b5c0..0000000000 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMeta.java +++ /dev/null @@ -1,391 +0,0 @@ -/* - * Copyright (C) 2011, Google Inc. - * and other copyright owners as documented in the project's IP log. - * - * This program and the accompanying materials are made available - * under the terms of the Eclipse Distribution License v1.0 which - * accompanies this distribution, is reproduced below, and is - * available at http://www.eclipse.org/org/documents/edl-v10.php - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or - * without modification, are permitted provided that the following - * conditions are met: - * - * - Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * - Redistributions in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials provided - * with the distribution. - * - * - Neither the name of the Eclipse Foundation, Inc. nor the - * names of its contributors may be used to endorse or promote - * products derived from this software without specific prior - * written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND - * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, - * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES - * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR - * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, - * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.eclipse.jgit.storage.dht; - -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -/** Metadata stored inline with each PackChunk. */ -public class ChunkMeta { - /** - * Convert from byte array. - * - * @param key - * the chunk key this meta object sits in. - * @param raw - * the raw byte array. - * @return the chunk meta. - */ - public static ChunkMeta fromBytes(ChunkKey key, byte[] raw) { - return fromBytes(key, TinyProtobuf.decode(raw)); - } - - /** - * Convert from byte array. - * - * @param key - * the chunk key this meta object sits in. - * @param d - * the message decoder. - * @return the chunk meta. - */ - public static ChunkMeta fromBytes(ChunkKey key, TinyProtobuf.Decoder d) { - List<BaseChunk> baseChunk = null; - List<ChunkKey> fragment = null; - PrefetchHint commit = null; - PrefetchHint tree = null; - - PARSE: for (;;) { - switch (d.next()) { - case 0: - break PARSE; - case 1: - if (baseChunk == null) - baseChunk = new ArrayList<BaseChunk>(4); - baseChunk.add(BaseChunk.fromBytes(d.message())); - continue; - case 2: - if (fragment == null) - fragment = new ArrayList<ChunkKey>(4); - fragment.add(ChunkKey.fromBytes(d)); - continue; - case 51: - commit = PrefetchHint.fromBytes(d.message()); - continue; - case 52: - tree = PrefetchHint.fromBytes(d.message()); - continue; - default: - d.skip(); - continue; - } - } - - return new ChunkMeta(key, baseChunk, fragment, commit, tree); - } - - private final ChunkKey chunkKey; - - List<BaseChunk> baseChunks; - - List<ChunkKey> fragments; - - PrefetchHint commitPrefetch; - - PrefetchHint treePrefetch; - - ChunkMeta(ChunkKey key) { - this(key, null, null, null, null); - } - - ChunkMeta(ChunkKey chunkKey, List<BaseChunk> baseChunk, - List<ChunkKey> fragment, PrefetchHint commit, PrefetchHint tree) { - this.chunkKey = chunkKey; - this.baseChunks = baseChunk; - this.fragments = fragment; - this.commitPrefetch = commit; - this.treePrefetch = tree; - } - - /** @return key of the chunk this meta information is for. */ - public ChunkKey getChunkKey() { - return chunkKey; - } - - BaseChunk getBaseChunk(long position) throws DhtException { - // Chunks are sorted by ascending relative_start order. - // Thus for a pack sequence of: A B C, we have: - // - // -- C relative_start = 10,000 - // -- B relative_start = 20,000 - // -- A relative_start = 30,000 - // - // Indicating that chunk C starts 10,000 bytes before us, - // chunk B starts 20,000 bytes before us (and 10,000 before C), - // chunk A starts 30,000 bytes before us (and 10,000 before B), - // - // If position falls within: - // - // -- C (10k), then position is between 0..10,000 - // -- B (20k), then position is between 10,000 .. 20,000 - // -- A (30k), then position is between 20,000 .. 30,000 - - int high = baseChunks.size(); - int low = 0; - while (low < high) { - final int mid = (low + high) >>> 1; - final BaseChunk base = baseChunks.get(mid); - - if (position > base.relativeStart) { - low = mid + 1; - - } else if (mid == 0 || position == base.relativeStart) { - return base; - - } else if (baseChunks.get(mid - 1).relativeStart < position) { - return base; - - } else { - high = mid; - } - } - - throw new DhtException(MessageFormat.format( - DhtText.get().missingLongOffsetBase, chunkKey, - Long.valueOf(position))); - } - - /** @return number of fragment chunks that make up the object. */ - public int getFragmentCount() { - return fragments != null ? fragments.size() : 0; - } - - /** - * Get the nth fragment key. - * - * @param nth - * @return the key. - */ - public ChunkKey getFragmentKey(int nth) { - return fragments.get(nth); - } - - /** - * Find the key of the fragment that occurs after this chunk. - * - * @param currentKey - * the current chunk key. - * @return next chunk after this; null if there isn't one. - */ - public ChunkKey getNextFragment(ChunkKey currentKey) { - for (int i = 0; i < fragments.size() - 1; i++) { - if (fragments.get(i).equals(currentKey)) - return fragments.get(i + 1); - } - return null; - } - - /** @return chunks to visit. */ - public PrefetchHint getCommitPrefetch() { - return commitPrefetch; - } - - /** @return chunks to visit. */ - public PrefetchHint getTreePrefetch() { - return treePrefetch; - } - - /** @return true if there is no data in this object worth storing. */ - boolean isEmpty() { - if (baseChunks != null && !baseChunks.isEmpty()) - return false; - if (fragments != null && !fragments.isEmpty()) - return false; - if (commitPrefetch != null && !commitPrefetch.isEmpty()) - return false; - if (treePrefetch != null && !treePrefetch.isEmpty()) - return false; - return true; - } - - /** @return format as byte array for storage. */ - public byte[] asBytes() { - TinyProtobuf.Encoder e = TinyProtobuf.encode(256); - - if (baseChunks != null) { - for (BaseChunk base : baseChunks) - e.message(1, base.asBytes()); - } - - if (fragments != null) { - for (ChunkKey key : fragments) - e.bytes(2, key.asBytes()); - } - - if (commitPrefetch != null) - e.message(51, commitPrefetch.asBytes()); - if (treePrefetch != null) - e.message(52, treePrefetch.asBytes()); - - return e.asByteArray(); - } - - /** Describes other chunks that contain the bases for this chunk's deltas. */ - public static class BaseChunk { - final long relativeStart; - - private final ChunkKey chunk; - - BaseChunk(long relativeStart, ChunkKey chunk) { - this.relativeStart = relativeStart; - this.chunk = chunk; - } - - /** @return bytes backward from current chunk to start of base chunk. */ - public long getRelativeStart() { - return relativeStart; - } - - /** @return unique key of this chunk. */ - public ChunkKey getChunkKey() { - return chunk; - } - - TinyProtobuf.Encoder asBytes() { - int max = 11 + 2 + ChunkKey.KEYLEN; - TinyProtobuf.Encoder e = TinyProtobuf.encode(max); - e.int64(1, relativeStart); - e.bytes(2, chunk.asBytes()); - return e; - } - - static BaseChunk fromBytes(TinyProtobuf.Decoder d) { - long relativeStart = -1; - ChunkKey chunk = null; - - PARSE: for (;;) { - switch (d.next()) { - case 0: - break PARSE; - case 1: - relativeStart = d.int64(); - continue; - case 2: - chunk = ChunkKey.fromBytes(d); - continue; - default: - d.skip(); - continue; - } - } - - return new BaseChunk(relativeStart, chunk); - } - } - - /** Describes the prefetching for a particular object type. */ - public static class PrefetchHint { - private final List<ChunkKey> edge; - - private final List<ChunkKey> sequential; - - PrefetchHint(List<ChunkKey> edge, List<ChunkKey> sequential) { - if (edge == null) - edge = Collections.emptyList(); - else - edge = Collections.unmodifiableList(edge); - - if (sequential == null) - sequential = Collections.emptyList(); - else - sequential = Collections.unmodifiableList(sequential); - - this.edge = edge; - this.sequential = sequential; - } - - /** @return chunks on the edge of this chunk. */ - public List<ChunkKey> getEdge() { - return edge; - } - - /** @return chunks according to sequential ordering. */ - public List<ChunkKey> getSequential() { - return sequential; - } - - boolean isEmpty() { - return edge.isEmpty() && sequential.isEmpty(); - } - - TinyProtobuf.Encoder asBytes() { - int max = 0; - - max += (2 + ChunkKey.KEYLEN) * edge.size(); - max += (2 + ChunkKey.KEYLEN) * sequential.size(); - - TinyProtobuf.Encoder e = TinyProtobuf.encode(max); - for (ChunkKey key : edge) - e.bytes(1, key.asBytes()); - for (ChunkKey key : sequential) - e.bytes(2, key.asBytes()); - return e; - } - - static PrefetchHint fromBytes(TinyProtobuf.Decoder d) { - ArrayList<ChunkKey> edge = null; - ArrayList<ChunkKey> sequential = null; - - PARSE: for (;;) { - switch (d.next()) { - case 0: - break PARSE; - case 1: - if (edge == null) - edge = new ArrayList<ChunkKey>(16); - edge.add(ChunkKey.fromBytes(d)); - continue; - case 2: - if (sequential == null) - sequential = new ArrayList<ChunkKey>(16); - sequential.add(ChunkKey.fromBytes(d)); - continue; - default: - d.skip(); - continue; - } - } - - if (edge != null) - edge.trimToSize(); - - if (sequential != null) - sequential.trimToSize(); - - return new PrefetchHint(edge, sequential); - } - } -} diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMetaUtil.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMetaUtil.java new file mode 100644 index 0000000000..7bc6439172 --- /dev/null +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMetaUtil.java @@ -0,0 +1,111 @@ +/* + * Copyright (C) 2011, Google Inc. + * and other copyright owners as documented in the project's IP log. + * + * This program and the accompanying materials are made available + * under the terms of the Eclipse Distribution License v1.0 which + * accompanies this distribution, is reproduced below, and is + * available at http://www.eclipse.org/org/documents/edl-v10.php + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or + * without modification, are permitted provided that the following + * conditions are met: + * + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * + * - Neither the name of the Eclipse Foundation, Inc. nor the + * names of its contributors may be used to endorse or promote + * products derived from this software without specific prior + * written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.eclipse.jgit.storage.dht; + +import java.text.MessageFormat; +import java.util.List; + +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk; + +class ChunkMetaUtil { + static BaseChunk getBaseChunk(ChunkKey chunkKey, ChunkMeta meta, + long position) throws DhtException { + // Chunks are sorted by ascending relative_start order. + // Thus for a pack sequence of: A B C, we have: + // + // -- C relative_start = 10,000 + // -- B relative_start = 20,000 + // -- A relative_start = 30,000 + // + // Indicating that chunk C starts 10,000 bytes before us, + // chunk B starts 20,000 bytes before us (and 10,000 before C), + // chunk A starts 30,000 bytes before us (and 10,000 before B), + // + // If position falls within: + // + // -- C (10k), then position is between 0..10,000 + // -- B (20k), then position is between 10,000 .. 20,000 + // -- A (30k), then position is between 20,000 .. 30,000 + + List<BaseChunk> baseChunks = meta.getBaseChunkList(); + int high = baseChunks.size(); + int low = 0; + while (low < high) { + final int mid = (low + high) >>> 1; + final BaseChunk base = baseChunks.get(mid); + + if (position > base.getRelativeStart()) { + low = mid + 1; + + } else if (mid == 0 || position == base.getRelativeStart()) { + return base; + + } else if (baseChunks.get(mid - 1).getRelativeStart() < position) { + return base; + + } else { + high = mid; + } + } + + throw new DhtException(MessageFormat.format( + DhtText.get().missingLongOffsetBase, chunkKey, + Long.valueOf(position))); + } + + static ChunkKey getNextFragment(ChunkMeta meta, ChunkKey chunkKey) { + int cnt = meta.getFragmentCount(); + for (int i = 0; i < cnt - 1; i++) { + ChunkKey key = ChunkKey.fromString(meta.getFragment(i)); + if (chunkKey.equals(key)) + return ChunkKey.fromString(meta.getFragment(i + 1)); + } + return null; + } + + private ChunkMetaUtil() { + // Static utilities only, do not create instances. + } +} diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtCachedPack.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtCachedPack.java index 13af792e0e..39a76463fb 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtCachedPack.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtCachedPack.java @@ -45,12 +45,16 @@ package org.eclipse.jgit.storage.dht; import java.io.IOException; import java.text.MessageFormat; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.storage.pack.CachedPack; import org.eclipse.jgit.storage.pack.ObjectToPack; @@ -61,7 +65,11 @@ import org.eclipse.jgit.storage.pack.StoredObjectRepresentation; public class DhtCachedPack extends CachedPack { private final CachedPackInfo info; - private Set<ChunkKey> chunkKeySet; + private Set<ObjectId> tips; + + private Set<ChunkKey> keySet; + + private ChunkKey[] keyList; DhtCachedPack(CachedPackInfo info) { this.info = info; @@ -69,7 +77,13 @@ public class DhtCachedPack extends CachedPack { @Override public Set<ObjectId> getTips() { - return Collections.unmodifiableSet(info.tips); + if (tips == null) { + tips = new HashSet<ObjectId>(); + for (String idString : info.getTipList().getObjectNameList()) + tips.add(ObjectId.fromString(idString)); + tips = Collections.unmodifiableSet(tips); + } + return tips; } @Override @@ -90,23 +104,37 @@ public class DhtCachedPack extends CachedPack { @Override public boolean hasObject(ObjectToPack obj, StoredObjectRepresentation rep) { DhtObjectRepresentation objrep = (DhtObjectRepresentation) rep; - if (chunkKeySet == null) - chunkKeySet = new HashSet<ChunkKey>(info.chunks); - return chunkKeySet.contains(objrep.getChunkKey()); + if (keySet == null) + init(); + return keySet.contains(objrep.getChunkKey()); + } + + private void init() { + ChunkList chunkList = info.getChunkList(); + int cnt = chunkList.getChunkKeyCount(); + keySet = new HashSet<ChunkKey>(); + keyList = new ChunkKey[cnt]; + for (int i = 0; i < cnt; i++) { + ChunkKey key = ChunkKey.fromString(chunkList.getChunkKey(i)); + keySet.add(key); + keyList[i] = key; + } } void copyAsIs(PackOutputStream out, boolean validate, DhtReader ctx) throws IOException { + if (keyList == null) + init(); Prefetcher p = new Prefetcher(ctx, 0); - p.push(info.chunks); - copyPack(out, ctx, p, validate); + p.push(Arrays.asList(keyList)); + copyPack(out, p, validate); } - private void copyPack(PackOutputStream out, DhtReader ctx, - Prefetcher prefetcher, boolean validate) throws DhtException, - DhtMissingChunkException, IOException { - Map<ChunkKey, Long> startsAt = new HashMap<ChunkKey, Long>(); - for (ChunkKey key : info.chunks) { + private void copyPack(PackOutputStream out, Prefetcher prefetcher, + boolean validate) throws DhtException, DhtMissingChunkException, + IOException { + Map<String, Long> startsAt = new HashMap<String, Long>(); + for (ChunkKey key : keyList) { PackChunk chunk = prefetcher.get(key); // The prefetcher should always produce the chunk for us, if not @@ -122,29 +150,34 @@ public class DhtCachedPack extends CachedPack { // incorrectly created and would confuse the client. // long position = out.length(); - if (chunk.getMeta() != null && chunk.getMeta().baseChunks != null) { - for (ChunkMeta.BaseChunk base : chunk.getMeta().baseChunks) { + ChunkMeta meta = chunk.getMeta(); + if (meta != null && meta.getBaseChunkCount() != 0) { + for (ChunkMeta.BaseChunk base : meta.getBaseChunkList()) { Long act = startsAt.get(base.getChunkKey()); long exp = position - base.getRelativeStart(); if (act == null) { throw new DhtException(MessageFormat.format(DhtText - .get().wrongChunkPositionInCachedPack, info - .getRowKey(), base.getChunkKey(), - "[not written]", key, exp)); + .get().wrongChunkPositionInCachedPack, + rowKey(), base.getChunkKey(), + "[not written]", key, Long.valueOf(exp))); } if (act.longValue() != exp) { throw new DhtException(MessageFormat.format(DhtText - .get().wrongChunkPositionInCachedPack, info - .getRowKey(), base.getChunkKey(), // - act, key, exp)); + .get().wrongChunkPositionInCachedPack, + rowKey(), base.getChunkKey(), + act, key, Long.valueOf(exp))); } } } - startsAt.put(key, Long.valueOf(position)); + startsAt.put(key.asString(), Long.valueOf(position)); chunk.copyEntireChunkAsIs(out, null, validate); } } + + private String rowKey() { + return info.getName() + "." + info.getVersion(); + } } diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserter.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserter.java index 997f4b4d21..4ae4cf58ed 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserter.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserter.java @@ -53,6 +53,9 @@ import java.util.Collections; import java.util.LinkedList; import java.util.zip.Deflater; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectInserter; @@ -174,8 +177,13 @@ class DhtInserter extends ObjectInserter { ChunkKey key = chunk.end(chunkDigest); chunk.setChunkIndex(Collections.singletonList(oe)); chunk.safePut(db, dbBuffer()); - ObjectInfo info = new ObjectInfo(key, -1, type, position, - packedSize, inflatedSize, null, false); + + GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder(); + b.setObjectType(ObjectType.valueOf(type)); + b.setOffset(position); + b.setPackedSize(packedSize); + b.setInflatedSize(inflatedSize); + ObjectInfo info = new ObjectInfo(key, b.build()); ObjectIndexKey objKey = ObjectIndexKey.create(repo, objId); db.objectIndex().add(objKey, info, dbBuffer()); return objId; @@ -188,12 +196,15 @@ class DhtInserter extends ObjectInserter { chunk = null; ChunkKey firstChunkKey = fragmentList.get(0); + + ChunkMeta.Builder metaBuilder = ChunkMeta.newBuilder(); + for (ChunkKey k : fragmentList) + metaBuilder.addFragment(k.asString()); + ChunkMeta meta = metaBuilder.build(); + for (ChunkKey key : fragmentList) { PackChunk.Members builder = new PackChunk.Members(); builder.setChunkKey(key); - - ChunkMeta meta = new ChunkMeta(key); - meta.fragments = fragmentList; builder.setMeta(meta); if (firstChunkKey.equals(key)) @@ -202,8 +213,12 @@ class DhtInserter extends ObjectInserter { db.chunk().put(builder, dbBuffer()); } - ObjectInfo info = new ObjectInfo(firstChunkKey, -1, type, position, - packedSize, inflatedSize, null, true); + GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder(); + b.setObjectType(ObjectType.valueOf(type)); + b.setOffset(position); + b.setPackedSize(packedSize); + b.setInflatedSize(inflatedSize); + ObjectInfo info = new ObjectInfo(firstChunkKey, b.build()); ObjectIndexKey objKey = ObjectIndexKey.create(repo, objId); db.objectIndex().add(objKey, info, dbBuffer()); @@ -234,12 +249,13 @@ class DhtInserter extends ObjectInserter { // TODO Allow more than one chunk pending at a time, this would // permit batching puts of the ChunkInfo records. - activeChunk.end(digest()); - activeChunk.safePut(db, dbBuffer()); - activeChunk = newChunk(); - - if (activeChunk.whole(deflater(), type, data, off, len, objId)) - return objId; + if (!activeChunk.isEmpty()) { + activeChunk.end(digest()); + activeChunk.safePut(db, dbBuffer()); + activeChunk = newChunk(); + if (activeChunk.whole(deflater(), type, data, off, len, objId)) + return objId; + } return insertStream(type, len, asStream(data, off, len)); } @@ -295,7 +311,7 @@ class DhtInserter extends ObjectInserter { ChunkFormatter fmt; fmt = new ChunkFormatter(repo, options); - fmt.setSource(ChunkInfo.Source.INSERT); + fmt.setSource(GitStore.ChunkInfo.Source.INSERT); return fmt; } diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserterOptions.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserterOptions.java index b1b1b5c5f8..56b323bd2b 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserterOptions.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserterOptions.java @@ -49,6 +49,7 @@ import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH; import java.security.SecureRandom; import java.util.zip.Deflater; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.CoreConfig; import org.eclipse.jgit.storage.dht.spi.WriteBuffer; diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjectRepresentation.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjectRepresentation.java index a5499254e5..f6d55c1a49 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjectRepresentation.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjectRepresentation.java @@ -76,7 +76,7 @@ final class DhtObjectRepresentation extends StoredObjectRepresentation { @Override public int getFormat() { - if (info.getDeltaBase() != null) + if (info.isDelta()) return PACK_DELTA; return PACK_WHOLE; } diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtPackParser.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtPackParser.java index 86078335d3..fdc143621c 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtPackParser.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtPackParser.java @@ -67,10 +67,13 @@ import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; -import java.util.Set; import java.util.Map.Entry; +import java.util.Set; import java.util.concurrent.TimeoutException; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.MutableObjectId; @@ -86,6 +89,8 @@ import org.eclipse.jgit.transport.PackedObjectInfo; import org.eclipse.jgit.treewalk.CanonicalTreeParser; import org.eclipse.jgit.util.LongList; +import com.google.protobuf.ByteString; + /** Parses the pack stream into chunks, and indexes the chunks for lookup. */ public class DhtPackParser extends PackParser { private final DhtObjDatabase objdb; @@ -112,7 +117,7 @@ public class DhtPackParser extends PackParser { private Edges[] openEdges; /** Prior chunks that were written, keyed by object type code. */ - private List<ChunkInfo>[] infoByOrder; + private List<ChunkKey>[] chunkByOrder; /** Information on chunks already written out. */ private Map<ChunkKey, ChunkInfo> infoByKey; @@ -199,7 +204,7 @@ public class DhtPackParser extends PackParser { dbWriteBuffer = db.newWriteBuffer(); openChunks = new ChunkFormatter[5]; openEdges = new Edges[5]; - infoByOrder = newListArray(5); + chunkByOrder = newListArray(5); infoByKey = new HashMap<ChunkKey, ChunkInfo>(); dirtyMeta = new HashMap<ChunkKey, ChunkMeta>(); chunkMeta = new HashMap<ChunkKey, ChunkMeta>(); @@ -306,7 +311,7 @@ public class DhtPackParser extends PackParser { if (!success) rollback(); - infoByOrder = null; + chunkByOrder = null; objectListByName = null; objectListByChunk = null; linkIterators = null; @@ -332,54 +337,74 @@ public class DhtPackParser extends PackParser { } private void putCachedPack() throws DhtException { - CachedPackInfo info = new CachedPackInfo(); + CachedPackInfo.Builder info = CachedPackInfo.newBuilder(); for (DhtInfo obj : objectMap) { if (!obj.isInPack()) return; if (!obj.isReferenced()) - info.tips.add(obj.copy()); + info.getTipListBuilder().addObjectName(obj.name()); } MessageDigest version = Constants.newMessageDigest(); - addChunkList(info, version, infoByOrder[OBJ_TAG]); - addChunkList(info, version, infoByOrder[OBJ_COMMIT]); - addChunkList(info, version, infoByOrder[OBJ_TREE]); - addChunkList(info, version, infoByOrder[OBJ_BLOB]); + addChunkList(info, version, chunkByOrder[OBJ_TAG]); + addChunkList(info, version, chunkByOrder[OBJ_COMMIT]); + addChunkList(info, version, chunkByOrder[OBJ_TREE]); + addChunkList(info, version, chunkByOrder[OBJ_BLOB]); - info.name = computePackName(); - info.version = ObjectId.fromRaw(version.digest()); + info.setName(computePackName().name()); + info.setVersion(ObjectId.fromRaw(version.digest()).name()); - cachedPackKey = info.getRowKey(); - for (List<ChunkInfo> list : infoByOrder) { + cachedPackKey = CachedPackKey.fromInfo(info.build()); + for (List<ChunkKey> list : chunkByOrder) { if (list == null) continue; - for (ChunkInfo c : list) { - c.cachedPack = cachedPackKey; - if (c.isFragment()) - db.repository().put(repo, info, dbWriteBuffer); + for (ChunkKey key : list) { + ChunkInfo oldInfo = infoByKey.get(key); + GitStore.ChunkInfo.Builder b = + GitStore.ChunkInfo.newBuilder(oldInfo.getData()); + b.setCachedPackKey(cachedPackKey.asString()); + ChunkInfo newInfo = new ChunkInfo(key, b.build()); + infoByKey.put(key, newInfo); + + // A fragment was already put, and has to be re-put. + // Non-fragments will put later and do not put now. + if (newInfo.getData().getIsFragment()) + db.repository().put(repo, newInfo, dbWriteBuffer); } } - db.repository().put(repo, info, dbWriteBuffer); + db.repository().put(repo, info.build(), dbWriteBuffer); } - private void addChunkList(CachedPackInfo info, MessageDigest version, - List<ChunkInfo> list) { + private void addChunkList(CachedPackInfo.Builder info, + MessageDigest version, List<ChunkKey> list) { if (list == null) return; + + long bytesTotal = info.getBytesTotal(); + long objectsTotal = info.getObjectsTotal(); + long objectsDelta = info.getObjectsDelta(); + byte[] buf = new byte[Constants.OBJECT_ID_LENGTH]; - for (ChunkInfo c : list) { - int len = c.chunkSize - ChunkFormatter.TRAILER_SIZE; - info.bytesTotal += len; - info.objectsTotal += c.objectsTotal; - info.objectsDelta += c.objectsOfsDelta; - info.objectsDelta += c.objectsRefDelta; - info.chunks.add(c.getChunkKey()); - c.getChunkKey().getChunkHash().copyRawTo(buf, 0); + for (ChunkKey key : list) { + ChunkInfo chunkInfo = infoByKey.get(key); + GitStore.ChunkInfo c = chunkInfo.getData(); + int len = c.getChunkSize() - ChunkFormatter.TRAILER_SIZE; + bytesTotal += len; + objectsTotal += c.getObjectCounts().getTotal(); + objectsDelta += c.getObjectCounts().getOfsDelta(); + objectsDelta += c.getObjectCounts().getRefDelta(); + info.getChunkListBuilder().addChunkKey( + chunkInfo.getChunkKey().asString()); + chunkInfo.getChunkKey().getChunkHash().copyRawTo(buf, 0); version.update(buf); } + + info.setBytesTotal(bytesTotal); + info.setObjectsTotal(objectsTotal); + info.setObjectsDelta(objectsDelta); } private ObjectId computePackName() { @@ -420,10 +445,10 @@ public class DhtPackParser extends PackParser { } } - deleteChunks(infoByOrder[OBJ_COMMIT]); - deleteChunks(infoByOrder[OBJ_TREE]); - deleteChunks(infoByOrder[OBJ_BLOB]); - deleteChunks(infoByOrder[OBJ_TAG]); + deleteChunks(chunkByOrder[OBJ_COMMIT]); + deleteChunks(chunkByOrder[OBJ_TREE]); + deleteChunks(chunkByOrder[OBJ_BLOB]); + deleteChunks(chunkByOrder[OBJ_TAG]); dbWriteBuffer.flush(); } catch (Throwable err) { @@ -431,10 +456,9 @@ public class DhtPackParser extends PackParser { } } - private void deleteChunks(List<ChunkInfo> list) throws DhtException { + private void deleteChunks(List<ChunkKey> list) throws DhtException { if (list != null) { - for (ChunkInfo info : list) { - ChunkKey key = info.getChunkKey(); + for (ChunkKey key : list) { db.chunk().remove(key, dbWriteBuffer); db.repository().remove(repo, key, dbWriteBuffer); } @@ -605,60 +629,77 @@ public class DhtPackParser extends PackParser { private void putChunkIndex(List<DhtInfo> objectList, ChunkKey key, int type) throws DhtException { - ChunkInfo info = infoByKey.get(key); - info.objectsTotal = objectList.size(); - info.objectType = type; + ChunkInfo oldInfo = infoByKey.get(key); + GitStore.ChunkInfo.Builder info + = GitStore.ChunkInfo.newBuilder(oldInfo.getData()); PackChunk.Members builder = new PackChunk.Members(); builder.setChunkKey(key); byte[] index = ChunkIndex.create(objectList); - info.indexSize = index.length; + info.setIndexSize(index.length); builder.setChunkIndex(index); ChunkMeta meta = dirtyMeta.remove(key); if (meta == null) meta = chunkMeta.get(key); - if (meta == null) - meta = new ChunkMeta(key); switch (type) { case OBJ_COMMIT: { Edges edges = chunkEdges.get(key); - if (edges != null) { - List<ChunkKey> e = edges.commitEdges; - List<ChunkKey> s = sequentialHint(key, OBJ_COMMIT); - meta.commitPrefetch = new ChunkMeta.PrefetchHint(e, s); + List<ChunkKey> e = edges != null ? edges.commitEdges : null; + List<ChunkKey> s = sequentialHint(key, OBJ_COMMIT); + if (e == null) + e = Collections.emptyList(); + if (s == null) + s = Collections.emptyList(); + if (!e.isEmpty() || !s.isEmpty()) { + ChunkMeta.Builder m = edit(meta); + ChunkMeta.PrefetchHint.Builder h = m.getCommitPrefetchBuilder(); + for (ChunkKey k : e) + h.addEdge(k.asString()); + for (ChunkKey k : s) + h.addSequential(k.asString()); + meta = m.build(); } break; } case OBJ_TREE: { List<ChunkKey> s = sequentialHint(key, OBJ_TREE); - meta.treePrefetch = new ChunkMeta.PrefetchHint(null, s); + if (s == null) + s = Collections.emptyList(); + if (!s.isEmpty()) { + ChunkMeta.Builder m = edit(meta); + ChunkMeta.PrefetchHint.Builder h = m.getTreePrefetchBuilder(); + for (ChunkKey k : s) + h.addSequential(k.asString()); + meta = m.build(); + } break; } } - if (meta.isEmpty()) { - info.metaSize = 0; - } else { - info.metaSize = meta.asBytes().length; + if (meta != null) { + info.setMetaSize(meta.getSerializedSize()); builder.setMeta(meta); } - db.repository().put(repo, info, dbWriteBuffer); + ChunkInfo newInfo = new ChunkInfo(key, info.build()); + infoByKey.put(key, newInfo); + db.repository().put(repo, newInfo, dbWriteBuffer); db.chunk().put(builder, dbWriteBuffer); } + private static ChunkMeta.Builder edit(ChunkMeta meta) { + if (meta != null) + return ChunkMeta.newBuilder(meta); + return ChunkMeta.newBuilder(); + } + private List<ChunkKey> sequentialHint(ChunkKey key, int typeCode) { - List<ChunkInfo> infoList = infoByOrder[typeCode]; - if (infoList == null) + List<ChunkKey> all = chunkByOrder[typeCode]; + if (all == null) return null; - - List<ChunkKey> all = new ArrayList<ChunkKey>(infoList.size()); - for (ChunkInfo info : infoList) - all.add(info.getChunkKey()); - int idx = all.indexOf(key); if (0 <= idx) { int max = options.getPrefetchDepth(); @@ -669,10 +710,10 @@ public class DhtPackParser extends PackParser { } private void putDirtyMeta() throws DhtException { - for (ChunkMeta meta : dirtyMeta.values()) { + for (Map.Entry<ChunkKey, ChunkMeta> meta : dirtyMeta.entrySet()) { PackChunk.Members builder = new PackChunk.Members(); - builder.setChunkKey(meta.getChunkKey()); - builder.setMeta(meta); + builder.setChunkKey(meta.getKey()); + builder.setMeta(meta.getValue()); db.chunk().put(builder, dbWriteBuffer); } } @@ -892,15 +933,15 @@ public class DhtPackParser extends PackParser { private boolean longOfsDelta(ChunkFormatter w, long infSize, long basePtr) { final int type = typeOf(basePtr); - final List<ChunkInfo> infoList = infoByOrder[type]; + final List<ChunkKey> infoList = chunkByOrder[type]; final int baseIdx = chunkIdx(basePtr); - final ChunkInfo baseInfo = infoList.get(baseIdx); + final ChunkInfo baseInfo = infoByKey.get(infoList.get(baseIdx)); // Go backwards to the start of the base's chunk. long relativeChunkStart = 0; for (int i = infoList.size() - 1; baseIdx <= i; i--) { - ChunkInfo info = infoList.get(i); - int packSize = info.chunkSize - ChunkFormatter.TRAILER_SIZE; + GitStore.ChunkInfo info = infoByKey.get(infoList.get(i)).getData(); + int packSize = info.getChunkSize() - ChunkFormatter.TRAILER_SIZE; relativeChunkStart += packSize; } @@ -940,14 +981,24 @@ public class DhtPackParser extends PackParser { if (lastKey != null) currFragments.add(lastKey); + ChunkMeta.Builder protoBuilder = ChunkMeta.newBuilder(); + for (ChunkKey key : currFragments) + protoBuilder.addFragment(key.asString()); + ChunkMeta protoMeta = protoBuilder.build(); + for (ChunkKey key : currFragments) { - ChunkMeta meta = chunkMeta.get(key); - if (meta == null) { - meta = new ChunkMeta(key); + ChunkMeta oldMeta = chunkMeta.get(key); + if (oldMeta != null) { + ChunkMeta.Builder newMeta = ChunkMeta.newBuilder(oldMeta); + newMeta.clearFragment(); + newMeta.mergeFrom(protoMeta); + ChunkMeta meta = newMeta.build(); + dirtyMeta.put(key, meta); chunkMeta.put(key, meta); + } else { + dirtyMeta.put(key, protoMeta); + chunkMeta.put(key, protoMeta); } - meta.fragments = currFragments; - dirtyMeta.put(key, meta); } currFragments = null; } @@ -1093,7 +1144,7 @@ public class DhtPackParser extends PackParser { if (meta == null) return 0; - ChunkKey next = meta.getNextFragment(dbChunk.getChunkKey()); + ChunkKey next = ChunkMetaUtil.getNextFragment(meta, dbChunk.getChunkKey()); if (next == null) return 0; @@ -1200,7 +1251,7 @@ public class DhtPackParser extends PackParser { ChunkFormatter w = openChunks[typeCode]; if (w == null) { w = new ChunkFormatter(repo, options); - w.setSource(ChunkInfo.Source.RECEIVE); + w.setSource(GitStore.ChunkInfo.Source.RECEIVE); w.setObjectType(typeCode); openChunks[typeCode] = w; } @@ -1221,9 +1272,9 @@ public class DhtPackParser extends PackParser { ChunkKey key = w.end(chunkKeyDigest); ChunkInfo info = w.getChunkInfo(); - if (infoByOrder[typeCode] == null) - infoByOrder[typeCode] = new ArrayList<ChunkInfo>(); - infoByOrder[typeCode].add(info); + if (chunkByOrder[typeCode] == null) + chunkByOrder[typeCode] = new ArrayList<ChunkKey>(); + chunkByOrder[typeCode].add(key); infoByKey.put(key, info); if (w.getChunkMeta() != null) @@ -1260,7 +1311,7 @@ public class DhtPackParser extends PackParser { } private long makeObjectPointer(ChunkFormatter w, int typeCode) { - List<ChunkInfo> list = infoByOrder[typeCode]; + List<ChunkKey> list = chunkByOrder[typeCode]; int idx = list == null ? 0 : list.size(); int ptr = w.position(); return (((long) typeCode) << 61) | (((long) idx) << 32) | ptr; @@ -1279,14 +1330,14 @@ public class DhtPackParser extends PackParser { } private boolean isInCurrentChunk(long objectPtr) { - List<ChunkInfo> list = infoByOrder[typeOf(objectPtr)]; + List<ChunkKey> list = chunkByOrder[typeOf(objectPtr)]; if (list == null) return chunkIdx(objectPtr) == 0; return chunkIdx(objectPtr) == list.size(); } private ChunkKey chunkOf(long objectPtr) throws DhtException { - List<ChunkInfo> list = infoByOrder[typeOf(objectPtr)]; + List<ChunkKey> list = chunkByOrder[typeOf(objectPtr)]; int idx = chunkIdx(objectPtr); if (list == null || list.size() <= idx) { throw new DhtException(MessageFormat.format( @@ -1295,7 +1346,7 @@ public class DhtPackParser extends PackParser { Integer.valueOf(idx), // Integer.valueOf(offsetOf(objectPtr)))); } - return list.get(idx).getChunkKey(); + return list.get(idx); } private static DhtException panicCannotInsert() { @@ -1349,8 +1400,19 @@ public class DhtPackParser extends PackParser { } ObjectInfo info(ChunkKey chunkKey) { - return new ObjectInfo(chunkKey, -1, getType(), offsetOf(chunkPtr), - packedSize, inflatedSize, base, isFragmented()); + GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder(); + b.setObjectType(GitStore.ObjectInfo.ObjectType.valueOf(getType())); + b.setOffset(offsetOf(chunkPtr)); + b.setPackedSize(packedSize); + b.setInflatedSize(inflatedSize); + if (base != null) { + byte[] t = new byte[Constants.OBJECT_ID_LENGTH]; + base.copyRawTo(t, 0); + b.setDeltaBase(ByteString.copyFrom(t)); + } + if (isFragmented()) + b.setIsFragmented(true); + return new ObjectInfo(chunkKey, b.build()); } } diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtReader.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtReader.java index 05438ab8dd..f9288b9e2e 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtReader.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtReader.java @@ -63,6 +63,7 @@ import java.util.zip.Inflater; import org.eclipse.jgit.errors.IncorrectObjectTypeException; import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo; import org.eclipse.jgit.lib.AbbreviatedObjectId; import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.AsyncObjectLoaderQueue; @@ -76,7 +77,6 @@ import org.eclipse.jgit.revwalk.ObjectWalk; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevObject; import org.eclipse.jgit.revwalk.RevWalk; -import org.eclipse.jgit.storage.dht.RefData.IdWithChunk; import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Database; import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable; @@ -186,7 +186,7 @@ public class DhtReader extends ObjectReader implements ObjectReuseAsIs { @Override public boolean has(AnyObjectId objId, int typeHint) throws IOException { - if (objId instanceof RefData.IdWithChunk) + if (objId instanceof RefDataUtil.IdWithChunk) return true; if (recentChunks.has(repo, objId)) @@ -283,8 +283,8 @@ public class DhtReader extends ObjectReader implements ObjectReuseAsIs { } ChunkKey key; - if (objId instanceof RefData.IdWithChunk) - key = ((RefData.IdWithChunk) objId).getChunkKey(); + if (objId instanceof RefDataUtil.IdWithChunk) + key = ((RefDataUtil.IdWithChunk) objId).getChunkKey(); else key = repository.getRefDatabase().findChunk(objId); @@ -331,8 +331,8 @@ public class DhtReader extends ObjectReader implements ObjectReuseAsIs { } ChunkKey findChunk(AnyObjectId objId) throws DhtException { - if (objId instanceof IdWithChunk) - return ((IdWithChunk) objId).getChunkKey(); + if (objId instanceof RefDataUtil.IdWithChunk) + return ((RefDataUtil.IdWithChunk) objId).getChunkKey(); ChunkKey key = repository.getRefDatabase().findChunk(objId); if (key != null) diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefDatabase.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefDatabase.java index 22569b91ee..ef6123a539 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefDatabase.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefDatabase.java @@ -55,6 +55,7 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReference; import org.eclipse.jgit.errors.MissingObjectException; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData; import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectIdRef.PeeledNonTag; @@ -68,6 +69,7 @@ import org.eclipse.jgit.lib.SymbolicRef; import org.eclipse.jgit.revwalk.RevObject; import org.eclipse.jgit.revwalk.RevTag; import org.eclipse.jgit.revwalk.RevWalk; +import org.eclipse.jgit.storage.dht.RefDataUtil.IdWithChunk; import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Database; import org.eclipse.jgit.util.RefList; @@ -94,7 +96,7 @@ public class DhtRefDatabase extends RefDatabase { ChunkKey findChunk(AnyObjectId id) { RefCache c = cache.get(); if (c != null) { - RefData.IdWithChunk i = c.hints.get(id); + IdWithChunk i = c.hints.get(id); if (i != null) return i.getChunkKey(); } @@ -190,8 +192,8 @@ public class DhtRefDatabase extends RefDatabase { try { RepositoryKey repo = repository.getRepositoryKey(); RefKey key = RefKey.create(repo, newLeaf.getName()); - RefData oldData = RefData.fromRef(oldLeaf); - RefData newData = RefData.fromRef(newLeaf); + RefData oldData = RefDataUtil.fromRef(oldLeaf); + RefData newData = RefDataUtil.fromRef(newLeaf); db.ref().compareAndPut(key, oldData, newData); } catch (TimeoutException e) { // Ignore a timeout here, we were only trying to update @@ -214,13 +216,12 @@ public class DhtRefDatabase extends RefDatabase { ChunkKey key = ctx.findChunk(oId); if (key != null) - oId = new RefData.IdWithChunk(oId, key); + oId = new IdWithChunk(oId, key); if (obj instanceof RevTag) { ObjectId pId = rw.peel(obj); key = ctx.findChunk(pId); - pId = key != null ? new RefData.IdWithChunk(pId, key) : pId - .copy(); + pId = key != null ? new IdWithChunk(pId, key) : pId.copy(); return new PeeledTag(leaf.getStorage(), name, oId, pId); } else { return new PeeledNonTag(leaf.getStorage(), name, oId); @@ -353,7 +354,7 @@ public class DhtRefDatabase extends RefDatabase { private RefCache read() throws DhtException, TimeoutException { RefList.Builder<Ref> id = new RefList.Builder<Ref>(); RefList.Builder<Ref> sym = new RefList.Builder<Ref>(); - ObjectIdSubclassMap<RefData.IdWithChunk> hints = new ObjectIdSubclassMap<RefData.IdWithChunk>(); + ObjectIdSubclassMap<IdWithChunk> hints = new ObjectIdSubclassMap<IdWithChunk>(); for (Map.Entry<RefKey, RefData> e : scan()) { Ref ref = fromData(e.getKey().getName(), e.getValue()); @@ -362,12 +363,12 @@ public class DhtRefDatabase extends RefDatabase { sym.add(ref); id.add(ref); - if (ref.getObjectId() instanceof RefData.IdWithChunk + if (ref.getObjectId() instanceof IdWithChunk && !hints.contains(ref.getObjectId())) - hints.add((RefData.IdWithChunk) ref.getObjectId()); - if (ref.getPeeledObjectId() instanceof RefData.IdWithChunk + hints.add((IdWithChunk) ref.getObjectId()); + if (ref.getPeeledObjectId() instanceof IdWithChunk && !hints.contains(ref.getPeeledObjectId())) - hints.add((RefData.IdWithChunk) ref.getPeeledObjectId()); + hints.add((IdWithChunk) ref.getPeeledObjectId()); } id.sort(); @@ -377,40 +378,20 @@ public class DhtRefDatabase extends RefDatabase { } private static Ref fromData(String name, RefData data) { - ObjectId oId = null; - boolean peeled = false; - ObjectId pId = null; - - TinyProtobuf.Decoder d = data.decode(); - DECODE: for (;;) { - switch (d.next()) { - case 0: - break DECODE; - - case RefData.TAG_SYMREF: { - String symref = d.string(); - Ref leaf = new Unpeeled(NEW, symref, null); - return new SymbolicRef(name, leaf); - } - - case RefData.TAG_TARGET: - oId = RefData.IdWithChunk.decode(d.message()); - continue; - case RefData.TAG_IS_PEELED: - peeled = d.bool(); - continue; - case RefData.TAG_PEELED: - pId = RefData.IdWithChunk.decode(d.message()); - continue; - default: - d.skip(); - continue; - } + if (data.hasSymref()) { + Ref leaf = new Unpeeled(NEW, data.getSymref(), null); + return new SymbolicRef(name, leaf); } - if (peeled && pId != null) + if (!data.hasTarget()) + return new Unpeeled(LOOSE, name, null); + + ObjectId oId = IdWithChunk.create(data.getTarget()); + if (data.getIsPeeled() && data.hasPeeled()) { + ObjectId pId = IdWithChunk.create(data.getPeeled()); return new PeeledTag(LOOSE, name, oId, pId); - if (peeled) + } + if (data.getIsPeeled()) return new PeeledNonTag(LOOSE, name, oId); return new Unpeeled(LOOSE, name, oId); } @@ -427,10 +408,10 @@ public class DhtRefDatabase extends RefDatabase { final RefList<Ref> sym; - final ObjectIdSubclassMap<RefData.IdWithChunk> hints; + final ObjectIdSubclassMap<IdWithChunk> hints; RefCache(RefList<Ref> ids, RefList<Ref> sym, - ObjectIdSubclassMap<RefData.IdWithChunk> hints) { + ObjectIdSubclassMap<IdWithChunk> hints) { this.ids = ids; this.sym = sym; this.hints = hints; diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefUpdate.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefUpdate.java index 158b7cf496..f131e2d741 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefUpdate.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefUpdate.java @@ -47,6 +47,7 @@ import java.io.IOException; import java.util.concurrent.TimeoutException; import org.eclipse.jgit.errors.MissingObjectException; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.RefUpdate; @@ -106,7 +107,7 @@ class DhtRefUpdate extends RefUpdate { dstRef = dstRef.getLeaf(); refKey = RefKey.create(repo, dstRef.getName()); - oldData = RefData.fromRef(dstRef); + oldData = RefDataUtil.fromRef(dstRef); if (dstRef.isSymbolic()) setOldObjectId(null); @@ -157,7 +158,7 @@ class DhtRefUpdate extends RefUpdate { @Override protected Result doLink(String target) throws IOException { try { - newData = RefData.symbolic(target); + newData = RefDataUtil.symbolic(target); boolean r = db.ref().compareAndPut(refKey, oldData, newData); if (r) { getRefDatabase().stored(dstRef.getName(), newData); @@ -181,19 +182,19 @@ class DhtRefUpdate extends RefUpdate { ChunkKey key = ctx.findChunk(newId); if (key != null) - newId = new RefData.IdWithChunk(newId, key); + newId = new RefDataUtil.IdWithChunk(newId, key); if (obj instanceof RevTag) { ObjectId pId = rw.peel(obj); key = ctx.findChunk(pId); - pId = key != null ? new RefData.IdWithChunk(pId, key) : pId; - return RefData.peeled(newId, pId); + pId = key != null ? new RefDataUtil.IdWithChunk(pId, key) : pId; + return RefDataUtil.peeled(newId, pId); } else if (obj != null) - return RefData.peeled(newId, null); + return RefDataUtil.peeled(newId, null); else - return RefData.id(newId); + return RefDataUtil.id(newId); } catch (MissingObjectException e) { - return RefData.id(newId); + return RefDataUtil.id(newId); } } } diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtText.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtText.java index 3c35ad6df3..4fb520be15 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtText.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtText.java @@ -59,9 +59,12 @@ public class DhtText extends TranslationBundle { /***/ public String cycleInDeltaChain; /***/ public String databaseRequired; /***/ public String expectedObjectSizeDuringCopyAsIs; + /***/ public String invalidCachedPackInfo; /***/ public String invalidChunkKey; + /***/ public String invalidChunkMeta; /***/ public String invalidObjectIndexKey; /***/ public String invalidObjectInfo; + /***/ public String invalidRefData; /***/ public String missingChunk; /***/ public String missingLongOffsetBase; /***/ public String nameRequired; @@ -72,12 +75,6 @@ public class DhtText extends TranslationBundle { /***/ public String objectTypeUnknown; /***/ public String packParserInvalidPointer; /***/ public String packParserRollbackFailed; - /***/ public String protobufNegativeValuesNotSupported; - /***/ public String protobufNoArray; - /***/ public String protobufNotBooleanValue; - /***/ public String protobufUnsupportedFieldType; - /***/ public String protobufWrongFieldLength; - /***/ public String protobufWrongFieldType; /***/ public String recordingObjects; /***/ public String repositoryAlreadyExists; /***/ public String repositoryMustBeBare; diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/LargeNonDeltaObject.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/LargeNonDeltaObject.java index aaef431c73..e6afd731fe 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/LargeNonDeltaObject.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/LargeNonDeltaObject.java @@ -50,6 +50,7 @@ import java.util.zip.InflaterInputStream; import org.eclipse.jgit.errors.LargeObjectException; import org.eclipse.jgit.errors.MissingObjectException; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.ObjectStream; @@ -102,7 +103,7 @@ class LargeNonDeltaObject extends ObjectLoader { if (pc != null) firstChunk = null; else - pc = ctx.getChunk(meta.getFragmentKey(0)); + pc = ctx.getChunk(ChunkKey.fromString(meta.getFragment(0))); InputStream in = new ChunkInputStream(meta, ctx, pos, pc); in = new BufferedInputStream(new InflaterInputStream(in), 8192); @@ -138,7 +139,8 @@ class LargeNonDeltaObject extends ObjectLoader { if (fragment == meta.getFragmentCount()) return -1; - pc = ctx.getChunk(meta.getFragmentKey(++fragment)); + pc = ctx.getChunk(ChunkKey.fromString( + meta.getFragment(++fragment))); ptr = 0; n = pc.read(ptr, dstbuf, dstptr, dstlen); if (n == 0) diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectInfo.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectInfo.java index 941ed6a6d1..9123a8be8f 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectInfo.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectInfo.java @@ -43,13 +43,12 @@ package org.eclipse.jgit.storage.dht; -import java.text.MessageFormat; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.List; -import org.eclipse.jgit.lib.Constants; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore; import org.eclipse.jgit.lib.ObjectId; /** Connects an object to the chunk it is stored in. */ @@ -71,106 +70,40 @@ public class ObjectInfo { Collections.sort(toSort, BY_TIME); } + private final ChunkKey chunk; + + private final long time; + + private final GitStore.ObjectInfo data; + /** - * Parse an ObjectInfo from the storage system. + * Wrap an ObjectInfo from the storage system. * * @param chunkKey * the chunk the object points to. * @param data * the data of the ObjectInfo. - * @param time - * timestamp of the ObjectInfo. If the implementation does not - * store timestamp data, supply a negative value. - * @return the object's information. */ - public static ObjectInfo fromBytes(ChunkKey chunkKey, byte[] data, long time) { - return fromBytes(chunkKey, TinyProtobuf.decode(data), time); + public ObjectInfo(ChunkKey chunkKey, GitStore.ObjectInfo data) { + this.chunk = chunkKey; + this.time = 0; + this.data = data; } /** - * Parse an ObjectInfo from the storage system. + * Wrap an ObjectInfo from the storage system. * * @param chunkKey * the chunk the object points to. - * @param d - * the data of the ObjectInfo. * @param time - * timestamp of the ObjectInfo. If the implementation does not - * store timestamp data, supply a negative value. - * @return the object's information. + * timestamp of the ObjectInfo. + * @param data + * the data of the ObjectInfo. */ - public static ObjectInfo fromBytes(ChunkKey chunkKey, - TinyProtobuf.Decoder d, long time) { - int typeCode = -1; - int offset = -1; - long packedSize = -1; - long inflatedSize = -1; - ObjectId deltaBase = null; - boolean fragmented = false; - - PARSE: for (;;) { - switch (d.next()) { - case 0: - break PARSE; - case 1: - typeCode = d.int32(); - continue; - case 2: - offset = d.int32(); - continue; - case 3: - packedSize = d.int64(); - continue; - case 4: - inflatedSize = d.int64(); - continue; - case 5: - deltaBase = d.bytesObjectId(); - continue; - case 6: - fragmented = d.bool(); - continue; - default: - d.skip(); - continue; - } - } - - if (typeCode < 0 || offset < 0 || packedSize < 0 || inflatedSize < 0) - throw new IllegalArgumentException(MessageFormat.format( - DhtText.get().invalidObjectInfo, chunkKey)); - - return new ObjectInfo(chunkKey, time, typeCode, offset, // - packedSize, inflatedSize, deltaBase, fragmented); - } - - private final ChunkKey chunk; - - private final long time; - - private final int typeCode; - - private final int offset; - - private final long packedSize; - - private final long inflatedSize; - - private final ObjectId deltaBase; - - private final boolean fragmented; - - ObjectInfo(ChunkKey chunk, long time, int typeCode, int offset, - long packedSize, long inflatedSize, ObjectId base, - boolean fragmented) { - this.chunk = chunk; + public ObjectInfo(ChunkKey chunkKey, long time, GitStore.ObjectInfo data) { + this.chunk = chunkKey; this.time = time < 0 ? 0 : time; - this.typeCode = typeCode; - this.offset = offset; - this.packedSize = packedSize; - this.inflatedSize = inflatedSize; - this.deltaBase = base; - this.fragmented = fragmented; + this.data = data; } /** @return the chunk this link points to. */ @@ -183,54 +116,43 @@ public class ObjectInfo { return time; } + /** @return GitStore.ObjectInfo to embed in the database. */ + public GitStore.ObjectInfo getData() { + return data; + } + /** @return type of the object, in OBJ_* constants. */ public int getType() { - return typeCode; + return data.getObjectType().getNumber(); } /** @return size of the object when fully inflated. */ public long getSize() { - return inflatedSize; + return data.getInflatedSize(); } /** @return true if the object storage uses delta compression. */ public boolean isDelta() { - return getDeltaBase() != null; + return data.hasDeltaBase(); } /** @return true if the object has been fragmented across chunks. */ public boolean isFragmented() { - return fragmented; + return data.getIsFragmented(); } int getOffset() { - return offset; + return data.getOffset(); } long getPackedSize() { - return packedSize; + return data.getPackedSize(); } ObjectId getDeltaBase() { - return deltaBase; - } - - /** - * Convert this ObjectInfo into a byte array for storage. - * - * @return the ObjectInfo data, encoded as a byte array. This does not - * include the ChunkKey, callers must store that separately. - */ - public byte[] asBytes() { - TinyProtobuf.Encoder e = TinyProtobuf.encode(256); - e.int32(1, typeCode); - e.int32(2, offset); - e.int64(3, packedSize); - e.int64(4, inflatedSize); - e.bytes(5, deltaBase); - if (fragmented) - e.bool(6, fragmented); - return e.asByteArray(); + if (data.hasDeltaBase()) + return ObjectId.fromRaw(data.getDeltaBase().toByteArray(), 0); + return null; } @Override @@ -238,18 +160,10 @@ public class ObjectInfo { StringBuilder b = new StringBuilder(); b.append("ObjectInfo:"); b.append(chunk); - b.append(" ["); if (0 < time) - b.append(" time=").append(new Date(time)); - b.append(" type=").append(Constants.typeString(typeCode)); - b.append(" offset=").append(offset); - b.append(" packedSize=").append(packedSize); - b.append(" inflatedSize=").append(inflatedSize); - if (deltaBase != null) - b.append(" deltaBase=").append(deltaBase.name()); - if (fragmented) - b.append(" fragmented"); - b.append(" ]"); + b.append(" @ ").append(new Date(time)); + b.append("\n"); + b.append(data.toString()); return b.toString(); } } diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectWriter.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectWriter.java index 17e36ab99a..d36b03bdb5 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectWriter.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectWriter.java @@ -43,7 +43,6 @@ package org.eclipse.jgit.storage.dht; -import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -55,6 +54,7 @@ import java.util.Set; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicReference; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.util.BlockList; @@ -136,7 +136,7 @@ final class ObjectWriter { ChunkMeta meta = allMeta.remove(key); if (meta != null) { for (int i = 1; i < meta.getFragmentCount(); i++) - keys.add(meta.getFragmentKey(i)); + keys.add(ChunkKey.fromString(meta.getFragment(i))); } } order = keys; @@ -221,7 +221,7 @@ final class ObjectWriter { throw metaError.get(); } - private class MetaLoader implements AsyncCallback<Collection<ChunkMeta>> { + private class MetaLoader implements AsyncCallback<Map<ChunkKey, ChunkMeta>> { private final Context context; private final Set<ChunkKey> keys; @@ -231,13 +231,11 @@ final class ObjectWriter { this.keys = keys; } - public void onSuccess(Collection<ChunkMeta> result) { + public void onSuccess(Map<ChunkKey, ChunkMeta> result) { try { synchronized (allMeta) { - for (ChunkMeta meta : result) { - allMeta.put(meta.getChunkKey(), meta); - keys.remove(meta.getChunkKey()); - } + allMeta.putAll(result); + keys.removeAll(result.keySet()); } if (context == Context.FAST_MISSING_OK && !keys.isEmpty()) { synchronized (metaMissing) { diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/PackChunk.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/PackChunk.java index c3bedc4ae1..c0684022f3 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/PackChunk.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/PackChunk.java @@ -63,6 +63,7 @@ import java.util.zip.Inflater; import org.eclipse.jgit.errors.CorruptObjectException; import org.eclipse.jgit.errors.LargeObjectException; import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectLoader; @@ -251,8 +252,6 @@ public final class PackChunk { private volatile Boolean valid; - private volatile ChunkKey nextFragment; - PackChunk(ChunkKey key, byte[] dataBuf, int dataPtr, int dataLen, ChunkIndex index, ChunkMeta meta) { this.key = key; @@ -400,9 +399,12 @@ public final class PackChunk { base = base - pos; ChunkMeta.BaseChunk baseChunk; - baseChunk = pc.meta.getBaseChunk(base); - baseChunkKey = baseChunk.getChunkKey(); - basePosInChunk = (int) (baseChunk.relativeStart - base); + baseChunk = ChunkMetaUtil.getBaseChunk( + pc.key, + pc.meta, + base); + baseChunkKey = ChunkKey.fromString(baseChunk.getChunkKey()); + basePosInChunk = (int) (baseChunk.getRelativeStart() - base); } delta = new Delta(delta, // @@ -559,7 +561,8 @@ public final class PackChunk { if (inf.needsInput()) { if (meta.getFragmentCount() <= nextChunk) break; - pc = reader.getChunk(meta.getFragmentKey(nextChunk++)); + pc = reader.getChunk(ChunkKey.fromString( + meta.getFragment(nextChunk++))); if (meta.getFragmentCount() == nextChunk) bs = pc.dataLen; // Include trailer on last chunk. else @@ -575,7 +578,7 @@ public final class PackChunk { if (dstoff != sz) { throw new DataFormatException(MessageFormat.format( DhtText.get().shortCompressedObject, - meta.getChunkKey(), + ChunkKey.fromString(meta.getFragment(0)), Integer.valueOf(pos))); } return dstbuf; @@ -683,7 +686,8 @@ public final class PackChunk { if (isFragment()) { int cnt = meta.getFragmentCount(); for (int fragId = 1; fragId < cnt; fragId++) { - PackChunk pc = ctx.getChunk(meta.getFragmentKey(fragId)); + PackChunk pc = ctx.getChunk(ChunkKey.fromString( + meta.getFragment(fragId))); pc.copyEntireChunkAsIs(out, obj, validate); } } @@ -728,18 +732,6 @@ public final class PackChunk { return sz; } - ChunkKey getNextFragment() { - if (meta == null) - return null; - - ChunkKey next = nextFragment; - if (next == null) { - next = meta.getNextFragment(getChunkKey()); - nextFragment = next; - } - return next; - } - private static class Delta { /** Child that applies onto this object. */ final Delta next; diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Prefetcher.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Prefetcher.java index b7463db3f9..743f1f5944 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Prefetcher.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Prefetcher.java @@ -59,6 +59,7 @@ import java.util.Set; import java.util.concurrent.TimeoutException; import org.eclipse.jgit.errors.MissingObjectException; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevTree; @@ -218,14 +219,21 @@ class Prefetcher implements StreamingCallback<Collection<PackChunk.Members>> { if (hint != null) { synchronized (this) { - if (followEdgeHints && !hint.getEdge().isEmpty()) - push(hint.getEdge()); + if (followEdgeHints && 0 < hint.getEdgeCount()) + push(hint.getEdgeList()); else - push(hint.getSequential()); + push(hint.getSequentialList()); } } } + private void push(List<String> list) { + List<ChunkKey> keys = new ArrayList<ChunkKey>(list.size()); + for (String keyString : list) + keys.add(ChunkKey.fromString(keyString)); + push(keys); + } + void push(Iterable<ChunkKey> list) { synchronized (this) { for (ChunkKey key : list) { diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/QueueObjectLookup.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/QueueObjectLookup.java index 482caf8917..9cf513d265 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/QueueObjectLookup.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/QueueObjectLookup.java @@ -55,7 +55,6 @@ import java.util.Map; import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.lib.AsyncOperation; import org.eclipse.jgit.lib.ObjectId; -import org.eclipse.jgit.storage.dht.RefData.IdWithChunk; import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Database; @@ -119,8 +118,8 @@ class QueueObjectLookup<T extends ObjectId> implements AsyncOperation { RecentInfoCache infoCache = reader.getRecentInfoCache(); List<T> missing = null; for (T obj : objects) { - if (needChunkOnly && obj instanceof IdWithChunk) { - push(obj, ((IdWithChunk) obj).getChunkKey()); + if (needChunkOnly && obj instanceof RefDataUtil.IdWithChunk) { + push(obj, ((RefDataUtil.IdWithChunk) obj).getChunkKey()); continue; } diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentChunks.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentChunks.java index f704c1daf5..f75e3bdc82 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentChunks.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentChunks.java @@ -48,7 +48,7 @@ import java.io.IOException; import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.storage.dht.DhtReader.ChunkAndOffset; -import org.eclipse.jgit.storage.dht.RefData.IdWithChunk; +import org.eclipse.jgit.storage.dht.RefDataUtil.IdWithChunk; final class RecentChunks { private final DhtReader reader; diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefData.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefData.java deleted file mode 100644 index e34e9d1c34..0000000000 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefData.java +++ /dev/null @@ -1,235 +0,0 @@ -/* - * Copyright (C) 2011, Google Inc. - * and other copyright owners as documented in the project's IP log. - * - * This program and the accompanying materials are made available - * under the terms of the Eclipse Distribution License v1.0 which - * accompanies this distribution, is reproduced below, and is - * available at http://www.eclipse.org/org/documents/edl-v10.php - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or - * without modification, are permitted provided that the following - * conditions are met: - * - * - Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * - Redistributions in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials provided - * with the distribution. - * - * - Neither the name of the Eclipse Foundation, Inc. nor the - * names of its contributors may be used to endorse or promote - * products derived from this software without specific prior - * written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND - * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, - * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES - * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR - * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, - * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.eclipse.jgit.storage.dht; - -import static org.eclipse.jgit.lib.Constants.OBJECT_ID_STRING_LENGTH; -import static org.eclipse.jgit.storage.dht.TinyProtobuf.encode; - -import java.util.Arrays; - -import org.eclipse.jgit.lib.AnyObjectId; -import org.eclipse.jgit.lib.ObjectId; -import org.eclipse.jgit.lib.Ref; -import org.eclipse.jgit.storage.dht.TinyProtobuf.Encoder; - -/** - * Describes the current state of a Git reference. - * <p> - * The reference state contains not just the SHA-1 object name that a reference - * points to, but the state also caches its peeled value if its a tag, and the - * {@link ChunkKey} the object was observed in when the reference was last - * updated. This cached data reduces latency when initially starting to work - * with a repository. - */ -public class RefData { - /** Magic constant meaning does not exist. */ - public static final RefData NONE = new RefData(new byte[0]); - - static final int TAG_SYMREF = 1; - - static final int TAG_TARGET = 2; - - static final int TAG_IS_PEELED = 3; - - static final int TAG_PEELED = 4; - - /** - * @param data - * @return the content - */ - public static RefData fromBytes(byte[] data) { - return new RefData(data); - } - - static RefData symbolic(String target) { - Encoder e = encode(2 + target.length()); - e.string(TAG_SYMREF, target); - return new RefData(e.asByteArray()); - } - - static RefData id(AnyObjectId id) { - Encoder e = encode(4 + OBJECT_ID_STRING_LENGTH + ChunkKey.KEYLEN); - e.message(TAG_TARGET, IdWithChunk.encode(id)); - return new RefData(e.asByteArray()); - } - - static RefData fromRef(Ref ref) { - if (ref.isSymbolic()) - return symbolic(ref.getTarget().getName()); - - if (ref.getObjectId() == null) - return RefData.NONE; - - int max = 8 + 2 * OBJECT_ID_STRING_LENGTH + 2 * ChunkKey.KEYLEN; - Encoder e = encode(max); - e.message(TAG_TARGET, IdWithChunk.encode(ref.getObjectId())); - if (ref.isPeeled()) { - e.bool(TAG_IS_PEELED, true); - if (ref.getPeeledObjectId() != null) - e.message(TAG_PEELED, - IdWithChunk.encode(ref.getPeeledObjectId())); - } - return new RefData(e.asByteArray()); - } - - static RefData peeled(ObjectId targetId, ObjectId peeledId) { - int max = 8 + 2 * OBJECT_ID_STRING_LENGTH + 2 * ChunkKey.KEYLEN; - Encoder e = encode(max); - e.message(TAG_TARGET, IdWithChunk.encode(targetId)); - e.bool(TAG_IS_PEELED, true); - if (peeledId != null) - e.message(TAG_PEELED, IdWithChunk.encode(peeledId)); - return new RefData(e.asByteArray()); - } - - private final byte[] data; - - RefData(byte[] data) { - this.data = data; - } - - TinyProtobuf.Decoder decode() { - return TinyProtobuf.decode(data); - } - - /** @return the contents, encoded as a byte array for storage. */ - public byte[] asBytes() { - return data; - } - - @Override - public int hashCode() { - int hash = 5381; - for (int ptr = 0; ptr < data.length; ptr++) - hash = ((hash << 5) + hash) + (data[ptr] & 0xff); - return hash; - } - - @Override - public boolean equals(Object other) { - if (other instanceof RefData) - return Arrays.equals(data, ((RefData) other).data); - return false; - } - - @Override - public String toString() { - StringBuilder b = new StringBuilder(); - TinyProtobuf.Decoder d = decode(); - for (;;) { - switch (d.next()) { - case 0: - return b.toString().substring(1); - case TAG_SYMREF: - b.append("\nsymref: ").append(d.string()); - continue; - case TAG_TARGET: - b.append("\ntarget: ").append(IdWithChunk.decode(d.message())); - continue; - case TAG_IS_PEELED: - b.append("\nis_peeled: ").append(d.bool()); - continue; - case TAG_PEELED: - b.append("\npeeled: ").append(IdWithChunk.decode(d.message())); - continue; - default: - d.skip(); - continue; - } - } - } - - static class IdWithChunk extends ObjectId { - static ObjectId decode(TinyProtobuf.Decoder d) { - ObjectId id = null; - ChunkKey key = null; - DECODE: for (;;) { - switch (d.next()) { - case 0: - break DECODE; - case 1: - id = d.stringObjectId(); - continue; - case 2: - key = ChunkKey.fromBytes(d); - continue; - default: - d.skip(); - } - } - return key != null ? new IdWithChunk(id, key) : id; - } - - static TinyProtobuf.Encoder encode(AnyObjectId id) { - if (id instanceof IdWithChunk) { - int max = 4 + OBJECT_ID_STRING_LENGTH + ChunkKey.KEYLEN; - TinyProtobuf.Encoder e = TinyProtobuf.encode(max); - e.string(1, id); - e.string(2, ((IdWithChunk) id).chunkKey); - return e; - } else { - int max = 2 + OBJECT_ID_STRING_LENGTH; - TinyProtobuf.Encoder e = TinyProtobuf.encode(max); - e.string(1, id); - return e; - } - } - - private final ChunkKey chunkKey; - - IdWithChunk(AnyObjectId id, ChunkKey key) { - super(id); - this.chunkKey = key; - } - - ChunkKey getChunkKey() { - return chunkKey; - } - - @Override - public String toString() { - return name() + "->" + chunkKey; - } - } -} diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefDataUtil.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefDataUtil.java new file mode 100644 index 0000000000..7c5d9e0b8c --- /dev/null +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefDataUtil.java @@ -0,0 +1,132 @@ +/* + * Copyright (C) 2011, Google Inc. + * and other copyright owners as documented in the project's IP log. + * + * This program and the accompanying materials are made available + * under the terms of the Eclipse Distribution License v1.0 which + * accompanies this distribution, is reproduced below, and is + * available at http://www.eclipse.org/org/documents/edl-v10.php + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or + * without modification, are permitted provided that the following + * conditions are met: + * + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * + * - Neither the name of the Eclipse Foundation, Inc. nor the + * names of its contributors may be used to endorse or promote + * products derived from this software without specific prior + * written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.eclipse.jgit.storage.dht; + +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData; +import org.eclipse.jgit.lib.AnyObjectId; +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.Ref; + +/** Tools to work with {@link RefData}. */ +public class RefDataUtil { + /** Magic constant meaning does not exist. */ + public static final RefData NONE = RefData.newBuilder().build(); + + static RefData symbolic(String target) { + RefData.Builder b = RefData.newBuilder(); + b.setSymref(target); + return b.build(); + } + + static RefData id(AnyObjectId id) { + RefData.Builder b = RefData.newBuilder(); + b.setTarget(toRefData(id)); + return b.build(); + } + + static RefData fromRef(Ref ref) { + if (ref.isSymbolic()) + return symbolic(ref.getTarget().getName()); + + if (ref.getObjectId() == null) + return NONE; + + RefData.Builder b = RefData.newBuilder(); + b.setTarget(toRefData(ref.getObjectId())); + if (ref.isPeeled()) { + b.setIsPeeled(true); + if (ref.getPeeledObjectId() != null) + b.setPeeled(toRefData(ref.getPeeledObjectId())); + } + return b.build(); + } + + static RefData peeled(ObjectId targetId, ObjectId peeledId) { + RefData.Builder b = RefData.newBuilder(); + b.setTarget(toRefData(targetId)); + b.setIsPeeled(true); + if (peeledId != null) + b.setPeeled(toRefData(peeledId)); + return b.build(); + } + + private static RefData.Id toRefData(AnyObjectId id) { + RefData.Id.Builder r = RefData.Id.newBuilder(); + r.setObjectName(id.name()); + if (id instanceof IdWithChunk) + r.setChunkKey(((IdWithChunk) id).getChunkKey().asString()); + return r.build(); + } + + static class IdWithChunk extends ObjectId { + static ObjectId create(RefData.Id src) { + if (src.hasChunkKey()) { + return new IdWithChunk( + ObjectId.fromString(src.getObjectName()), + ChunkKey.fromString(src.getChunkKey())); + } + return ObjectId.fromString(src.getObjectName()); + } + + private final ChunkKey chunkKey; + + IdWithChunk(AnyObjectId id, ChunkKey key) { + super(id); + this.chunkKey = key; + } + + ChunkKey getChunkKey() { + return chunkKey; + } + + @Override + public String toString() { + return name() + "->" + chunkKey; + } + } + + private RefDataUtil() { + // Utility class, do not create instances. + } +} diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/TinyProtobuf.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/TinyProtobuf.java deleted file mode 100644 index dcf3dfb172..0000000000 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/TinyProtobuf.java +++ /dev/null @@ -1,755 +0,0 @@ -/* - * Copyright (C) 2011, Google Inc. - * and other copyright owners as documented in the project's IP log. - * - * This program and the accompanying materials are made available - * under the terms of the Eclipse Distribution License v1.0 which - * accompanies this distribution, is reproduced below, and is - * available at http://www.eclipse.org/org/documents/edl-v10.php - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or - * without modification, are permitted provided that the following - * conditions are met: - * - * - Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * - Redistributions in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials provided - * with the distribution. - * - * - Neither the name of the Eclipse Foundation, Inc. nor the - * names of its contributors may be used to endorse or promote - * products derived from this software without specific prior - * written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND - * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, - * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES - * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR - * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, - * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.eclipse.jgit.storage.dht; - -import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH; -import static org.eclipse.jgit.lib.Constants.OBJECT_ID_STRING_LENGTH; - -import java.nio.ByteBuffer; -import java.text.MessageFormat; - -import org.eclipse.jgit.lib.AnyObjectId; -import org.eclipse.jgit.lib.Constants; -import org.eclipse.jgit.lib.ObjectId; -import org.eclipse.jgit.util.RawParseUtils; - -/** - * A tiny implementation of a subset of the Google Protocol Buffers format. - * <p> - * For more information on the network format, see the canonical documentation - * at <a href="http://code.google.com/p/protobuf/">Google Protocol Buffers</a>. - */ -public class TinyProtobuf { - private static final int WIRE_VARINT = 0; - - private static final int WIRE_FIXED_64 = 1; - - private static final int WIRE_LEN_DELIM = 2; - - private static final int WIRE_FIXED_32 = 5; - - /** - * Create a new encoder. - * - * @param estimatedSize - * estimated size of the message. If the size is accurate, - * copying of the result can be avoided during - * {@link Encoder#asByteArray()}. If the size is too small, the - * buffer will grow dynamically. - * @return a new encoder. - */ - public static Encoder encode(int estimatedSize) { - return new Encoder(new byte[estimatedSize]); - } - - /** - * Create an encoder that estimates size. - * - * @return a new encoder. - */ - public static Encoder size() { - return new Encoder(null); - } - - /** - * Decode a buffer. - * - * @param buf - * the buffer to read. - * @return a new decoder. - */ - public static Decoder decode(byte[] buf) { - return decode(buf, 0, buf.length); - } - - /** - * Decode a buffer. - * - * @param buf - * the buffer to read. - * @param off - * offset to begin reading from {@code buf}. - * @param len - * total number of bytes to read from {@code buf}. - * @return a new decoder. - */ - public static Decoder decode(byte[] buf, int off, int len) { - return new Decoder(buf, off, len); - } - - /** An enumerated value that encodes/decodes as int32. */ - public static interface Enum { - /** @return the wire value. */ - public int value(); - } - - /** Decode fields from a binary protocol buffer. */ - public static class Decoder { - private final byte[] buf; - - private final int end; - - private int ptr; - - private int field; - - private int type; - - private int length; - - private Decoder(byte[] buf, int off, int len) { - this.buf = buf; - this.ptr = off; - this.end = off + len; - } - - /** @return get the field tag number, 0 on end of buffer. */ - public int next() { - if (ptr == end) - return 0; - - int fieldAndType = varint32(); - field = fieldAndType >>> 3; - type = fieldAndType & 7; - return field; - } - - /** Skip the current field's value. */ - public void skip() { - switch (type) { - case WIRE_VARINT: - varint64(); - break; - case WIRE_FIXED_64: - ptr += 8; - break; - case WIRE_LEN_DELIM: - ptr += varint32(); - break; - case WIRE_FIXED_32: - ptr += 4; - break; - default: - throw new IllegalStateException(MessageFormat.format(DhtText - .get().protobufUnsupportedFieldType, Integer - .valueOf(type))); - } - } - - /** @return decode the current field as an int32. */ - public int int32() { - checkFieldType(WIRE_VARINT); - return varint32(); - } - - /** @return decode the current field as an int64. */ - public long int64() { - checkFieldType(WIRE_VARINT); - return varint64(); - } - - /** - * @param <T> - * the type of enumeration. - * @param all - * all of the supported values. - * @return decode the current field as an enumerated value. - */ - public <T extends Enum> T intEnum(T[] all) { - checkFieldType(WIRE_VARINT); - int value = varint32(); - for (T t : all) { - if (t.value() == value) - return t; - } - throw new IllegalStateException(MessageFormat.format( - DhtText.get().protobufWrongFieldType, Integer - .valueOf(field), Integer.valueOf(type), all[0] - .getClass().getSimpleName())); - } - - /** @return decode the current field as a bool. */ - public boolean bool() { - checkFieldType(WIRE_VARINT); - int val = varint32(); - switch (val) { - case 0: - return false; - case 1: - return true; - default: - throw new IllegalStateException(MessageFormat.format(DhtText - .get().protobufNotBooleanValue, Integer.valueOf(field), - Integer.valueOf(val))); - } - } - - /** @return decode a fixed 64 bit value. */ - public long fixed64() { - checkFieldType(WIRE_FIXED_64); - long val = buf[ptr + 0] & 0xff; - val |= ((long) (buf[ptr + 1] & 0xff)) << (1 * 8); - val |= ((long) (buf[ptr + 2] & 0xff)) << (2 * 8); - val |= ((long) (buf[ptr + 3] & 0xff)) << (3 * 8); - val |= ((long) (buf[ptr + 4] & 0xff)) << (4 * 8); - val |= ((long) (buf[ptr + 5] & 0xff)) << (5 * 8); - val |= ((long) (buf[ptr + 6] & 0xff)) << (6 * 8); - val |= ((long) (buf[ptr + 7] & 0xff)) << (7 * 8); - ptr += 8; - return val; - } - - /** @return decode the current field as a string. */ - public String string() { - checkFieldType(WIRE_LEN_DELIM); - int len = varint32(); - String s = RawParseUtils.decode(buf, ptr, ptr + len); - ptr += len; - return s; - } - - /** @return decode the current hex string to an ObjectId. */ - public ObjectId stringObjectId() { - checkFieldType(WIRE_LEN_DELIM); - int len = varint32(); - if (len != OBJECT_ID_STRING_LENGTH) - throw new IllegalStateException(MessageFormat.format(DhtText - .get().protobufWrongFieldLength, - Integer.valueOf(field), Integer - .valueOf(OBJECT_ID_STRING_LENGTH), Integer - .valueOf(len))); - - ObjectId id = ObjectId.fromString(buf, ptr); - ptr += OBJECT_ID_STRING_LENGTH; - return id; - } - - /** @return decode a string from 8 hex digits. */ - public int stringHex32() { - checkFieldType(WIRE_LEN_DELIM); - int len = varint32(); - if (len != 8) - throw new IllegalStateException(MessageFormat.format(DhtText - .get().protobufWrongFieldLength, - Integer.valueOf(field), Integer.valueOf(8), Integer - .valueOf(len))); - int val = KeyUtils.parse32(buf, ptr); - ptr += 8; - return val; - } - - /** @return decode the current field as an array of bytes. */ - public byte[] bytes() { - checkFieldType(WIRE_LEN_DELIM); - byte[] r = new byte[varint32()]; - System.arraycopy(buf, ptr, r, 0, r.length); - ptr += r.length; - return r; - } - - /** @return backing array of the current field. */ - public byte[] bytesArray() { - return buf; - } - - /** @return length of field, call before {@link #bytesOffset}. */ - public int bytesLength() { - checkFieldType(WIRE_LEN_DELIM); - length = varint32(); - return length; - } - - /** @return starting offset of the field, after {@link #bytesLength()}. */ - public int bytesOffset() { - int start = ptr; - ptr += length; - return start; - } - - /** @return decode the current raw bytes to an ObjectId. */ - public ObjectId bytesObjectId() { - checkFieldType(WIRE_LEN_DELIM); - int len = varint32(); - if (len != OBJECT_ID_LENGTH) - throw new IllegalStateException(MessageFormat.format(DhtText - .get().protobufWrongFieldLength, - Integer.valueOf(field), Integer - .valueOf(OBJECT_ID_LENGTH), Integer - .valueOf(len))); - - ObjectId id = ObjectId.fromRaw(buf, ptr); - ptr += OBJECT_ID_LENGTH; - return id; - } - - /** @return decode the current field as a nested message. */ - public Decoder message() { - checkFieldType(WIRE_LEN_DELIM); - int len = varint32(); - Decoder msg = decode(buf, ptr, len); - ptr += len; - return msg; - } - - private int varint32() { - long v = varint64(); - if (Integer.MAX_VALUE < v) - throw new IllegalStateException(MessageFormat.format(DhtText - .get().protobufWrongFieldType, Integer.valueOf(field), - "int64", "int32")); - return (int) v; - } - - private long varint64() { - int c = buf[ptr++]; - long r = c & 0x7f; - int shift = 7; - while ((c & 0x80) != 0) { - c = buf[ptr++]; - r |= ((long) (c & 0x7f)) << shift; - shift += 7; - } - return r; - } - - private void checkFieldType(int expected) { - if (type != expected) - throw new IllegalStateException(MessageFormat.format(DhtText - .get().protobufWrongFieldType, Integer.valueOf(field), - Integer.valueOf(type), Integer.valueOf(expected))); - } - } - - /** Encode values into a binary protocol buffer. */ - public static class Encoder { - private byte[] buf; - - private int ptr; - - private Encoder(byte[] buf) { - this.buf = buf; - } - - /** - * Encode a variable length positive integer. - * - * @param field - * field tag number. - * @param value - * the value to store. Must be >= 0. - */ - public void int32(int field, int value) { - int64(field, value); - } - - /** - * Encode a variable length positive integer. - * - * @param field - * field tag number. - * @param value - * the value to store; omitted if 0. - */ - public void int32IfNotZero(int field, int value) { - int64IfNotZero(field, value); - } - - /** - * Encode a variable length positive integer. - * - * @param field - * field tag number. - * @param value - * the value to store; omitted if negative. - */ - public void int32IfNotNegative(int field, int value) { - int64IfNotNegative(field, value); - } - - /** - * Encode a variable length positive integer. - * - * @param field - * field tag number. - * @param value - * the value to store. Must be >= 0. - */ - public void int64(int field, long value) { - if (value < 0) - throw new IllegalArgumentException( - DhtText.get().protobufNegativeValuesNotSupported); - - field(field, WIRE_VARINT); - varint(value); - } - - /** - * Encode a variable length positive integer. - * - * @param field - * field tag number. - * @param value - * the value to store; omitted if 0. - */ - public void int64IfNotZero(int field, long value) { - if (0 != value) - int64(field, value); - } - - /** - * Encode a variable length positive integer. - * - * @param field - * field tag number. - * @param value - * the value to store; omitted if negative. - */ - public void int64IfNotNegative(int field, long value) { - if (0 <= value) - int64(field, value); - } - - /** - * Encode an enumerated value. - * - * @param <T> - * type of the enumerated values. - * @param field - * field tag number. - * @param value - * value to store; if null the field is omitted. - */ - public <T extends Enum> void intEnum(int field, T value) { - if (value != null) { - field(field, WIRE_VARINT); - varint(value.value()); - } - } - - /** - * Encode a boolean value. - * - * @param field - * field tag number. - * @param value - * the value to store. - */ - public void bool(int field, boolean value) { - field(field, WIRE_VARINT); - varint(value ? 1 : 0); - } - - /** - * Encode a boolean value, only if true. - * - * @param field - * field tag number. - * @param value - * the value to store. - */ - public void boolIfTrue(int field, boolean value) { - if (value) - bool(field, value); - } - - /** - * Encode a fixed 64 value. - * - * @param field - * field tag number. - * @param value - * the value to store. - */ - public void fixed64(int field, long value) { - field(field, WIRE_FIXED_64); - if (buf != null) { - ensureSpace(8); - - buf[ptr + 0] = (byte) value; - value >>>= 8; - - buf[ptr + 1] = (byte) value; - value >>>= 8; - - buf[ptr + 3] = (byte) value; - value >>>= 8; - - buf[ptr + 3] = (byte) value; - value >>>= 8; - - buf[ptr + 4] = (byte) value; - value >>>= 8; - - buf[ptr + 5] = (byte) value; - value >>>= 8; - - buf[ptr + 6] = (byte) value; - value >>>= 8; - - buf[ptr + 7] = (byte) value; - } - ptr += 8; - } - - /** - * Encode a length delimited bytes field. - * - * @param field - * field tag number. - * @param value - * the value to store; if null the field is omitted. - */ - public void bytes(int field, byte[] value) { - if (value != null) - bytes(field, value, 0, value.length); - } - - /** - * Encode a length delimited bytes field. - * - * @param field - * field tag number. - * @param value - * the value to store; if null the field is omitted. - */ - public void bytes(int field, ByteBuffer value) { - if (value != null) { - if (!value.hasArray()) - throw new IllegalArgumentException(DhtText.get().protobufNoArray); - byte[] valBuf = value.array(); - int valPtr = value.arrayOffset() + value.position(); - int valLen = value.limit() - value.position(); - bytes(field, valBuf, valPtr, valLen); - } - } - - /** - * Encode a length delimited bytes field. - * - * @param field - * field tag number. - * @param value - * the value to store; if null the field is omitted. - * @param off - * position to copy from. - * @param len - * number of bytes to copy. - */ - public void bytes(int field, byte[] value, int off, int len) { - if (value != null) { - field(field, WIRE_LEN_DELIM); - varint(len); - copy(value, off, len); - } - } - - /** - * Encode an ObjectId as a bytes (in raw binary format). - * - * @param field - * field tag number. - * @param value - * the value to store, as a raw binary; if null the field is - * omitted. - */ - public void bytes(int field, AnyObjectId value) { - if (value != null) { - field(field, WIRE_LEN_DELIM); - varint(OBJECT_ID_LENGTH); - if (buf != null) { - ensureSpace(OBJECT_ID_LENGTH); - value.copyRawTo(buf, ptr); - } - ptr += OBJECT_ID_LENGTH; - } - } - - /** - * Encode an ObjectId as a string (in hex format). - * - * @param field - * field tag number. - * @param value - * the value to store, as a hex string; if null the field is - * omitted. - */ - public void string(int field, AnyObjectId value) { - if (value != null) { - field(field, WIRE_LEN_DELIM); - varint(OBJECT_ID_STRING_LENGTH); - if (buf != null) { - ensureSpace(OBJECT_ID_STRING_LENGTH); - value.copyTo(buf, ptr); - } - ptr += OBJECT_ID_STRING_LENGTH; - } - } - - /** - * Encode a plain Java string. - * - * @param field - * field tag number. - * @param value - * the value to store; if null the field is omitted. - */ - public void string(int field, String value) { - if (value != null) - bytes(field, Constants.encode(value)); - } - - /** - * Encode a row key as a string. - * - * @param field - * field tag number. - * @param key - * the row key to store as a string; if null the field is - * omitted. - */ - public void string(int field, RowKey key) { - if (key != null) - bytes(field, key.asBytes()); - } - - /** - * Encode an integer as an 8 byte hex string. - * - * @param field - * field tag number. - * @param value - * value to encode. - */ - public void stringHex32(int field, int value) { - field(field, WIRE_LEN_DELIM); - varint(8); - if (buf != null) { - ensureSpace(8); - KeyUtils.format32(buf, ptr, value); - } - ptr += 8; - } - - /** - * Encode a nested message. - * - * @param field - * field tag number. - * @param msg - * message to store; if null or empty the field is omitted. - */ - public void message(int field, Encoder msg) { - if (msg != null && msg.ptr > 0) - bytes(field, msg.buf, 0, msg.ptr); - } - - private void field(int field, int type) { - varint((field << 3) | type); - } - - private void varint(long value) { - if (buf != null) { - if (buf.length - ptr < 10) - ensureSpace(varintSize(value)); - - do { - byte b = (byte) (value & 0x7f); - value >>>= 7; - if (value != 0) - b |= 0x80; - buf[ptr++] = b; - } while (value != 0); - } else { - ptr += varintSize(value); - } - } - - private static int varintSize(long value) { - value >>>= 7; - int need = 1; - for (; value != 0; value >>>= 7) - need++; - return need; - } - - private void copy(byte[] src, int off, int cnt) { - if (buf != null) { - ensureSpace(cnt); - System.arraycopy(src, off, buf, ptr, cnt); - } - ptr += cnt; - } - - private void ensureSpace(int need) { - if (buf.length - ptr < need) { - byte[] n = new byte[Math.max(ptr + need, buf.length * 2)]; - System.arraycopy(buf, 0, n, 0, ptr); - buf = n; - } - } - - /** @return size of the protocol buffer message, in bytes. */ - public int size() { - return ptr; - } - - /** @return the current buffer, as a byte array. */ - public byte[] asByteArray() { - if (ptr == buf.length) - return buf; - byte[] r = new byte[ptr]; - System.arraycopy(buf, 0, r, 0, ptr); - return r; - } - - /** @return the current buffer. */ - public ByteBuffer asByteBuffer() { - return ByteBuffer.wrap(buf, 0, ptr); - } - } - - private TinyProtobuf() { - // Don't make instances. - } -} diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/ChunkTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/ChunkTable.java index d5c5cc9ff7..db0fded3f2 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/ChunkTable.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/ChunkTable.java @@ -44,11 +44,12 @@ package org.eclipse.jgit.storage.dht.spi; import java.util.Collection; +import java.util.Map; import java.util.Set; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; import org.eclipse.jgit.storage.dht.AsyncCallback; import org.eclipse.jgit.storage.dht.ChunkKey; -import org.eclipse.jgit.storage.dht.ChunkMeta; import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.PackChunk; import org.eclipse.jgit.storage.dht.StreamingCallback; @@ -113,7 +114,7 @@ public interface ChunkTable { * results early. */ public void getMeta(Context options, Set<ChunkKey> keys, - AsyncCallback<Collection<ChunkMeta>> callback); + AsyncCallback<Map<ChunkKey, ChunkMeta>> callback); /** * Put some (or all) of a single chunk. diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RefTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RefTable.java index 48171265c1..b46ca0b5a6 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RefTable.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RefTable.java @@ -46,8 +46,9 @@ package org.eclipse.jgit.storage.dht.spi; import java.util.Map; import java.util.concurrent.TimeoutException; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData; import org.eclipse.jgit.storage.dht.DhtException; -import org.eclipse.jgit.storage.dht.RefData; +import org.eclipse.jgit.storage.dht.RefDataUtil; import org.eclipse.jgit.storage.dht.RefKey; import org.eclipse.jgit.storage.dht.RepositoryKey; @@ -100,8 +101,8 @@ public interface RefTable { * @param oldData * the old data for the reference. The put only occurs if the * value is still equal to {@code oldData}. Use - * {@link RefData#NONE} if the reference should not exist and is - * being created. + * {@link RefDataUtil#NONE} if the reference should not exist and + * is being created. * @param newData * new value to store. * @return true if the put was successful; false if the current value does diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RepositoryTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RepositoryTable.java index 5921ca95c1..8f2dab83ed 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RepositoryTable.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RepositoryTable.java @@ -46,7 +46,7 @@ package org.eclipse.jgit.storage.dht.spi; import java.util.Collection; import java.util.concurrent.TimeoutException; -import org.eclipse.jgit.storage.dht.CachedPackInfo; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo; import org.eclipse.jgit.storage.dht.CachedPackKey; import org.eclipse.jgit.storage.dht.ChunkInfo; import org.eclipse.jgit.storage.dht.ChunkKey; diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheChunkTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheChunkTable.java index 22989cb93f..b7f94fd6c8 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheChunkTable.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheChunkTable.java @@ -44,29 +44,36 @@ package org.eclipse.jgit.storage.dht.spi.cache; import static java.util.Collections.singleton; +import static java.util.Collections.singletonMap; +import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; import org.eclipse.jgit.storage.dht.AsyncCallback; import org.eclipse.jgit.storage.dht.ChunkKey; -import org.eclipse.jgit.storage.dht.ChunkMeta; import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.PackChunk; import org.eclipse.jgit.storage.dht.StreamingCallback; import org.eclipse.jgit.storage.dht.Sync; -import org.eclipse.jgit.storage.dht.TinyProtobuf; import org.eclipse.jgit.storage.dht.spi.ChunkTable; import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change; +import com.google.protobuf.CodedInputStream; +import com.google.protobuf.CodedOutputStream; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.WireFormat; + /** Cache wrapper around ChunkTable. */ public class CacheChunkTable implements ChunkTable { private final ChunkTable db; @@ -105,7 +112,7 @@ public class CacheChunkTable implements ChunkTable { } public void getMeta(Context options, Set<ChunkKey> keys, - AsyncCallback<Collection<ChunkMeta>> callback) { + AsyncCallback<Map<ChunkKey, ChunkMeta>> callback) { List<CacheKey> toFind = new ArrayList<CacheKey>(keys.size()); for (ChunkKey k : keys) toFind.add(nsMeta.key(k)); @@ -118,8 +125,10 @@ public class CacheChunkTable implements ChunkTable { db.put(chunk, buf.getWriteBuffer()); // Only store fragmented meta. This is all callers should ask for. - if (chunk.hasMeta() && chunk.getMeta().getFragmentCount() != 0) - buf.put(nsMeta.key(chunk.getChunkKey()), chunk.getMeta().asBytes()); + if (chunk.hasMeta() && chunk.getMeta().getFragmentCount() != 0) { + buf.put(nsMeta.key(chunk.getChunkKey()), + chunk.getMeta().toByteArray()); + } if (chunk.hasChunkData()) buf.put(nsChunk.key(chunk.getChunkKey()), encode(chunk)); @@ -135,57 +144,99 @@ public class CacheChunkTable implements ChunkTable { } private static byte[] encode(PackChunk.Members members) { - final byte[] meta; - if (members.hasMeta()) - meta = members.getMeta().asBytes(); - else - meta = null; - - ByteBuffer chunkData = members.getChunkDataAsByteBuffer(); - ByteBuffer chunkIndex = members.getChunkIndexAsByteBuffer(); - - TinyProtobuf.Encoder sizer = TinyProtobuf.size(); - TinyProtobuf.Encoder e = sizer; - do { - e.bytes(1, chunkData); - e.bytes(2, chunkIndex); - e.bytes(3, meta); - if (e == sizer) - e = TinyProtobuf.encode(e.size()); - else - return e.asByteArray(); - } while (true); + // Its too slow to encode ByteBuffer through the standard code. + // Since the message is only 3 fields, do it by hand. + ByteBuffer data = members.getChunkDataAsByteBuffer(); + ByteBuffer index = members.getChunkIndexAsByteBuffer(); + ChunkMeta meta = members.getMeta(); + + int sz = 0; + if (data != null) + sz += computeByteBufferSize(1, data); + if (index != null) + sz += computeByteBufferSize(2, index); + if (meta != null) + sz += CodedOutputStream.computeMessageSize(3, meta); + + byte[] r = new byte[sz]; + CodedOutputStream out = CodedOutputStream.newInstance(r); + try { + if (data != null) + writeByteBuffer(out, 1, data); + if (index != null) + writeByteBuffer(out, 2, index); + if (meta != null) + out.writeMessage(3, meta); + } catch (IOException err) { + throw new RuntimeException("Cannot buffer chunk", err); + } + return r; + } + + private static int computeByteBufferSize(int fieldNumber, ByteBuffer data) { + int n = data.remaining(); + return CodedOutputStream.computeTagSize(fieldNumber) + + CodedOutputStream.computeRawVarint32Size(n) + + n; + } + + private static void writeByteBuffer(CodedOutputStream out, int fieldNumber, + ByteBuffer data) throws IOException { + byte[] d = data.array(); + int p = data.arrayOffset() + data.position(); + int n = data.remaining(); + out.writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); + out.writeRawVarint32(n); + out.writeRawBytes(d, p, n); } private static PackChunk.Members decode(ChunkKey key, byte[] raw) { PackChunk.Members members = new PackChunk.Members(); members.setChunkKey(key); - TinyProtobuf.Decoder d = TinyProtobuf.decode(raw); - for (;;) { - switch (d.next()) { - case 0: - return members; - case 1: { - int cnt = d.bytesLength(); - int ptr = d.bytesOffset(); - byte[] buf = d.bytesArray(); - members.setChunkData(buf, ptr, cnt); - continue; - } - case 2: { - int cnt = d.bytesLength(); - int ptr = d.bytesOffset(); - byte[] buf = d.bytesArray(); - members.setChunkIndex(buf, ptr, cnt); - continue; - } - case 3: - members.setMeta(ChunkMeta.fromBytes(key, d.message())); - continue; - default: - d.skip(); + // Its too slow to convert using the standard code, as copies + // are made. Instead find offsets in the stream and use that. + CodedInputStream in = CodedInputStream.newInstance(raw); + try { + int tag = in.readTag(); + for (;;) { + switch (WireFormat.getTagFieldNumber(tag)) { + case 0: + return members; + case 1: { + int cnt = in.readRawVarint32(); + int ptr = in.getTotalBytesRead(); + members.setChunkData(raw, ptr, cnt); + in.skipRawBytes(cnt); + tag = in.readTag(); + if (WireFormat.getTagFieldNumber(tag) != 2) + continue; + } + //$FALL-THROUGH$ + case 2: { + int cnt = in.readRawVarint32(); + int ptr = in.getTotalBytesRead(); + members.setChunkIndex(raw, ptr, cnt); + in.skipRawBytes(cnt); + tag = in.readTag(); + if (WireFormat.getTagFieldNumber(tag) != 3) + continue; + } + //$FALL-THROUGH$ + case 3: { + int cnt = in.readRawVarint32(); + int oldLimit = in.pushLimit(cnt); + members.setMeta(ChunkMeta.parseFrom(in)); + in.popLimit(oldLimit); + tag = in.readTag(); + continue; + } + default: + in.skipField(tag); + } } + } catch (IOException err) { + throw new RuntimeException("Cannot decode chunk", err); } } @@ -329,41 +380,49 @@ public class CacheChunkTable implements ChunkTable { private final Set<ChunkKey> remaining; - private final AsyncCallback<Collection<ChunkMeta>> normalCallback; + private final AsyncCallback<Map<ChunkKey, ChunkMeta>> normalCallback; - private final StreamingCallback<Collection<ChunkMeta>> streamingCallback; + private final StreamingCallback<Map<ChunkKey, ChunkMeta>> streamingCallback; - private final List<ChunkMeta> all; + private final Map<ChunkKey, ChunkMeta> all; MetaFromCache(Context options, Set<ChunkKey> keys, - AsyncCallback<Collection<ChunkMeta>> callback) { + AsyncCallback<Map<ChunkKey, ChunkMeta>> callback) { this.options = options; this.remaining = new HashSet<ChunkKey>(keys); this.normalCallback = callback; if (callback instanceof StreamingCallback<?>) { - streamingCallback = (StreamingCallback<Collection<ChunkMeta>>) callback; + streamingCallback = (StreamingCallback<Map<ChunkKey, ChunkMeta>>) callback; all = null; } else { streamingCallback = null; - all = new ArrayList<ChunkMeta>(keys.size()); + all = new HashMap<ChunkKey, ChunkMeta>(); } } public void onPartialResult(Map<CacheKey, byte[]> result) { for (Map.Entry<CacheKey, byte[]> ent : result.entrySet()) { ChunkKey key = ChunkKey.fromBytes(ent.getKey().getBytes()); - ChunkMeta meta = ChunkMeta.fromBytes(key, ent.getValue()); + ChunkMeta meta; + try { + meta = ChunkMeta.parseFrom(ent.getValue()); + } catch (InvalidProtocolBufferException e) { + // Invalid meta message, remove the cell from cache. + client.modify(singleton(Change.remove(ent.getKey())), + Sync.<Void> none()); + continue; + } if (streamingCallback != null) { - streamingCallback.onPartialResult(singleton(meta)); + streamingCallback.onPartialResult(singletonMap(key, meta)); synchronized (lock) { remaining.remove(key); } } else { synchronized (lock) { - all.add(meta); + all.put(key, meta); remaining.remove(key); } } @@ -391,31 +450,31 @@ public class CacheChunkTable implements ChunkTable { } private class MetaFromDatabase implements - StreamingCallback<Collection<ChunkMeta>> { + StreamingCallback<Map<ChunkKey, ChunkMeta>> { private final Object lock = new Object(); - private final List<ChunkMeta> all; + private final Map<ChunkKey, ChunkMeta> all; - private final AsyncCallback<Collection<ChunkMeta>> normalCallback; + private final AsyncCallback<Map<ChunkKey, ChunkMeta>> normalCallback; - private final StreamingCallback<Collection<ChunkMeta>> streamingCallback; + private final StreamingCallback<Map<ChunkKey, ChunkMeta>> streamingCallback; - MetaFromDatabase(List<ChunkMeta> all, - AsyncCallback<Collection<ChunkMeta>> normalCallback, - StreamingCallback<Collection<ChunkMeta>> streamingCallback) { + MetaFromDatabase(Map<ChunkKey, ChunkMeta> all, + AsyncCallback<Map<ChunkKey, ChunkMeta>> normalCallback, + StreamingCallback<Map<ChunkKey, ChunkMeta>> streamingCallback) { this.all = all; this.normalCallback = normalCallback; this.streamingCallback = streamingCallback; } - public void onPartialResult(Collection<ChunkMeta> result) { - final List<ChunkMeta> toPutIntoCache = copy(result); + public void onPartialResult(Map<ChunkKey, ChunkMeta> result) { + final Map<ChunkKey, ChunkMeta> toPutIntoCache = copy(result); if (streamingCallback != null) streamingCallback.onPartialResult(result); else { synchronized (lock) { - all.addAll(result); + all.putAll(result); } } @@ -425,20 +484,22 @@ public class CacheChunkTable implements ChunkTable { // executor.submit(new Runnable() { public void run() { - for (ChunkMeta meta : toPutIntoCache) { - ChunkKey key = meta.getChunkKey(); - Change op = Change.put(nsMeta.key(key), meta.asBytes()); + for (Map.Entry<ChunkKey, ChunkMeta> ent + : toPutIntoCache.entrySet()) { + ChunkKey key = ent.getKey(); + Change op = Change.put(nsMeta.key(key), + ent.getValue().toByteArray()); client.modify(singleton(op), none); } } }); } - private <T> List<T> copy(Collection<T> result) { - return new ArrayList<T>(result); + private <K, V> Map<K, V> copy(Map<K, V> result) { + return new HashMap<K, V>(result); } - public void onSuccess(Collection<ChunkMeta> result) { + public void onSuccess(Map<ChunkKey, ChunkMeta> result) { if (result != null && !result.isEmpty()) onPartialResult(result); diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheObjectIndexTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheObjectIndexTable.java index 0438dc09e7..0cd3549e04 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheObjectIndexTable.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheObjectIndexTable.java @@ -44,7 +44,9 @@ package org.eclipse.jgit.storage.dht.spi.cache; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -52,6 +54,7 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; +import org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex; import org.eclipse.jgit.storage.dht.AsyncCallback; import org.eclipse.jgit.storage.dht.ChunkKey; import org.eclipse.jgit.storage.dht.DhtException; @@ -59,12 +62,13 @@ import org.eclipse.jgit.storage.dht.ObjectIndexKey; import org.eclipse.jgit.storage.dht.ObjectInfo; import org.eclipse.jgit.storage.dht.StreamingCallback; import org.eclipse.jgit.storage.dht.Sync; -import org.eclipse.jgit.storage.dht.TinyProtobuf; import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable; import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change; +import com.google.protobuf.InvalidProtocolBufferException; + /** Cache wrapper around ObjectIndexTable. */ public class CacheObjectIndexTable implements ObjectIndexTable { private final ObjectIndexTable db; @@ -125,58 +129,6 @@ public class CacheObjectIndexTable implements ObjectIndexTable { buf.remove(ns.key(objId)); } - private static byte[] encode(Collection<ObjectInfo> list) { - TinyProtobuf.Encoder e = TinyProtobuf.encode(128); - for (ObjectInfo info : list) { - TinyProtobuf.Encoder m = TinyProtobuf.encode(128); - m.bytes(1, info.getChunkKey().asBytes()); - m.bytes(2, info.asBytes()); - m.fixed64(3, info.getTime()); - e.message(1, m); - } - return e.asByteArray(); - } - - private static ObjectInfo decodeItem(TinyProtobuf.Decoder m) { - ChunkKey key = null; - TinyProtobuf.Decoder data = null; - long time = -1; - - for (;;) { - switch (m.next()) { - case 0: - return ObjectInfo.fromBytes(key, data, time); - case 1: - key = ChunkKey.fromBytes(m); - continue; - case 2: - data = m.message(); - continue; - case 3: - time = m.fixed64(); - continue; - default: - m.skip(); - } - } - } - - private static Collection<ObjectInfo> decode(byte[] raw) { - List<ObjectInfo> res = new ArrayList<ObjectInfo>(1); - TinyProtobuf.Decoder d = TinyProtobuf.decode(raw); - for (;;) { - switch (d.next()) { - case 0: - return res; - case 1: - res.add(decodeItem(d.message())); - continue; - default: - d.skip(); - } - } - } - private class LoaderFromCache implements StreamingCallback<Map<CacheKey, byte[]>> { private final Object lock = new Object(); @@ -217,7 +169,15 @@ public class CacheObjectIndexTable implements ObjectIndexTable { for (Map.Entry<CacheKey, byte[]> e : result.entrySet()) { ObjectIndexKey objKey; - Collection<ObjectInfo> list = decode(e.getValue()); + Collection<ObjectInfo> list; + try { + list = decode(e.getValue()); + } catch (InvalidProtocolBufferException badCell) { + client.modify( + Collections.singleton(Change.remove(e.getKey())), + Sync.<Void> none()); + continue; + } objKey = ObjectIndexKey.fromBytes(e.getKey().getBytes()); if (tmp != null) @@ -238,6 +198,21 @@ public class CacheObjectIndexTable implements ObjectIndexTable { } } + private Collection<ObjectInfo> decode(byte[] value) + throws InvalidProtocolBufferException { + CachedObjectIndex cacheEntry = CachedObjectIndex.parseFrom(value); + int sz = cacheEntry.getItemCount(); + ObjectInfo[] r = new ObjectInfo[sz]; + for (int i = 0; i < sz; i++) { + CachedObjectIndex.Item item = cacheEntry.getItem(i); + r[i] = new ObjectInfo( + ChunkKey.fromString(item.getChunkKey()), + item.getTime(), + item.getObjectInfo()); + } + return Arrays.asList(r); + } + public void onSuccess(Map<CacheKey, byte[]> result) { if (result != null && !result.isEmpty()) onPartialResult(result); @@ -305,6 +280,19 @@ public class CacheObjectIndexTable implements ObjectIndexTable { client.modify(ops, Sync.<Void> none()); } + + private byte[] encode(List<ObjectInfo> items) { + CachedObjectIndex.Builder b; + b = CachedObjectIndex.newBuilder(); + for (ObjectInfo info : items) { + CachedObjectIndex.Item.Builder i = b.addItemBuilder(); + i.setChunkKey(info.getChunkKey().asString()); + i.setObjectInfo(info.getData()); + if (0 < info.getTime()) + i.setTime(info.getTime()); + } + return b.build().toByteArray(); + } }); } diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRefTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRefTable.java index 5edb49eddf..2b6c8dac31 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRefTable.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRefTable.java @@ -46,8 +46,8 @@ package org.eclipse.jgit.storage.dht.spi.cache; import java.util.Map; import java.util.concurrent.TimeoutException; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData; import org.eclipse.jgit.storage.dht.DhtException; -import org.eclipse.jgit.storage.dht.RefData; import org.eclipse.jgit.storage.dht.RefKey; import org.eclipse.jgit.storage.dht.RepositoryKey; import org.eclipse.jgit.storage.dht.spi.Context; diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRepositoryTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRepositoryTable.java index b71c242625..a378e0a8b3 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRepositoryTable.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRepositoryTable.java @@ -46,24 +46,24 @@ package org.eclipse.jgit.storage.dht.spi.cache; import static java.util.Collections.emptyMap; import static java.util.Collections.singleton; -import java.util.ArrayList; import java.util.Collection; -import java.util.List; import java.util.Map; import java.util.concurrent.TimeoutException; -import org.eclipse.jgit.storage.dht.CachedPackInfo; +import org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo; import org.eclipse.jgit.storage.dht.CachedPackKey; import org.eclipse.jgit.storage.dht.ChunkInfo; import org.eclipse.jgit.storage.dht.ChunkKey; import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.RepositoryKey; import org.eclipse.jgit.storage.dht.Sync; -import org.eclipse.jgit.storage.dht.TinyProtobuf; import org.eclipse.jgit.storage.dht.spi.RepositoryTable; import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change; +import com.google.protobuf.InvalidProtocolBufferException; + /** Cache wrapper around RepositoryTable. */ public class CacheRepositoryTable implements RepositoryTable { private final RepositoryTable db; @@ -126,26 +126,20 @@ public class CacheRepositoryTable implements RepositoryTable { byte[] data = result.get(memKey); if (data != null) { - List<CachedPackInfo> r = new ArrayList<CachedPackInfo>(); - TinyProtobuf.Decoder d = TinyProtobuf.decode(data); - for (;;) { - switch (d.next()) { - case 0: - return r; - case 1: - r.add(CachedPackInfo.fromBytes(d.message())); - continue; - default: - d.skip(); - } + try { + return CachedPackInfoList.parseFrom(data).getPackList(); + } catch (InvalidProtocolBufferException e) { + // Invalidate the cache entry and fall through. + client.modify(singleton(Change.remove(memKey)), none); } } Collection<CachedPackInfo> r = db.getCachedPacks(repo); - TinyProtobuf.Encoder e = TinyProtobuf.encode(1024); - for (CachedPackInfo info : r) - e.bytes(1, info.asBytes()); - client.modify(singleton(Change.put(memKey, e.asByteArray())), none); + CachedPackInfoList.Builder list = CachedPackInfoList.newBuilder(); + list.addAllPack(r); + client.modify( + singleton(Change.put(memKey, list.build().toByteArray())), + none); return r; } diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemChunkTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemChunkTable.java index 8a04dbb6d5..277b2b83a8 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemChunkTable.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemChunkTable.java @@ -43,21 +43,27 @@ package org.eclipse.jgit.storage.dht.spi.memory; +import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Set; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta; import org.eclipse.jgit.storage.dht.AsyncCallback; import org.eclipse.jgit.storage.dht.ChunkKey; -import org.eclipse.jgit.storage.dht.ChunkMeta; import org.eclipse.jgit.storage.dht.DhtException; +import org.eclipse.jgit.storage.dht.DhtText; import org.eclipse.jgit.storage.dht.PackChunk; import org.eclipse.jgit.storage.dht.spi.ChunkTable; import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher; +import com.google.protobuf.InvalidProtocolBufferException; + final class MemChunkTable implements ChunkTable { private final MemTable table = new MemTable(); @@ -89,8 +95,15 @@ final class MemChunkTable implements ChunkTable { m.setChunkIndex(cell.getValue()); cell = table.get(row, colMeta.name()); - if (cell != null) - m.setMeta(ChunkMeta.fromBytes(chunk, cell.getValue())); + if (cell != null) { + try { + m.setMeta(ChunkMeta.parseFrom(cell.getValue())); + } catch (InvalidProtocolBufferException err) { + callback.onFailure(new DhtException(MessageFormat.format( + DhtText.get().invalidChunkMeta, chunk), err)); + return; + } + } out.add(m); } @@ -99,15 +112,21 @@ final class MemChunkTable implements ChunkTable { } public void getMeta(Context options, Set<ChunkKey> keys, - AsyncCallback<Collection<ChunkMeta>> callback) { - int cnt = keys.size(); - List<ChunkMeta> out = new ArrayList<ChunkMeta>(cnt); + AsyncCallback<Map<ChunkKey, ChunkMeta>> callback) { + Map<ChunkKey, ChunkMeta> out = new HashMap<ChunkKey, ChunkMeta>(); for (ChunkKey chunk : keys) { byte[] row = chunk.asBytes(); MemTable.Cell cell = table.get(row, colMeta.name()); - if (cell != null) - out.add(ChunkMeta.fromBytes(chunk, cell.getValue())); + if (cell != null) { + try { + out.put(chunk, ChunkMeta.parseFrom(cell.getValue())); + } catch (InvalidProtocolBufferException err) { + callback.onFailure(new DhtException(MessageFormat.format( + DhtText.get().invalidChunkMeta, chunk), err)); + return; + } + } } callback.onSuccess(out); @@ -124,7 +143,7 @@ final class MemChunkTable implements ChunkTable { table.put(row, colIndex.name(), chunk.getChunkIndex()); if (chunk.hasMeta()) - table.put(row, colMeta.name(), chunk.getMeta().asBytes()); + table.put(row, colMeta.name(), chunk.getMeta().toByteArray()); } public void remove(ChunkKey key, WriteBuffer buffer) throws DhtException { diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemObjectIndexTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemObjectIndexTable.java index e6f4f7acac..e3bb7fdd11 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemObjectIndexTable.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemObjectIndexTable.java @@ -43,15 +43,18 @@ package org.eclipse.jgit.storage.dht.spi.memory; +import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore; import org.eclipse.jgit.storage.dht.AsyncCallback; import org.eclipse.jgit.storage.dht.ChunkKey; import org.eclipse.jgit.storage.dht.DhtException; +import org.eclipse.jgit.storage.dht.DhtText; import org.eclipse.jgit.storage.dht.ObjectIndexKey; import org.eclipse.jgit.storage.dht.ObjectInfo; import org.eclipse.jgit.storage.dht.spi.Context; @@ -59,6 +62,8 @@ import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable; import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher; +import com.google.protobuf.InvalidProtocolBufferException; + final class MemObjectIndexTable implements ObjectIndexTable { private final MemTable table = new MemTable(); @@ -70,17 +75,25 @@ final class MemObjectIndexTable implements ObjectIndexTable { for (ObjectIndexKey objId : objects) { for (MemTable.Cell cell : table.scanFamily(objId.asBytes(), colInfo)) { - Collection<ObjectInfo> info = out.get(objId); - if (info == null) { - info = new ArrayList<ObjectInfo>(4); - out.put(objId, info); + Collection<ObjectInfo> chunks = out.get(objId); + ChunkKey chunkKey; + if (chunks == null) { + chunks = new ArrayList<ObjectInfo>(4); + out.put(objId, chunks); } - ChunkKey chunk = ChunkKey.fromBytes( - colInfo.suffix(cell.getName())); - byte[] value = cell.getValue(); - long time = cell.getTimestamp(); - info.add(ObjectInfo.fromBytes(chunk, value, time)); + chunkKey = ChunkKey.fromBytes(colInfo.suffix(cell.getName())); + try { + chunks.add(new ObjectInfo( + chunkKey, + cell.getTimestamp(), + GitStore.ObjectInfo.parseFrom(cell.getValue()))); + } catch (InvalidProtocolBufferException badCell) { + callback.onFailure(new DhtException(MessageFormat.format( + DhtText.get().invalidObjectInfo, objId, chunkKey), + badCell)); + return; + } } } @@ -91,7 +104,7 @@ final class MemObjectIndexTable implements ObjectIndexTable { throws DhtException { ChunkKey chunk = info.getChunkKey(); table.put(objId.asBytes(), colInfo.append(chunk.asBytes()), - info.asBytes()); + info.getData().toByteArray()); } public void remove(ObjectIndexKey objId, ChunkKey chunk, WriteBuffer buffer) diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRefTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRefTable.java index 6c41f20c4a..595e3fdd7c 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRefTable.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRefTable.java @@ -43,18 +43,23 @@ package org.eclipse.jgit.storage.dht.spi.memory; +import java.text.MessageFormat; import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeoutException; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData; import org.eclipse.jgit.storage.dht.DhtException; -import org.eclipse.jgit.storage.dht.RefData; +import org.eclipse.jgit.storage.dht.DhtText; +import org.eclipse.jgit.storage.dht.RefDataUtil; import org.eclipse.jgit.storage.dht.RefKey; import org.eclipse.jgit.storage.dht.RepositoryKey; import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.RefTable; import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher; +import com.google.protobuf.InvalidProtocolBufferException; + final class MemRefTable implements RefTable { private final MemTable table = new MemTable(); @@ -65,8 +70,12 @@ final class MemRefTable implements RefTable { Map<RefKey, RefData> out = new HashMap<RefKey, RefData>(); for (MemTable.Cell cell : table.scanFamily(repository.asBytes(), colRef)) { RefKey ref = RefKey.fromBytes(colRef.suffix(cell.getName())); - RefData val = RefData.fromBytes(cell.getValue()); - out.put(ref, val); + try { + out.put(ref, RefData.parseFrom(cell.getValue())); + } catch (InvalidProtocolBufferException badCell) { + throw new DhtException(MessageFormat.format( + DhtText.get().invalidRefData, ref), badCell); + } } return out; } @@ -77,8 +86,8 @@ final class MemRefTable implements RefTable { return table.compareAndSet( // repo.asBytes(), // colRef.append(refKey.asBytes()), // - oldData != RefData.NONE ? oldData.asBytes() : null, // - newData.asBytes()); + oldData != RefDataUtil.NONE ? oldData.toByteArray() : null, // + newData.toByteArray()); } public boolean compareAndRemove(RefKey refKey, RefData oldData) @@ -87,7 +96,7 @@ final class MemRefTable implements RefTable { return table.compareAndSet( // repo.asBytes(), // colRef.append(refKey.asBytes()), // - oldData != RefData.NONE ? oldData.asBytes() : null, // + oldData != RefDataUtil.NONE ? oldData.toByteArray() : null, // null); } } diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRepositoryTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRepositoryTable.java index 01e90de3ba..d393934a23 100644 --- a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRepositoryTable.java +++ b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRepositoryTable.java @@ -43,22 +43,26 @@ package org.eclipse.jgit.storage.dht.spi.memory; +import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; -import org.eclipse.jgit.storage.dht.CachedPackInfo; +import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo; import org.eclipse.jgit.storage.dht.CachedPackKey; import org.eclipse.jgit.storage.dht.ChunkInfo; import org.eclipse.jgit.storage.dht.ChunkKey; import org.eclipse.jgit.storage.dht.DhtException; +import org.eclipse.jgit.storage.dht.DhtText; import org.eclipse.jgit.storage.dht.RepositoryKey; import org.eclipse.jgit.storage.dht.spi.RepositoryTable; import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher; +import com.google.protobuf.InvalidProtocolBufferException; + final class MemRepositoryTable implements RepositoryTable { private final AtomicInteger nextId = new AtomicInteger(); @@ -76,7 +80,7 @@ final class MemRepositoryTable implements RepositoryTable { throws DhtException { table.put(repo.asBytes(), colChunkInfo.append(info.getChunkKey().asBytes()), - info.asBytes()); + info.getData().toByteArray()); } public void remove(RepositoryKey repo, ChunkKey chunk, WriteBuffer buffer) @@ -87,16 +91,24 @@ final class MemRepositoryTable implements RepositoryTable { public Collection<CachedPackInfo> getCachedPacks(RepositoryKey repo) throws DhtException, TimeoutException { List<CachedPackInfo> out = new ArrayList<CachedPackInfo>(4); - for (MemTable.Cell cell : table.scanFamily(repo.asBytes(), colCachedPack)) - out.add(CachedPackInfo.fromBytes(cell.getValue())); + for (MemTable.Cell cell : table.scanFamily(repo.asBytes(), colCachedPack)) { + try { + out.add(CachedPackInfo.parseFrom(cell.getValue())); + } catch (InvalidProtocolBufferException e) { + throw new DhtException(MessageFormat.format( + DhtText.get().invalidCachedPackInfo, repo, + CachedPackKey.fromBytes(cell.getName())), e); + } + } return out; } public void put(RepositoryKey repo, CachedPackInfo info, WriteBuffer buffer) throws DhtException { + CachedPackKey key = CachedPackKey.fromInfo(info); table.put(repo.asBytes(), - colCachedPack.append(info.getRowKey().asBytes()), - info.asBytes()); + colCachedPack.append(key.asBytes()), + info.toByteArray()); } public void remove(RepositoryKey repo, CachedPackKey key, WriteBuffer buffer) |