]> source.dussan.org Git - jgit.git/commitdiff
Delete storage.dht package 83/7283/2
authorShawn O. Pearce <spearce@spearce.org>
Sat, 18 Aug 2012 22:27:12 +0000 (15:27 -0700)
committerMatthias Sohn <matthias.sohn@sap.com>
Wed, 5 Sep 2012 15:19:51 +0000 (17:19 +0200)
This experiment proved to be not very useful. I had originally
planned to use this on top of Google Bigtable, Apache HBase or
Apache Cassandra. Unfortunately the schema is very complex and
does not perform well. The storage.dfs package has much better
performance and has been in production at Google for many months
now, proving it is a viable storage backend for Git.

As there are no users of the storage.dht schema, either at Google or
any other company, nor any valid open source implementations of the
storage system, drop the entire package and API from the JGit project.
There is no point in trying to maintain code that is simply not used.

Change-Id: Ia8d32f27426d2bcc12e7dc9cc4524c59f4fe4df9
Signed-off-by: Matthias Sohn <matthias.sohn@sap.com>
141 files changed:
org.eclipse.jgit.generated.storage.dht.proto/.classpath [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/.gitignore [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/.project [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.core.resources.prefs [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.core.runtime.prefs [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.jdt.core.prefs [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.jdt.ui.prefs [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.mylyn.tasks.ui.prefs [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.mylyn.team.ui.prefs [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.pde.api.tools.prefs [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/META-INF/MANIFEST.MF [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/about.html [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/build.properties [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/generate.sh [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/plugin.properties [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/pom.xml [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/resources/org/eclipse/jgit/storage/dht/git_cache.proto [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/resources/org/eclipse/jgit/storage/dht/git_store.proto [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/src/org/eclipse/jgit/generated/storage/dht/proto/GitCache.java [deleted file]
org.eclipse.jgit.generated.storage.dht.proto/src/org/eclipse/jgit/generated/storage/dht/proto/GitStore.java [deleted file]
org.eclipse.jgit.storage.dht.test/.classpath [deleted file]
org.eclipse.jgit.storage.dht.test/.gitignore [deleted file]
org.eclipse.jgit.storage.dht.test/.project [deleted file]
org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.core.resources.prefs [deleted file]
org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.core.runtime.prefs [deleted file]
org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.jdt.core.prefs [deleted file]
org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.jdt.ui.prefs [deleted file]
org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.mylyn.tasks.ui.prefs [deleted file]
org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.mylyn.team.ui.prefs [deleted file]
org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.pde.api.tools.prefs [deleted file]
org.eclipse.jgit.storage.dht.test/META-INF/MANIFEST.MF [deleted file]
org.eclipse.jgit.storage.dht.test/build.properties [deleted file]
org.eclipse.jgit.storage.dht.test/org.eclipse.jgit.storage.dht--All-Tests.launch [deleted file]
org.eclipse.jgit.storage.dht.test/plugin.properties [deleted file]
org.eclipse.jgit.storage.dht.test/pom.xml [deleted file]
org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/ChunkIndexTest.java [deleted file]
org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/ChunkKeyTest.java [deleted file]
org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/DhtPackParserTest.java [deleted file]
org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/DhtRepositoryBuilderTest.java [deleted file]
org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/LargeNonDeltaObjectTest.java [deleted file]
org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/ObjectIndexKeyTest.java [deleted file]
org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/RepositoryKeyTest.java [deleted file]
org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/TimeoutTest.java [deleted file]
org.eclipse.jgit.storage.dht/.classpath [deleted file]
org.eclipse.jgit.storage.dht/.fbprefs [deleted file]
org.eclipse.jgit.storage.dht/.gitignore [deleted file]
org.eclipse.jgit.storage.dht/.project [deleted file]
org.eclipse.jgit.storage.dht/.settings/org.eclipse.core.resources.prefs [deleted file]
org.eclipse.jgit.storage.dht/.settings/org.eclipse.core.runtime.prefs [deleted file]
org.eclipse.jgit.storage.dht/.settings/org.eclipse.jdt.core.prefs [deleted file]
org.eclipse.jgit.storage.dht/.settings/org.eclipse.jdt.ui.prefs [deleted file]
org.eclipse.jgit.storage.dht/.settings/org.eclipse.mylyn.tasks.ui.prefs [deleted file]
org.eclipse.jgit.storage.dht/.settings/org.eclipse.mylyn.team.ui.prefs [deleted file]
org.eclipse.jgit.storage.dht/.settings/org.eclipse.pde.api.tools.prefs [deleted file]
org.eclipse.jgit.storage.dht/META-INF/MANIFEST.MF [deleted file]
org.eclipse.jgit.storage.dht/README [deleted file]
org.eclipse.jgit.storage.dht/about.html [deleted file]
org.eclipse.jgit.storage.dht/build.properties [deleted file]
org.eclipse.jgit.storage.dht/plugin.properties [deleted file]
org.eclipse.jgit.storage.dht/pom.xml [deleted file]
org.eclipse.jgit.storage.dht/resources/org/eclipse/jgit/storage/dht/DhtText.properties [deleted file]
org.eclipse.jgit.storage.dht/resources/org/eclipse/jgit/storage/dht/dht-schema.html [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/AsyncCallback.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/BatchObjectLookup.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackKey.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkFormatter.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkIndex.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkInfo.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkKey.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMetaUtil.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DeltaBaseCache.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtCachedPack.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtConfig.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtException.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserter.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserterOptions.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtMissingChunkException.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjDatabase.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjectRepresentation.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjectToPack.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtPackParser.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtReader.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtReaderOptions.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefDatabase.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefRename.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefUpdate.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRepository.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRepositoryBuilder.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtText.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtTimeoutException.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/KeyUtils.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/LargeNonDeltaObject.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectIndexKey.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectInfo.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectWriter.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/OpenQueue.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/PackChunk.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Prefetcher.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/QueueObjectLookup.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentChunks.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentInfoCache.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefDataUtil.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefKey.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RepositoryKey.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RepositoryName.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RepresentationSelector.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RowKey.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/SizeQueue.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/StreamingCallback.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Sync.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Timeout.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/ChunkTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/Context.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/Database.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/ObjectIndexTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RefTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RepositoryIndexTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RepositoryTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/WriteBuffer.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheBuffer.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheChunkTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheDatabase.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheKey.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheObjectIndexTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheOptions.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRefTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRepositoryIndexTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRepositoryTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheService.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/Namespace.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemChunkTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemObjectIndexTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRefTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRepositoryIndexTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRepositoryTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemTable.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemoryDatabase.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/util/AbstractWriteBuffer.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/util/ColumnMatcher.java [deleted file]
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/util/ExecutorTools.java [deleted file]
pom.xml

diff --git a/org.eclipse.jgit.generated.storage.dht.proto/.classpath b/org.eclipse.jgit.generated.storage.dht.proto/.classpath
deleted file mode 100644 (file)
index 304e861..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-       <classpathentry kind="src" path="src"/>
-       <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/>
-       <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
-       <classpathentry kind="output" path="bin"/>
-</classpath>
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/.gitignore b/org.eclipse.jgit.generated.storage.dht.proto/.gitignore
deleted file mode 100644 (file)
index 934e0e0..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/bin
-/target
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/.project b/org.eclipse.jgit.generated.storage.dht.proto/.project
deleted file mode 100644 (file)
index 1c7815e..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-       <name>org.eclipse.jgit.generated.storage.dht.proto</name>
-       <comment></comment>
-       <projects>
-       </projects>
-       <buildSpec>
-               <buildCommand>
-                       <name>org.eclipse.jdt.core.javabuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-               <buildCommand>
-                       <name>org.eclipse.pde.ManifestBuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-               <buildCommand>
-                       <name>org.eclipse.pde.SchemaBuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-               <buildCommand>
-                       <name>org.eclipse.pde.api.tools.apiAnalysisBuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-       </buildSpec>
-       <natures>
-               <nature>org.eclipse.jdt.core.javanature</nature>
-               <nature>org.eclipse.pde.PluginNature</nature>
-               <nature>org.eclipse.pde.api.tools.apiAnalysisNature</nature>
-       </natures>
-</projectDescription>
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.core.resources.prefs b/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.core.resources.prefs
deleted file mode 100644 (file)
index 66ac15c..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#Mon Aug 11 16:46:12 PDT 2008
-eclipse.preferences.version=1
-encoding/<project>=UTF-8
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.core.runtime.prefs b/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.core.runtime.prefs
deleted file mode 100644 (file)
index 006e07e..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#Mon Mar 24 18:55:50 EDT 2008
-eclipse.preferences.version=1
-line.separator=\n
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.jdt.core.prefs b/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644 (file)
index 5bf6764..0000000
+++ /dev/null
@@ -1,349 +0,0 @@
-#Thu May 05 16:40:25 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
-org.eclipse.jdt.core.compiler.codegen.unusedLocal=optimize out
-org.eclipse.jdt.core.compiler.compliance=1.5
-org.eclipse.jdt.core.compiler.debug.lineNumber=do not generate
-org.eclipse.jdt.core.compiler.debug.localVariable=do not generate
-org.eclipse.jdt.core.compiler.debug.sourceFile=do not generate
-org.eclipse.jdt.core.compiler.doc.comment.support=disabled
-org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=ignore
-org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
-org.eclipse.jdt.core.compiler.problem.autoboxing=ignore
-org.eclipse.jdt.core.compiler.problem.comparingIdentical=ignore
-org.eclipse.jdt.core.compiler.problem.deadCode=ignore
-org.eclipse.jdt.core.compiler.problem.deprecation=ignore
-org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
-org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
-org.eclipse.jdt.core.compiler.problem.discouragedReference=ignore
-org.eclipse.jdt.core.compiler.problem.emptyStatement=ignore
-org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
-org.eclipse.jdt.core.compiler.problem.fallthroughCase=ignore
-org.eclipse.jdt.core.compiler.problem.fatalOptionalError=disabled
-org.eclipse.jdt.core.compiler.problem.fieldHiding=ignore
-org.eclipse.jdt.core.compiler.problem.finalParameterBound=ignore
-org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=ignore
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=ignore
-org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=ignore
-org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=ignore
-org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=ignore
-org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=ignore
-org.eclipse.jdt.core.compiler.problem.invalidJavadoc=error
-org.eclipse.jdt.core.compiler.problem.invalidJavadocTags=enabled
-org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsDeprecatedRef=enabled
-org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsNotVisibleRef=enabled
-org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsVisibility=private
-org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore
-org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=ignore
-org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingHashCodeMethod=ignore
-org.eclipse.jdt.core.compiler.problem.missingJavadocComments=error
-org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsVisibility=protected
-org.eclipse.jdt.core.compiler.problem.missingJavadocTagDescription=return_tag
-org.eclipse.jdt.core.compiler.problem.missingJavadocTags=error
-org.eclipse.jdt.core.compiler.problem.missingJavadocTagsOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.missingJavadocTagsVisibility=private
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotationForInterfaceMethodImplementation=enabled
-org.eclipse.jdt.core.compiler.problem.missingSerialVersion=ignore
-org.eclipse.jdt.core.compiler.problem.missingSynchronizedOnInheritedMethod=ignore
-org.eclipse.jdt.core.compiler.problem.noEffectAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=ignore
-org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
-org.eclipse.jdt.core.compiler.problem.nullReference=ignore
-org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=ignore
-org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.potentialNullReference=ignore
-org.eclipse.jdt.core.compiler.problem.rawTypeReference=ignore
-org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore
-org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=ignore
-org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled
-org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=ignore
-org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=disabled
-org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled
-org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore
-org.eclipse.jdt.core.compiler.problem.typeParameterHiding=ignore
-org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=ignore
-org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=ignore
-org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=ignore
-org.eclipse.jdt.core.compiler.problem.unnecessaryElse=ignore
-org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=ignore
-org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=ignore
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionExemptExceptionAndThrowable=enabled
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionIncludeDocCommentReference=enabled
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.unusedImport=ignore
-org.eclipse.jdt.core.compiler.problem.unusedLabel=ignore
-org.eclipse.jdt.core.compiler.problem.unusedLocal=ignore
-org.eclipse.jdt.core.compiler.problem.unusedObjectAllocation=ignore
-org.eclipse.jdt.core.compiler.problem.unusedParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedParameterIncludeDocCommentReference=enabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled
-org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=ignore
-org.eclipse.jdt.core.compiler.problem.unusedWarningToken=ignore
-org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=ignore
-org.eclipse.jdt.core.compiler.source=1.5
-org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_assignment=0
-org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
-org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
-org.eclipse.jdt.core.formatter.alignment_for_enum_constants=0
-org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
-org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
-org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_after_package=1
-org.eclipse.jdt.core.formatter.blank_lines_before_field=1
-org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
-org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
-org.eclipse.jdt.core.formatter.blank_lines_before_method=1
-org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
-org.eclipse.jdt.core.formatter.blank_lines_before_package=0
-org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
-org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
-org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines=false
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
-org.eclipse.jdt.core.formatter.comment.format_block_comments=true
-org.eclipse.jdt.core.formatter.comment.format_comments=true
-org.eclipse.jdt.core.formatter.comment.format_header=false
-org.eclipse.jdt.core.formatter.comment.format_html=true
-org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
-org.eclipse.jdt.core.formatter.comment.format_line_comments=true
-org.eclipse.jdt.core.formatter.comment.format_source_code=true
-org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
-org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
-org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
-org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
-org.eclipse.jdt.core.formatter.comment.line_length=80
-org.eclipse.jdt.core.formatter.compact_else_if=true
-org.eclipse.jdt.core.formatter.continuation_indentation=2
-org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
-org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
-org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_empty_lines=false
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=false
-org.eclipse.jdt.core.formatter.indentation.size=4
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
-org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.lineSplit=80
-org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
-org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
-org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
-org.eclipse.jdt.core.formatter.tabulation.char=tab
-org.eclipse.jdt.core.formatter.tabulation.size=4
-org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
-org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.jdt.ui.prefs b/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.jdt.ui.prefs
deleted file mode 100644 (file)
index 7b2cdca..0000000
+++ /dev/null
@@ -1,62 +0,0 @@
-#Thu Aug 26 12:30:58 CDT 2010
-eclipse.preferences.version=1
-editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
-formatter_profile=_JGit Format
-formatter_settings_version=11
-org.eclipse.jdt.ui.ignorelowercasenames=true
-org.eclipse.jdt.ui.importorder=java;javax;org;com;
-org.eclipse.jdt.ui.ondemandthreshold=99
-org.eclipse.jdt.ui.staticondemandthreshold=99
-org.eclipse.jdt.ui.text.custom_code_templates=<?xml version\="1.0" encoding\="UTF-8" standalone\="no"?><templates/>
-sp_cleanup.add_default_serial_version_id=true
-sp_cleanup.add_generated_serial_version_id=false
-sp_cleanup.add_missing_annotations=false
-sp_cleanup.add_missing_deprecated_annotations=true
-sp_cleanup.add_missing_methods=false
-sp_cleanup.add_missing_nls_tags=false
-sp_cleanup.add_missing_override_annotations=true
-sp_cleanup.add_missing_override_annotations_interface_methods=false
-sp_cleanup.add_serial_version_id=false
-sp_cleanup.always_use_blocks=true
-sp_cleanup.always_use_parentheses_in_expressions=false
-sp_cleanup.always_use_this_for_non_static_field_access=false
-sp_cleanup.always_use_this_for_non_static_method_access=false
-sp_cleanup.convert_to_enhanced_for_loop=false
-sp_cleanup.correct_indentation=false
-sp_cleanup.format_source_code=true
-sp_cleanup.format_source_code_changes_only=true
-sp_cleanup.make_local_variable_final=false
-sp_cleanup.make_parameters_final=false
-sp_cleanup.make_private_fields_final=true
-sp_cleanup.make_type_abstract_if_missing_method=false
-sp_cleanup.make_variable_declarations_final=false
-sp_cleanup.never_use_blocks=false
-sp_cleanup.never_use_parentheses_in_expressions=true
-sp_cleanup.on_save_use_additional_actions=true
-sp_cleanup.organize_imports=false
-sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
-sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
-sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
-sp_cleanup.qualify_static_member_accesses_with_declaring_class=false
-sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
-sp_cleanup.remove_private_constructors=true
-sp_cleanup.remove_trailing_whitespaces=true
-sp_cleanup.remove_trailing_whitespaces_all=true
-sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
-sp_cleanup.remove_unnecessary_casts=false
-sp_cleanup.remove_unnecessary_nls_tags=false
-sp_cleanup.remove_unused_imports=false
-sp_cleanup.remove_unused_local_variables=false
-sp_cleanup.remove_unused_private_fields=true
-sp_cleanup.remove_unused_private_members=false
-sp_cleanup.remove_unused_private_methods=true
-sp_cleanup.remove_unused_private_types=true
-sp_cleanup.sort_members=false
-sp_cleanup.sort_members_all=false
-sp_cleanup.use_blocks=false
-sp_cleanup.use_blocks_only_for_return_and_throw=false
-sp_cleanup.use_parentheses_in_expressions=false
-sp_cleanup.use_this_for_non_static_field_access=false
-sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=true
-sp_cleanup.use_this_for_non_static_method_access=false
-sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=true
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.mylyn.tasks.ui.prefs b/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.mylyn.tasks.ui.prefs
deleted file mode 100644 (file)
index 823c0f5..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-#Tue Jul 19 20:11:28 CEST 2011
-eclipse.preferences.version=1
-project.repository.kind=bugzilla
-project.repository.url=https\://bugs.eclipse.org/bugs
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.mylyn.team.ui.prefs b/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.mylyn.team.ui.prefs
deleted file mode 100644 (file)
index 0cba949..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#Tue Jul 19 20:11:28 CEST 2011
-commit.comment.template=${task.description} \n\nBug\: ${task.key}
-eclipse.preferences.version=1
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.pde.api.tools.prefs b/org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.pde.api.tools.prefs
deleted file mode 100644 (file)
index cd148d9..0000000
+++ /dev/null
@@ -1,94 +0,0 @@
-#Tue Oct 18 00:52:01 CEST 2011
-ANNOTATION_ELEMENT_TYPE_ADDED_METHOD_WITHOUT_DEFAULT_VALUE=Error
-ANNOTATION_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
-ANNOTATION_ELEMENT_TYPE_REMOVED_FIELD=Error
-ANNOTATION_ELEMENT_TYPE_REMOVED_METHOD=Error
-ANNOTATION_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
-API_COMPONENT_ELEMENT_TYPE_REMOVED_API_TYPE=Error
-API_COMPONENT_ELEMENT_TYPE_REMOVED_REEXPORTED_API_TYPE=Error
-API_COMPONENT_ELEMENT_TYPE_REMOVED_REEXPORTED_TYPE=Error
-API_COMPONENT_ELEMENT_TYPE_REMOVED_TYPE=Error
-CLASS_ELEMENT_TYPE_ADDED_METHOD=Error
-CLASS_ELEMENT_TYPE_ADDED_RESTRICTIONS=Error
-CLASS_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
-CLASS_ELEMENT_TYPE_CHANGED_CONTRACTED_SUPERINTERFACES_SET=Error
-CLASS_ELEMENT_TYPE_CHANGED_DECREASE_ACCESS=Error
-CLASS_ELEMENT_TYPE_CHANGED_NON_ABSTRACT_TO_ABSTRACT=Error
-CLASS_ELEMENT_TYPE_CHANGED_NON_FINAL_TO_FINAL=Error
-CLASS_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
-CLASS_ELEMENT_TYPE_REMOVED_CONSTRUCTOR=Error
-CLASS_ELEMENT_TYPE_REMOVED_FIELD=Error
-CLASS_ELEMENT_TYPE_REMOVED_METHOD=Error
-CLASS_ELEMENT_TYPE_REMOVED_SUPERCLASS=Error
-CLASS_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
-CLASS_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
-CONSTRUCTOR_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
-CONSTRUCTOR_ELEMENT_TYPE_CHANGED_DECREASE_ACCESS=Error
-CONSTRUCTOR_ELEMENT_TYPE_CHANGED_VARARGS_TO_ARRAY=Error
-CONSTRUCTOR_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
-ENUM_ELEMENT_TYPE_CHANGED_CONTRACTED_SUPERINTERFACES_SET=Error
-ENUM_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
-ENUM_ELEMENT_TYPE_REMOVED_ENUM_CONSTANT=Error
-ENUM_ELEMENT_TYPE_REMOVED_FIELD=Error
-ENUM_ELEMENT_TYPE_REMOVED_METHOD=Error
-ENUM_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
-FIELD_ELEMENT_TYPE_ADDED_VALUE=Error
-FIELD_ELEMENT_TYPE_CHANGED_DECREASE_ACCESS=Error
-FIELD_ELEMENT_TYPE_CHANGED_FINAL_TO_NON_FINAL_STATIC_CONSTANT=Error
-FIELD_ELEMENT_TYPE_CHANGED_NON_FINAL_TO_FINAL=Error
-FIELD_ELEMENT_TYPE_CHANGED_NON_STATIC_TO_STATIC=Error
-FIELD_ELEMENT_TYPE_CHANGED_STATIC_TO_NON_STATIC=Error
-FIELD_ELEMENT_TYPE_CHANGED_TYPE=Error
-FIELD_ELEMENT_TYPE_CHANGED_VALUE=Error
-FIELD_ELEMENT_TYPE_REMOVED_TYPE_ARGUMENT=Error
-FIELD_ELEMENT_TYPE_REMOVED_VALUE=Error
-ILLEGAL_EXTEND=Warning
-ILLEGAL_IMPLEMENT=Warning
-ILLEGAL_INSTANTIATE=Warning
-ILLEGAL_OVERRIDE=Warning
-ILLEGAL_REFERENCE=Warning
-INTERFACE_ELEMENT_TYPE_ADDED_FIELD=Error
-INTERFACE_ELEMENT_TYPE_ADDED_METHOD=Error
-INTERFACE_ELEMENT_TYPE_ADDED_RESTRICTIONS=Error
-INTERFACE_ELEMENT_TYPE_ADDED_SUPER_INTERFACE_WITH_METHODS=Error
-INTERFACE_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
-INTERFACE_ELEMENT_TYPE_CHANGED_CONTRACTED_SUPERINTERFACES_SET=Error
-INTERFACE_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
-INTERFACE_ELEMENT_TYPE_REMOVED_FIELD=Error
-INTERFACE_ELEMENT_TYPE_REMOVED_METHOD=Error
-INTERFACE_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
-INTERFACE_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
-INVALID_JAVADOC_TAG=Ignore
-INVALID_REFERENCE_IN_SYSTEM_LIBRARIES=Error
-LEAK_EXTEND=Warning
-LEAK_FIELD_DECL=Warning
-LEAK_IMPLEMENT=Warning
-LEAK_METHOD_PARAM=Warning
-LEAK_METHOD_RETURN_TYPE=Warning
-METHOD_ELEMENT_TYPE_ADDED_RESTRICTIONS=Error
-METHOD_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
-METHOD_ELEMENT_TYPE_CHANGED_DECREASE_ACCESS=Error
-METHOD_ELEMENT_TYPE_CHANGED_NON_ABSTRACT_TO_ABSTRACT=Error
-METHOD_ELEMENT_TYPE_CHANGED_NON_FINAL_TO_FINAL=Error
-METHOD_ELEMENT_TYPE_CHANGED_NON_STATIC_TO_STATIC=Error
-METHOD_ELEMENT_TYPE_CHANGED_STATIC_TO_NON_STATIC=Error
-METHOD_ELEMENT_TYPE_CHANGED_VARARGS_TO_ARRAY=Error
-METHOD_ELEMENT_TYPE_REMOVED_ANNOTATION_DEFAULT_VALUE=Error
-METHOD_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
-TYPE_PARAMETER_ELEMENT_TYPE_ADDED_CLASS_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_ADDED_INTERFACE_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_CHANGED_CLASS_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_CHANGED_INTERFACE_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_REMOVED_CLASS_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_REMOVED_INTERFACE_BOUND=Error
-UNUSED_PROBLEM_FILTERS=Warning
-automatically_removed_unused_problem_filters=false
-eclipse.preferences.version=1
-incompatible_api_component_version=Error
-incompatible_api_component_version_include_major_without_breaking_change=Disabled
-incompatible_api_component_version_include_minor_without_api_change=Disabled
-invalid_since_tag_version=Error
-malformed_since_tag=Error
-missing_since_tag=Error
-report_api_breakage_when_major_version_incremented=Disabled
-report_resolution_errors_api_component=Warning
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/META-INF/MANIFEST.MF b/org.eclipse.jgit.generated.storage.dht.proto/META-INF/MANIFEST.MF
deleted file mode 100644 (file)
index 15cfeea..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-Manifest-Version: 1.0
-Bundle-ManifestVersion: 2
-Bundle-Name: %plugin_name
-Bundle-SymbolicName: org.eclipse.jgit.generated.storage.dht.proto
-Bundle-Version: 2.1.0.qualifier
-Bundle-Localization: plugin
-Bundle-Vendor: %provider_name
-Bundle-ActivationPolicy: lazy
-Bundle-RequiredExecutionEnvironment: J2SE-1.5
-Export-Package: org.eclipse.jgit.generated.storage.dht.proto;version="2.1.0"
-Import-Package: com.google.protobuf;version="[2.4.0,2.5.0)"
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/about.html b/org.eclipse.jgit.generated.storage.dht.proto/about.html
deleted file mode 100644 (file)
index 01a2671..0000000
+++ /dev/null
@@ -1,59 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1" ?>
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
-<html xmlns="http://www.w3.org/1999/xhtml">
-
-<head>
-<meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1" />
-<title>Eclipse Distribution License - Version 1.0</title>
-<style type="text/css">
-  body {
-    size: 8.5in 11.0in;
-    margin: 0.25in 0.5in 0.25in 0.5in;
-    tab-interval: 0.5in;
-    }
-  p {          
-    margin-left: auto;
-    margin-top:  0.5em;
-    margin-bottom: 0.5em;
-    }
-  p.list {
-       margin-left: 0.5in;
-    margin-top:  0.05em;
-    margin-bottom: 0.05em;
-    }
-  </style>
-
-</head>
-
-<body lang="EN-US">
-
-<p><b>Eclipse Distribution License - v 1.0</b></p>
-
-<p>Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors. </p>
-
-<p>All rights reserved.</p>
-<p>Redistribution and use in source and binary forms, with or without modification, 
-       are permitted provided that the following conditions are met:
-<ul><li>Redistributions of source code must retain the above copyright notice, 
-       this list of conditions and the following disclaimer. </li>
-<li>Redistributions in binary form must reproduce the above copyright notice, 
-       this list of conditions and the following disclaimer in the documentation 
-       and/or other materials provided with the distribution. </li>
-<li>Neither the name of the Eclipse Foundation, Inc. nor the names of its 
-       contributors may be used to endorse or promote products derived from 
-       this software without specific prior written permission. </li></ul>
-</p>
-<p>THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 
-IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, 
-INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 
-NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 
-PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 
-POSSIBILITY OF SUCH DAMAGE.</p>
-
-</body>
-
-</html>
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/build.properties b/org.eclipse.jgit.generated.storage.dht.proto/build.properties
deleted file mode 100644 (file)
index b67aba1..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-source.. = src/
-output.. = bin/
-bin.includes = META-INF/,\
-               .,\
-               plugin.properties,\
-               about.html
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/generate.sh b/org.eclipse.jgit.generated.storage.dht.proto/generate.sh
deleted file mode 100755 (executable)
index f809895..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-#
-# Update generated Java code from protocol buffer descriptions.
-
-set -e
-
-for proto in resources/org/eclipse/jgit/storage/dht/*.proto
-do
-  echo >&2 Generating from $proto
-  protoc -Iresources --java_out=src $proto
-done
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/plugin.properties b/org.eclipse.jgit.generated.storage.dht.proto/plugin.properties
deleted file mode 100644 (file)
index 442bc4e..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-plugin_name=JGit DHT Storage Protocol Buffer Messages
-provider_name=Eclipse.org
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/pom.xml b/org.eclipse.jgit.generated.storage.dht.proto/pom.xml
deleted file mode 100644 (file)
index fc11e8a..0000000
+++ /dev/null
@@ -1,121 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Copyright (C) 2011, Google Inc.
-   and other copyright owners as documented in the project's IP log.
-
-   This program and the accompanying materials are made available
-   under the terms of the Eclipse Distribution License v1.0 which
-   accompanies this distribution, is reproduced below, and is
-   available at http://www.eclipse.org/org/documents/edl-v10.php
-
-   All rights reserved.
-
-   Redistribution and use in source and binary forms, with or
-   without modification, are permitted provided that the following
-   conditions are met:
-
-   - Redistributions of source code must retain the above copyright
-     notice, this list of conditions and the following disclaimer.
-
-   - Redistributions in binary form must reproduce the above
-     copyright notice, this list of conditions and the following
-     disclaimer in the documentation and/or other materials provided
-     with the distribution.
-
-   - Neither the name of the Eclipse Foundation, Inc. nor the
-     names of its contributors may be used to endorse or promote
-     products derived from this software without specific prior
-     written permission.
-
-   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
-   CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
-   INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
-   OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-   ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
-   CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
-   NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-   LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-   CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-   STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-   ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-   ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <groupId>org.eclipse.jgit</groupId>
-    <artifactId>org.eclipse.jgit-parent</artifactId>
-    <version>2.1.0-SNAPSHOT</version>
-  </parent>
-
-  <artifactId>org.eclipse.jgit.generated.storage.dht.proto</artifactId>
-  <name>JGit - DHT Storage Protocol Buffer Messages</name>
-
-  <description>
-    Compiled protocol buffer messages for DHT storage
-  </description>
-
-  <properties>
-    <translate-qualifier/>
-  </properties>
-
-  <dependencies>
-    <dependency>
-      <groupId>com.google.protobuf</groupId>
-      <artifactId>protobuf-java</artifactId>
-    </dependency>
-  </dependencies>
-
-  <build>
-    <sourceDirectory>src/</sourceDirectory>
-
-    <resources>
-      <resource>
-        <directory>.</directory>
-        <includes>
-          <include>plugin.properties</include>
-          <include>about.html</include>
-        </includes>
-      </resource>
-      <resource>
-        <directory>resources/</directory>
-      </resource>
-    </resources>
-
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-source-plugin</artifactId>
-        <inherited>true</inherited>
-        <executions>
-          <execution>
-            <id>attach-sources</id>
-            <phase>process-classes</phase>
-            <goals>
-              <goal>jar</goal>
-            </goals>
-            <configuration>
-              <archive>
-                <manifestFile>${source-bundle-manifest}</manifestFile>
-              </archive>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-
-      <plugin>
-        <artifactId>maven-jar-plugin</artifactId>
-        <configuration>
-          <archive>
-            <manifestFile>${bundle-manifest}</manifestFile>
-          </archive>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-</project>
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/resources/org/eclipse/jgit/storage/dht/git_cache.proto b/org.eclipse.jgit.generated.storage.dht.proto/resources/org/eclipse/jgit/storage/dht/git_cache.proto
deleted file mode 100644 (file)
index 40a2efd..0000000
+++ /dev/null
@@ -1,86 +0,0 @@
-// Copyright (C) 2011, Google Inc.
-// and other copyright owners as documented in the project's IP log.
-//
-// This program and the accompanying materials are made available
-// under the terms of the Eclipse Distribution License v1.0 which
-// accompanies this distribution, is reproduced below, and is
-// available at http://www.eclipse.org/org/documents/edl-v10.php
-//
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or
-// without modification, are permitted provided that the following
-// conditions are met:
-//
-// - Redistributions of source code must retain the above copyright
-//   notice, this list of conditions and the following disclaimer.
-//
-// - Redistributions in binary form must reproduce the above
-//   copyright notice, this list of conditions and the following
-//   disclaimer in the documentation and/or other materials provided
-//   with the distribution.
-//
-// - Neither the name of the Eclipse Foundation, Inc. nor the
-//   names of its contributors may be used to endorse or promote
-//   products derived from this software without specific prior
-//   written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
-// CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
-// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
-// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
-// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
-// NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-//
-// WARNING:  If you edit this file, run generate.sh
-//
-// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
-syntax = "proto2";
-
-package org.eclipse.jgit.storage.dht;
-option java_generate_equals_and_hash = true;
-option java_package = "org.eclipse.jgit.generated.storage.dht.proto";
-
-import "org/eclipse/jgit/storage/dht/git_store.proto";
-
-
-  // Caches ObjectIndexTable in a single message.
-  //
-message CachedObjectIndex {
-  message Item {
-    required string chunk_key = 1;
-    required ObjectInfo object_info = 2;
-    optional fixed64 time = 3;
-  }
-  repeated Item item = 1;
-}
-
-
-  // Caches CachedPackInfo in a single message.
-  //
-message CachedPackInfoList {
-  repeated CachedPackInfo pack = 1;
-}
-
-
-  // Caches ChunkTable in a single message.
-  //
-  // WARNING: Formatters for this message are also hand-coded
-  // inside of the CacheChunkTable class.  If you make changes
-  // to this message, ensure that class is also updated.
-  //
-message CachedChunk {
-  required bytes data = 1;
-  optional bytes index = 2;
-  optional ChunkMeta meta = 3;
-}
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/resources/org/eclipse/jgit/storage/dht/git_store.proto b/org.eclipse.jgit.generated.storage.dht.proto/resources/org/eclipse/jgit/storage/dht/git_store.proto
deleted file mode 100644 (file)
index ee9e7bd..0000000
+++ /dev/null
@@ -1,278 +0,0 @@
-// Copyright (C) 2011, Google Inc.
-// and other copyright owners as documented in the project's IP log.
-//
-// This program and the accompanying materials are made available
-// under the terms of the Eclipse Distribution License v1.0 which
-// accompanies this distribution, is reproduced below, and is
-// available at http://www.eclipse.org/org/documents/edl-v10.php
-//
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or
-// without modification, are permitted provided that the following
-// conditions are met:
-//
-// - Redistributions of source code must retain the above copyright
-//   notice, this list of conditions and the following disclaimer.
-//
-// - Redistributions in binary form must reproduce the above
-//   copyright notice, this list of conditions and the following
-//   disclaimer in the documentation and/or other materials provided
-//   with the distribution.
-//
-// - Neither the name of the Eclipse Foundation, Inc. nor the
-//   names of its contributors may be used to endorse or promote
-//   products derived from this software without specific prior
-//   written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
-// CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
-// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
-// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
-// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
-// NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-//
-// WARNING:  If you edit this file, run generate.sh
-//
-// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
-syntax = "proto2";
-
-package org.eclipse.jgit.storage.dht;
-option java_generate_equals_and_hash = true;
-option java_package = "org.eclipse.jgit.generated.storage.dht.proto";
-
-
-    // Entry in RefTable describing the target of the reference.
-    // Either symref *OR* target must be populated, but never both.
-    //
-message RefData {
-    // Incrementing counter updated each time the RefData changes.
-    // Should always start at 1.
-    //
-  required uint32 sequence = 5 [default = 0];
-
-    // An ObjectId with an optional hint about where it can be found.
-    //
-  message Id {
-    required string object_name = 1;
-    optional string chunk_key = 2;
-  }
-
-    // Name of another reference this reference inherits its target
-    // from.  The target is inherited on-the-fly at runtime by reading
-    // the other reference.  Typically only "HEAD" uses symref.
-    //
-  optional string symref = 1;
-
-    // ObjectId this reference currently points at.
-    //
-  optional Id target = 2;
-
-    // True if the correct value for peeled is stored.
-    //
-  optional bool is_peeled = 3;
-
-    // If is_peeled is true, this field is accurate.  This field
-    // exists only if target points to annotated tag object, then
-    // this field stores the "object" field for that tag.
-    //
-  optional Id peeled = 4;
-}
-
-
-    // Entry in ObjectIndexTable, describes how an object appears in a chunk.
-    //
-message ObjectInfo {
-    // Type of Git object.
-    //
-  enum ObjectType {
-    COMMIT = 1;
-    TREE = 2;
-    BLOB = 3;
-    TAG = 4;
-  }
-  optional ObjectType object_type = 1;
-
-    // Position of the object's header within its chunk.
-    //
-  required int32 offset = 2;
-
-    // Total number of compressed data bytes, not including the pack
-    // header. For fragmented objects this is the sum of all chunks.
-    //
-  required int64 packed_size = 3;
-
-    // Total number of bytes of the uncompressed object. For a
-    // delta this is the size after applying the delta onto its base.
-    //
-  required int64 inflated_size = 4;
-
-    // ObjectId of the delta base, if this object is stored as a delta.
-    // The base is stored in raw binary.
-    //
-  optional bytes delta_base = 5;
-
-    // True if the object requires more than one chunk to be stored.
-    //
-  optional bool is_fragmented = 6;
-}
-
-
-    // Describes at a high-level the information about a chunk.
-    // A repository can use this summary to determine how much
-    // data is stored, or when garbage collection should occur.
-    //
-message ChunkInfo {
-    // Source of the chunk (what code path created it).
-    //
-  enum Source {
-    RECEIVE = 1;    // Came in over the network from external source.
-    INSERT = 2;     // Created in this repository (e.g. a merge).
-    REPACK = 3;     // Generated during a repack of this repository.
-  }
-  optional Source source = 1;
-
-    // Type of Git object stored in this chunk.
-    //
-  enum ObjectType {
-    MIXED = 0;
-    COMMIT = 1;
-    TREE = 2;
-    BLOB = 3;
-    TAG = 4;
-  }
-  optional ObjectType object_type = 2;
-
-    // True if this chunk is a member of a fragmented object.
-    //
-  optional bool is_fragment = 3;
-
-    // If present, key of the CachedPackInfo object
-    // that this chunk is a member of.
-    //
-  optional string cached_pack_key = 4;
-
-    // Summary description of the objects stored here.
-    //
-  message ObjectCounts {
-      // Number of objects stored in this chunk.
-      //
-    optional int32 total = 1;
-
-      // Number of objects stored in whole (non-delta) form.
-      //
-    optional int32 whole = 2;
-
-      // Number of objects stored in OFS_DELTA format.
-      // The delta base appears in the same chunk, or
-      // may appear in an earlier chunk through the
-      // ChunkMeta.base_chunk link.
-      //
-    optional int32 ofs_delta = 3;
-
-      // Number of objects stored in REF_DELTA format.
-      // The delta base is at an unknown location.
-      //
-    optional int32 ref_delta = 4;
-  }
-  optional ObjectCounts object_counts = 5;
-
-    // Size in bytes of the chunk's compressed data column.
-    //
-  optional int32 chunk_size = 6;
-
-    // Size in bytes of the chunk's index.
-    //
-  optional int32 index_size = 7;
-
-    // Size in bytes of the meta information.
-    //
-  optional int32 meta_size  = 8;
-}
-
-
-    // Describes meta information about a chunk, stored inline with it.
-    //
-message ChunkMeta {
-    // Enumerates the other chunks this chunk depends upon by OFS_DELTA.
-    // Entries are sorted by relative_start ascending, enabling search.  Thus
-    // the earliest chunk is at the end of the list.
-    //
-  message BaseChunk {
-      // Bytes between start of the base chunk and start of this chunk.
-      // Although the value is positive, its a negative offset.
-      //
-    required int64 relative_start = 1;
-    required string chunk_key = 2;
-  }
-  repeated BaseChunk base_chunk = 1;
-
-    // If this chunk is part of a fragment, key of every chunk that
-    // makes up the fragment, including this chunk.
-    //
-  repeated string fragment = 2;
-
-    // Chunks that should be prefetched if reading the current chunk.
-    //
-  message PrefetchHint {
-    repeated string edge = 1;
-    repeated string sequential = 2;
-  }
-  optional PrefetchHint commit_prefetch = 51;
-  optional PrefetchHint tree_prefetch = 52;
-}
-
-
-    // Describes a CachedPack, for efficient bulk clones.
-    //
-message CachedPackInfo {
-    // Unique name of the cached pack.  This is the SHA-1 hash of
-    // all of the objects that make up the cached pack, sorted and
-    // in binary form.  (Same rules as Git on the filesystem.)
-    //
-  required string name = 1;
-
-    // SHA-1 of all chunk keys, which are themselves SHA-1s of the
-    // raw chunk data. If any bit differs in compression (due to
-    // repacking) the version will differ.
-    //
-  required string version = 2;
-
-    // Total number of objects in the cached pack. This must be known
-    // in order to set the final resulting pack header correctly before it
-    // is sent to clients.
-    //
-  required int64 objects_total = 3;
-
-    // Number of objects stored as deltas, rather than deflated whole.
-    //
-  optional int64 objects_delta = 4;
-
-    // Total size of the chunks, in bytes, not including the chunk footer.
-    //
-  optional int64 bytes_total = 5;
-
-    // Objects this pack starts from.
-    //
-  message TipObjectList {
-    repeated string object_name = 1;
-  }
-  required TipObjectList tip_list = 6;
-
-    // Chunks, in order of occurrence in the stream.
-    //
-  message ChunkList {
-    repeated string chunk_key = 1;
-  }
-  required ChunkList chunk_list = 7;
-}
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/src/org/eclipse/jgit/generated/storage/dht/proto/GitCache.java b/org.eclipse.jgit.generated.storage.dht.proto/src/org/eclipse/jgit/generated/storage/dht/proto/GitCache.java
deleted file mode 100644 (file)
index d9bba52..0000000
+++ /dev/null
@@ -1,2546 +0,0 @@
-// Generated by the protocol buffer compiler.  DO NOT EDIT!
-// source: org/eclipse/jgit/storage/dht/git_cache.proto
-
-package org.eclipse.jgit.generated.storage.dht.proto;
-
-public final class GitCache {
-  private GitCache() {}
-  public static void registerAllExtensions(
-      com.google.protobuf.ExtensionRegistry registry) {
-  }
-  public interface CachedObjectIndexOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // repeated .org.eclipse.jgit.storage.dht.CachedObjectIndex.Item item = 1;
-    java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item>
-        getItemList();
-    org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item getItem(int index);
-    int getItemCount();
-    java.util.List<? extends org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.ItemOrBuilder>
-        getItemOrBuilderList();
-    org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.ItemOrBuilder getItemOrBuilder(
-        int index);
-  }
-  public static final class CachedObjectIndex extends
-      com.google.protobuf.GeneratedMessage
-      implements CachedObjectIndexOrBuilder {
-    // Use CachedObjectIndex.newBuilder() to construct.
-    private CachedObjectIndex(Builder builder) {
-      super(builder);
-    }
-    private CachedObjectIndex(boolean noInit) {}
-
-    private static final CachedObjectIndex defaultInstance;
-    public static CachedObjectIndex getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public CachedObjectIndex getDefaultInstanceForType() {
-      return defaultInstance;
-    }
-
-    public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_descriptor;
-    }
-
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_fieldAccessorTable;
-    }
-
-    public interface ItemOrBuilder
-        extends com.google.protobuf.MessageOrBuilder {
-
-      // required string chunk_key = 1;
-      boolean hasChunkKey();
-      String getChunkKey();
-
-      // required .org.eclipse.jgit.storage.dht.ObjectInfo object_info = 2;
-      boolean hasObjectInfo();
-      org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo getObjectInfo();
-      org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfoOrBuilder getObjectInfoOrBuilder();
-
-      // optional fixed64 time = 3;
-      boolean hasTime();
-      long getTime();
-    }
-    public static final class Item extends
-        com.google.protobuf.GeneratedMessage
-        implements ItemOrBuilder {
-      // Use Item.newBuilder() to construct.
-      private Item(Builder builder) {
-        super(builder);
-      }
-      private Item(boolean noInit) {}
-
-      private static final Item defaultInstance;
-      public static Item getDefaultInstance() {
-        return defaultInstance;
-      }
-
-      public Item getDefaultInstanceForType() {
-        return defaultInstance;
-      }
-
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_Item_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_Item_fieldAccessorTable;
-      }
-
-      private int bitField0_;
-      // required string chunk_key = 1;
-      public static final int CHUNK_KEY_FIELD_NUMBER = 1;
-      private Object chunkKey_;
-      public boolean hasChunkKey() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      public String getChunkKey() {
-        Object ref = chunkKey_;
-        if (ref instanceof String) {
-          return (String) ref;
-        } else {
-          com.google.protobuf.ByteString bs =
-              (com.google.protobuf.ByteString) ref;
-          String s = bs.toStringUtf8();
-          if (com.google.protobuf.Internal.isValidUtf8(bs)) {
-            chunkKey_ = s;
-          }
-          return s;
-        }
-      }
-      private com.google.protobuf.ByteString getChunkKeyBytes() {
-        Object ref = chunkKey_;
-        if (ref instanceof String) {
-          com.google.protobuf.ByteString b =
-              com.google.protobuf.ByteString.copyFromUtf8((String) ref);
-          chunkKey_ = b;
-          return b;
-        } else {
-          return (com.google.protobuf.ByteString) ref;
-        }
-      }
-
-      // required .org.eclipse.jgit.storage.dht.ObjectInfo object_info = 2;
-      public static final int OBJECT_INFO_FIELD_NUMBER = 2;
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo objectInfo_;
-      public boolean hasObjectInfo() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo getObjectInfo() {
-        return objectInfo_;
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfoOrBuilder getObjectInfoOrBuilder() {
-        return objectInfo_;
-      }
-
-      // optional fixed64 time = 3;
-      public static final int TIME_FIELD_NUMBER = 3;
-      private long time_;
-      public boolean hasTime() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
-      }
-      public long getTime() {
-        return time_;
-      }
-
-      private void initFields() {
-        chunkKey_ = "";
-        objectInfo_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.getDefaultInstance();
-        time_ = 0L;
-      }
-      private byte memoizedIsInitialized = -1;
-      public final boolean isInitialized() {
-        byte isInitialized = memoizedIsInitialized;
-        if (isInitialized != -1) return isInitialized == 1;
-
-        if (!hasChunkKey()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-        if (!hasObjectInfo()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-        if (!getObjectInfo().isInitialized()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-        memoizedIsInitialized = 1;
-        return true;
-      }
-
-      public void writeTo(com.google.protobuf.CodedOutputStream output)
-                          throws java.io.IOException {
-        getSerializedSize();
-        if (((bitField0_ & 0x00000001) == 0x00000001)) {
-          output.writeBytes(1, getChunkKeyBytes());
-        }
-        if (((bitField0_ & 0x00000002) == 0x00000002)) {
-          output.writeMessage(2, objectInfo_);
-        }
-        if (((bitField0_ & 0x00000004) == 0x00000004)) {
-          output.writeFixed64(3, time_);
-        }
-        getUnknownFields().writeTo(output);
-      }
-
-      private int memoizedSerializedSize = -1;
-      public int getSerializedSize() {
-        int size = memoizedSerializedSize;
-        if (size != -1) return size;
-
-        size = 0;
-        if (((bitField0_ & 0x00000001) == 0x00000001)) {
-          size += com.google.protobuf.CodedOutputStream
-            .computeBytesSize(1, getChunkKeyBytes());
-        }
-        if (((bitField0_ & 0x00000002) == 0x00000002)) {
-          size += com.google.protobuf.CodedOutputStream
-            .computeMessageSize(2, objectInfo_);
-        }
-        if (((bitField0_ & 0x00000004) == 0x00000004)) {
-          size += com.google.protobuf.CodedOutputStream
-            .computeFixed64Size(3, time_);
-        }
-        size += getUnknownFields().getSerializedSize();
-        memoizedSerializedSize = size;
-        return size;
-      }
-
-      @java.lang.Override
-      protected Object writeReplace() throws java.io.ObjectStreamException {
-        return super.writeReplace();
-      }
-
-      @java.lang.Override
-      public boolean equals(final Object obj) {
-        if (obj == this) {
-         return true;
-        }
-        if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item)) {
-          return super.equals(obj);
-        }
-        org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item other = (org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item) obj;
-
-        boolean result = true;
-        result = result && (hasChunkKey() == other.hasChunkKey());
-        if (hasChunkKey()) {
-          result = result && getChunkKey()
-              .equals(other.getChunkKey());
-        }
-        result = result && (hasObjectInfo() == other.hasObjectInfo());
-        if (hasObjectInfo()) {
-          result = result && getObjectInfo()
-              .equals(other.getObjectInfo());
-        }
-        result = result && (hasTime() == other.hasTime());
-        if (hasTime()) {
-          result = result && (getTime()
-              == other.getTime());
-        }
-        result = result &&
-            getUnknownFields().equals(other.getUnknownFields());
-        return result;
-      }
-
-      @java.lang.Override
-      public int hashCode() {
-        int hash = 41;
-        hash = (19 * hash) + getDescriptorForType().hashCode();
-        if (hasChunkKey()) {
-          hash = (37 * hash) + CHUNK_KEY_FIELD_NUMBER;
-          hash = (53 * hash) + getChunkKey().hashCode();
-        }
-        if (hasObjectInfo()) {
-          hash = (37 * hash) + OBJECT_INFO_FIELD_NUMBER;
-          hash = (53 * hash) + getObjectInfo().hashCode();
-        }
-        if (hasTime()) {
-          hash = (37 * hash) + TIME_FIELD_NUMBER;
-          hash = (53 * hash) + hashLong(getTime());
-        }
-        hash = (29 * hash) + getUnknownFields().hashCode();
-        return hash;
-      }
-
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item parseFrom(
-          com.google.protobuf.ByteString data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item parseFrom(
-          com.google.protobuf.ByteString data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item parseFrom(byte[] data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item parseFrom(
-          byte[] data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item parseFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item parseFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item parseDelimitedFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item parseDelimitedFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item parseFrom(
-          com.google.protobuf.CodedInputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item parseFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-
-      public static Builder newBuilder() { return Builder.create(); }
-      public Builder newBuilderForType() { return newBuilder(); }
-      public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item prototype) {
-        return newBuilder().mergeFrom(prototype);
-      }
-      public Builder toBuilder() { return newBuilder(this); }
-
-      @java.lang.Override
-      protected Builder newBuilderForType(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-        Builder builder = new Builder(parent);
-        return builder;
-      }
-      public static final class Builder extends
-          com.google.protobuf.GeneratedMessage.Builder<Builder>
-         implements org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.ItemOrBuilder {
-        public static final com.google.protobuf.Descriptors.Descriptor
-            getDescriptor() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_Item_descriptor;
-        }
-
-        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-            internalGetFieldAccessorTable() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_Item_fieldAccessorTable;
-        }
-
-        // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.newBuilder()
-        private Builder() {
-          maybeForceBuilderInitialization();
-        }
-
-        private Builder(BuilderParent parent) {
-          super(parent);
-          maybeForceBuilderInitialization();
-        }
-        private void maybeForceBuilderInitialization() {
-          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-            getObjectInfoFieldBuilder();
-          }
-        }
-        private static Builder create() {
-          return new Builder();
-        }
-
-        public Builder clear() {
-          super.clear();
-          chunkKey_ = "";
-          bitField0_ = (bitField0_ & ~0x00000001);
-          if (objectInfoBuilder_ == null) {
-            objectInfo_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.getDefaultInstance();
-          } else {
-            objectInfoBuilder_.clear();
-          }
-          bitField0_ = (bitField0_ & ~0x00000002);
-          time_ = 0L;
-          bitField0_ = (bitField0_ & ~0x00000004);
-          return this;
-        }
-
-        public Builder clone() {
-          return create().mergeFrom(buildPartial());
-        }
-
-        public com.google.protobuf.Descriptors.Descriptor
-            getDescriptorForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.getDescriptor();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item getDefaultInstanceForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.getDefaultInstance();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item build() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(result);
-          }
-          return result;
-        }
-
-        private org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item buildParsed()
-            throws com.google.protobuf.InvalidProtocolBufferException {
-          org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(
-              result).asInvalidProtocolBufferException();
-          }
-          return result;
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item buildPartial() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item result = new org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item(this);
-          int from_bitField0_ = bitField0_;
-          int to_bitField0_ = 0;
-          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-            to_bitField0_ |= 0x00000001;
-          }
-          result.chunkKey_ = chunkKey_;
-          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-            to_bitField0_ |= 0x00000002;
-          }
-          if (objectInfoBuilder_ == null) {
-            result.objectInfo_ = objectInfo_;
-          } else {
-            result.objectInfo_ = objectInfoBuilder_.build();
-          }
-          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
-            to_bitField0_ |= 0x00000004;
-          }
-          result.time_ = time_;
-          result.bitField0_ = to_bitField0_;
-          onBuilt();
-          return result;
-        }
-
-        public Builder mergeFrom(com.google.protobuf.Message other) {
-          if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item) {
-            return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item)other);
-          } else {
-            super.mergeFrom(other);
-            return this;
-          }
-        }
-
-        public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item other) {
-          if (other == org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.getDefaultInstance()) return this;
-          if (other.hasChunkKey()) {
-            setChunkKey(other.getChunkKey());
-          }
-          if (other.hasObjectInfo()) {
-            mergeObjectInfo(other.getObjectInfo());
-          }
-          if (other.hasTime()) {
-            setTime(other.getTime());
-          }
-          this.mergeUnknownFields(other.getUnknownFields());
-          return this;
-        }
-
-        public final boolean isInitialized() {
-          if (!hasChunkKey()) {
-
-            return false;
-          }
-          if (!hasObjectInfo()) {
-
-            return false;
-          }
-          if (!getObjectInfo().isInitialized()) {
-
-            return false;
-          }
-          return true;
-        }
-
-        public Builder mergeFrom(
-            com.google.protobuf.CodedInputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws java.io.IOException {
-          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-            com.google.protobuf.UnknownFieldSet.newBuilder(
-              this.getUnknownFields());
-          while (true) {
-            int tag = input.readTag();
-            switch (tag) {
-              case 0:
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              default: {
-                if (!parseUnknownField(input, unknownFields,
-                                       extensionRegistry, tag)) {
-                  this.setUnknownFields(unknownFields.build());
-                  onChanged();
-                  return this;
-                }
-                break;
-              }
-              case 10: {
-                bitField0_ |= 0x00000001;
-                chunkKey_ = input.readBytes();
-                break;
-              }
-              case 18: {
-                org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.Builder subBuilder = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.newBuilder();
-                if (hasObjectInfo()) {
-                  subBuilder.mergeFrom(getObjectInfo());
-                }
-                input.readMessage(subBuilder, extensionRegistry);
-                setObjectInfo(subBuilder.buildPartial());
-                break;
-              }
-              case 25: {
-                bitField0_ |= 0x00000004;
-                time_ = input.readFixed64();
-                break;
-              }
-            }
-          }
-        }
-
-        private int bitField0_;
-
-        // required string chunk_key = 1;
-        private Object chunkKey_ = "";
-        public boolean hasChunkKey() {
-          return ((bitField0_ & 0x00000001) == 0x00000001);
-        }
-        public String getChunkKey() {
-          Object ref = chunkKey_;
-          if (!(ref instanceof String)) {
-            String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
-            chunkKey_ = s;
-            return s;
-          } else {
-            return (String) ref;
-          }
-        }
-        public Builder setChunkKey(String value) {
-          if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-          chunkKey_ = value;
-          onChanged();
-          return this;
-        }
-        public Builder clearChunkKey() {
-          bitField0_ = (bitField0_ & ~0x00000001);
-          chunkKey_ = getDefaultInstance().getChunkKey();
-          onChanged();
-          return this;
-        }
-        void setChunkKey(com.google.protobuf.ByteString value) {
-          bitField0_ |= 0x00000001;
-          chunkKey_ = value;
-          onChanged();
-        }
-
-        // required .org.eclipse.jgit.storage.dht.ObjectInfo object_info = 2;
-        private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo objectInfo_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.getDefaultInstance();
-        private com.google.protobuf.SingleFieldBuilder<
-            org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfoOrBuilder> objectInfoBuilder_;
-        public boolean hasObjectInfo() {
-          return ((bitField0_ & 0x00000002) == 0x00000002);
-        }
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo getObjectInfo() {
-          if (objectInfoBuilder_ == null) {
-            return objectInfo_;
-          } else {
-            return objectInfoBuilder_.getMessage();
-          }
-        }
-        public Builder setObjectInfo(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo value) {
-          if (objectInfoBuilder_ == null) {
-            if (value == null) {
-              throw new NullPointerException();
-            }
-            objectInfo_ = value;
-            onChanged();
-          } else {
-            objectInfoBuilder_.setMessage(value);
-          }
-          bitField0_ |= 0x00000002;
-          return this;
-        }
-        public Builder setObjectInfo(
-            org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.Builder builderForValue) {
-          if (objectInfoBuilder_ == null) {
-            objectInfo_ = builderForValue.build();
-            onChanged();
-          } else {
-            objectInfoBuilder_.setMessage(builderForValue.build());
-          }
-          bitField0_ |= 0x00000002;
-          return this;
-        }
-        public Builder mergeObjectInfo(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo value) {
-          if (objectInfoBuilder_ == null) {
-            if (((bitField0_ & 0x00000002) == 0x00000002) &&
-                objectInfo_ != org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.getDefaultInstance()) {
-              objectInfo_ =
-                org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.newBuilder(objectInfo_).mergeFrom(value).buildPartial();
-            } else {
-              objectInfo_ = value;
-            }
-            onChanged();
-          } else {
-            objectInfoBuilder_.mergeFrom(value);
-          }
-          bitField0_ |= 0x00000002;
-          return this;
-        }
-        public Builder clearObjectInfo() {
-          if (objectInfoBuilder_ == null) {
-            objectInfo_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.getDefaultInstance();
-            onChanged();
-          } else {
-            objectInfoBuilder_.clear();
-          }
-          bitField0_ = (bitField0_ & ~0x00000002);
-          return this;
-        }
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.Builder getObjectInfoBuilder() {
-          bitField0_ |= 0x00000002;
-          onChanged();
-          return getObjectInfoFieldBuilder().getBuilder();
-        }
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfoOrBuilder getObjectInfoOrBuilder() {
-          if (objectInfoBuilder_ != null) {
-            return objectInfoBuilder_.getMessageOrBuilder();
-          } else {
-            return objectInfo_;
-          }
-        }
-        private com.google.protobuf.SingleFieldBuilder<
-            org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfoOrBuilder>
-            getObjectInfoFieldBuilder() {
-          if (objectInfoBuilder_ == null) {
-            objectInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-                org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfoOrBuilder>(
-                    objectInfo_,
-                    getParentForChildren(),
-                    isClean());
-            objectInfo_ = null;
-          }
-          return objectInfoBuilder_;
-        }
-
-        // optional fixed64 time = 3;
-        private long time_ ;
-        public boolean hasTime() {
-          return ((bitField0_ & 0x00000004) == 0x00000004);
-        }
-        public long getTime() {
-          return time_;
-        }
-        public Builder setTime(long value) {
-          bitField0_ |= 0x00000004;
-          time_ = value;
-          onChanged();
-          return this;
-        }
-        public Builder clearTime() {
-          bitField0_ = (bitField0_ & ~0x00000004);
-          time_ = 0L;
-          onChanged();
-          return this;
-        }
-
-        // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.CachedObjectIndex.Item)
-      }
-
-      static {
-        defaultInstance = new Item(true);
-        defaultInstance.initFields();
-      }
-
-      // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.CachedObjectIndex.Item)
-    }
-
-    // repeated .org.eclipse.jgit.storage.dht.CachedObjectIndex.Item item = 1;
-    public static final int ITEM_FIELD_NUMBER = 1;
-    private java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item> item_;
-    public java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item> getItemList() {
-      return item_;
-    }
-    public java.util.List<? extends org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.ItemOrBuilder>
-        getItemOrBuilderList() {
-      return item_;
-    }
-    public int getItemCount() {
-      return item_.size();
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item getItem(int index) {
-      return item_.get(index);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.ItemOrBuilder getItemOrBuilder(
-        int index) {
-      return item_.get(index);
-    }
-
-    private void initFields() {
-      item_ = java.util.Collections.emptyList();
-    }
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
-
-      for (int i = 0; i < getItemCount(); i++) {
-        if (!getItem(i).isInitialized()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-      }
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      for (int i = 0; i < item_.size(); i++) {
-        output.writeMessage(1, item_.get(i));
-      }
-      getUnknownFields().writeTo(output);
-    }
-
-    private int memoizedSerializedSize = -1;
-    public int getSerializedSize() {
-      int size = memoizedSerializedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      for (int i = 0; i < item_.size(); i++) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(1, item_.get(i));
-      }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
-      return size;
-    }
-
-    @java.lang.Override
-    protected Object writeReplace() throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
-    public boolean equals(final Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex)) {
-        return super.equals(obj);
-      }
-      org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex other = (org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex) obj;
-
-      boolean result = true;
-      result = result && getItemList()
-          .equals(other.getItemList());
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
-      return result;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-      int hash = 41;
-      hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (getItemCount() > 0) {
-        hash = (37 * hash) + ITEM_FIELD_NUMBER;
-        hash = (53 * hash) + getItemList().hashCode();
-      }
-      hash = (29 * hash) + getUnknownFields().hashCode();
-      return hash;
-    }
-
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex parseFrom(
-        com.google.protobuf.ByteString data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex parseFrom(
-        com.google.protobuf.ByteString data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex parseFrom(byte[] data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex parseFrom(
-        byte[] data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex parseFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex parseDelimitedFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex parseFrom(
-        com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex parseFrom(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-
-    public static Builder newBuilder() { return Builder.create(); }
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex prototype) {
-      return newBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() { return newBuilder(this); }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndexOrBuilder {
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_fieldAccessorTable;
-      }
-
-      // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          getItemFieldBuilder();
-        }
-      }
-      private static Builder create() {
-        return new Builder();
-      }
-
-      public Builder clear() {
-        super.clear();
-        if (itemBuilder_ == null) {
-          item_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000001);
-        } else {
-          itemBuilder_.clear();
-        }
-        return this;
-      }
-
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
-      public com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.getDescriptor();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex getDefaultInstanceForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.getDefaultInstance();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex build() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      private org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex buildPartial() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex result = new org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex(this);
-        int from_bitField0_ = bitField0_;
-        if (itemBuilder_ == null) {
-          if (((bitField0_ & 0x00000001) == 0x00000001)) {
-            item_ = java.util.Collections.unmodifiableList(item_);
-            bitField0_ = (bitField0_ & ~0x00000001);
-          }
-          result.item_ = item_;
-        } else {
-          result.item_ = itemBuilder_.build();
-        }
-        onBuilt();
-        return result;
-      }
-
-      public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex) {
-          return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex other) {
-        if (other == org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.getDefaultInstance()) return this;
-        if (itemBuilder_ == null) {
-          if (!other.item_.isEmpty()) {
-            if (item_.isEmpty()) {
-              item_ = other.item_;
-              bitField0_ = (bitField0_ & ~0x00000001);
-            } else {
-              ensureItemIsMutable();
-              item_.addAll(other.item_);
-            }
-            onChanged();
-          }
-        } else {
-          if (!other.item_.isEmpty()) {
-            if (itemBuilder_.isEmpty()) {
-              itemBuilder_.dispose();
-              itemBuilder_ = null;
-              item_ = other.item_;
-              bitField0_ = (bitField0_ & ~0x00000001);
-              itemBuilder_ =
-                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
-                   getItemFieldBuilder() : null;
-            } else {
-              itemBuilder_.addAllMessages(other.item_);
-            }
-          }
-        }
-        this.mergeUnknownFields(other.getUnknownFields());
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        for (int i = 0; i < getItemCount(); i++) {
-          if (!getItem(i).isInitialized()) {
-
-            return false;
-          }
-        }
-        return true;
-      }
-
-      public Builder mergeFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 10: {
-              org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.Builder subBuilder = org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.newBuilder();
-              input.readMessage(subBuilder, extensionRegistry);
-              addItem(subBuilder.buildPartial());
-              break;
-            }
-          }
-        }
-      }
-
-      private int bitField0_;
-
-      // repeated .org.eclipse.jgit.storage.dht.CachedObjectIndex.Item item = 1;
-      private java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item> item_ =
-        java.util.Collections.emptyList();
-      private void ensureItemIsMutable() {
-        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
-          item_ = new java.util.ArrayList<org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item>(item_);
-          bitField0_ |= 0x00000001;
-         }
-      }
-
-      private com.google.protobuf.RepeatedFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item, org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.ItemOrBuilder> itemBuilder_;
-
-      public java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item> getItemList() {
-        if (itemBuilder_ == null) {
-          return java.util.Collections.unmodifiableList(item_);
-        } else {
-          return itemBuilder_.getMessageList();
-        }
-      }
-      public int getItemCount() {
-        if (itemBuilder_ == null) {
-          return item_.size();
-        } else {
-          return itemBuilder_.getCount();
-        }
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item getItem(int index) {
-        if (itemBuilder_ == null) {
-          return item_.get(index);
-        } else {
-          return itemBuilder_.getMessage(index);
-        }
-      }
-      public Builder setItem(
-          int index, org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item value) {
-        if (itemBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          ensureItemIsMutable();
-          item_.set(index, value);
-          onChanged();
-        } else {
-          itemBuilder_.setMessage(index, value);
-        }
-        return this;
-      }
-      public Builder setItem(
-          int index, org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.Builder builderForValue) {
-        if (itemBuilder_ == null) {
-          ensureItemIsMutable();
-          item_.set(index, builderForValue.build());
-          onChanged();
-        } else {
-          itemBuilder_.setMessage(index, builderForValue.build());
-        }
-        return this;
-      }
-      public Builder addItem(org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item value) {
-        if (itemBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          ensureItemIsMutable();
-          item_.add(value);
-          onChanged();
-        } else {
-          itemBuilder_.addMessage(value);
-        }
-        return this;
-      }
-      public Builder addItem(
-          int index, org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item value) {
-        if (itemBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          ensureItemIsMutable();
-          item_.add(index, value);
-          onChanged();
-        } else {
-          itemBuilder_.addMessage(index, value);
-        }
-        return this;
-      }
-      public Builder addItem(
-          org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.Builder builderForValue) {
-        if (itemBuilder_ == null) {
-          ensureItemIsMutable();
-          item_.add(builderForValue.build());
-          onChanged();
-        } else {
-          itemBuilder_.addMessage(builderForValue.build());
-        }
-        return this;
-      }
-      public Builder addItem(
-          int index, org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.Builder builderForValue) {
-        if (itemBuilder_ == null) {
-          ensureItemIsMutable();
-          item_.add(index, builderForValue.build());
-          onChanged();
-        } else {
-          itemBuilder_.addMessage(index, builderForValue.build());
-        }
-        return this;
-      }
-      public Builder addAllItem(
-          java.lang.Iterable<? extends org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item> values) {
-        if (itemBuilder_ == null) {
-          ensureItemIsMutable();
-          super.addAll(values, item_);
-          onChanged();
-        } else {
-          itemBuilder_.addAllMessages(values);
-        }
-        return this;
-      }
-      public Builder clearItem() {
-        if (itemBuilder_ == null) {
-          item_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000001);
-          onChanged();
-        } else {
-          itemBuilder_.clear();
-        }
-        return this;
-      }
-      public Builder removeItem(int index) {
-        if (itemBuilder_ == null) {
-          ensureItemIsMutable();
-          item_.remove(index);
-          onChanged();
-        } else {
-          itemBuilder_.remove(index);
-        }
-        return this;
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.Builder getItemBuilder(
-          int index) {
-        return getItemFieldBuilder().getBuilder(index);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.ItemOrBuilder getItemOrBuilder(
-          int index) {
-        if (itemBuilder_ == null) {
-          return item_.get(index);  } else {
-          return itemBuilder_.getMessageOrBuilder(index);
-        }
-      }
-      public java.util.List<? extends org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.ItemOrBuilder>
-           getItemOrBuilderList() {
-        if (itemBuilder_ != null) {
-          return itemBuilder_.getMessageOrBuilderList();
-        } else {
-          return java.util.Collections.unmodifiableList(item_);
-        }
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.Builder addItemBuilder() {
-        return getItemFieldBuilder().addBuilder(
-            org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.getDefaultInstance());
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.Builder addItemBuilder(
-          int index) {
-        return getItemFieldBuilder().addBuilder(
-            index, org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.getDefaultInstance());
-      }
-      public java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.Builder>
-           getItemBuilderList() {
-        return getItemFieldBuilder().getBuilderList();
-      }
-      private com.google.protobuf.RepeatedFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item, org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.ItemOrBuilder>
-          getItemFieldBuilder() {
-        if (itemBuilder_ == null) {
-          itemBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
-              org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item, org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.ItemOrBuilder>(
-                  item_,
-                  ((bitField0_ & 0x00000001) == 0x00000001),
-                  getParentForChildren(),
-                  isClean());
-          item_ = null;
-        }
-        return itemBuilder_;
-      }
-
-      // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.CachedObjectIndex)
-    }
-
-    static {
-      defaultInstance = new CachedObjectIndex(true);
-      defaultInstance.initFields();
-    }
-
-    // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.CachedObjectIndex)
-  }
-
-  public interface CachedPackInfoListOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // repeated .org.eclipse.jgit.storage.dht.CachedPackInfo pack = 1;
-    java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo>
-        getPackList();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo getPack(int index);
-    int getPackCount();
-    java.util.List<? extends org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfoOrBuilder>
-        getPackOrBuilderList();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfoOrBuilder getPackOrBuilder(
-        int index);
-  }
-  public static final class CachedPackInfoList extends
-      com.google.protobuf.GeneratedMessage
-      implements CachedPackInfoListOrBuilder {
-    // Use CachedPackInfoList.newBuilder() to construct.
-    private CachedPackInfoList(Builder builder) {
-      super(builder);
-    }
-    private CachedPackInfoList(boolean noInit) {}
-
-    private static final CachedPackInfoList defaultInstance;
-    public static CachedPackInfoList getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public CachedPackInfoList getDefaultInstanceForType() {
-      return defaultInstance;
-    }
-
-    public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfoList_descriptor;
-    }
-
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfoList_fieldAccessorTable;
-    }
-
-    // repeated .org.eclipse.jgit.storage.dht.CachedPackInfo pack = 1;
-    public static final int PACK_FIELD_NUMBER = 1;
-    private java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo> pack_;
-    public java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo> getPackList() {
-      return pack_;
-    }
-    public java.util.List<? extends org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfoOrBuilder>
-        getPackOrBuilderList() {
-      return pack_;
-    }
-    public int getPackCount() {
-      return pack_.size();
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo getPack(int index) {
-      return pack_.get(index);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfoOrBuilder getPackOrBuilder(
-        int index) {
-      return pack_.get(index);
-    }
-
-    private void initFields() {
-      pack_ = java.util.Collections.emptyList();
-    }
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
-
-      for (int i = 0; i < getPackCount(); i++) {
-        if (!getPack(i).isInitialized()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-      }
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      for (int i = 0; i < pack_.size(); i++) {
-        output.writeMessage(1, pack_.get(i));
-      }
-      getUnknownFields().writeTo(output);
-    }
-
-    private int memoizedSerializedSize = -1;
-    public int getSerializedSize() {
-      int size = memoizedSerializedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      for (int i = 0; i < pack_.size(); i++) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(1, pack_.get(i));
-      }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
-      return size;
-    }
-
-    @java.lang.Override
-    protected Object writeReplace() throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
-    public boolean equals(final Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList)) {
-        return super.equals(obj);
-      }
-      org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList other = (org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList) obj;
-
-      boolean result = true;
-      result = result && getPackList()
-          .equals(other.getPackList());
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
-      return result;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-      int hash = 41;
-      hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (getPackCount() > 0) {
-        hash = (37 * hash) + PACK_FIELD_NUMBER;
-        hash = (53 * hash) + getPackList().hashCode();
-      }
-      hash = (29 * hash) + getUnknownFields().hashCode();
-      return hash;
-    }
-
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList parseFrom(
-        com.google.protobuf.ByteString data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList parseFrom(
-        com.google.protobuf.ByteString data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList parseFrom(byte[] data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList parseFrom(
-        byte[] data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList parseFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList parseDelimitedFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList parseFrom(
-        com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList parseFrom(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-
-    public static Builder newBuilder() { return Builder.create(); }
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList prototype) {
-      return newBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() { return newBuilder(this); }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoListOrBuilder {
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfoList_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfoList_fieldAccessorTable;
-      }
-
-      // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          getPackFieldBuilder();
-        }
-      }
-      private static Builder create() {
-        return new Builder();
-      }
-
-      public Builder clear() {
-        super.clear();
-        if (packBuilder_ == null) {
-          pack_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000001);
-        } else {
-          packBuilder_.clear();
-        }
-        return this;
-      }
-
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
-      public com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList.getDescriptor();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList getDefaultInstanceForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList.getDefaultInstance();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList build() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      private org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList buildPartial() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList result = new org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList(this);
-        int from_bitField0_ = bitField0_;
-        if (packBuilder_ == null) {
-          if (((bitField0_ & 0x00000001) == 0x00000001)) {
-            pack_ = java.util.Collections.unmodifiableList(pack_);
-            bitField0_ = (bitField0_ & ~0x00000001);
-          }
-          result.pack_ = pack_;
-        } else {
-          result.pack_ = packBuilder_.build();
-        }
-        onBuilt();
-        return result;
-      }
-
-      public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList) {
-          return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList other) {
-        if (other == org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList.getDefaultInstance()) return this;
-        if (packBuilder_ == null) {
-          if (!other.pack_.isEmpty()) {
-            if (pack_.isEmpty()) {
-              pack_ = other.pack_;
-              bitField0_ = (bitField0_ & ~0x00000001);
-            } else {
-              ensurePackIsMutable();
-              pack_.addAll(other.pack_);
-            }
-            onChanged();
-          }
-        } else {
-          if (!other.pack_.isEmpty()) {
-            if (packBuilder_.isEmpty()) {
-              packBuilder_.dispose();
-              packBuilder_ = null;
-              pack_ = other.pack_;
-              bitField0_ = (bitField0_ & ~0x00000001);
-              packBuilder_ =
-                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
-                   getPackFieldBuilder() : null;
-            } else {
-              packBuilder_.addAllMessages(other.pack_);
-            }
-          }
-        }
-        this.mergeUnknownFields(other.getUnknownFields());
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        for (int i = 0; i < getPackCount(); i++) {
-          if (!getPack(i).isInitialized()) {
-
-            return false;
-          }
-        }
-        return true;
-      }
-
-      public Builder mergeFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 10: {
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.Builder subBuilder = org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.newBuilder();
-              input.readMessage(subBuilder, extensionRegistry);
-              addPack(subBuilder.buildPartial());
-              break;
-            }
-          }
-        }
-      }
-
-      private int bitField0_;
-
-      // repeated .org.eclipse.jgit.storage.dht.CachedPackInfo pack = 1;
-      private java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo> pack_ =
-        java.util.Collections.emptyList();
-      private void ensurePackIsMutable() {
-        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
-          pack_ = new java.util.ArrayList<org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo>(pack_);
-          bitField0_ |= 0x00000001;
-         }
-      }
-
-      private com.google.protobuf.RepeatedFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfoOrBuilder> packBuilder_;
-
-      public java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo> getPackList() {
-        if (packBuilder_ == null) {
-          return java.util.Collections.unmodifiableList(pack_);
-        } else {
-          return packBuilder_.getMessageList();
-        }
-      }
-      public int getPackCount() {
-        if (packBuilder_ == null) {
-          return pack_.size();
-        } else {
-          return packBuilder_.getCount();
-        }
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo getPack(int index) {
-        if (packBuilder_ == null) {
-          return pack_.get(index);
-        } else {
-          return packBuilder_.getMessage(index);
-        }
-      }
-      public Builder setPack(
-          int index, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo value) {
-        if (packBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          ensurePackIsMutable();
-          pack_.set(index, value);
-          onChanged();
-        } else {
-          packBuilder_.setMessage(index, value);
-        }
-        return this;
-      }
-      public Builder setPack(
-          int index, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.Builder builderForValue) {
-        if (packBuilder_ == null) {
-          ensurePackIsMutable();
-          pack_.set(index, builderForValue.build());
-          onChanged();
-        } else {
-          packBuilder_.setMessage(index, builderForValue.build());
-        }
-        return this;
-      }
-      public Builder addPack(org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo value) {
-        if (packBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          ensurePackIsMutable();
-          pack_.add(value);
-          onChanged();
-        } else {
-          packBuilder_.addMessage(value);
-        }
-        return this;
-      }
-      public Builder addPack(
-          int index, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo value) {
-        if (packBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          ensurePackIsMutable();
-          pack_.add(index, value);
-          onChanged();
-        } else {
-          packBuilder_.addMessage(index, value);
-        }
-        return this;
-      }
-      public Builder addPack(
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.Builder builderForValue) {
-        if (packBuilder_ == null) {
-          ensurePackIsMutable();
-          pack_.add(builderForValue.build());
-          onChanged();
-        } else {
-          packBuilder_.addMessage(builderForValue.build());
-        }
-        return this;
-      }
-      public Builder addPack(
-          int index, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.Builder builderForValue) {
-        if (packBuilder_ == null) {
-          ensurePackIsMutable();
-          pack_.add(index, builderForValue.build());
-          onChanged();
-        } else {
-          packBuilder_.addMessage(index, builderForValue.build());
-        }
-        return this;
-      }
-      public Builder addAllPack(
-          java.lang.Iterable<? extends org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo> values) {
-        if (packBuilder_ == null) {
-          ensurePackIsMutable();
-          super.addAll(values, pack_);
-          onChanged();
-        } else {
-          packBuilder_.addAllMessages(values);
-        }
-        return this;
-      }
-      public Builder clearPack() {
-        if (packBuilder_ == null) {
-          pack_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000001);
-          onChanged();
-        } else {
-          packBuilder_.clear();
-        }
-        return this;
-      }
-      public Builder removePack(int index) {
-        if (packBuilder_ == null) {
-          ensurePackIsMutable();
-          pack_.remove(index);
-          onChanged();
-        } else {
-          packBuilder_.remove(index);
-        }
-        return this;
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.Builder getPackBuilder(
-          int index) {
-        return getPackFieldBuilder().getBuilder(index);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfoOrBuilder getPackOrBuilder(
-          int index) {
-        if (packBuilder_ == null) {
-          return pack_.get(index);  } else {
-          return packBuilder_.getMessageOrBuilder(index);
-        }
-      }
-      public java.util.List<? extends org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfoOrBuilder>
-           getPackOrBuilderList() {
-        if (packBuilder_ != null) {
-          return packBuilder_.getMessageOrBuilderList();
-        } else {
-          return java.util.Collections.unmodifiableList(pack_);
-        }
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.Builder addPackBuilder() {
-        return getPackFieldBuilder().addBuilder(
-            org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.getDefaultInstance());
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.Builder addPackBuilder(
-          int index) {
-        return getPackFieldBuilder().addBuilder(
-            index, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.getDefaultInstance());
-      }
-      public java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.Builder>
-           getPackBuilderList() {
-        return getPackFieldBuilder().getBuilderList();
-      }
-      private com.google.protobuf.RepeatedFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfoOrBuilder>
-          getPackFieldBuilder() {
-        if (packBuilder_ == null) {
-          packBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfoOrBuilder>(
-                  pack_,
-                  ((bitField0_ & 0x00000001) == 0x00000001),
-                  getParentForChildren(),
-                  isClean());
-          pack_ = null;
-        }
-        return packBuilder_;
-      }
-
-      // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.CachedPackInfoList)
-    }
-
-    static {
-      defaultInstance = new CachedPackInfoList(true);
-      defaultInstance.initFields();
-    }
-
-    // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.CachedPackInfoList)
-  }
-
-  public interface CachedChunkOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // required bytes data = 1;
-    boolean hasData();
-    com.google.protobuf.ByteString getData();
-
-    // optional bytes index = 2;
-    boolean hasIndex();
-    com.google.protobuf.ByteString getIndex();
-
-    // optional .org.eclipse.jgit.storage.dht.ChunkMeta meta = 3;
-    boolean hasMeta();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta getMeta();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMetaOrBuilder getMetaOrBuilder();
-  }
-  public static final class CachedChunk extends
-      com.google.protobuf.GeneratedMessage
-      implements CachedChunkOrBuilder {
-    // Use CachedChunk.newBuilder() to construct.
-    private CachedChunk(Builder builder) {
-      super(builder);
-    }
-    private CachedChunk(boolean noInit) {}
-
-    private static final CachedChunk defaultInstance;
-    public static CachedChunk getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public CachedChunk getDefaultInstanceForType() {
-      return defaultInstance;
-    }
-
-    public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedChunk_descriptor;
-    }
-
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedChunk_fieldAccessorTable;
-    }
-
-    private int bitField0_;
-    // required bytes data = 1;
-    public static final int DATA_FIELD_NUMBER = 1;
-    private com.google.protobuf.ByteString data_;
-    public boolean hasData() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    public com.google.protobuf.ByteString getData() {
-      return data_;
-    }
-
-    // optional bytes index = 2;
-    public static final int INDEX_FIELD_NUMBER = 2;
-    private com.google.protobuf.ByteString index_;
-    public boolean hasIndex() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
-    }
-    public com.google.protobuf.ByteString getIndex() {
-      return index_;
-    }
-
-    // optional .org.eclipse.jgit.storage.dht.ChunkMeta meta = 3;
-    public static final int META_FIELD_NUMBER = 3;
-    private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta meta_;
-    public boolean hasMeta() {
-      return ((bitField0_ & 0x00000004) == 0x00000004);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta getMeta() {
-      return meta_;
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMetaOrBuilder getMetaOrBuilder() {
-      return meta_;
-    }
-
-    private void initFields() {
-      data_ = com.google.protobuf.ByteString.EMPTY;
-      index_ = com.google.protobuf.ByteString.EMPTY;
-      meta_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.getDefaultInstance();
-    }
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
-
-      if (!hasData()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (hasMeta()) {
-        if (!getMeta().isInitialized()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-      }
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeBytes(1, data_);
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeBytes(2, index_);
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeMessage(3, meta_);
-      }
-      getUnknownFields().writeTo(output);
-    }
-
-    private int memoizedSerializedSize = -1;
-    public int getSerializedSize() {
-      int size = memoizedSerializedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(1, data_);
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(2, index_);
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(3, meta_);
-      }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
-      return size;
-    }
-
-    @java.lang.Override
-    protected Object writeReplace() throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
-    public boolean equals(final Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk)) {
-        return super.equals(obj);
-      }
-      org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk other = (org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk) obj;
-
-      boolean result = true;
-      result = result && (hasData() == other.hasData());
-      if (hasData()) {
-        result = result && getData()
-            .equals(other.getData());
-      }
-      result = result && (hasIndex() == other.hasIndex());
-      if (hasIndex()) {
-        result = result && getIndex()
-            .equals(other.getIndex());
-      }
-      result = result && (hasMeta() == other.hasMeta());
-      if (hasMeta()) {
-        result = result && getMeta()
-            .equals(other.getMeta());
-      }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
-      return result;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-      int hash = 41;
-      hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (hasData()) {
-        hash = (37 * hash) + DATA_FIELD_NUMBER;
-        hash = (53 * hash) + getData().hashCode();
-      }
-      if (hasIndex()) {
-        hash = (37 * hash) + INDEX_FIELD_NUMBER;
-        hash = (53 * hash) + getIndex().hashCode();
-      }
-      if (hasMeta()) {
-        hash = (37 * hash) + META_FIELD_NUMBER;
-        hash = (53 * hash) + getMeta().hashCode();
-      }
-      hash = (29 * hash) + getUnknownFields().hashCode();
-      return hash;
-    }
-
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk parseFrom(
-        com.google.protobuf.ByteString data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk parseFrom(
-        com.google.protobuf.ByteString data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk parseFrom(byte[] data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk parseFrom(
-        byte[] data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk parseFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk parseDelimitedFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk parseFrom(
-        com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk parseFrom(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-
-    public static Builder newBuilder() { return Builder.create(); }
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk prototype) {
-      return newBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() { return newBuilder(this); }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunkOrBuilder {
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedChunk_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.internal_static_org_eclipse_jgit_storage_dht_CachedChunk_fieldAccessorTable;
-      }
-
-      // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          getMetaFieldBuilder();
-        }
-      }
-      private static Builder create() {
-        return new Builder();
-      }
-
-      public Builder clear() {
-        super.clear();
-        data_ = com.google.protobuf.ByteString.EMPTY;
-        bitField0_ = (bitField0_ & ~0x00000001);
-        index_ = com.google.protobuf.ByteString.EMPTY;
-        bitField0_ = (bitField0_ & ~0x00000002);
-        if (metaBuilder_ == null) {
-          meta_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.getDefaultInstance();
-        } else {
-          metaBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000004);
-        return this;
-      }
-
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
-      public com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk.getDescriptor();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk getDefaultInstanceForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk.getDefaultInstance();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk build() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      private org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk buildPartial() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk result = new org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        result.data_ = data_;
-        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-          to_bitField0_ |= 0x00000002;
-        }
-        result.index_ = index_;
-        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
-          to_bitField0_ |= 0x00000004;
-        }
-        if (metaBuilder_ == null) {
-          result.meta_ = meta_;
-        } else {
-          result.meta_ = metaBuilder_.build();
-        }
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk) {
-          return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk other) {
-        if (other == org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk.getDefaultInstance()) return this;
-        if (other.hasData()) {
-          setData(other.getData());
-        }
-        if (other.hasIndex()) {
-          setIndex(other.getIndex());
-        }
-        if (other.hasMeta()) {
-          mergeMeta(other.getMeta());
-        }
-        this.mergeUnknownFields(other.getUnknownFields());
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        if (!hasData()) {
-
-          return false;
-        }
-        if (hasMeta()) {
-          if (!getMeta().isInitialized()) {
-
-            return false;
-          }
-        }
-        return true;
-      }
-
-      public Builder mergeFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 10: {
-              bitField0_ |= 0x00000001;
-              data_ = input.readBytes();
-              break;
-            }
-            case 18: {
-              bitField0_ |= 0x00000002;
-              index_ = input.readBytes();
-              break;
-            }
-            case 26: {
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.Builder subBuilder = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.newBuilder();
-              if (hasMeta()) {
-                subBuilder.mergeFrom(getMeta());
-              }
-              input.readMessage(subBuilder, extensionRegistry);
-              setMeta(subBuilder.buildPartial());
-              break;
-            }
-          }
-        }
-      }
-
-      private int bitField0_;
-
-      // required bytes data = 1;
-      private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
-      public boolean hasData() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      public com.google.protobuf.ByteString getData() {
-        return data_;
-      }
-      public Builder setData(com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-        data_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearData() {
-        bitField0_ = (bitField0_ & ~0x00000001);
-        data_ = getDefaultInstance().getData();
-        onChanged();
-        return this;
-      }
-
-      // optional bytes index = 2;
-      private com.google.protobuf.ByteString index_ = com.google.protobuf.ByteString.EMPTY;
-      public boolean hasIndex() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      public com.google.protobuf.ByteString getIndex() {
-        return index_;
-      }
-      public Builder setIndex(com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000002;
-        index_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearIndex() {
-        bitField0_ = (bitField0_ & ~0x00000002);
-        index_ = getDefaultInstance().getIndex();
-        onChanged();
-        return this;
-      }
-
-      // optional .org.eclipse.jgit.storage.dht.ChunkMeta meta = 3;
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta meta_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMetaOrBuilder> metaBuilder_;
-      public boolean hasMeta() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta getMeta() {
-        if (metaBuilder_ == null) {
-          return meta_;
-        } else {
-          return metaBuilder_.getMessage();
-        }
-      }
-      public Builder setMeta(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta value) {
-        if (metaBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          meta_ = value;
-          onChanged();
-        } else {
-          metaBuilder_.setMessage(value);
-        }
-        bitField0_ |= 0x00000004;
-        return this;
-      }
-      public Builder setMeta(
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.Builder builderForValue) {
-        if (metaBuilder_ == null) {
-          meta_ = builderForValue.build();
-          onChanged();
-        } else {
-          metaBuilder_.setMessage(builderForValue.build());
-        }
-        bitField0_ |= 0x00000004;
-        return this;
-      }
-      public Builder mergeMeta(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta value) {
-        if (metaBuilder_ == null) {
-          if (((bitField0_ & 0x00000004) == 0x00000004) &&
-              meta_ != org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.getDefaultInstance()) {
-            meta_ =
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.newBuilder(meta_).mergeFrom(value).buildPartial();
-          } else {
-            meta_ = value;
-          }
-          onChanged();
-        } else {
-          metaBuilder_.mergeFrom(value);
-        }
-        bitField0_ |= 0x00000004;
-        return this;
-      }
-      public Builder clearMeta() {
-        if (metaBuilder_ == null) {
-          meta_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.getDefaultInstance();
-          onChanged();
-        } else {
-          metaBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000004);
-        return this;
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.Builder getMetaBuilder() {
-        bitField0_ |= 0x00000004;
-        onChanged();
-        return getMetaFieldBuilder().getBuilder();
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMetaOrBuilder getMetaOrBuilder() {
-        if (metaBuilder_ != null) {
-          return metaBuilder_.getMessageOrBuilder();
-        } else {
-          return meta_;
-        }
-      }
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMetaOrBuilder>
-          getMetaFieldBuilder() {
-        if (metaBuilder_ == null) {
-          metaBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMetaOrBuilder>(
-                  meta_,
-                  getParentForChildren(),
-                  isClean());
-          meta_ = null;
-        }
-        return metaBuilder_;
-      }
-
-      // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.CachedChunk)
-    }
-
-    static {
-      defaultInstance = new CachedChunk(true);
-      defaultInstance.initFields();
-    }
-
-    // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.CachedChunk)
-  }
-
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_Item_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_Item_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_CachedPackInfoList_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_CachedPackInfoList_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_CachedChunk_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_CachedChunk_fieldAccessorTable;
-
-  public static com.google.protobuf.Descriptors.FileDescriptor
-      getDescriptor() {
-    return descriptor;
-  }
-  private static com.google.protobuf.Descriptors.FileDescriptor
-      descriptor;
-  static {
-    java.lang.String[] descriptorData = {
-      "\n,org/eclipse/jgit/storage/dht/git_cache" +
-      ".proto\022\034org.eclipse.jgit.storage.dht\032,or" +
-      "g/eclipse/jgit/storage/dht/git_store.pro" +
-      "to\"\277\001\n\021CachedObjectIndex\022B\n\004item\030\001 \003(\01324" +
-      ".org.eclipse.jgit.storage.dht.CachedObje" +
-      "ctIndex.Item\032f\n\004Item\022\021\n\tchunk_key\030\001 \002(\t\022" +
-      "=\n\013object_info\030\002 \002(\0132(.org.eclipse.jgit." +
-      "storage.dht.ObjectInfo\022\014\n\004time\030\003 \001(\006\"P\n\022" +
-      "CachedPackInfoList\022:\n\004pack\030\001 \003(\0132,.org.e" +
-      "clipse.jgit.storage.dht.CachedPackInfo\"a",
-      "\n\013CachedChunk\022\014\n\004data\030\001 \002(\014\022\r\n\005index\030\002 \001" +
-      "(\014\0225\n\004meta\030\003 \001(\0132\'.org.eclipse.jgit.stor" +
-      "age.dht.ChunkMetaB1\n,org.eclipse.jgit.ge" +
-      "nerated.storage.dht.proto\240\001\001"
-    };
-    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
-      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
-        public com.google.protobuf.ExtensionRegistry assignDescriptors(
-            com.google.protobuf.Descriptors.FileDescriptor root) {
-          descriptor = root;
-          internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_descriptor =
-            getDescriptor().getMessageTypes().get(0);
-          internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_descriptor,
-              new java.lang.String[] { "Item", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_Item_descriptor =
-            internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_descriptor.getNestedTypes().get(0);
-          internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_Item_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_CachedObjectIndex_Item_descriptor,
-              new java.lang.String[] { "ChunkKey", "ObjectInfo", "Time", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex.Item.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_CachedPackInfoList_descriptor =
-            getDescriptor().getMessageTypes().get(1);
-          internal_static_org_eclipse_jgit_storage_dht_CachedPackInfoList_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_CachedPackInfoList_descriptor,
-              new java.lang.String[] { "Pack", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_CachedChunk_descriptor =
-            getDescriptor().getMessageTypes().get(2);
-          internal_static_org_eclipse_jgit_storage_dht_CachedChunk_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_CachedChunk_descriptor,
-              new java.lang.String[] { "Data", "Index", "Meta", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedChunk.Builder.class);
-          return null;
-        }
-      };
-    com.google.protobuf.Descriptors.FileDescriptor
-      .internalBuildGeneratedFileFrom(descriptorData,
-        new com.google.protobuf.Descriptors.FileDescriptor[] {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.getDescriptor(),
-        }, assigner);
-  }
-
-  // @@protoc_insertion_point(outer_class_scope)
-}
diff --git a/org.eclipse.jgit.generated.storage.dht.proto/src/org/eclipse/jgit/generated/storage/dht/proto/GitStore.java b/org.eclipse.jgit.generated.storage.dht.proto/src/org/eclipse/jgit/generated/storage/dht/proto/GitStore.java
deleted file mode 100644 (file)
index ecb1a4c..0000000
+++ /dev/null
@@ -1,8037 +0,0 @@
-// Generated by the protocol buffer compiler.  DO NOT EDIT!
-// source: org/eclipse/jgit/storage/dht/git_store.proto
-
-package org.eclipse.jgit.generated.storage.dht.proto;
-
-public final class GitStore {
-  private GitStore() {}
-  public static void registerAllExtensions(
-      com.google.protobuf.ExtensionRegistry registry) {
-  }
-  public interface RefDataOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // required uint32 sequence = 5 [default = 0];
-    boolean hasSequence();
-    int getSequence();
-
-    // optional string symref = 1;
-    boolean hasSymref();
-    String getSymref();
-
-    // optional .org.eclipse.jgit.storage.dht.RefData.Id target = 2;
-    boolean hasTarget();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id getTarget();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder getTargetOrBuilder();
-
-    // optional bool is_peeled = 3;
-    boolean hasIsPeeled();
-    boolean getIsPeeled();
-
-    // optional .org.eclipse.jgit.storage.dht.RefData.Id peeled = 4;
-    boolean hasPeeled();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id getPeeled();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder getPeeledOrBuilder();
-  }
-  public static final class RefData extends
-      com.google.protobuf.GeneratedMessage
-      implements RefDataOrBuilder {
-    // Use RefData.newBuilder() to construct.
-    private RefData(Builder builder) {
-      super(builder);
-    }
-    private RefData(boolean noInit) {}
-
-    private static final RefData defaultInstance;
-    public static RefData getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public RefData getDefaultInstanceForType() {
-      return defaultInstance;
-    }
-
-    public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_RefData_descriptor;
-    }
-
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_RefData_fieldAccessorTable;
-    }
-
-    public interface IdOrBuilder
-        extends com.google.protobuf.MessageOrBuilder {
-
-      // required string object_name = 1;
-      boolean hasObjectName();
-      String getObjectName();
-
-      // optional string chunk_key = 2;
-      boolean hasChunkKey();
-      String getChunkKey();
-    }
-    public static final class Id extends
-        com.google.protobuf.GeneratedMessage
-        implements IdOrBuilder {
-      // Use Id.newBuilder() to construct.
-      private Id(Builder builder) {
-        super(builder);
-      }
-      private Id(boolean noInit) {}
-
-      private static final Id defaultInstance;
-      public static Id getDefaultInstance() {
-        return defaultInstance;
-      }
-
-      public Id getDefaultInstanceForType() {
-        return defaultInstance;
-      }
-
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_RefData_Id_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_RefData_Id_fieldAccessorTable;
-      }
-
-      private int bitField0_;
-      // required string object_name = 1;
-      public static final int OBJECT_NAME_FIELD_NUMBER = 1;
-      private Object objectName_;
-      public boolean hasObjectName() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      public String getObjectName() {
-        Object ref = objectName_;
-        if (ref instanceof String) {
-          return (String) ref;
-        } else {
-          com.google.protobuf.ByteString bs =
-              (com.google.protobuf.ByteString) ref;
-          String s = bs.toStringUtf8();
-          if (com.google.protobuf.Internal.isValidUtf8(bs)) {
-            objectName_ = s;
-          }
-          return s;
-        }
-      }
-      private com.google.protobuf.ByteString getObjectNameBytes() {
-        Object ref = objectName_;
-        if (ref instanceof String) {
-          com.google.protobuf.ByteString b =
-              com.google.protobuf.ByteString.copyFromUtf8((String) ref);
-          objectName_ = b;
-          return b;
-        } else {
-          return (com.google.protobuf.ByteString) ref;
-        }
-      }
-
-      // optional string chunk_key = 2;
-      public static final int CHUNK_KEY_FIELD_NUMBER = 2;
-      private Object chunkKey_;
-      public boolean hasChunkKey() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      public String getChunkKey() {
-        Object ref = chunkKey_;
-        if (ref instanceof String) {
-          return (String) ref;
-        } else {
-          com.google.protobuf.ByteString bs =
-              (com.google.protobuf.ByteString) ref;
-          String s = bs.toStringUtf8();
-          if (com.google.protobuf.Internal.isValidUtf8(bs)) {
-            chunkKey_ = s;
-          }
-          return s;
-        }
-      }
-      private com.google.protobuf.ByteString getChunkKeyBytes() {
-        Object ref = chunkKey_;
-        if (ref instanceof String) {
-          com.google.protobuf.ByteString b =
-              com.google.protobuf.ByteString.copyFromUtf8((String) ref);
-          chunkKey_ = b;
-          return b;
-        } else {
-          return (com.google.protobuf.ByteString) ref;
-        }
-      }
-
-      private void initFields() {
-        objectName_ = "";
-        chunkKey_ = "";
-      }
-      private byte memoizedIsInitialized = -1;
-      public final boolean isInitialized() {
-        byte isInitialized = memoizedIsInitialized;
-        if (isInitialized != -1) return isInitialized == 1;
-
-        if (!hasObjectName()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-        memoizedIsInitialized = 1;
-        return true;
-      }
-
-      public void writeTo(com.google.protobuf.CodedOutputStream output)
-                          throws java.io.IOException {
-        getSerializedSize();
-        if (((bitField0_ & 0x00000001) == 0x00000001)) {
-          output.writeBytes(1, getObjectNameBytes());
-        }
-        if (((bitField0_ & 0x00000002) == 0x00000002)) {
-          output.writeBytes(2, getChunkKeyBytes());
-        }
-        getUnknownFields().writeTo(output);
-      }
-
-      private int memoizedSerializedSize = -1;
-      public int getSerializedSize() {
-        int size = memoizedSerializedSize;
-        if (size != -1) return size;
-
-        size = 0;
-        if (((bitField0_ & 0x00000001) == 0x00000001)) {
-          size += com.google.protobuf.CodedOutputStream
-            .computeBytesSize(1, getObjectNameBytes());
-        }
-        if (((bitField0_ & 0x00000002) == 0x00000002)) {
-          size += com.google.protobuf.CodedOutputStream
-            .computeBytesSize(2, getChunkKeyBytes());
-        }
-        size += getUnknownFields().getSerializedSize();
-        memoizedSerializedSize = size;
-        return size;
-      }
-
-      @java.lang.Override
-      protected Object writeReplace() throws java.io.ObjectStreamException {
-        return super.writeReplace();
-      }
-
-      @java.lang.Override
-      public boolean equals(final Object obj) {
-        if (obj == this) {
-         return true;
-        }
-        if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id)) {
-          return super.equals(obj);
-        }
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id other = (org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id) obj;
-
-        boolean result = true;
-        result = result && (hasObjectName() == other.hasObjectName());
-        if (hasObjectName()) {
-          result = result && getObjectName()
-              .equals(other.getObjectName());
-        }
-        result = result && (hasChunkKey() == other.hasChunkKey());
-        if (hasChunkKey()) {
-          result = result && getChunkKey()
-              .equals(other.getChunkKey());
-        }
-        result = result &&
-            getUnknownFields().equals(other.getUnknownFields());
-        return result;
-      }
-
-      @java.lang.Override
-      public int hashCode() {
-        int hash = 41;
-        hash = (19 * hash) + getDescriptorForType().hashCode();
-        if (hasObjectName()) {
-          hash = (37 * hash) + OBJECT_NAME_FIELD_NUMBER;
-          hash = (53 * hash) + getObjectName().hashCode();
-        }
-        if (hasChunkKey()) {
-          hash = (37 * hash) + CHUNK_KEY_FIELD_NUMBER;
-          hash = (53 * hash) + getChunkKey().hashCode();
-        }
-        hash = (29 * hash) + getUnknownFields().hashCode();
-        return hash;
-      }
-
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id parseFrom(
-          com.google.protobuf.ByteString data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id parseFrom(
-          com.google.protobuf.ByteString data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id parseFrom(byte[] data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id parseFrom(
-          byte[] data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id parseFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id parseFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id parseDelimitedFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id parseDelimitedFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id parseFrom(
-          com.google.protobuf.CodedInputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id parseFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-
-      public static Builder newBuilder() { return Builder.create(); }
-      public Builder newBuilderForType() { return newBuilder(); }
-      public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id prototype) {
-        return newBuilder().mergeFrom(prototype);
-      }
-      public Builder toBuilder() { return newBuilder(this); }
-
-      @java.lang.Override
-      protected Builder newBuilderForType(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-        Builder builder = new Builder(parent);
-        return builder;
-      }
-      public static final class Builder extends
-          com.google.protobuf.GeneratedMessage.Builder<Builder>
-         implements org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder {
-        public static final com.google.protobuf.Descriptors.Descriptor
-            getDescriptor() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_RefData_Id_descriptor;
-        }
-
-        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-            internalGetFieldAccessorTable() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_RefData_Id_fieldAccessorTable;
-        }
-
-        // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.newBuilder()
-        private Builder() {
-          maybeForceBuilderInitialization();
-        }
-
-        private Builder(BuilderParent parent) {
-          super(parent);
-          maybeForceBuilderInitialization();
-        }
-        private void maybeForceBuilderInitialization() {
-          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          }
-        }
-        private static Builder create() {
-          return new Builder();
-        }
-
-        public Builder clear() {
-          super.clear();
-          objectName_ = "";
-          bitField0_ = (bitField0_ & ~0x00000001);
-          chunkKey_ = "";
-          bitField0_ = (bitField0_ & ~0x00000002);
-          return this;
-        }
-
-        public Builder clone() {
-          return create().mergeFrom(buildPartial());
-        }
-
-        public com.google.protobuf.Descriptors.Descriptor
-            getDescriptorForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDescriptor();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id getDefaultInstanceForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDefaultInstance();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id build() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(result);
-          }
-          return result;
-        }
-
-        private org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id buildParsed()
-            throws com.google.protobuf.InvalidProtocolBufferException {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(
-              result).asInvalidProtocolBufferException();
-          }
-          return result;
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id buildPartial() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id result = new org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id(this);
-          int from_bitField0_ = bitField0_;
-          int to_bitField0_ = 0;
-          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-            to_bitField0_ |= 0x00000001;
-          }
-          result.objectName_ = objectName_;
-          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-            to_bitField0_ |= 0x00000002;
-          }
-          result.chunkKey_ = chunkKey_;
-          result.bitField0_ = to_bitField0_;
-          onBuilt();
-          return result;
-        }
-
-        public Builder mergeFrom(com.google.protobuf.Message other) {
-          if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id) {
-            return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id)other);
-          } else {
-            super.mergeFrom(other);
-            return this;
-          }
-        }
-
-        public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id other) {
-          if (other == org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDefaultInstance()) return this;
-          if (other.hasObjectName()) {
-            setObjectName(other.getObjectName());
-          }
-          if (other.hasChunkKey()) {
-            setChunkKey(other.getChunkKey());
-          }
-          this.mergeUnknownFields(other.getUnknownFields());
-          return this;
-        }
-
-        public final boolean isInitialized() {
-          if (!hasObjectName()) {
-
-            return false;
-          }
-          return true;
-        }
-
-        public Builder mergeFrom(
-            com.google.protobuf.CodedInputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws java.io.IOException {
-          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-            com.google.protobuf.UnknownFieldSet.newBuilder(
-              this.getUnknownFields());
-          while (true) {
-            int tag = input.readTag();
-            switch (tag) {
-              case 0:
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              default: {
-                if (!parseUnknownField(input, unknownFields,
-                                       extensionRegistry, tag)) {
-                  this.setUnknownFields(unknownFields.build());
-                  onChanged();
-                  return this;
-                }
-                break;
-              }
-              case 10: {
-                bitField0_ |= 0x00000001;
-                objectName_ = input.readBytes();
-                break;
-              }
-              case 18: {
-                bitField0_ |= 0x00000002;
-                chunkKey_ = input.readBytes();
-                break;
-              }
-            }
-          }
-        }
-
-        private int bitField0_;
-
-        // required string object_name = 1;
-        private Object objectName_ = "";
-        public boolean hasObjectName() {
-          return ((bitField0_ & 0x00000001) == 0x00000001);
-        }
-        public String getObjectName() {
-          Object ref = objectName_;
-          if (!(ref instanceof String)) {
-            String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
-            objectName_ = s;
-            return s;
-          } else {
-            return (String) ref;
-          }
-        }
-        public Builder setObjectName(String value) {
-          if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-          objectName_ = value;
-          onChanged();
-          return this;
-        }
-        public Builder clearObjectName() {
-          bitField0_ = (bitField0_ & ~0x00000001);
-          objectName_ = getDefaultInstance().getObjectName();
-          onChanged();
-          return this;
-        }
-        void setObjectName(com.google.protobuf.ByteString value) {
-          bitField0_ |= 0x00000001;
-          objectName_ = value;
-          onChanged();
-        }
-
-        // optional string chunk_key = 2;
-        private Object chunkKey_ = "";
-        public boolean hasChunkKey() {
-          return ((bitField0_ & 0x00000002) == 0x00000002);
-        }
-        public String getChunkKey() {
-          Object ref = chunkKey_;
-          if (!(ref instanceof String)) {
-            String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
-            chunkKey_ = s;
-            return s;
-          } else {
-            return (String) ref;
-          }
-        }
-        public Builder setChunkKey(String value) {
-          if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000002;
-          chunkKey_ = value;
-          onChanged();
-          return this;
-        }
-        public Builder clearChunkKey() {
-          bitField0_ = (bitField0_ & ~0x00000002);
-          chunkKey_ = getDefaultInstance().getChunkKey();
-          onChanged();
-          return this;
-        }
-        void setChunkKey(com.google.protobuf.ByteString value) {
-          bitField0_ |= 0x00000002;
-          chunkKey_ = value;
-          onChanged();
-        }
-
-        // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.RefData.Id)
-      }
-
-      static {
-        defaultInstance = new Id(true);
-        defaultInstance.initFields();
-      }
-
-      // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.RefData.Id)
-    }
-
-    private int bitField0_;
-    // required uint32 sequence = 5 [default = 0];
-    public static final int SEQUENCE_FIELD_NUMBER = 5;
-    private int sequence_;
-    public boolean hasSequence() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    public int getSequence() {
-      return sequence_;
-    }
-
-    // optional string symref = 1;
-    public static final int SYMREF_FIELD_NUMBER = 1;
-    private Object symref_;
-    public boolean hasSymref() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
-    }
-    public String getSymref() {
-      Object ref = symref_;
-      if (ref instanceof String) {
-        return (String) ref;
-      } else {
-        com.google.protobuf.ByteString bs =
-            (com.google.protobuf.ByteString) ref;
-        String s = bs.toStringUtf8();
-        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
-          symref_ = s;
-        }
-        return s;
-      }
-    }
-    private com.google.protobuf.ByteString getSymrefBytes() {
-      Object ref = symref_;
-      if (ref instanceof String) {
-        com.google.protobuf.ByteString b =
-            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
-        symref_ = b;
-        return b;
-      } else {
-        return (com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    // optional .org.eclipse.jgit.storage.dht.RefData.Id target = 2;
-    public static final int TARGET_FIELD_NUMBER = 2;
-    private org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id target_;
-    public boolean hasTarget() {
-      return ((bitField0_ & 0x00000004) == 0x00000004);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id getTarget() {
-      return target_;
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder getTargetOrBuilder() {
-      return target_;
-    }
-
-    // optional bool is_peeled = 3;
-    public static final int IS_PEELED_FIELD_NUMBER = 3;
-    private boolean isPeeled_;
-    public boolean hasIsPeeled() {
-      return ((bitField0_ & 0x00000008) == 0x00000008);
-    }
-    public boolean getIsPeeled() {
-      return isPeeled_;
-    }
-
-    // optional .org.eclipse.jgit.storage.dht.RefData.Id peeled = 4;
-    public static final int PEELED_FIELD_NUMBER = 4;
-    private org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id peeled_;
-    public boolean hasPeeled() {
-      return ((bitField0_ & 0x00000010) == 0x00000010);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id getPeeled() {
-      return peeled_;
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder getPeeledOrBuilder() {
-      return peeled_;
-    }
-
-    private void initFields() {
-      sequence_ = 0;
-      symref_ = "";
-      target_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDefaultInstance();
-      isPeeled_ = false;
-      peeled_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDefaultInstance();
-    }
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
-
-      if (!hasSequence()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (hasTarget()) {
-        if (!getTarget().isInitialized()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-      }
-      if (hasPeeled()) {
-        if (!getPeeled().isInitialized()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-      }
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeBytes(1, getSymrefBytes());
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeMessage(2, target_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        output.writeBool(3, isPeeled_);
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        output.writeMessage(4, peeled_);
-      }
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeUInt32(5, sequence_);
-      }
-      getUnknownFields().writeTo(output);
-    }
-
-    private int memoizedSerializedSize = -1;
-    public int getSerializedSize() {
-      int size = memoizedSerializedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(1, getSymrefBytes());
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(2, target_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBoolSize(3, isPeeled_);
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(4, peeled_);
-      }
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeUInt32Size(5, sequence_);
-      }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
-      return size;
-    }
-
-    @java.lang.Override
-    protected Object writeReplace() throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
-    public boolean equals(final Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData)) {
-        return super.equals(obj);
-      }
-      org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData other = (org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData) obj;
-
-      boolean result = true;
-      result = result && (hasSequence() == other.hasSequence());
-      if (hasSequence()) {
-        result = result && (getSequence()
-            == other.getSequence());
-      }
-      result = result && (hasSymref() == other.hasSymref());
-      if (hasSymref()) {
-        result = result && getSymref()
-            .equals(other.getSymref());
-      }
-      result = result && (hasTarget() == other.hasTarget());
-      if (hasTarget()) {
-        result = result && getTarget()
-            .equals(other.getTarget());
-      }
-      result = result && (hasIsPeeled() == other.hasIsPeeled());
-      if (hasIsPeeled()) {
-        result = result && (getIsPeeled()
-            == other.getIsPeeled());
-      }
-      result = result && (hasPeeled() == other.hasPeeled());
-      if (hasPeeled()) {
-        result = result && getPeeled()
-            .equals(other.getPeeled());
-      }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
-      return result;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-      int hash = 41;
-      hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (hasSequence()) {
-        hash = (37 * hash) + SEQUENCE_FIELD_NUMBER;
-        hash = (53 * hash) + getSequence();
-      }
-      if (hasSymref()) {
-        hash = (37 * hash) + SYMREF_FIELD_NUMBER;
-        hash = (53 * hash) + getSymref().hashCode();
-      }
-      if (hasTarget()) {
-        hash = (37 * hash) + TARGET_FIELD_NUMBER;
-        hash = (53 * hash) + getTarget().hashCode();
-      }
-      if (hasIsPeeled()) {
-        hash = (37 * hash) + IS_PEELED_FIELD_NUMBER;
-        hash = (53 * hash) + hashBoolean(getIsPeeled());
-      }
-      if (hasPeeled()) {
-        hash = (37 * hash) + PEELED_FIELD_NUMBER;
-        hash = (53 * hash) + getPeeled().hashCode();
-      }
-      hash = (29 * hash) + getUnknownFields().hashCode();
-      return hash;
-    }
-
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData parseFrom(
-        com.google.protobuf.ByteString data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData parseFrom(
-        com.google.protobuf.ByteString data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData parseFrom(byte[] data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData parseFrom(
-        byte[] data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData parseFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData parseDelimitedFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData parseFrom(
-        com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData parseFrom(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-
-    public static Builder newBuilder() { return Builder.create(); }
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData prototype) {
-      return newBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() { return newBuilder(this); }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefDataOrBuilder {
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_RefData_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_RefData_fieldAccessorTable;
-      }
-
-      // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          getTargetFieldBuilder();
-          getPeeledFieldBuilder();
-        }
-      }
-      private static Builder create() {
-        return new Builder();
-      }
-
-      public Builder clear() {
-        super.clear();
-        sequence_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000001);
-        symref_ = "";
-        bitField0_ = (bitField0_ & ~0x00000002);
-        if (targetBuilder_ == null) {
-          target_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDefaultInstance();
-        } else {
-          targetBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000004);
-        isPeeled_ = false;
-        bitField0_ = (bitField0_ & ~0x00000008);
-        if (peeledBuilder_ == null) {
-          peeled_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDefaultInstance();
-        } else {
-          peeledBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000010);
-        return this;
-      }
-
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
-      public com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.getDescriptor();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData getDefaultInstanceForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.getDefaultInstance();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData build() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData buildPartial() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData result = new org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        result.sequence_ = sequence_;
-        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-          to_bitField0_ |= 0x00000002;
-        }
-        result.symref_ = symref_;
-        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
-          to_bitField0_ |= 0x00000004;
-        }
-        if (targetBuilder_ == null) {
-          result.target_ = target_;
-        } else {
-          result.target_ = targetBuilder_.build();
-        }
-        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
-          to_bitField0_ |= 0x00000008;
-        }
-        result.isPeeled_ = isPeeled_;
-        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
-          to_bitField0_ |= 0x00000010;
-        }
-        if (peeledBuilder_ == null) {
-          result.peeled_ = peeled_;
-        } else {
-          result.peeled_ = peeledBuilder_.build();
-        }
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData) {
-          return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData other) {
-        if (other == org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.getDefaultInstance()) return this;
-        if (other.hasSequence()) {
-          setSequence(other.getSequence());
-        }
-        if (other.hasSymref()) {
-          setSymref(other.getSymref());
-        }
-        if (other.hasTarget()) {
-          mergeTarget(other.getTarget());
-        }
-        if (other.hasIsPeeled()) {
-          setIsPeeled(other.getIsPeeled());
-        }
-        if (other.hasPeeled()) {
-          mergePeeled(other.getPeeled());
-        }
-        this.mergeUnknownFields(other.getUnknownFields());
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        if (!hasSequence()) {
-
-          return false;
-        }
-        if (hasTarget()) {
-          if (!getTarget().isInitialized()) {
-
-            return false;
-          }
-        }
-        if (hasPeeled()) {
-          if (!getPeeled().isInitialized()) {
-
-            return false;
-          }
-        }
-        return true;
-      }
-
-      public Builder mergeFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 10: {
-              bitField0_ |= 0x00000002;
-              symref_ = input.readBytes();
-              break;
-            }
-            case 18: {
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder subBuilder = org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.newBuilder();
-              if (hasTarget()) {
-                subBuilder.mergeFrom(getTarget());
-              }
-              input.readMessage(subBuilder, extensionRegistry);
-              setTarget(subBuilder.buildPartial());
-              break;
-            }
-            case 24: {
-              bitField0_ |= 0x00000008;
-              isPeeled_ = input.readBool();
-              break;
-            }
-            case 34: {
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder subBuilder = org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.newBuilder();
-              if (hasPeeled()) {
-                subBuilder.mergeFrom(getPeeled());
-              }
-              input.readMessage(subBuilder, extensionRegistry);
-              setPeeled(subBuilder.buildPartial());
-              break;
-            }
-            case 40: {
-              bitField0_ |= 0x00000001;
-              sequence_ = input.readUInt32();
-              break;
-            }
-          }
-        }
-      }
-
-      private int bitField0_;
-
-      // required uint32 sequence = 5 [default = 0];
-      private int sequence_ ;
-      public boolean hasSequence() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      public int getSequence() {
-        return sequence_;
-      }
-      public Builder setSequence(int value) {
-        bitField0_ |= 0x00000001;
-        sequence_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearSequence() {
-        bitField0_ = (bitField0_ & ~0x00000001);
-        sequence_ = 0;
-        onChanged();
-        return this;
-      }
-
-      // optional string symref = 1;
-      private Object symref_ = "";
-      public boolean hasSymref() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      public String getSymref() {
-        Object ref = symref_;
-        if (!(ref instanceof String)) {
-          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
-          symref_ = s;
-          return s;
-        } else {
-          return (String) ref;
-        }
-      }
-      public Builder setSymref(String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000002;
-        symref_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearSymref() {
-        bitField0_ = (bitField0_ & ~0x00000002);
-        symref_ = getDefaultInstance().getSymref();
-        onChanged();
-        return this;
-      }
-      void setSymref(com.google.protobuf.ByteString value) {
-        bitField0_ |= 0x00000002;
-        symref_ = value;
-        onChanged();
-      }
-
-      // optional .org.eclipse.jgit.storage.dht.RefData.Id target = 2;
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id target_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id, org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder> targetBuilder_;
-      public boolean hasTarget() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id getTarget() {
-        if (targetBuilder_ == null) {
-          return target_;
-        } else {
-          return targetBuilder_.getMessage();
-        }
-      }
-      public Builder setTarget(org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id value) {
-        if (targetBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          target_ = value;
-          onChanged();
-        } else {
-          targetBuilder_.setMessage(value);
-        }
-        bitField0_ |= 0x00000004;
-        return this;
-      }
-      public Builder setTarget(
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder builderForValue) {
-        if (targetBuilder_ == null) {
-          target_ = builderForValue.build();
-          onChanged();
-        } else {
-          targetBuilder_.setMessage(builderForValue.build());
-        }
-        bitField0_ |= 0x00000004;
-        return this;
-      }
-      public Builder mergeTarget(org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id value) {
-        if (targetBuilder_ == null) {
-          if (((bitField0_ & 0x00000004) == 0x00000004) &&
-              target_ != org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDefaultInstance()) {
-            target_ =
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.newBuilder(target_).mergeFrom(value).buildPartial();
-          } else {
-            target_ = value;
-          }
-          onChanged();
-        } else {
-          targetBuilder_.mergeFrom(value);
-        }
-        bitField0_ |= 0x00000004;
-        return this;
-      }
-      public Builder clearTarget() {
-        if (targetBuilder_ == null) {
-          target_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDefaultInstance();
-          onChanged();
-        } else {
-          targetBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000004);
-        return this;
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder getTargetBuilder() {
-        bitField0_ |= 0x00000004;
-        onChanged();
-        return getTargetFieldBuilder().getBuilder();
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder getTargetOrBuilder() {
-        if (targetBuilder_ != null) {
-          return targetBuilder_.getMessageOrBuilder();
-        } else {
-          return target_;
-        }
-      }
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id, org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder>
-          getTargetFieldBuilder() {
-        if (targetBuilder_ == null) {
-          targetBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id, org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder>(
-                  target_,
-                  getParentForChildren(),
-                  isClean());
-          target_ = null;
-        }
-        return targetBuilder_;
-      }
-
-      // optional bool is_peeled = 3;
-      private boolean isPeeled_ ;
-      public boolean hasIsPeeled() {
-        return ((bitField0_ & 0x00000008) == 0x00000008);
-      }
-      public boolean getIsPeeled() {
-        return isPeeled_;
-      }
-      public Builder setIsPeeled(boolean value) {
-        bitField0_ |= 0x00000008;
-        isPeeled_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearIsPeeled() {
-        bitField0_ = (bitField0_ & ~0x00000008);
-        isPeeled_ = false;
-        onChanged();
-        return this;
-      }
-
-      // optional .org.eclipse.jgit.storage.dht.RefData.Id peeled = 4;
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id peeled_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id, org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder> peeledBuilder_;
-      public boolean hasPeeled() {
-        return ((bitField0_ & 0x00000010) == 0x00000010);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id getPeeled() {
-        if (peeledBuilder_ == null) {
-          return peeled_;
-        } else {
-          return peeledBuilder_.getMessage();
-        }
-      }
-      public Builder setPeeled(org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id value) {
-        if (peeledBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          peeled_ = value;
-          onChanged();
-        } else {
-          peeledBuilder_.setMessage(value);
-        }
-        bitField0_ |= 0x00000010;
-        return this;
-      }
-      public Builder setPeeled(
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder builderForValue) {
-        if (peeledBuilder_ == null) {
-          peeled_ = builderForValue.build();
-          onChanged();
-        } else {
-          peeledBuilder_.setMessage(builderForValue.build());
-        }
-        bitField0_ |= 0x00000010;
-        return this;
-      }
-      public Builder mergePeeled(org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id value) {
-        if (peeledBuilder_ == null) {
-          if (((bitField0_ & 0x00000010) == 0x00000010) &&
-              peeled_ != org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDefaultInstance()) {
-            peeled_ =
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.newBuilder(peeled_).mergeFrom(value).buildPartial();
-          } else {
-            peeled_ = value;
-          }
-          onChanged();
-        } else {
-          peeledBuilder_.mergeFrom(value);
-        }
-        bitField0_ |= 0x00000010;
-        return this;
-      }
-      public Builder clearPeeled() {
-        if (peeledBuilder_ == null) {
-          peeled_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.getDefaultInstance();
-          onChanged();
-        } else {
-          peeledBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000010);
-        return this;
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder getPeeledBuilder() {
-        bitField0_ |= 0x00000010;
-        onChanged();
-        return getPeeledFieldBuilder().getBuilder();
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder getPeeledOrBuilder() {
-        if (peeledBuilder_ != null) {
-          return peeledBuilder_.getMessageOrBuilder();
-        } else {
-          return peeled_;
-        }
-      }
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id, org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder>
-          getPeeledFieldBuilder() {
-        if (peeledBuilder_ == null) {
-          peeledBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id, org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.IdOrBuilder>(
-                  peeled_,
-                  getParentForChildren(),
-                  isClean());
-          peeled_ = null;
-        }
-        return peeledBuilder_;
-      }
-
-      // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.RefData)
-    }
-
-    static {
-      defaultInstance = new RefData(true);
-      defaultInstance.initFields();
-    }
-
-    // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.RefData)
-  }
-
-  public interface ObjectInfoOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // optional .org.eclipse.jgit.storage.dht.ObjectInfo.ObjectType object_type = 1;
-    boolean hasObjectType();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType getObjectType();
-
-    // required int32 offset = 2;
-    boolean hasOffset();
-    int getOffset();
-
-    // required int64 packed_size = 3;
-    boolean hasPackedSize();
-    long getPackedSize();
-
-    // required int64 inflated_size = 4;
-    boolean hasInflatedSize();
-    long getInflatedSize();
-
-    // optional bytes delta_base = 5;
-    boolean hasDeltaBase();
-    com.google.protobuf.ByteString getDeltaBase();
-
-    // optional bool is_fragmented = 6;
-    boolean hasIsFragmented();
-    boolean getIsFragmented();
-  }
-  public static final class ObjectInfo extends
-      com.google.protobuf.GeneratedMessage
-      implements ObjectInfoOrBuilder {
-    // Use ObjectInfo.newBuilder() to construct.
-    private ObjectInfo(Builder builder) {
-      super(builder);
-    }
-    private ObjectInfo(boolean noInit) {}
-
-    private static final ObjectInfo defaultInstance;
-    public static ObjectInfo getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public ObjectInfo getDefaultInstanceForType() {
-      return defaultInstance;
-    }
-
-    public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ObjectInfo_descriptor;
-    }
-
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ObjectInfo_fieldAccessorTable;
-    }
-
-    public enum ObjectType
-        implements com.google.protobuf.ProtocolMessageEnum {
-      COMMIT(0, 1),
-      TREE(1, 2),
-      BLOB(2, 3),
-      TAG(3, 4),
-      ;
-
-      public static final int COMMIT_VALUE = 1;
-      public static final int TREE_VALUE = 2;
-      public static final int BLOB_VALUE = 3;
-      public static final int TAG_VALUE = 4;
-
-
-      public final int getNumber() { return value; }
-
-      public static ObjectType valueOf(int value) {
-        switch (value) {
-          case 1: return COMMIT;
-          case 2: return TREE;
-          case 3: return BLOB;
-          case 4: return TAG;
-          default: return null;
-        }
-      }
-
-      public static com.google.protobuf.Internal.EnumLiteMap<ObjectType>
-          internalGetValueMap() {
-        return internalValueMap;
-      }
-      private static com.google.protobuf.Internal.EnumLiteMap<ObjectType>
-          internalValueMap =
-            new com.google.protobuf.Internal.EnumLiteMap<ObjectType>() {
-              public ObjectType findValueByNumber(int number) {
-                return ObjectType.valueOf(number);
-              }
-            };
-
-      public final com.google.protobuf.Descriptors.EnumValueDescriptor
-          getValueDescriptor() {
-        return getDescriptor().getValues().get(index);
-      }
-      public final com.google.protobuf.Descriptors.EnumDescriptor
-          getDescriptorForType() {
-        return getDescriptor();
-      }
-      public static final com.google.protobuf.Descriptors.EnumDescriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.getDescriptor().getEnumTypes().get(0);
-      }
-
-      private static final ObjectType[] VALUES = {
-        COMMIT, TREE, BLOB, TAG,
-      };
-
-      public static ObjectType valueOf(
-          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
-        if (desc.getType() != getDescriptor()) {
-          throw new java.lang.IllegalArgumentException(
-            "EnumValueDescriptor is not for this type.");
-        }
-        return VALUES[desc.getIndex()];
-      }
-
-      private final int index;
-      private final int value;
-
-      private ObjectType(int index, int value) {
-        this.index = index;
-        this.value = value;
-      }
-
-      // @@protoc_insertion_point(enum_scope:org.eclipse.jgit.storage.dht.ObjectInfo.ObjectType)
-    }
-
-    private int bitField0_;
-    // optional .org.eclipse.jgit.storage.dht.ObjectInfo.ObjectType object_type = 1;
-    public static final int OBJECT_TYPE_FIELD_NUMBER = 1;
-    private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType objectType_;
-    public boolean hasObjectType() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType getObjectType() {
-      return objectType_;
-    }
-
-    // required int32 offset = 2;
-    public static final int OFFSET_FIELD_NUMBER = 2;
-    private int offset_;
-    public boolean hasOffset() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
-    }
-    public int getOffset() {
-      return offset_;
-    }
-
-    // required int64 packed_size = 3;
-    public static final int PACKED_SIZE_FIELD_NUMBER = 3;
-    private long packedSize_;
-    public boolean hasPackedSize() {
-      return ((bitField0_ & 0x00000004) == 0x00000004);
-    }
-    public long getPackedSize() {
-      return packedSize_;
-    }
-
-    // required int64 inflated_size = 4;
-    public static final int INFLATED_SIZE_FIELD_NUMBER = 4;
-    private long inflatedSize_;
-    public boolean hasInflatedSize() {
-      return ((bitField0_ & 0x00000008) == 0x00000008);
-    }
-    public long getInflatedSize() {
-      return inflatedSize_;
-    }
-
-    // optional bytes delta_base = 5;
-    public static final int DELTA_BASE_FIELD_NUMBER = 5;
-    private com.google.protobuf.ByteString deltaBase_;
-    public boolean hasDeltaBase() {
-      return ((bitField0_ & 0x00000010) == 0x00000010);
-    }
-    public com.google.protobuf.ByteString getDeltaBase() {
-      return deltaBase_;
-    }
-
-    // optional bool is_fragmented = 6;
-    public static final int IS_FRAGMENTED_FIELD_NUMBER = 6;
-    private boolean isFragmented_;
-    public boolean hasIsFragmented() {
-      return ((bitField0_ & 0x00000020) == 0x00000020);
-    }
-    public boolean getIsFragmented() {
-      return isFragmented_;
-    }
-
-    private void initFields() {
-      objectType_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType.COMMIT;
-      offset_ = 0;
-      packedSize_ = 0L;
-      inflatedSize_ = 0L;
-      deltaBase_ = com.google.protobuf.ByteString.EMPTY;
-      isFragmented_ = false;
-    }
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
-
-      if (!hasOffset()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (!hasPackedSize()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (!hasInflatedSize()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeEnum(1, objectType_.getNumber());
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeInt32(2, offset_);
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeInt64(3, packedSize_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        output.writeInt64(4, inflatedSize_);
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        output.writeBytes(5, deltaBase_);
-      }
-      if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        output.writeBool(6, isFragmented_);
-      }
-      getUnknownFields().writeTo(output);
-    }
-
-    private int memoizedSerializedSize = -1;
-    public int getSerializedSize() {
-      int size = memoizedSerializedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeEnumSize(1, objectType_.getNumber());
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt32Size(2, offset_);
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt64Size(3, packedSize_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt64Size(4, inflatedSize_);
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(5, deltaBase_);
-      }
-      if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBoolSize(6, isFragmented_);
-      }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
-      return size;
-    }
-
-    @java.lang.Override
-    protected Object writeReplace() throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
-    public boolean equals(final Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo)) {
-        return super.equals(obj);
-      }
-      org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo other = (org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo) obj;
-
-      boolean result = true;
-      result = result && (hasObjectType() == other.hasObjectType());
-      if (hasObjectType()) {
-        result = result &&
-            (getObjectType() == other.getObjectType());
-      }
-      result = result && (hasOffset() == other.hasOffset());
-      if (hasOffset()) {
-        result = result && (getOffset()
-            == other.getOffset());
-      }
-      result = result && (hasPackedSize() == other.hasPackedSize());
-      if (hasPackedSize()) {
-        result = result && (getPackedSize()
-            == other.getPackedSize());
-      }
-      result = result && (hasInflatedSize() == other.hasInflatedSize());
-      if (hasInflatedSize()) {
-        result = result && (getInflatedSize()
-            == other.getInflatedSize());
-      }
-      result = result && (hasDeltaBase() == other.hasDeltaBase());
-      if (hasDeltaBase()) {
-        result = result && getDeltaBase()
-            .equals(other.getDeltaBase());
-      }
-      result = result && (hasIsFragmented() == other.hasIsFragmented());
-      if (hasIsFragmented()) {
-        result = result && (getIsFragmented()
-            == other.getIsFragmented());
-      }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
-      return result;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-      int hash = 41;
-      hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (hasObjectType()) {
-        hash = (37 * hash) + OBJECT_TYPE_FIELD_NUMBER;
-        hash = (53 * hash) + hashEnum(getObjectType());
-      }
-      if (hasOffset()) {
-        hash = (37 * hash) + OFFSET_FIELD_NUMBER;
-        hash = (53 * hash) + getOffset();
-      }
-      if (hasPackedSize()) {
-        hash = (37 * hash) + PACKED_SIZE_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getPackedSize());
-      }
-      if (hasInflatedSize()) {
-        hash = (37 * hash) + INFLATED_SIZE_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getInflatedSize());
-      }
-      if (hasDeltaBase()) {
-        hash = (37 * hash) + DELTA_BASE_FIELD_NUMBER;
-        hash = (53 * hash) + getDeltaBase().hashCode();
-      }
-      if (hasIsFragmented()) {
-        hash = (37 * hash) + IS_FRAGMENTED_FIELD_NUMBER;
-        hash = (53 * hash) + hashBoolean(getIsFragmented());
-      }
-      hash = (29 * hash) + getUnknownFields().hashCode();
-      return hash;
-    }
-
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo parseFrom(
-        com.google.protobuf.ByteString data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo parseFrom(
-        com.google.protobuf.ByteString data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo parseFrom(byte[] data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo parseFrom(
-        byte[] data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo parseFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo parseDelimitedFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo parseFrom(
-        com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo parseFrom(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-
-    public static Builder newBuilder() { return Builder.create(); }
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo prototype) {
-      return newBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() { return newBuilder(this); }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfoOrBuilder {
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ObjectInfo_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ObjectInfo_fieldAccessorTable;
-      }
-
-      // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-        }
-      }
-      private static Builder create() {
-        return new Builder();
-      }
-
-      public Builder clear() {
-        super.clear();
-        objectType_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType.COMMIT;
-        bitField0_ = (bitField0_ & ~0x00000001);
-        offset_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000002);
-        packedSize_ = 0L;
-        bitField0_ = (bitField0_ & ~0x00000004);
-        inflatedSize_ = 0L;
-        bitField0_ = (bitField0_ & ~0x00000008);
-        deltaBase_ = com.google.protobuf.ByteString.EMPTY;
-        bitField0_ = (bitField0_ & ~0x00000010);
-        isFragmented_ = false;
-        bitField0_ = (bitField0_ & ~0x00000020);
-        return this;
-      }
-
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
-      public com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.getDescriptor();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo getDefaultInstanceForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.getDefaultInstance();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo build() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo buildPartial() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo result = new org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        result.objectType_ = objectType_;
-        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-          to_bitField0_ |= 0x00000002;
-        }
-        result.offset_ = offset_;
-        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
-          to_bitField0_ |= 0x00000004;
-        }
-        result.packedSize_ = packedSize_;
-        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
-          to_bitField0_ |= 0x00000008;
-        }
-        result.inflatedSize_ = inflatedSize_;
-        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
-          to_bitField0_ |= 0x00000010;
-        }
-        result.deltaBase_ = deltaBase_;
-        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
-          to_bitField0_ |= 0x00000020;
-        }
-        result.isFragmented_ = isFragmented_;
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo) {
-          return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo other) {
-        if (other == org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.getDefaultInstance()) return this;
-        if (other.hasObjectType()) {
-          setObjectType(other.getObjectType());
-        }
-        if (other.hasOffset()) {
-          setOffset(other.getOffset());
-        }
-        if (other.hasPackedSize()) {
-          setPackedSize(other.getPackedSize());
-        }
-        if (other.hasInflatedSize()) {
-          setInflatedSize(other.getInflatedSize());
-        }
-        if (other.hasDeltaBase()) {
-          setDeltaBase(other.getDeltaBase());
-        }
-        if (other.hasIsFragmented()) {
-          setIsFragmented(other.getIsFragmented());
-        }
-        this.mergeUnknownFields(other.getUnknownFields());
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        if (!hasOffset()) {
-
-          return false;
-        }
-        if (!hasPackedSize()) {
-
-          return false;
-        }
-        if (!hasInflatedSize()) {
-
-          return false;
-        }
-        return true;
-      }
-
-      public Builder mergeFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 8: {
-              int rawValue = input.readEnum();
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType value = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType.valueOf(rawValue);
-              if (value == null) {
-                unknownFields.mergeVarintField(1, rawValue);
-              } else {
-                bitField0_ |= 0x00000001;
-                objectType_ = value;
-              }
-              break;
-            }
-            case 16: {
-              bitField0_ |= 0x00000002;
-              offset_ = input.readInt32();
-              break;
-            }
-            case 24: {
-              bitField0_ |= 0x00000004;
-              packedSize_ = input.readInt64();
-              break;
-            }
-            case 32: {
-              bitField0_ |= 0x00000008;
-              inflatedSize_ = input.readInt64();
-              break;
-            }
-            case 42: {
-              bitField0_ |= 0x00000010;
-              deltaBase_ = input.readBytes();
-              break;
-            }
-            case 48: {
-              bitField0_ |= 0x00000020;
-              isFragmented_ = input.readBool();
-              break;
-            }
-          }
-        }
-      }
-
-      private int bitField0_;
-
-      // optional .org.eclipse.jgit.storage.dht.ObjectInfo.ObjectType object_type = 1;
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType objectType_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType.COMMIT;
-      public boolean hasObjectType() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType getObjectType() {
-        return objectType_;
-      }
-      public Builder setObjectType(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType value) {
-        if (value == null) {
-          throw new NullPointerException();
-        }
-        bitField0_ |= 0x00000001;
-        objectType_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearObjectType() {
-        bitField0_ = (bitField0_ & ~0x00000001);
-        objectType_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType.COMMIT;
-        onChanged();
-        return this;
-      }
-
-      // required int32 offset = 2;
-      private int offset_ ;
-      public boolean hasOffset() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      public int getOffset() {
-        return offset_;
-      }
-      public Builder setOffset(int value) {
-        bitField0_ |= 0x00000002;
-        offset_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearOffset() {
-        bitField0_ = (bitField0_ & ~0x00000002);
-        offset_ = 0;
-        onChanged();
-        return this;
-      }
-
-      // required int64 packed_size = 3;
-      private long packedSize_ ;
-      public boolean hasPackedSize() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
-      }
-      public long getPackedSize() {
-        return packedSize_;
-      }
-      public Builder setPackedSize(long value) {
-        bitField0_ |= 0x00000004;
-        packedSize_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearPackedSize() {
-        bitField0_ = (bitField0_ & ~0x00000004);
-        packedSize_ = 0L;
-        onChanged();
-        return this;
-      }
-
-      // required int64 inflated_size = 4;
-      private long inflatedSize_ ;
-      public boolean hasInflatedSize() {
-        return ((bitField0_ & 0x00000008) == 0x00000008);
-      }
-      public long getInflatedSize() {
-        return inflatedSize_;
-      }
-      public Builder setInflatedSize(long value) {
-        bitField0_ |= 0x00000008;
-        inflatedSize_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearInflatedSize() {
-        bitField0_ = (bitField0_ & ~0x00000008);
-        inflatedSize_ = 0L;
-        onChanged();
-        return this;
-      }
-
-      // optional bytes delta_base = 5;
-      private com.google.protobuf.ByteString deltaBase_ = com.google.protobuf.ByteString.EMPTY;
-      public boolean hasDeltaBase() {
-        return ((bitField0_ & 0x00000010) == 0x00000010);
-      }
-      public com.google.protobuf.ByteString getDeltaBase() {
-        return deltaBase_;
-      }
-      public Builder setDeltaBase(com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000010;
-        deltaBase_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearDeltaBase() {
-        bitField0_ = (bitField0_ & ~0x00000010);
-        deltaBase_ = getDefaultInstance().getDeltaBase();
-        onChanged();
-        return this;
-      }
-
-      // optional bool is_fragmented = 6;
-      private boolean isFragmented_ ;
-      public boolean hasIsFragmented() {
-        return ((bitField0_ & 0x00000020) == 0x00000020);
-      }
-      public boolean getIsFragmented() {
-        return isFragmented_;
-      }
-      public Builder setIsFragmented(boolean value) {
-        bitField0_ |= 0x00000020;
-        isFragmented_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearIsFragmented() {
-        bitField0_ = (bitField0_ & ~0x00000020);
-        isFragmented_ = false;
-        onChanged();
-        return this;
-      }
-
-      // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.ObjectInfo)
-    }
-
-    static {
-      defaultInstance = new ObjectInfo(true);
-      defaultInstance.initFields();
-    }
-
-    // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.ObjectInfo)
-  }
-
-  public interface ChunkInfoOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // optional .org.eclipse.jgit.storage.dht.ChunkInfo.Source source = 1;
-    boolean hasSource();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Source getSource();
-
-    // optional .org.eclipse.jgit.storage.dht.ChunkInfo.ObjectType object_type = 2;
-    boolean hasObjectType();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectType getObjectType();
-
-    // optional bool is_fragment = 3;
-    boolean hasIsFragment();
-    boolean getIsFragment();
-
-    // optional string cached_pack_key = 4;
-    boolean hasCachedPackKey();
-    String getCachedPackKey();
-
-    // optional .org.eclipse.jgit.storage.dht.ChunkInfo.ObjectCounts object_counts = 5;
-    boolean hasObjectCounts();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts getObjectCounts();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCountsOrBuilder getObjectCountsOrBuilder();
-
-    // optional int32 chunk_size = 6;
-    boolean hasChunkSize();
-    int getChunkSize();
-
-    // optional int32 index_size = 7;
-    boolean hasIndexSize();
-    int getIndexSize();
-
-    // optional int32 meta_size = 8;
-    boolean hasMetaSize();
-    int getMetaSize();
-  }
-  public static final class ChunkInfo extends
-      com.google.protobuf.GeneratedMessage
-      implements ChunkInfoOrBuilder {
-    // Use ChunkInfo.newBuilder() to construct.
-    private ChunkInfo(Builder builder) {
-      super(builder);
-    }
-    private ChunkInfo(boolean noInit) {}
-
-    private static final ChunkInfo defaultInstance;
-    public static ChunkInfo getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public ChunkInfo getDefaultInstanceForType() {
-      return defaultInstance;
-    }
-
-    public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_descriptor;
-    }
-
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_fieldAccessorTable;
-    }
-
-    public enum Source
-        implements com.google.protobuf.ProtocolMessageEnum {
-      RECEIVE(0, 1),
-      INSERT(1, 2),
-      REPACK(2, 3),
-      ;
-
-      public static final int RECEIVE_VALUE = 1;
-      public static final int INSERT_VALUE = 2;
-      public static final int REPACK_VALUE = 3;
-
-
-      public final int getNumber() { return value; }
-
-      public static Source valueOf(int value) {
-        switch (value) {
-          case 1: return RECEIVE;
-          case 2: return INSERT;
-          case 3: return REPACK;
-          default: return null;
-        }
-      }
-
-      public static com.google.protobuf.Internal.EnumLiteMap<Source>
-          internalGetValueMap() {
-        return internalValueMap;
-      }
-      private static com.google.protobuf.Internal.EnumLiteMap<Source>
-          internalValueMap =
-            new com.google.protobuf.Internal.EnumLiteMap<Source>() {
-              public Source findValueByNumber(int number) {
-                return Source.valueOf(number);
-              }
-            };
-
-      public final com.google.protobuf.Descriptors.EnumValueDescriptor
-          getValueDescriptor() {
-        return getDescriptor().getValues().get(index);
-      }
-      public final com.google.protobuf.Descriptors.EnumDescriptor
-          getDescriptorForType() {
-        return getDescriptor();
-      }
-      public static final com.google.protobuf.Descriptors.EnumDescriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.getDescriptor().getEnumTypes().get(0);
-      }
-
-      private static final Source[] VALUES = {
-        RECEIVE, INSERT, REPACK,
-      };
-
-      public static Source valueOf(
-          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
-        if (desc.getType() != getDescriptor()) {
-          throw new java.lang.IllegalArgumentException(
-            "EnumValueDescriptor is not for this type.");
-        }
-        return VALUES[desc.getIndex()];
-      }
-
-      private final int index;
-      private final int value;
-
-      private Source(int index, int value) {
-        this.index = index;
-        this.value = value;
-      }
-
-      // @@protoc_insertion_point(enum_scope:org.eclipse.jgit.storage.dht.ChunkInfo.Source)
-    }
-
-    public enum ObjectType
-        implements com.google.protobuf.ProtocolMessageEnum {
-      MIXED(0, 0),
-      COMMIT(1, 1),
-      TREE(2, 2),
-      BLOB(3, 3),
-      TAG(4, 4),
-      ;
-
-      public static final int MIXED_VALUE = 0;
-      public static final int COMMIT_VALUE = 1;
-      public static final int TREE_VALUE = 2;
-      public static final int BLOB_VALUE = 3;
-      public static final int TAG_VALUE = 4;
-
-
-      public final int getNumber() { return value; }
-
-      public static ObjectType valueOf(int value) {
-        switch (value) {
-          case 0: return MIXED;
-          case 1: return COMMIT;
-          case 2: return TREE;
-          case 3: return BLOB;
-          case 4: return TAG;
-          default: return null;
-        }
-      }
-
-      public static com.google.protobuf.Internal.EnumLiteMap<ObjectType>
-          internalGetValueMap() {
-        return internalValueMap;
-      }
-      private static com.google.protobuf.Internal.EnumLiteMap<ObjectType>
-          internalValueMap =
-            new com.google.protobuf.Internal.EnumLiteMap<ObjectType>() {
-              public ObjectType findValueByNumber(int number) {
-                return ObjectType.valueOf(number);
-              }
-            };
-
-      public final com.google.protobuf.Descriptors.EnumValueDescriptor
-          getValueDescriptor() {
-        return getDescriptor().getValues().get(index);
-      }
-      public final com.google.protobuf.Descriptors.EnumDescriptor
-          getDescriptorForType() {
-        return getDescriptor();
-      }
-      public static final com.google.protobuf.Descriptors.EnumDescriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.getDescriptor().getEnumTypes().get(1);
-      }
-
-      private static final ObjectType[] VALUES = {
-        MIXED, COMMIT, TREE, BLOB, TAG,
-      };
-
-      public static ObjectType valueOf(
-          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
-        if (desc.getType() != getDescriptor()) {
-          throw new java.lang.IllegalArgumentException(
-            "EnumValueDescriptor is not for this type.");
-        }
-        return VALUES[desc.getIndex()];
-      }
-
-      private final int index;
-      private final int value;
-
-      private ObjectType(int index, int value) {
-        this.index = index;
-        this.value = value;
-      }
-
-      // @@protoc_insertion_point(enum_scope:org.eclipse.jgit.storage.dht.ChunkInfo.ObjectType)
-    }
-
-    public interface ObjectCountsOrBuilder
-        extends com.google.protobuf.MessageOrBuilder {
-
-      // optional int32 total = 1;
-      boolean hasTotal();
-      int getTotal();
-
-      // optional int32 whole = 2;
-      boolean hasWhole();
-      int getWhole();
-
-      // optional int32 ofs_delta = 3;
-      boolean hasOfsDelta();
-      int getOfsDelta();
-
-      // optional int32 ref_delta = 4;
-      boolean hasRefDelta();
-      int getRefDelta();
-    }
-    public static final class ObjectCounts extends
-        com.google.protobuf.GeneratedMessage
-        implements ObjectCountsOrBuilder {
-      // Use ObjectCounts.newBuilder() to construct.
-      private ObjectCounts(Builder builder) {
-        super(builder);
-      }
-      private ObjectCounts(boolean noInit) {}
-
-      private static final ObjectCounts defaultInstance;
-      public static ObjectCounts getDefaultInstance() {
-        return defaultInstance;
-      }
-
-      public ObjectCounts getDefaultInstanceForType() {
-        return defaultInstance;
-      }
-
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_ObjectCounts_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_ObjectCounts_fieldAccessorTable;
-      }
-
-      private int bitField0_;
-      // optional int32 total = 1;
-      public static final int TOTAL_FIELD_NUMBER = 1;
-      private int total_;
-      public boolean hasTotal() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      public int getTotal() {
-        return total_;
-      }
-
-      // optional int32 whole = 2;
-      public static final int WHOLE_FIELD_NUMBER = 2;
-      private int whole_;
-      public boolean hasWhole() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      public int getWhole() {
-        return whole_;
-      }
-
-      // optional int32 ofs_delta = 3;
-      public static final int OFS_DELTA_FIELD_NUMBER = 3;
-      private int ofsDelta_;
-      public boolean hasOfsDelta() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
-      }
-      public int getOfsDelta() {
-        return ofsDelta_;
-      }
-
-      // optional int32 ref_delta = 4;
-      public static final int REF_DELTA_FIELD_NUMBER = 4;
-      private int refDelta_;
-      public boolean hasRefDelta() {
-        return ((bitField0_ & 0x00000008) == 0x00000008);
-      }
-      public int getRefDelta() {
-        return refDelta_;
-      }
-
-      private void initFields() {
-        total_ = 0;
-        whole_ = 0;
-        ofsDelta_ = 0;
-        refDelta_ = 0;
-      }
-      private byte memoizedIsInitialized = -1;
-      public final boolean isInitialized() {
-        byte isInitialized = memoizedIsInitialized;
-        if (isInitialized != -1) return isInitialized == 1;
-
-        memoizedIsInitialized = 1;
-        return true;
-      }
-
-      public void writeTo(com.google.protobuf.CodedOutputStream output)
-                          throws java.io.IOException {
-        getSerializedSize();
-        if (((bitField0_ & 0x00000001) == 0x00000001)) {
-          output.writeInt32(1, total_);
-        }
-        if (((bitField0_ & 0x00000002) == 0x00000002)) {
-          output.writeInt32(2, whole_);
-        }
-        if (((bitField0_ & 0x00000004) == 0x00000004)) {
-          output.writeInt32(3, ofsDelta_);
-        }
-        if (((bitField0_ & 0x00000008) == 0x00000008)) {
-          output.writeInt32(4, refDelta_);
-        }
-        getUnknownFields().writeTo(output);
-      }
-
-      private int memoizedSerializedSize = -1;
-      public int getSerializedSize() {
-        int size = memoizedSerializedSize;
-        if (size != -1) return size;
-
-        size = 0;
-        if (((bitField0_ & 0x00000001) == 0x00000001)) {
-          size += com.google.protobuf.CodedOutputStream
-            .computeInt32Size(1, total_);
-        }
-        if (((bitField0_ & 0x00000002) == 0x00000002)) {
-          size += com.google.protobuf.CodedOutputStream
-            .computeInt32Size(2, whole_);
-        }
-        if (((bitField0_ & 0x00000004) == 0x00000004)) {
-          size += com.google.protobuf.CodedOutputStream
-            .computeInt32Size(3, ofsDelta_);
-        }
-        if (((bitField0_ & 0x00000008) == 0x00000008)) {
-          size += com.google.protobuf.CodedOutputStream
-            .computeInt32Size(4, refDelta_);
-        }
-        size += getUnknownFields().getSerializedSize();
-        memoizedSerializedSize = size;
-        return size;
-      }
-
-      @java.lang.Override
-      protected Object writeReplace() throws java.io.ObjectStreamException {
-        return super.writeReplace();
-      }
-
-      @java.lang.Override
-      public boolean equals(final Object obj) {
-        if (obj == this) {
-         return true;
-        }
-        if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts)) {
-          return super.equals(obj);
-        }
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts other = (org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts) obj;
-
-        boolean result = true;
-        result = result && (hasTotal() == other.hasTotal());
-        if (hasTotal()) {
-          result = result && (getTotal()
-              == other.getTotal());
-        }
-        result = result && (hasWhole() == other.hasWhole());
-        if (hasWhole()) {
-          result = result && (getWhole()
-              == other.getWhole());
-        }
-        result = result && (hasOfsDelta() == other.hasOfsDelta());
-        if (hasOfsDelta()) {
-          result = result && (getOfsDelta()
-              == other.getOfsDelta());
-        }
-        result = result && (hasRefDelta() == other.hasRefDelta());
-        if (hasRefDelta()) {
-          result = result && (getRefDelta()
-              == other.getRefDelta());
-        }
-        result = result &&
-            getUnknownFields().equals(other.getUnknownFields());
-        return result;
-      }
-
-      @java.lang.Override
-      public int hashCode() {
-        int hash = 41;
-        hash = (19 * hash) + getDescriptorForType().hashCode();
-        if (hasTotal()) {
-          hash = (37 * hash) + TOTAL_FIELD_NUMBER;
-          hash = (53 * hash) + getTotal();
-        }
-        if (hasWhole()) {
-          hash = (37 * hash) + WHOLE_FIELD_NUMBER;
-          hash = (53 * hash) + getWhole();
-        }
-        if (hasOfsDelta()) {
-          hash = (37 * hash) + OFS_DELTA_FIELD_NUMBER;
-          hash = (53 * hash) + getOfsDelta();
-        }
-        if (hasRefDelta()) {
-          hash = (37 * hash) + REF_DELTA_FIELD_NUMBER;
-          hash = (53 * hash) + getRefDelta();
-        }
-        hash = (29 * hash) + getUnknownFields().hashCode();
-        return hash;
-      }
-
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts parseFrom(
-          com.google.protobuf.ByteString data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts parseFrom(
-          com.google.protobuf.ByteString data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts parseFrom(byte[] data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts parseFrom(
-          byte[] data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts parseFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts parseFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts parseDelimitedFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts parseDelimitedFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts parseFrom(
-          com.google.protobuf.CodedInputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts parseFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-
-      public static Builder newBuilder() { return Builder.create(); }
-      public Builder newBuilderForType() { return newBuilder(); }
-      public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts prototype) {
-        return newBuilder().mergeFrom(prototype);
-      }
-      public Builder toBuilder() { return newBuilder(this); }
-
-      @java.lang.Override
-      protected Builder newBuilderForType(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-        Builder builder = new Builder(parent);
-        return builder;
-      }
-      public static final class Builder extends
-          com.google.protobuf.GeneratedMessage.Builder<Builder>
-         implements org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCountsOrBuilder {
-        public static final com.google.protobuf.Descriptors.Descriptor
-            getDescriptor() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_ObjectCounts_descriptor;
-        }
-
-        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-            internalGetFieldAccessorTable() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_ObjectCounts_fieldAccessorTable;
-        }
-
-        // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.newBuilder()
-        private Builder() {
-          maybeForceBuilderInitialization();
-        }
-
-        private Builder(BuilderParent parent) {
-          super(parent);
-          maybeForceBuilderInitialization();
-        }
-        private void maybeForceBuilderInitialization() {
-          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          }
-        }
-        private static Builder create() {
-          return new Builder();
-        }
-
-        public Builder clear() {
-          super.clear();
-          total_ = 0;
-          bitField0_ = (bitField0_ & ~0x00000001);
-          whole_ = 0;
-          bitField0_ = (bitField0_ & ~0x00000002);
-          ofsDelta_ = 0;
-          bitField0_ = (bitField0_ & ~0x00000004);
-          refDelta_ = 0;
-          bitField0_ = (bitField0_ & ~0x00000008);
-          return this;
-        }
-
-        public Builder clone() {
-          return create().mergeFrom(buildPartial());
-        }
-
-        public com.google.protobuf.Descriptors.Descriptor
-            getDescriptorForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.getDescriptor();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts getDefaultInstanceForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.getDefaultInstance();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts build() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(result);
-          }
-          return result;
-        }
-
-        private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts buildParsed()
-            throws com.google.protobuf.InvalidProtocolBufferException {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(
-              result).asInvalidProtocolBufferException();
-          }
-          return result;
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts buildPartial() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts result = new org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts(this);
-          int from_bitField0_ = bitField0_;
-          int to_bitField0_ = 0;
-          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-            to_bitField0_ |= 0x00000001;
-          }
-          result.total_ = total_;
-          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-            to_bitField0_ |= 0x00000002;
-          }
-          result.whole_ = whole_;
-          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
-            to_bitField0_ |= 0x00000004;
-          }
-          result.ofsDelta_ = ofsDelta_;
-          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
-            to_bitField0_ |= 0x00000008;
-          }
-          result.refDelta_ = refDelta_;
-          result.bitField0_ = to_bitField0_;
-          onBuilt();
-          return result;
-        }
-
-        public Builder mergeFrom(com.google.protobuf.Message other) {
-          if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts) {
-            return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts)other);
-          } else {
-            super.mergeFrom(other);
-            return this;
-          }
-        }
-
-        public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts other) {
-          if (other == org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.getDefaultInstance()) return this;
-          if (other.hasTotal()) {
-            setTotal(other.getTotal());
-          }
-          if (other.hasWhole()) {
-            setWhole(other.getWhole());
-          }
-          if (other.hasOfsDelta()) {
-            setOfsDelta(other.getOfsDelta());
-          }
-          if (other.hasRefDelta()) {
-            setRefDelta(other.getRefDelta());
-          }
-          this.mergeUnknownFields(other.getUnknownFields());
-          return this;
-        }
-
-        public final boolean isInitialized() {
-          return true;
-        }
-
-        public Builder mergeFrom(
-            com.google.protobuf.CodedInputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws java.io.IOException {
-          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-            com.google.protobuf.UnknownFieldSet.newBuilder(
-              this.getUnknownFields());
-          while (true) {
-            int tag = input.readTag();
-            switch (tag) {
-              case 0:
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              default: {
-                if (!parseUnknownField(input, unknownFields,
-                                       extensionRegistry, tag)) {
-                  this.setUnknownFields(unknownFields.build());
-                  onChanged();
-                  return this;
-                }
-                break;
-              }
-              case 8: {
-                bitField0_ |= 0x00000001;
-                total_ = input.readInt32();
-                break;
-              }
-              case 16: {
-                bitField0_ |= 0x00000002;
-                whole_ = input.readInt32();
-                break;
-              }
-              case 24: {
-                bitField0_ |= 0x00000004;
-                ofsDelta_ = input.readInt32();
-                break;
-              }
-              case 32: {
-                bitField0_ |= 0x00000008;
-                refDelta_ = input.readInt32();
-                break;
-              }
-            }
-          }
-        }
-
-        private int bitField0_;
-
-        // optional int32 total = 1;
-        private int total_ ;
-        public boolean hasTotal() {
-          return ((bitField0_ & 0x00000001) == 0x00000001);
-        }
-        public int getTotal() {
-          return total_;
-        }
-        public Builder setTotal(int value) {
-          bitField0_ |= 0x00000001;
-          total_ = value;
-          onChanged();
-          return this;
-        }
-        public Builder clearTotal() {
-          bitField0_ = (bitField0_ & ~0x00000001);
-          total_ = 0;
-          onChanged();
-          return this;
-        }
-
-        // optional int32 whole = 2;
-        private int whole_ ;
-        public boolean hasWhole() {
-          return ((bitField0_ & 0x00000002) == 0x00000002);
-        }
-        public int getWhole() {
-          return whole_;
-        }
-        public Builder setWhole(int value) {
-          bitField0_ |= 0x00000002;
-          whole_ = value;
-          onChanged();
-          return this;
-        }
-        public Builder clearWhole() {
-          bitField0_ = (bitField0_ & ~0x00000002);
-          whole_ = 0;
-          onChanged();
-          return this;
-        }
-
-        // optional int32 ofs_delta = 3;
-        private int ofsDelta_ ;
-        public boolean hasOfsDelta() {
-          return ((bitField0_ & 0x00000004) == 0x00000004);
-        }
-        public int getOfsDelta() {
-          return ofsDelta_;
-        }
-        public Builder setOfsDelta(int value) {
-          bitField0_ |= 0x00000004;
-          ofsDelta_ = value;
-          onChanged();
-          return this;
-        }
-        public Builder clearOfsDelta() {
-          bitField0_ = (bitField0_ & ~0x00000004);
-          ofsDelta_ = 0;
-          onChanged();
-          return this;
-        }
-
-        // optional int32 ref_delta = 4;
-        private int refDelta_ ;
-        public boolean hasRefDelta() {
-          return ((bitField0_ & 0x00000008) == 0x00000008);
-        }
-        public int getRefDelta() {
-          return refDelta_;
-        }
-        public Builder setRefDelta(int value) {
-          bitField0_ |= 0x00000008;
-          refDelta_ = value;
-          onChanged();
-          return this;
-        }
-        public Builder clearRefDelta() {
-          bitField0_ = (bitField0_ & ~0x00000008);
-          refDelta_ = 0;
-          onChanged();
-          return this;
-        }
-
-        // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.ChunkInfo.ObjectCounts)
-      }
-
-      static {
-        defaultInstance = new ObjectCounts(true);
-        defaultInstance.initFields();
-      }
-
-      // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.ChunkInfo.ObjectCounts)
-    }
-
-    private int bitField0_;
-    // optional .org.eclipse.jgit.storage.dht.ChunkInfo.Source source = 1;
-    public static final int SOURCE_FIELD_NUMBER = 1;
-    private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Source source_;
-    public boolean hasSource() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Source getSource() {
-      return source_;
-    }
-
-    // optional .org.eclipse.jgit.storage.dht.ChunkInfo.ObjectType object_type = 2;
-    public static final int OBJECT_TYPE_FIELD_NUMBER = 2;
-    private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectType objectType_;
-    public boolean hasObjectType() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectType getObjectType() {
-      return objectType_;
-    }
-
-    // optional bool is_fragment = 3;
-    public static final int IS_FRAGMENT_FIELD_NUMBER = 3;
-    private boolean isFragment_;
-    public boolean hasIsFragment() {
-      return ((bitField0_ & 0x00000004) == 0x00000004);
-    }
-    public boolean getIsFragment() {
-      return isFragment_;
-    }
-
-    // optional string cached_pack_key = 4;
-    public static final int CACHED_PACK_KEY_FIELD_NUMBER = 4;
-    private Object cachedPackKey_;
-    public boolean hasCachedPackKey() {
-      return ((bitField0_ & 0x00000008) == 0x00000008);
-    }
-    public String getCachedPackKey() {
-      Object ref = cachedPackKey_;
-      if (ref instanceof String) {
-        return (String) ref;
-      } else {
-        com.google.protobuf.ByteString bs =
-            (com.google.protobuf.ByteString) ref;
-        String s = bs.toStringUtf8();
-        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
-          cachedPackKey_ = s;
-        }
-        return s;
-      }
-    }
-    private com.google.protobuf.ByteString getCachedPackKeyBytes() {
-      Object ref = cachedPackKey_;
-      if (ref instanceof String) {
-        com.google.protobuf.ByteString b =
-            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
-        cachedPackKey_ = b;
-        return b;
-      } else {
-        return (com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    // optional .org.eclipse.jgit.storage.dht.ChunkInfo.ObjectCounts object_counts = 5;
-    public static final int OBJECT_COUNTS_FIELD_NUMBER = 5;
-    private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts objectCounts_;
-    public boolean hasObjectCounts() {
-      return ((bitField0_ & 0x00000010) == 0x00000010);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts getObjectCounts() {
-      return objectCounts_;
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCountsOrBuilder getObjectCountsOrBuilder() {
-      return objectCounts_;
-    }
-
-    // optional int32 chunk_size = 6;
-    public static final int CHUNK_SIZE_FIELD_NUMBER = 6;
-    private int chunkSize_;
-    public boolean hasChunkSize() {
-      return ((bitField0_ & 0x00000020) == 0x00000020);
-    }
-    public int getChunkSize() {
-      return chunkSize_;
-    }
-
-    // optional int32 index_size = 7;
-    public static final int INDEX_SIZE_FIELD_NUMBER = 7;
-    private int indexSize_;
-    public boolean hasIndexSize() {
-      return ((bitField0_ & 0x00000040) == 0x00000040);
-    }
-    public int getIndexSize() {
-      return indexSize_;
-    }
-
-    // optional int32 meta_size = 8;
-    public static final int META_SIZE_FIELD_NUMBER = 8;
-    private int metaSize_;
-    public boolean hasMetaSize() {
-      return ((bitField0_ & 0x00000080) == 0x00000080);
-    }
-    public int getMetaSize() {
-      return metaSize_;
-    }
-
-    private void initFields() {
-      source_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Source.RECEIVE;
-      objectType_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectType.MIXED;
-      isFragment_ = false;
-      cachedPackKey_ = "";
-      objectCounts_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.getDefaultInstance();
-      chunkSize_ = 0;
-      indexSize_ = 0;
-      metaSize_ = 0;
-    }
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
-
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeEnum(1, source_.getNumber());
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeEnum(2, objectType_.getNumber());
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeBool(3, isFragment_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        output.writeBytes(4, getCachedPackKeyBytes());
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        output.writeMessage(5, objectCounts_);
-      }
-      if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        output.writeInt32(6, chunkSize_);
-      }
-      if (((bitField0_ & 0x00000040) == 0x00000040)) {
-        output.writeInt32(7, indexSize_);
-      }
-      if (((bitField0_ & 0x00000080) == 0x00000080)) {
-        output.writeInt32(8, metaSize_);
-      }
-      getUnknownFields().writeTo(output);
-    }
-
-    private int memoizedSerializedSize = -1;
-    public int getSerializedSize() {
-      int size = memoizedSerializedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeEnumSize(1, source_.getNumber());
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeEnumSize(2, objectType_.getNumber());
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBoolSize(3, isFragment_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(4, getCachedPackKeyBytes());
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(5, objectCounts_);
-      }
-      if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt32Size(6, chunkSize_);
-      }
-      if (((bitField0_ & 0x00000040) == 0x00000040)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt32Size(7, indexSize_);
-      }
-      if (((bitField0_ & 0x00000080) == 0x00000080)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt32Size(8, metaSize_);
-      }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
-      return size;
-    }
-
-    @java.lang.Override
-    protected Object writeReplace() throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
-    public boolean equals(final Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo)) {
-        return super.equals(obj);
-      }
-      org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo other = (org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo) obj;
-
-      boolean result = true;
-      result = result && (hasSource() == other.hasSource());
-      if (hasSource()) {
-        result = result &&
-            (getSource() == other.getSource());
-      }
-      result = result && (hasObjectType() == other.hasObjectType());
-      if (hasObjectType()) {
-        result = result &&
-            (getObjectType() == other.getObjectType());
-      }
-      result = result && (hasIsFragment() == other.hasIsFragment());
-      if (hasIsFragment()) {
-        result = result && (getIsFragment()
-            == other.getIsFragment());
-      }
-      result = result && (hasCachedPackKey() == other.hasCachedPackKey());
-      if (hasCachedPackKey()) {
-        result = result && getCachedPackKey()
-            .equals(other.getCachedPackKey());
-      }
-      result = result && (hasObjectCounts() == other.hasObjectCounts());
-      if (hasObjectCounts()) {
-        result = result && getObjectCounts()
-            .equals(other.getObjectCounts());
-      }
-      result = result && (hasChunkSize() == other.hasChunkSize());
-      if (hasChunkSize()) {
-        result = result && (getChunkSize()
-            == other.getChunkSize());
-      }
-      result = result && (hasIndexSize() == other.hasIndexSize());
-      if (hasIndexSize()) {
-        result = result && (getIndexSize()
-            == other.getIndexSize());
-      }
-      result = result && (hasMetaSize() == other.hasMetaSize());
-      if (hasMetaSize()) {
-        result = result && (getMetaSize()
-            == other.getMetaSize());
-      }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
-      return result;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-      int hash = 41;
-      hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (hasSource()) {
-        hash = (37 * hash) + SOURCE_FIELD_NUMBER;
-        hash = (53 * hash) + hashEnum(getSource());
-      }
-      if (hasObjectType()) {
-        hash = (37 * hash) + OBJECT_TYPE_FIELD_NUMBER;
-        hash = (53 * hash) + hashEnum(getObjectType());
-      }
-      if (hasIsFragment()) {
-        hash = (37 * hash) + IS_FRAGMENT_FIELD_NUMBER;
-        hash = (53 * hash) + hashBoolean(getIsFragment());
-      }
-      if (hasCachedPackKey()) {
-        hash = (37 * hash) + CACHED_PACK_KEY_FIELD_NUMBER;
-        hash = (53 * hash) + getCachedPackKey().hashCode();
-      }
-      if (hasObjectCounts()) {
-        hash = (37 * hash) + OBJECT_COUNTS_FIELD_NUMBER;
-        hash = (53 * hash) + getObjectCounts().hashCode();
-      }
-      if (hasChunkSize()) {
-        hash = (37 * hash) + CHUNK_SIZE_FIELD_NUMBER;
-        hash = (53 * hash) + getChunkSize();
-      }
-      if (hasIndexSize()) {
-        hash = (37 * hash) + INDEX_SIZE_FIELD_NUMBER;
-        hash = (53 * hash) + getIndexSize();
-      }
-      if (hasMetaSize()) {
-        hash = (37 * hash) + META_SIZE_FIELD_NUMBER;
-        hash = (53 * hash) + getMetaSize();
-      }
-      hash = (29 * hash) + getUnknownFields().hashCode();
-      return hash;
-    }
-
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo parseFrom(
-        com.google.protobuf.ByteString data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo parseFrom(
-        com.google.protobuf.ByteString data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo parseFrom(byte[] data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo parseFrom(
-        byte[] data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo parseFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo parseDelimitedFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo parseFrom(
-        com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo parseFrom(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-
-    public static Builder newBuilder() { return Builder.create(); }
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo prototype) {
-      return newBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() { return newBuilder(this); }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfoOrBuilder {
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_fieldAccessorTable;
-      }
-
-      // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          getObjectCountsFieldBuilder();
-        }
-      }
-      private static Builder create() {
-        return new Builder();
-      }
-
-      public Builder clear() {
-        super.clear();
-        source_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Source.RECEIVE;
-        bitField0_ = (bitField0_ & ~0x00000001);
-        objectType_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectType.MIXED;
-        bitField0_ = (bitField0_ & ~0x00000002);
-        isFragment_ = false;
-        bitField0_ = (bitField0_ & ~0x00000004);
-        cachedPackKey_ = "";
-        bitField0_ = (bitField0_ & ~0x00000008);
-        if (objectCountsBuilder_ == null) {
-          objectCounts_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.getDefaultInstance();
-        } else {
-          objectCountsBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000010);
-        chunkSize_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000020);
-        indexSize_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000040);
-        metaSize_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000080);
-        return this;
-      }
-
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
-      public com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.getDescriptor();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo getDefaultInstanceForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.getDefaultInstance();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo build() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo buildPartial() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo result = new org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        result.source_ = source_;
-        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-          to_bitField0_ |= 0x00000002;
-        }
-        result.objectType_ = objectType_;
-        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
-          to_bitField0_ |= 0x00000004;
-        }
-        result.isFragment_ = isFragment_;
-        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
-          to_bitField0_ |= 0x00000008;
-        }
-        result.cachedPackKey_ = cachedPackKey_;
-        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
-          to_bitField0_ |= 0x00000010;
-        }
-        if (objectCountsBuilder_ == null) {
-          result.objectCounts_ = objectCounts_;
-        } else {
-          result.objectCounts_ = objectCountsBuilder_.build();
-        }
-        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
-          to_bitField0_ |= 0x00000020;
-        }
-        result.chunkSize_ = chunkSize_;
-        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
-          to_bitField0_ |= 0x00000040;
-        }
-        result.indexSize_ = indexSize_;
-        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
-          to_bitField0_ |= 0x00000080;
-        }
-        result.metaSize_ = metaSize_;
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo) {
-          return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo other) {
-        if (other == org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.getDefaultInstance()) return this;
-        if (other.hasSource()) {
-          setSource(other.getSource());
-        }
-        if (other.hasObjectType()) {
-          setObjectType(other.getObjectType());
-        }
-        if (other.hasIsFragment()) {
-          setIsFragment(other.getIsFragment());
-        }
-        if (other.hasCachedPackKey()) {
-          setCachedPackKey(other.getCachedPackKey());
-        }
-        if (other.hasObjectCounts()) {
-          mergeObjectCounts(other.getObjectCounts());
-        }
-        if (other.hasChunkSize()) {
-          setChunkSize(other.getChunkSize());
-        }
-        if (other.hasIndexSize()) {
-          setIndexSize(other.getIndexSize());
-        }
-        if (other.hasMetaSize()) {
-          setMetaSize(other.getMetaSize());
-        }
-        this.mergeUnknownFields(other.getUnknownFields());
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        return true;
-      }
-
-      public Builder mergeFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 8: {
-              int rawValue = input.readEnum();
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Source value = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Source.valueOf(rawValue);
-              if (value == null) {
-                unknownFields.mergeVarintField(1, rawValue);
-              } else {
-                bitField0_ |= 0x00000001;
-                source_ = value;
-              }
-              break;
-            }
-            case 16: {
-              int rawValue = input.readEnum();
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectType value = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectType.valueOf(rawValue);
-              if (value == null) {
-                unknownFields.mergeVarintField(2, rawValue);
-              } else {
-                bitField0_ |= 0x00000002;
-                objectType_ = value;
-              }
-              break;
-            }
-            case 24: {
-              bitField0_ |= 0x00000004;
-              isFragment_ = input.readBool();
-              break;
-            }
-            case 34: {
-              bitField0_ |= 0x00000008;
-              cachedPackKey_ = input.readBytes();
-              break;
-            }
-            case 42: {
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.Builder subBuilder = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.newBuilder();
-              if (hasObjectCounts()) {
-                subBuilder.mergeFrom(getObjectCounts());
-              }
-              input.readMessage(subBuilder, extensionRegistry);
-              setObjectCounts(subBuilder.buildPartial());
-              break;
-            }
-            case 48: {
-              bitField0_ |= 0x00000020;
-              chunkSize_ = input.readInt32();
-              break;
-            }
-            case 56: {
-              bitField0_ |= 0x00000040;
-              indexSize_ = input.readInt32();
-              break;
-            }
-            case 64: {
-              bitField0_ |= 0x00000080;
-              metaSize_ = input.readInt32();
-              break;
-            }
-          }
-        }
-      }
-
-      private int bitField0_;
-
-      // optional .org.eclipse.jgit.storage.dht.ChunkInfo.Source source = 1;
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Source source_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Source.RECEIVE;
-      public boolean hasSource() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Source getSource() {
-        return source_;
-      }
-      public Builder setSource(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Source value) {
-        if (value == null) {
-          throw new NullPointerException();
-        }
-        bitField0_ |= 0x00000001;
-        source_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearSource() {
-        bitField0_ = (bitField0_ & ~0x00000001);
-        source_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Source.RECEIVE;
-        onChanged();
-        return this;
-      }
-
-      // optional .org.eclipse.jgit.storage.dht.ChunkInfo.ObjectType object_type = 2;
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectType objectType_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectType.MIXED;
-      public boolean hasObjectType() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectType getObjectType() {
-        return objectType_;
-      }
-      public Builder setObjectType(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectType value) {
-        if (value == null) {
-          throw new NullPointerException();
-        }
-        bitField0_ |= 0x00000002;
-        objectType_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearObjectType() {
-        bitField0_ = (bitField0_ & ~0x00000002);
-        objectType_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectType.MIXED;
-        onChanged();
-        return this;
-      }
-
-      // optional bool is_fragment = 3;
-      private boolean isFragment_ ;
-      public boolean hasIsFragment() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
-      }
-      public boolean getIsFragment() {
-        return isFragment_;
-      }
-      public Builder setIsFragment(boolean value) {
-        bitField0_ |= 0x00000004;
-        isFragment_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearIsFragment() {
-        bitField0_ = (bitField0_ & ~0x00000004);
-        isFragment_ = false;
-        onChanged();
-        return this;
-      }
-
-      // optional string cached_pack_key = 4;
-      private Object cachedPackKey_ = "";
-      public boolean hasCachedPackKey() {
-        return ((bitField0_ & 0x00000008) == 0x00000008);
-      }
-      public String getCachedPackKey() {
-        Object ref = cachedPackKey_;
-        if (!(ref instanceof String)) {
-          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
-          cachedPackKey_ = s;
-          return s;
-        } else {
-          return (String) ref;
-        }
-      }
-      public Builder setCachedPackKey(String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000008;
-        cachedPackKey_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearCachedPackKey() {
-        bitField0_ = (bitField0_ & ~0x00000008);
-        cachedPackKey_ = getDefaultInstance().getCachedPackKey();
-        onChanged();
-        return this;
-      }
-      void setCachedPackKey(com.google.protobuf.ByteString value) {
-        bitField0_ |= 0x00000008;
-        cachedPackKey_ = value;
-        onChanged();
-      }
-
-      // optional .org.eclipse.jgit.storage.dht.ChunkInfo.ObjectCounts object_counts = 5;
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts objectCounts_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCountsOrBuilder> objectCountsBuilder_;
-      public boolean hasObjectCounts() {
-        return ((bitField0_ & 0x00000010) == 0x00000010);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts getObjectCounts() {
-        if (objectCountsBuilder_ == null) {
-          return objectCounts_;
-        } else {
-          return objectCountsBuilder_.getMessage();
-        }
-      }
-      public Builder setObjectCounts(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts value) {
-        if (objectCountsBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          objectCounts_ = value;
-          onChanged();
-        } else {
-          objectCountsBuilder_.setMessage(value);
-        }
-        bitField0_ |= 0x00000010;
-        return this;
-      }
-      public Builder setObjectCounts(
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.Builder builderForValue) {
-        if (objectCountsBuilder_ == null) {
-          objectCounts_ = builderForValue.build();
-          onChanged();
-        } else {
-          objectCountsBuilder_.setMessage(builderForValue.build());
-        }
-        bitField0_ |= 0x00000010;
-        return this;
-      }
-      public Builder mergeObjectCounts(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts value) {
-        if (objectCountsBuilder_ == null) {
-          if (((bitField0_ & 0x00000010) == 0x00000010) &&
-              objectCounts_ != org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.getDefaultInstance()) {
-            objectCounts_ =
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.newBuilder(objectCounts_).mergeFrom(value).buildPartial();
-          } else {
-            objectCounts_ = value;
-          }
-          onChanged();
-        } else {
-          objectCountsBuilder_.mergeFrom(value);
-        }
-        bitField0_ |= 0x00000010;
-        return this;
-      }
-      public Builder clearObjectCounts() {
-        if (objectCountsBuilder_ == null) {
-          objectCounts_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.getDefaultInstance();
-          onChanged();
-        } else {
-          objectCountsBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000010);
-        return this;
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.Builder getObjectCountsBuilder() {
-        bitField0_ |= 0x00000010;
-        onChanged();
-        return getObjectCountsFieldBuilder().getBuilder();
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCountsOrBuilder getObjectCountsOrBuilder() {
-        if (objectCountsBuilder_ != null) {
-          return objectCountsBuilder_.getMessageOrBuilder();
-        } else {
-          return objectCounts_;
-        }
-      }
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCountsOrBuilder>
-          getObjectCountsFieldBuilder() {
-        if (objectCountsBuilder_ == null) {
-          objectCountsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCountsOrBuilder>(
-                  objectCounts_,
-                  getParentForChildren(),
-                  isClean());
-          objectCounts_ = null;
-        }
-        return objectCountsBuilder_;
-      }
-
-      // optional int32 chunk_size = 6;
-      private int chunkSize_ ;
-      public boolean hasChunkSize() {
-        return ((bitField0_ & 0x00000020) == 0x00000020);
-      }
-      public int getChunkSize() {
-        return chunkSize_;
-      }
-      public Builder setChunkSize(int value) {
-        bitField0_ |= 0x00000020;
-        chunkSize_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearChunkSize() {
-        bitField0_ = (bitField0_ & ~0x00000020);
-        chunkSize_ = 0;
-        onChanged();
-        return this;
-      }
-
-      // optional int32 index_size = 7;
-      private int indexSize_ ;
-      public boolean hasIndexSize() {
-        return ((bitField0_ & 0x00000040) == 0x00000040);
-      }
-      public int getIndexSize() {
-        return indexSize_;
-      }
-      public Builder setIndexSize(int value) {
-        bitField0_ |= 0x00000040;
-        indexSize_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearIndexSize() {
-        bitField0_ = (bitField0_ & ~0x00000040);
-        indexSize_ = 0;
-        onChanged();
-        return this;
-      }
-
-      // optional int32 meta_size = 8;
-      private int metaSize_ ;
-      public boolean hasMetaSize() {
-        return ((bitField0_ & 0x00000080) == 0x00000080);
-      }
-      public int getMetaSize() {
-        return metaSize_;
-      }
-      public Builder setMetaSize(int value) {
-        bitField0_ |= 0x00000080;
-        metaSize_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearMetaSize() {
-        bitField0_ = (bitField0_ & ~0x00000080);
-        metaSize_ = 0;
-        onChanged();
-        return this;
-      }
-
-      // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.ChunkInfo)
-    }
-
-    static {
-      defaultInstance = new ChunkInfo(true);
-      defaultInstance.initFields();
-    }
-
-    // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.ChunkInfo)
-  }
-
-  public interface ChunkMetaOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // repeated .org.eclipse.jgit.storage.dht.ChunkMeta.BaseChunk base_chunk = 1;
-    java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk>
-        getBaseChunkList();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk getBaseChunk(int index);
-    int getBaseChunkCount();
-    java.util.List<? extends org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunkOrBuilder>
-        getBaseChunkOrBuilderList();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunkOrBuilder getBaseChunkOrBuilder(
-        int index);
-
-    // repeated string fragment = 2;
-    java.util.List<String> getFragmentList();
-    int getFragmentCount();
-    String getFragment(int index);
-
-    // optional .org.eclipse.jgit.storage.dht.ChunkMeta.PrefetchHint commit_prefetch = 51;
-    boolean hasCommitPrefetch();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint getCommitPrefetch();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder getCommitPrefetchOrBuilder();
-
-    // optional .org.eclipse.jgit.storage.dht.ChunkMeta.PrefetchHint tree_prefetch = 52;
-    boolean hasTreePrefetch();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint getTreePrefetch();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder getTreePrefetchOrBuilder();
-  }
-  public static final class ChunkMeta extends
-      com.google.protobuf.GeneratedMessage
-      implements ChunkMetaOrBuilder {
-    // Use ChunkMeta.newBuilder() to construct.
-    private ChunkMeta(Builder builder) {
-      super(builder);
-    }
-    private ChunkMeta(boolean noInit) {}
-
-    private static final ChunkMeta defaultInstance;
-    public static ChunkMeta getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public ChunkMeta getDefaultInstanceForType() {
-      return defaultInstance;
-    }
-
-    public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_descriptor;
-    }
-
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_fieldAccessorTable;
-    }
-
-    public interface BaseChunkOrBuilder
-        extends com.google.protobuf.MessageOrBuilder {
-
-      // required int64 relative_start = 1;
-      boolean hasRelativeStart();
-      long getRelativeStart();
-
-      // required string chunk_key = 2;
-      boolean hasChunkKey();
-      String getChunkKey();
-    }
-    public static final class BaseChunk extends
-        com.google.protobuf.GeneratedMessage
-        implements BaseChunkOrBuilder {
-      // Use BaseChunk.newBuilder() to construct.
-      private BaseChunk(Builder builder) {
-        super(builder);
-      }
-      private BaseChunk(boolean noInit) {}
-
-      private static final BaseChunk defaultInstance;
-      public static BaseChunk getDefaultInstance() {
-        return defaultInstance;
-      }
-
-      public BaseChunk getDefaultInstanceForType() {
-        return defaultInstance;
-      }
-
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_BaseChunk_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_BaseChunk_fieldAccessorTable;
-      }
-
-      private int bitField0_;
-      // required int64 relative_start = 1;
-      public static final int RELATIVE_START_FIELD_NUMBER = 1;
-      private long relativeStart_;
-      public boolean hasRelativeStart() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      public long getRelativeStart() {
-        return relativeStart_;
-      }
-
-      // required string chunk_key = 2;
-      public static final int CHUNK_KEY_FIELD_NUMBER = 2;
-      private Object chunkKey_;
-      public boolean hasChunkKey() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      public String getChunkKey() {
-        Object ref = chunkKey_;
-        if (ref instanceof String) {
-          return (String) ref;
-        } else {
-          com.google.protobuf.ByteString bs =
-              (com.google.protobuf.ByteString) ref;
-          String s = bs.toStringUtf8();
-          if (com.google.protobuf.Internal.isValidUtf8(bs)) {
-            chunkKey_ = s;
-          }
-          return s;
-        }
-      }
-      private com.google.protobuf.ByteString getChunkKeyBytes() {
-        Object ref = chunkKey_;
-        if (ref instanceof String) {
-          com.google.protobuf.ByteString b =
-              com.google.protobuf.ByteString.copyFromUtf8((String) ref);
-          chunkKey_ = b;
-          return b;
-        } else {
-          return (com.google.protobuf.ByteString) ref;
-        }
-      }
-
-      private void initFields() {
-        relativeStart_ = 0L;
-        chunkKey_ = "";
-      }
-      private byte memoizedIsInitialized = -1;
-      public final boolean isInitialized() {
-        byte isInitialized = memoizedIsInitialized;
-        if (isInitialized != -1) return isInitialized == 1;
-
-        if (!hasRelativeStart()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-        if (!hasChunkKey()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-        memoizedIsInitialized = 1;
-        return true;
-      }
-
-      public void writeTo(com.google.protobuf.CodedOutputStream output)
-                          throws java.io.IOException {
-        getSerializedSize();
-        if (((bitField0_ & 0x00000001) == 0x00000001)) {
-          output.writeInt64(1, relativeStart_);
-        }
-        if (((bitField0_ & 0x00000002) == 0x00000002)) {
-          output.writeBytes(2, getChunkKeyBytes());
-        }
-        getUnknownFields().writeTo(output);
-      }
-
-      private int memoizedSerializedSize = -1;
-      public int getSerializedSize() {
-        int size = memoizedSerializedSize;
-        if (size != -1) return size;
-
-        size = 0;
-        if (((bitField0_ & 0x00000001) == 0x00000001)) {
-          size += com.google.protobuf.CodedOutputStream
-            .computeInt64Size(1, relativeStart_);
-        }
-        if (((bitField0_ & 0x00000002) == 0x00000002)) {
-          size += com.google.protobuf.CodedOutputStream
-            .computeBytesSize(2, getChunkKeyBytes());
-        }
-        size += getUnknownFields().getSerializedSize();
-        memoizedSerializedSize = size;
-        return size;
-      }
-
-      @java.lang.Override
-      protected Object writeReplace() throws java.io.ObjectStreamException {
-        return super.writeReplace();
-      }
-
-      @java.lang.Override
-      public boolean equals(final Object obj) {
-        if (obj == this) {
-         return true;
-        }
-        if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk)) {
-          return super.equals(obj);
-        }
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk other = (org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk) obj;
-
-        boolean result = true;
-        result = result && (hasRelativeStart() == other.hasRelativeStart());
-        if (hasRelativeStart()) {
-          result = result && (getRelativeStart()
-              == other.getRelativeStart());
-        }
-        result = result && (hasChunkKey() == other.hasChunkKey());
-        if (hasChunkKey()) {
-          result = result && getChunkKey()
-              .equals(other.getChunkKey());
-        }
-        result = result &&
-            getUnknownFields().equals(other.getUnknownFields());
-        return result;
-      }
-
-      @java.lang.Override
-      public int hashCode() {
-        int hash = 41;
-        hash = (19 * hash) + getDescriptorForType().hashCode();
-        if (hasRelativeStart()) {
-          hash = (37 * hash) + RELATIVE_START_FIELD_NUMBER;
-          hash = (53 * hash) + hashLong(getRelativeStart());
-        }
-        if (hasChunkKey()) {
-          hash = (37 * hash) + CHUNK_KEY_FIELD_NUMBER;
-          hash = (53 * hash) + getChunkKey().hashCode();
-        }
-        hash = (29 * hash) + getUnknownFields().hashCode();
-        return hash;
-      }
-
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk parseFrom(
-          com.google.protobuf.ByteString data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk parseFrom(
-          com.google.protobuf.ByteString data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk parseFrom(byte[] data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk parseFrom(
-          byte[] data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk parseFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk parseFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk parseDelimitedFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk parseDelimitedFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk parseFrom(
-          com.google.protobuf.CodedInputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk parseFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-
-      public static Builder newBuilder() { return Builder.create(); }
-      public Builder newBuilderForType() { return newBuilder(); }
-      public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk prototype) {
-        return newBuilder().mergeFrom(prototype);
-      }
-      public Builder toBuilder() { return newBuilder(this); }
-
-      @java.lang.Override
-      protected Builder newBuilderForType(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-        Builder builder = new Builder(parent);
-        return builder;
-      }
-      public static final class Builder extends
-          com.google.protobuf.GeneratedMessage.Builder<Builder>
-         implements org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunkOrBuilder {
-        public static final com.google.protobuf.Descriptors.Descriptor
-            getDescriptor() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_BaseChunk_descriptor;
-        }
-
-        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-            internalGetFieldAccessorTable() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_BaseChunk_fieldAccessorTable;
-        }
-
-        // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.newBuilder()
-        private Builder() {
-          maybeForceBuilderInitialization();
-        }
-
-        private Builder(BuilderParent parent) {
-          super(parent);
-          maybeForceBuilderInitialization();
-        }
-        private void maybeForceBuilderInitialization() {
-          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          }
-        }
-        private static Builder create() {
-          return new Builder();
-        }
-
-        public Builder clear() {
-          super.clear();
-          relativeStart_ = 0L;
-          bitField0_ = (bitField0_ & ~0x00000001);
-          chunkKey_ = "";
-          bitField0_ = (bitField0_ & ~0x00000002);
-          return this;
-        }
-
-        public Builder clone() {
-          return create().mergeFrom(buildPartial());
-        }
-
-        public com.google.protobuf.Descriptors.Descriptor
-            getDescriptorForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.getDescriptor();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk getDefaultInstanceForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.getDefaultInstance();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk build() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(result);
-          }
-          return result;
-        }
-
-        private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk buildParsed()
-            throws com.google.protobuf.InvalidProtocolBufferException {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(
-              result).asInvalidProtocolBufferException();
-          }
-          return result;
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk buildPartial() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk result = new org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk(this);
-          int from_bitField0_ = bitField0_;
-          int to_bitField0_ = 0;
-          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-            to_bitField0_ |= 0x00000001;
-          }
-          result.relativeStart_ = relativeStart_;
-          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-            to_bitField0_ |= 0x00000002;
-          }
-          result.chunkKey_ = chunkKey_;
-          result.bitField0_ = to_bitField0_;
-          onBuilt();
-          return result;
-        }
-
-        public Builder mergeFrom(com.google.protobuf.Message other) {
-          if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk) {
-            return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk)other);
-          } else {
-            super.mergeFrom(other);
-            return this;
-          }
-        }
-
-        public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk other) {
-          if (other == org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.getDefaultInstance()) return this;
-          if (other.hasRelativeStart()) {
-            setRelativeStart(other.getRelativeStart());
-          }
-          if (other.hasChunkKey()) {
-            setChunkKey(other.getChunkKey());
-          }
-          this.mergeUnknownFields(other.getUnknownFields());
-          return this;
-        }
-
-        public final boolean isInitialized() {
-          if (!hasRelativeStart()) {
-
-            return false;
-          }
-          if (!hasChunkKey()) {
-
-            return false;
-          }
-          return true;
-        }
-
-        public Builder mergeFrom(
-            com.google.protobuf.CodedInputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws java.io.IOException {
-          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-            com.google.protobuf.UnknownFieldSet.newBuilder(
-              this.getUnknownFields());
-          while (true) {
-            int tag = input.readTag();
-            switch (tag) {
-              case 0:
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              default: {
-                if (!parseUnknownField(input, unknownFields,
-                                       extensionRegistry, tag)) {
-                  this.setUnknownFields(unknownFields.build());
-                  onChanged();
-                  return this;
-                }
-                break;
-              }
-              case 8: {
-                bitField0_ |= 0x00000001;
-                relativeStart_ = input.readInt64();
-                break;
-              }
-              case 18: {
-                bitField0_ |= 0x00000002;
-                chunkKey_ = input.readBytes();
-                break;
-              }
-            }
-          }
-        }
-
-        private int bitField0_;
-
-        // required int64 relative_start = 1;
-        private long relativeStart_ ;
-        public boolean hasRelativeStart() {
-          return ((bitField0_ & 0x00000001) == 0x00000001);
-        }
-        public long getRelativeStart() {
-          return relativeStart_;
-        }
-        public Builder setRelativeStart(long value) {
-          bitField0_ |= 0x00000001;
-          relativeStart_ = value;
-          onChanged();
-          return this;
-        }
-        public Builder clearRelativeStart() {
-          bitField0_ = (bitField0_ & ~0x00000001);
-          relativeStart_ = 0L;
-          onChanged();
-          return this;
-        }
-
-        // required string chunk_key = 2;
-        private Object chunkKey_ = "";
-        public boolean hasChunkKey() {
-          return ((bitField0_ & 0x00000002) == 0x00000002);
-        }
-        public String getChunkKey() {
-          Object ref = chunkKey_;
-          if (!(ref instanceof String)) {
-            String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
-            chunkKey_ = s;
-            return s;
-          } else {
-            return (String) ref;
-          }
-        }
-        public Builder setChunkKey(String value) {
-          if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000002;
-          chunkKey_ = value;
-          onChanged();
-          return this;
-        }
-        public Builder clearChunkKey() {
-          bitField0_ = (bitField0_ & ~0x00000002);
-          chunkKey_ = getDefaultInstance().getChunkKey();
-          onChanged();
-          return this;
-        }
-        void setChunkKey(com.google.protobuf.ByteString value) {
-          bitField0_ |= 0x00000002;
-          chunkKey_ = value;
-          onChanged();
-        }
-
-        // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.ChunkMeta.BaseChunk)
-      }
-
-      static {
-        defaultInstance = new BaseChunk(true);
-        defaultInstance.initFields();
-      }
-
-      // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.ChunkMeta.BaseChunk)
-    }
-
-    public interface PrefetchHintOrBuilder
-        extends com.google.protobuf.MessageOrBuilder {
-
-      // repeated string edge = 1;
-      java.util.List<String> getEdgeList();
-      int getEdgeCount();
-      String getEdge(int index);
-
-      // repeated string sequential = 2;
-      java.util.List<String> getSequentialList();
-      int getSequentialCount();
-      String getSequential(int index);
-    }
-    public static final class PrefetchHint extends
-        com.google.protobuf.GeneratedMessage
-        implements PrefetchHintOrBuilder {
-      // Use PrefetchHint.newBuilder() to construct.
-      private PrefetchHint(Builder builder) {
-        super(builder);
-      }
-      private PrefetchHint(boolean noInit) {}
-
-      private static final PrefetchHint defaultInstance;
-      public static PrefetchHint getDefaultInstance() {
-        return defaultInstance;
-      }
-
-      public PrefetchHint getDefaultInstanceForType() {
-        return defaultInstance;
-      }
-
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_PrefetchHint_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_PrefetchHint_fieldAccessorTable;
-      }
-
-      // repeated string edge = 1;
-      public static final int EDGE_FIELD_NUMBER = 1;
-      private com.google.protobuf.LazyStringList edge_;
-      public java.util.List<String>
-          getEdgeList() {
-        return edge_;
-      }
-      public int getEdgeCount() {
-        return edge_.size();
-      }
-      public String getEdge(int index) {
-        return edge_.get(index);
-      }
-
-      // repeated string sequential = 2;
-      public static final int SEQUENTIAL_FIELD_NUMBER = 2;
-      private com.google.protobuf.LazyStringList sequential_;
-      public java.util.List<String>
-          getSequentialList() {
-        return sequential_;
-      }
-      public int getSequentialCount() {
-        return sequential_.size();
-      }
-      public String getSequential(int index) {
-        return sequential_.get(index);
-      }
-
-      private void initFields() {
-        edge_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-        sequential_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-      }
-      private byte memoizedIsInitialized = -1;
-      public final boolean isInitialized() {
-        byte isInitialized = memoizedIsInitialized;
-        if (isInitialized != -1) return isInitialized == 1;
-
-        memoizedIsInitialized = 1;
-        return true;
-      }
-
-      public void writeTo(com.google.protobuf.CodedOutputStream output)
-                          throws java.io.IOException {
-        getSerializedSize();
-        for (int i = 0; i < edge_.size(); i++) {
-          output.writeBytes(1, edge_.getByteString(i));
-        }
-        for (int i = 0; i < sequential_.size(); i++) {
-          output.writeBytes(2, sequential_.getByteString(i));
-        }
-        getUnknownFields().writeTo(output);
-      }
-
-      private int memoizedSerializedSize = -1;
-      public int getSerializedSize() {
-        int size = memoizedSerializedSize;
-        if (size != -1) return size;
-
-        size = 0;
-        {
-          int dataSize = 0;
-          for (int i = 0; i < edge_.size(); i++) {
-            dataSize += com.google.protobuf.CodedOutputStream
-              .computeBytesSizeNoTag(edge_.getByteString(i));
-          }
-          size += dataSize;
-          size += 1 * getEdgeList().size();
-        }
-        {
-          int dataSize = 0;
-          for (int i = 0; i < sequential_.size(); i++) {
-            dataSize += com.google.protobuf.CodedOutputStream
-              .computeBytesSizeNoTag(sequential_.getByteString(i));
-          }
-          size += dataSize;
-          size += 1 * getSequentialList().size();
-        }
-        size += getUnknownFields().getSerializedSize();
-        memoizedSerializedSize = size;
-        return size;
-      }
-
-      @java.lang.Override
-      protected Object writeReplace() throws java.io.ObjectStreamException {
-        return super.writeReplace();
-      }
-
-      @java.lang.Override
-      public boolean equals(final Object obj) {
-        if (obj == this) {
-         return true;
-        }
-        if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint)) {
-          return super.equals(obj);
-        }
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint other = (org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint) obj;
-
-        boolean result = true;
-        result = result && getEdgeList()
-            .equals(other.getEdgeList());
-        result = result && getSequentialList()
-            .equals(other.getSequentialList());
-        result = result &&
-            getUnknownFields().equals(other.getUnknownFields());
-        return result;
-      }
-
-      @java.lang.Override
-      public int hashCode() {
-        int hash = 41;
-        hash = (19 * hash) + getDescriptorForType().hashCode();
-        if (getEdgeCount() > 0) {
-          hash = (37 * hash) + EDGE_FIELD_NUMBER;
-          hash = (53 * hash) + getEdgeList().hashCode();
-        }
-        if (getSequentialCount() > 0) {
-          hash = (37 * hash) + SEQUENTIAL_FIELD_NUMBER;
-          hash = (53 * hash) + getSequentialList().hashCode();
-        }
-        hash = (29 * hash) + getUnknownFields().hashCode();
-        return hash;
-      }
-
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint parseFrom(
-          com.google.protobuf.ByteString data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint parseFrom(
-          com.google.protobuf.ByteString data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint parseFrom(byte[] data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint parseFrom(
-          byte[] data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint parseFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint parseFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint parseDelimitedFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint parseDelimitedFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint parseFrom(
-          com.google.protobuf.CodedInputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint parseFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-
-      public static Builder newBuilder() { return Builder.create(); }
-      public Builder newBuilderForType() { return newBuilder(); }
-      public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint prototype) {
-        return newBuilder().mergeFrom(prototype);
-      }
-      public Builder toBuilder() { return newBuilder(this); }
-
-      @java.lang.Override
-      protected Builder newBuilderForType(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-        Builder builder = new Builder(parent);
-        return builder;
-      }
-      public static final class Builder extends
-          com.google.protobuf.GeneratedMessage.Builder<Builder>
-         implements org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder {
-        public static final com.google.protobuf.Descriptors.Descriptor
-            getDescriptor() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_PrefetchHint_descriptor;
-        }
-
-        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-            internalGetFieldAccessorTable() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_PrefetchHint_fieldAccessorTable;
-        }
-
-        // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.newBuilder()
-        private Builder() {
-          maybeForceBuilderInitialization();
-        }
-
-        private Builder(BuilderParent parent) {
-          super(parent);
-          maybeForceBuilderInitialization();
-        }
-        private void maybeForceBuilderInitialization() {
-          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          }
-        }
-        private static Builder create() {
-          return new Builder();
-        }
-
-        public Builder clear() {
-          super.clear();
-          edge_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-          bitField0_ = (bitField0_ & ~0x00000001);
-          sequential_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-          bitField0_ = (bitField0_ & ~0x00000002);
-          return this;
-        }
-
-        public Builder clone() {
-          return create().mergeFrom(buildPartial());
-        }
-
-        public com.google.protobuf.Descriptors.Descriptor
-            getDescriptorForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDescriptor();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint getDefaultInstanceForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDefaultInstance();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint build() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(result);
-          }
-          return result;
-        }
-
-        private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint buildParsed()
-            throws com.google.protobuf.InvalidProtocolBufferException {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(
-              result).asInvalidProtocolBufferException();
-          }
-          return result;
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint buildPartial() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint result = new org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint(this);
-          int from_bitField0_ = bitField0_;
-          if (((bitField0_ & 0x00000001) == 0x00000001)) {
-            edge_ = new com.google.protobuf.UnmodifiableLazyStringList(
-                edge_);
-            bitField0_ = (bitField0_ & ~0x00000001);
-          }
-          result.edge_ = edge_;
-          if (((bitField0_ & 0x00000002) == 0x00000002)) {
-            sequential_ = new com.google.protobuf.UnmodifiableLazyStringList(
-                sequential_);
-            bitField0_ = (bitField0_ & ~0x00000002);
-          }
-          result.sequential_ = sequential_;
-          onBuilt();
-          return result;
-        }
-
-        public Builder mergeFrom(com.google.protobuf.Message other) {
-          if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint) {
-            return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint)other);
-          } else {
-            super.mergeFrom(other);
-            return this;
-          }
-        }
-
-        public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint other) {
-          if (other == org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDefaultInstance()) return this;
-          if (!other.edge_.isEmpty()) {
-            if (edge_.isEmpty()) {
-              edge_ = other.edge_;
-              bitField0_ = (bitField0_ & ~0x00000001);
-            } else {
-              ensureEdgeIsMutable();
-              edge_.addAll(other.edge_);
-            }
-            onChanged();
-          }
-          if (!other.sequential_.isEmpty()) {
-            if (sequential_.isEmpty()) {
-              sequential_ = other.sequential_;
-              bitField0_ = (bitField0_ & ~0x00000002);
-            } else {
-              ensureSequentialIsMutable();
-              sequential_.addAll(other.sequential_);
-            }
-            onChanged();
-          }
-          this.mergeUnknownFields(other.getUnknownFields());
-          return this;
-        }
-
-        public final boolean isInitialized() {
-          return true;
-        }
-
-        public Builder mergeFrom(
-            com.google.protobuf.CodedInputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws java.io.IOException {
-          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-            com.google.protobuf.UnknownFieldSet.newBuilder(
-              this.getUnknownFields());
-          while (true) {
-            int tag = input.readTag();
-            switch (tag) {
-              case 0:
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              default: {
-                if (!parseUnknownField(input, unknownFields,
-                                       extensionRegistry, tag)) {
-                  this.setUnknownFields(unknownFields.build());
-                  onChanged();
-                  return this;
-                }
-                break;
-              }
-              case 10: {
-                ensureEdgeIsMutable();
-                edge_.add(input.readBytes());
-                break;
-              }
-              case 18: {
-                ensureSequentialIsMutable();
-                sequential_.add(input.readBytes());
-                break;
-              }
-            }
-          }
-        }
-
-        private int bitField0_;
-
-        // repeated string edge = 1;
-        private com.google.protobuf.LazyStringList edge_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-        private void ensureEdgeIsMutable() {
-          if (!((bitField0_ & 0x00000001) == 0x00000001)) {
-            edge_ = new com.google.protobuf.LazyStringArrayList(edge_);
-            bitField0_ |= 0x00000001;
-           }
-        }
-        public java.util.List<String>
-            getEdgeList() {
-          return java.util.Collections.unmodifiableList(edge_);
-        }
-        public int getEdgeCount() {
-          return edge_.size();
-        }
-        public String getEdge(int index) {
-          return edge_.get(index);
-        }
-        public Builder setEdge(
-            int index, String value) {
-          if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureEdgeIsMutable();
-          edge_.set(index, value);
-          onChanged();
-          return this;
-        }
-        public Builder addEdge(String value) {
-          if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureEdgeIsMutable();
-          edge_.add(value);
-          onChanged();
-          return this;
-        }
-        public Builder addAllEdge(
-            java.lang.Iterable<String> values) {
-          ensureEdgeIsMutable();
-          super.addAll(values, edge_);
-          onChanged();
-          return this;
-        }
-        public Builder clearEdge() {
-          edge_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-          bitField0_ = (bitField0_ & ~0x00000001);
-          onChanged();
-          return this;
-        }
-        void addEdge(com.google.protobuf.ByteString value) {
-          ensureEdgeIsMutable();
-          edge_.add(value);
-          onChanged();
-        }
-
-        // repeated string sequential = 2;
-        private com.google.protobuf.LazyStringList sequential_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-        private void ensureSequentialIsMutable() {
-          if (!((bitField0_ & 0x00000002) == 0x00000002)) {
-            sequential_ = new com.google.protobuf.LazyStringArrayList(sequential_);
-            bitField0_ |= 0x00000002;
-           }
-        }
-        public java.util.List<String>
-            getSequentialList() {
-          return java.util.Collections.unmodifiableList(sequential_);
-        }
-        public int getSequentialCount() {
-          return sequential_.size();
-        }
-        public String getSequential(int index) {
-          return sequential_.get(index);
-        }
-        public Builder setSequential(
-            int index, String value) {
-          if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureSequentialIsMutable();
-          sequential_.set(index, value);
-          onChanged();
-          return this;
-        }
-        public Builder addSequential(String value) {
-          if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureSequentialIsMutable();
-          sequential_.add(value);
-          onChanged();
-          return this;
-        }
-        public Builder addAllSequential(
-            java.lang.Iterable<String> values) {
-          ensureSequentialIsMutable();
-          super.addAll(values, sequential_);
-          onChanged();
-          return this;
-        }
-        public Builder clearSequential() {
-          sequential_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-          bitField0_ = (bitField0_ & ~0x00000002);
-          onChanged();
-          return this;
-        }
-        void addSequential(com.google.protobuf.ByteString value) {
-          ensureSequentialIsMutable();
-          sequential_.add(value);
-          onChanged();
-        }
-
-        // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.ChunkMeta.PrefetchHint)
-      }
-
-      static {
-        defaultInstance = new PrefetchHint(true);
-        defaultInstance.initFields();
-      }
-
-      // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.ChunkMeta.PrefetchHint)
-    }
-
-    private int bitField0_;
-    // repeated .org.eclipse.jgit.storage.dht.ChunkMeta.BaseChunk base_chunk = 1;
-    public static final int BASE_CHUNK_FIELD_NUMBER = 1;
-    private java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk> baseChunk_;
-    public java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk> getBaseChunkList() {
-      return baseChunk_;
-    }
-    public java.util.List<? extends org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunkOrBuilder>
-        getBaseChunkOrBuilderList() {
-      return baseChunk_;
-    }
-    public int getBaseChunkCount() {
-      return baseChunk_.size();
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk getBaseChunk(int index) {
-      return baseChunk_.get(index);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunkOrBuilder getBaseChunkOrBuilder(
-        int index) {
-      return baseChunk_.get(index);
-    }
-
-    // repeated string fragment = 2;
-    public static final int FRAGMENT_FIELD_NUMBER = 2;
-    private com.google.protobuf.LazyStringList fragment_;
-    public java.util.List<String>
-        getFragmentList() {
-      return fragment_;
-    }
-    public int getFragmentCount() {
-      return fragment_.size();
-    }
-    public String getFragment(int index) {
-      return fragment_.get(index);
-    }
-
-    // optional .org.eclipse.jgit.storage.dht.ChunkMeta.PrefetchHint commit_prefetch = 51;
-    public static final int COMMIT_PREFETCH_FIELD_NUMBER = 51;
-    private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint commitPrefetch_;
-    public boolean hasCommitPrefetch() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint getCommitPrefetch() {
-      return commitPrefetch_;
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder getCommitPrefetchOrBuilder() {
-      return commitPrefetch_;
-    }
-
-    // optional .org.eclipse.jgit.storage.dht.ChunkMeta.PrefetchHint tree_prefetch = 52;
-    public static final int TREE_PREFETCH_FIELD_NUMBER = 52;
-    private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint treePrefetch_;
-    public boolean hasTreePrefetch() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint getTreePrefetch() {
-      return treePrefetch_;
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder getTreePrefetchOrBuilder() {
-      return treePrefetch_;
-    }
-
-    private void initFields() {
-      baseChunk_ = java.util.Collections.emptyList();
-      fragment_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-      commitPrefetch_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDefaultInstance();
-      treePrefetch_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDefaultInstance();
-    }
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
-
-      for (int i = 0; i < getBaseChunkCount(); i++) {
-        if (!getBaseChunk(i).isInitialized()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-      }
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      for (int i = 0; i < baseChunk_.size(); i++) {
-        output.writeMessage(1, baseChunk_.get(i));
-      }
-      for (int i = 0; i < fragment_.size(); i++) {
-        output.writeBytes(2, fragment_.getByteString(i));
-      }
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeMessage(51, commitPrefetch_);
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeMessage(52, treePrefetch_);
-      }
-      getUnknownFields().writeTo(output);
-    }
-
-    private int memoizedSerializedSize = -1;
-    public int getSerializedSize() {
-      int size = memoizedSerializedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      for (int i = 0; i < baseChunk_.size(); i++) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(1, baseChunk_.get(i));
-      }
-      {
-        int dataSize = 0;
-        for (int i = 0; i < fragment_.size(); i++) {
-          dataSize += com.google.protobuf.CodedOutputStream
-            .computeBytesSizeNoTag(fragment_.getByteString(i));
-        }
-        size += dataSize;
-        size += 1 * getFragmentList().size();
-      }
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(51, commitPrefetch_);
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(52, treePrefetch_);
-      }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
-      return size;
-    }
-
-    @java.lang.Override
-    protected Object writeReplace() throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
-    public boolean equals(final Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta)) {
-        return super.equals(obj);
-      }
-      org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta other = (org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta) obj;
-
-      boolean result = true;
-      result = result && getBaseChunkList()
-          .equals(other.getBaseChunkList());
-      result = result && getFragmentList()
-          .equals(other.getFragmentList());
-      result = result && (hasCommitPrefetch() == other.hasCommitPrefetch());
-      if (hasCommitPrefetch()) {
-        result = result && getCommitPrefetch()
-            .equals(other.getCommitPrefetch());
-      }
-      result = result && (hasTreePrefetch() == other.hasTreePrefetch());
-      if (hasTreePrefetch()) {
-        result = result && getTreePrefetch()
-            .equals(other.getTreePrefetch());
-      }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
-      return result;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-      int hash = 41;
-      hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (getBaseChunkCount() > 0) {
-        hash = (37 * hash) + BASE_CHUNK_FIELD_NUMBER;
-        hash = (53 * hash) + getBaseChunkList().hashCode();
-      }
-      if (getFragmentCount() > 0) {
-        hash = (37 * hash) + FRAGMENT_FIELD_NUMBER;
-        hash = (53 * hash) + getFragmentList().hashCode();
-      }
-      if (hasCommitPrefetch()) {
-        hash = (37 * hash) + COMMIT_PREFETCH_FIELD_NUMBER;
-        hash = (53 * hash) + getCommitPrefetch().hashCode();
-      }
-      if (hasTreePrefetch()) {
-        hash = (37 * hash) + TREE_PREFETCH_FIELD_NUMBER;
-        hash = (53 * hash) + getTreePrefetch().hashCode();
-      }
-      hash = (29 * hash) + getUnknownFields().hashCode();
-      return hash;
-    }
-
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta parseFrom(
-        com.google.protobuf.ByteString data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta parseFrom(
-        com.google.protobuf.ByteString data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta parseFrom(byte[] data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta parseFrom(
-        byte[] data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta parseFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta parseDelimitedFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta parseFrom(
-        com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta parseFrom(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-
-    public static Builder newBuilder() { return Builder.create(); }
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta prototype) {
-      return newBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() { return newBuilder(this); }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMetaOrBuilder {
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_fieldAccessorTable;
-      }
-
-      // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          getBaseChunkFieldBuilder();
-          getCommitPrefetchFieldBuilder();
-          getTreePrefetchFieldBuilder();
-        }
-      }
-      private static Builder create() {
-        return new Builder();
-      }
-
-      public Builder clear() {
-        super.clear();
-        if (baseChunkBuilder_ == null) {
-          baseChunk_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000001);
-        } else {
-          baseChunkBuilder_.clear();
-        }
-        fragment_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-        bitField0_ = (bitField0_ & ~0x00000002);
-        if (commitPrefetchBuilder_ == null) {
-          commitPrefetch_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDefaultInstance();
-        } else {
-          commitPrefetchBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000004);
-        if (treePrefetchBuilder_ == null) {
-          treePrefetch_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDefaultInstance();
-        } else {
-          treePrefetchBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000008);
-        return this;
-      }
-
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
-      public com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.getDescriptor();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta getDefaultInstanceForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.getDefaultInstance();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta build() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta buildPartial() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta result = new org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (baseChunkBuilder_ == null) {
-          if (((bitField0_ & 0x00000001) == 0x00000001)) {
-            baseChunk_ = java.util.Collections.unmodifiableList(baseChunk_);
-            bitField0_ = (bitField0_ & ~0x00000001);
-          }
-          result.baseChunk_ = baseChunk_;
-        } else {
-          result.baseChunk_ = baseChunkBuilder_.build();
-        }
-        if (((bitField0_ & 0x00000002) == 0x00000002)) {
-          fragment_ = new com.google.protobuf.UnmodifiableLazyStringList(
-              fragment_);
-          bitField0_ = (bitField0_ & ~0x00000002);
-        }
-        result.fragment_ = fragment_;
-        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        if (commitPrefetchBuilder_ == null) {
-          result.commitPrefetch_ = commitPrefetch_;
-        } else {
-          result.commitPrefetch_ = commitPrefetchBuilder_.build();
-        }
-        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
-          to_bitField0_ |= 0x00000002;
-        }
-        if (treePrefetchBuilder_ == null) {
-          result.treePrefetch_ = treePrefetch_;
-        } else {
-          result.treePrefetch_ = treePrefetchBuilder_.build();
-        }
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta) {
-          return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta other) {
-        if (other == org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.getDefaultInstance()) return this;
-        if (baseChunkBuilder_ == null) {
-          if (!other.baseChunk_.isEmpty()) {
-            if (baseChunk_.isEmpty()) {
-              baseChunk_ = other.baseChunk_;
-              bitField0_ = (bitField0_ & ~0x00000001);
-            } else {
-              ensureBaseChunkIsMutable();
-              baseChunk_.addAll(other.baseChunk_);
-            }
-            onChanged();
-          }
-        } else {
-          if (!other.baseChunk_.isEmpty()) {
-            if (baseChunkBuilder_.isEmpty()) {
-              baseChunkBuilder_.dispose();
-              baseChunkBuilder_ = null;
-              baseChunk_ = other.baseChunk_;
-              bitField0_ = (bitField0_ & ~0x00000001);
-              baseChunkBuilder_ =
-                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
-                   getBaseChunkFieldBuilder() : null;
-            } else {
-              baseChunkBuilder_.addAllMessages(other.baseChunk_);
-            }
-          }
-        }
-        if (!other.fragment_.isEmpty()) {
-          if (fragment_.isEmpty()) {
-            fragment_ = other.fragment_;
-            bitField0_ = (bitField0_ & ~0x00000002);
-          } else {
-            ensureFragmentIsMutable();
-            fragment_.addAll(other.fragment_);
-          }
-          onChanged();
-        }
-        if (other.hasCommitPrefetch()) {
-          mergeCommitPrefetch(other.getCommitPrefetch());
-        }
-        if (other.hasTreePrefetch()) {
-          mergeTreePrefetch(other.getTreePrefetch());
-        }
-        this.mergeUnknownFields(other.getUnknownFields());
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        for (int i = 0; i < getBaseChunkCount(); i++) {
-          if (!getBaseChunk(i).isInitialized()) {
-
-            return false;
-          }
-        }
-        return true;
-      }
-
-      public Builder mergeFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 10: {
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.Builder subBuilder = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.newBuilder();
-              input.readMessage(subBuilder, extensionRegistry);
-              addBaseChunk(subBuilder.buildPartial());
-              break;
-            }
-            case 18: {
-              ensureFragmentIsMutable();
-              fragment_.add(input.readBytes());
-              break;
-            }
-            case 410: {
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder subBuilder = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.newBuilder();
-              if (hasCommitPrefetch()) {
-                subBuilder.mergeFrom(getCommitPrefetch());
-              }
-              input.readMessage(subBuilder, extensionRegistry);
-              setCommitPrefetch(subBuilder.buildPartial());
-              break;
-            }
-            case 418: {
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder subBuilder = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.newBuilder();
-              if (hasTreePrefetch()) {
-                subBuilder.mergeFrom(getTreePrefetch());
-              }
-              input.readMessage(subBuilder, extensionRegistry);
-              setTreePrefetch(subBuilder.buildPartial());
-              break;
-            }
-          }
-        }
-      }
-
-      private int bitField0_;
-
-      // repeated .org.eclipse.jgit.storage.dht.ChunkMeta.BaseChunk base_chunk = 1;
-      private java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk> baseChunk_ =
-        java.util.Collections.emptyList();
-      private void ensureBaseChunkIsMutable() {
-        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
-          baseChunk_ = new java.util.ArrayList<org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk>(baseChunk_);
-          bitField0_ |= 0x00000001;
-         }
-      }
-
-      private com.google.protobuf.RepeatedFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunkOrBuilder> baseChunkBuilder_;
-
-      public java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk> getBaseChunkList() {
-        if (baseChunkBuilder_ == null) {
-          return java.util.Collections.unmodifiableList(baseChunk_);
-        } else {
-          return baseChunkBuilder_.getMessageList();
-        }
-      }
-      public int getBaseChunkCount() {
-        if (baseChunkBuilder_ == null) {
-          return baseChunk_.size();
-        } else {
-          return baseChunkBuilder_.getCount();
-        }
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk getBaseChunk(int index) {
-        if (baseChunkBuilder_ == null) {
-          return baseChunk_.get(index);
-        } else {
-          return baseChunkBuilder_.getMessage(index);
-        }
-      }
-      public Builder setBaseChunk(
-          int index, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk value) {
-        if (baseChunkBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          ensureBaseChunkIsMutable();
-          baseChunk_.set(index, value);
-          onChanged();
-        } else {
-          baseChunkBuilder_.setMessage(index, value);
-        }
-        return this;
-      }
-      public Builder setBaseChunk(
-          int index, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.Builder builderForValue) {
-        if (baseChunkBuilder_ == null) {
-          ensureBaseChunkIsMutable();
-          baseChunk_.set(index, builderForValue.build());
-          onChanged();
-        } else {
-          baseChunkBuilder_.setMessage(index, builderForValue.build());
-        }
-        return this;
-      }
-      public Builder addBaseChunk(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk value) {
-        if (baseChunkBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          ensureBaseChunkIsMutable();
-          baseChunk_.add(value);
-          onChanged();
-        } else {
-          baseChunkBuilder_.addMessage(value);
-        }
-        return this;
-      }
-      public Builder addBaseChunk(
-          int index, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk value) {
-        if (baseChunkBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          ensureBaseChunkIsMutable();
-          baseChunk_.add(index, value);
-          onChanged();
-        } else {
-          baseChunkBuilder_.addMessage(index, value);
-        }
-        return this;
-      }
-      public Builder addBaseChunk(
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.Builder builderForValue) {
-        if (baseChunkBuilder_ == null) {
-          ensureBaseChunkIsMutable();
-          baseChunk_.add(builderForValue.build());
-          onChanged();
-        } else {
-          baseChunkBuilder_.addMessage(builderForValue.build());
-        }
-        return this;
-      }
-      public Builder addBaseChunk(
-          int index, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.Builder builderForValue) {
-        if (baseChunkBuilder_ == null) {
-          ensureBaseChunkIsMutable();
-          baseChunk_.add(index, builderForValue.build());
-          onChanged();
-        } else {
-          baseChunkBuilder_.addMessage(index, builderForValue.build());
-        }
-        return this;
-      }
-      public Builder addAllBaseChunk(
-          java.lang.Iterable<? extends org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk> values) {
-        if (baseChunkBuilder_ == null) {
-          ensureBaseChunkIsMutable();
-          super.addAll(values, baseChunk_);
-          onChanged();
-        } else {
-          baseChunkBuilder_.addAllMessages(values);
-        }
-        return this;
-      }
-      public Builder clearBaseChunk() {
-        if (baseChunkBuilder_ == null) {
-          baseChunk_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000001);
-          onChanged();
-        } else {
-          baseChunkBuilder_.clear();
-        }
-        return this;
-      }
-      public Builder removeBaseChunk(int index) {
-        if (baseChunkBuilder_ == null) {
-          ensureBaseChunkIsMutable();
-          baseChunk_.remove(index);
-          onChanged();
-        } else {
-          baseChunkBuilder_.remove(index);
-        }
-        return this;
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.Builder getBaseChunkBuilder(
-          int index) {
-        return getBaseChunkFieldBuilder().getBuilder(index);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunkOrBuilder getBaseChunkOrBuilder(
-          int index) {
-        if (baseChunkBuilder_ == null) {
-          return baseChunk_.get(index);  } else {
-          return baseChunkBuilder_.getMessageOrBuilder(index);
-        }
-      }
-      public java.util.List<? extends org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunkOrBuilder>
-           getBaseChunkOrBuilderList() {
-        if (baseChunkBuilder_ != null) {
-          return baseChunkBuilder_.getMessageOrBuilderList();
-        } else {
-          return java.util.Collections.unmodifiableList(baseChunk_);
-        }
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.Builder addBaseChunkBuilder() {
-        return getBaseChunkFieldBuilder().addBuilder(
-            org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.getDefaultInstance());
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.Builder addBaseChunkBuilder(
-          int index) {
-        return getBaseChunkFieldBuilder().addBuilder(
-            index, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.getDefaultInstance());
-      }
-      public java.util.List<org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.Builder>
-           getBaseChunkBuilderList() {
-        return getBaseChunkFieldBuilder().getBuilderList();
-      }
-      private com.google.protobuf.RepeatedFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunkOrBuilder>
-          getBaseChunkFieldBuilder() {
-        if (baseChunkBuilder_ == null) {
-          baseChunkBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunkOrBuilder>(
-                  baseChunk_,
-                  ((bitField0_ & 0x00000001) == 0x00000001),
-                  getParentForChildren(),
-                  isClean());
-          baseChunk_ = null;
-        }
-        return baseChunkBuilder_;
-      }
-
-      // repeated string fragment = 2;
-      private com.google.protobuf.LazyStringList fragment_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-      private void ensureFragmentIsMutable() {
-        if (!((bitField0_ & 0x00000002) == 0x00000002)) {
-          fragment_ = new com.google.protobuf.LazyStringArrayList(fragment_);
-          bitField0_ |= 0x00000002;
-         }
-      }
-      public java.util.List<String>
-          getFragmentList() {
-        return java.util.Collections.unmodifiableList(fragment_);
-      }
-      public int getFragmentCount() {
-        return fragment_.size();
-      }
-      public String getFragment(int index) {
-        return fragment_.get(index);
-      }
-      public Builder setFragment(
-          int index, String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureFragmentIsMutable();
-        fragment_.set(index, value);
-        onChanged();
-        return this;
-      }
-      public Builder addFragment(String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureFragmentIsMutable();
-        fragment_.add(value);
-        onChanged();
-        return this;
-      }
-      public Builder addAllFragment(
-          java.lang.Iterable<String> values) {
-        ensureFragmentIsMutable();
-        super.addAll(values, fragment_);
-        onChanged();
-        return this;
-      }
-      public Builder clearFragment() {
-        fragment_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-        bitField0_ = (bitField0_ & ~0x00000002);
-        onChanged();
-        return this;
-      }
-      void addFragment(com.google.protobuf.ByteString value) {
-        ensureFragmentIsMutable();
-        fragment_.add(value);
-        onChanged();
-      }
-
-      // optional .org.eclipse.jgit.storage.dht.ChunkMeta.PrefetchHint commit_prefetch = 51;
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint commitPrefetch_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder> commitPrefetchBuilder_;
-      public boolean hasCommitPrefetch() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint getCommitPrefetch() {
-        if (commitPrefetchBuilder_ == null) {
-          return commitPrefetch_;
-        } else {
-          return commitPrefetchBuilder_.getMessage();
-        }
-      }
-      public Builder setCommitPrefetch(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint value) {
-        if (commitPrefetchBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          commitPrefetch_ = value;
-          onChanged();
-        } else {
-          commitPrefetchBuilder_.setMessage(value);
-        }
-        bitField0_ |= 0x00000004;
-        return this;
-      }
-      public Builder setCommitPrefetch(
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder builderForValue) {
-        if (commitPrefetchBuilder_ == null) {
-          commitPrefetch_ = builderForValue.build();
-          onChanged();
-        } else {
-          commitPrefetchBuilder_.setMessage(builderForValue.build());
-        }
-        bitField0_ |= 0x00000004;
-        return this;
-      }
-      public Builder mergeCommitPrefetch(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint value) {
-        if (commitPrefetchBuilder_ == null) {
-          if (((bitField0_ & 0x00000004) == 0x00000004) &&
-              commitPrefetch_ != org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDefaultInstance()) {
-            commitPrefetch_ =
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.newBuilder(commitPrefetch_).mergeFrom(value).buildPartial();
-          } else {
-            commitPrefetch_ = value;
-          }
-          onChanged();
-        } else {
-          commitPrefetchBuilder_.mergeFrom(value);
-        }
-        bitField0_ |= 0x00000004;
-        return this;
-      }
-      public Builder clearCommitPrefetch() {
-        if (commitPrefetchBuilder_ == null) {
-          commitPrefetch_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDefaultInstance();
-          onChanged();
-        } else {
-          commitPrefetchBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000004);
-        return this;
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder getCommitPrefetchBuilder() {
-        bitField0_ |= 0x00000004;
-        onChanged();
-        return getCommitPrefetchFieldBuilder().getBuilder();
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder getCommitPrefetchOrBuilder() {
-        if (commitPrefetchBuilder_ != null) {
-          return commitPrefetchBuilder_.getMessageOrBuilder();
-        } else {
-          return commitPrefetch_;
-        }
-      }
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder>
-          getCommitPrefetchFieldBuilder() {
-        if (commitPrefetchBuilder_ == null) {
-          commitPrefetchBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder>(
-                  commitPrefetch_,
-                  getParentForChildren(),
-                  isClean());
-          commitPrefetch_ = null;
-        }
-        return commitPrefetchBuilder_;
-      }
-
-      // optional .org.eclipse.jgit.storage.dht.ChunkMeta.PrefetchHint tree_prefetch = 52;
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint treePrefetch_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder> treePrefetchBuilder_;
-      public boolean hasTreePrefetch() {
-        return ((bitField0_ & 0x00000008) == 0x00000008);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint getTreePrefetch() {
-        if (treePrefetchBuilder_ == null) {
-          return treePrefetch_;
-        } else {
-          return treePrefetchBuilder_.getMessage();
-        }
-      }
-      public Builder setTreePrefetch(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint value) {
-        if (treePrefetchBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          treePrefetch_ = value;
-          onChanged();
-        } else {
-          treePrefetchBuilder_.setMessage(value);
-        }
-        bitField0_ |= 0x00000008;
-        return this;
-      }
-      public Builder setTreePrefetch(
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder builderForValue) {
-        if (treePrefetchBuilder_ == null) {
-          treePrefetch_ = builderForValue.build();
-          onChanged();
-        } else {
-          treePrefetchBuilder_.setMessage(builderForValue.build());
-        }
-        bitField0_ |= 0x00000008;
-        return this;
-      }
-      public Builder mergeTreePrefetch(org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint value) {
-        if (treePrefetchBuilder_ == null) {
-          if (((bitField0_ & 0x00000008) == 0x00000008) &&
-              treePrefetch_ != org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDefaultInstance()) {
-            treePrefetch_ =
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.newBuilder(treePrefetch_).mergeFrom(value).buildPartial();
-          } else {
-            treePrefetch_ = value;
-          }
-          onChanged();
-        } else {
-          treePrefetchBuilder_.mergeFrom(value);
-        }
-        bitField0_ |= 0x00000008;
-        return this;
-      }
-      public Builder clearTreePrefetch() {
-        if (treePrefetchBuilder_ == null) {
-          treePrefetch_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.getDefaultInstance();
-          onChanged();
-        } else {
-          treePrefetchBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000008);
-        return this;
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder getTreePrefetchBuilder() {
-        bitField0_ |= 0x00000008;
-        onChanged();
-        return getTreePrefetchFieldBuilder().getBuilder();
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder getTreePrefetchOrBuilder() {
-        if (treePrefetchBuilder_ != null) {
-          return treePrefetchBuilder_.getMessageOrBuilder();
-        } else {
-          return treePrefetch_;
-        }
-      }
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder>
-          getTreePrefetchFieldBuilder() {
-        if (treePrefetchBuilder_ == null) {
-          treePrefetchBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHintOrBuilder>(
-                  treePrefetch_,
-                  getParentForChildren(),
-                  isClean());
-          treePrefetch_ = null;
-        }
-        return treePrefetchBuilder_;
-      }
-
-      // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.ChunkMeta)
-    }
-
-    static {
-      defaultInstance = new ChunkMeta(true);
-      defaultInstance.initFields();
-    }
-
-    // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.ChunkMeta)
-  }
-
-  public interface CachedPackInfoOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // required string name = 1;
-    boolean hasName();
-    String getName();
-
-    // required string version = 2;
-    boolean hasVersion();
-    String getVersion();
-
-    // required int64 objects_total = 3;
-    boolean hasObjectsTotal();
-    long getObjectsTotal();
-
-    // optional int64 objects_delta = 4;
-    boolean hasObjectsDelta();
-    long getObjectsDelta();
-
-    // optional int64 bytes_total = 5;
-    boolean hasBytesTotal();
-    long getBytesTotal();
-
-    // required .org.eclipse.jgit.storage.dht.CachedPackInfo.TipObjectList tip_list = 6;
-    boolean hasTipList();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList getTipList();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectListOrBuilder getTipListOrBuilder();
-
-    // required .org.eclipse.jgit.storage.dht.CachedPackInfo.ChunkList chunk_list = 7;
-    boolean hasChunkList();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList getChunkList();
-    org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkListOrBuilder getChunkListOrBuilder();
-  }
-  public static final class CachedPackInfo extends
-      com.google.protobuf.GeneratedMessage
-      implements CachedPackInfoOrBuilder {
-    // Use CachedPackInfo.newBuilder() to construct.
-    private CachedPackInfo(Builder builder) {
-      super(builder);
-    }
-    private CachedPackInfo(boolean noInit) {}
-
-    private static final CachedPackInfo defaultInstance;
-    public static CachedPackInfo getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public CachedPackInfo getDefaultInstanceForType() {
-      return defaultInstance;
-    }
-
-    public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_descriptor;
-    }
-
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_fieldAccessorTable;
-    }
-
-    public interface TipObjectListOrBuilder
-        extends com.google.protobuf.MessageOrBuilder {
-
-      // repeated string object_name = 1;
-      java.util.List<String> getObjectNameList();
-      int getObjectNameCount();
-      String getObjectName(int index);
-    }
-    public static final class TipObjectList extends
-        com.google.protobuf.GeneratedMessage
-        implements TipObjectListOrBuilder {
-      // Use TipObjectList.newBuilder() to construct.
-      private TipObjectList(Builder builder) {
-        super(builder);
-      }
-      private TipObjectList(boolean noInit) {}
-
-      private static final TipObjectList defaultInstance;
-      public static TipObjectList getDefaultInstance() {
-        return defaultInstance;
-      }
-
-      public TipObjectList getDefaultInstanceForType() {
-        return defaultInstance;
-      }
-
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_TipObjectList_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_TipObjectList_fieldAccessorTable;
-      }
-
-      // repeated string object_name = 1;
-      public static final int OBJECT_NAME_FIELD_NUMBER = 1;
-      private com.google.protobuf.LazyStringList objectName_;
-      public java.util.List<String>
-          getObjectNameList() {
-        return objectName_;
-      }
-      public int getObjectNameCount() {
-        return objectName_.size();
-      }
-      public String getObjectName(int index) {
-        return objectName_.get(index);
-      }
-
-      private void initFields() {
-        objectName_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-      }
-      private byte memoizedIsInitialized = -1;
-      public final boolean isInitialized() {
-        byte isInitialized = memoizedIsInitialized;
-        if (isInitialized != -1) return isInitialized == 1;
-
-        memoizedIsInitialized = 1;
-        return true;
-      }
-
-      public void writeTo(com.google.protobuf.CodedOutputStream output)
-                          throws java.io.IOException {
-        getSerializedSize();
-        for (int i = 0; i < objectName_.size(); i++) {
-          output.writeBytes(1, objectName_.getByteString(i));
-        }
-        getUnknownFields().writeTo(output);
-      }
-
-      private int memoizedSerializedSize = -1;
-      public int getSerializedSize() {
-        int size = memoizedSerializedSize;
-        if (size != -1) return size;
-
-        size = 0;
-        {
-          int dataSize = 0;
-          for (int i = 0; i < objectName_.size(); i++) {
-            dataSize += com.google.protobuf.CodedOutputStream
-              .computeBytesSizeNoTag(objectName_.getByteString(i));
-          }
-          size += dataSize;
-          size += 1 * getObjectNameList().size();
-        }
-        size += getUnknownFields().getSerializedSize();
-        memoizedSerializedSize = size;
-        return size;
-      }
-
-      @java.lang.Override
-      protected Object writeReplace() throws java.io.ObjectStreamException {
-        return super.writeReplace();
-      }
-
-      @java.lang.Override
-      public boolean equals(final Object obj) {
-        if (obj == this) {
-         return true;
-        }
-        if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList)) {
-          return super.equals(obj);
-        }
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList other = (org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList) obj;
-
-        boolean result = true;
-        result = result && getObjectNameList()
-            .equals(other.getObjectNameList());
-        result = result &&
-            getUnknownFields().equals(other.getUnknownFields());
-        return result;
-      }
-
-      @java.lang.Override
-      public int hashCode() {
-        int hash = 41;
-        hash = (19 * hash) + getDescriptorForType().hashCode();
-        if (getObjectNameCount() > 0) {
-          hash = (37 * hash) + OBJECT_NAME_FIELD_NUMBER;
-          hash = (53 * hash) + getObjectNameList().hashCode();
-        }
-        hash = (29 * hash) + getUnknownFields().hashCode();
-        return hash;
-      }
-
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList parseFrom(
-          com.google.protobuf.ByteString data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList parseFrom(
-          com.google.protobuf.ByteString data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList parseFrom(byte[] data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList parseFrom(
-          byte[] data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList parseFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList parseFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList parseDelimitedFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList parseDelimitedFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList parseFrom(
-          com.google.protobuf.CodedInputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList parseFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-
-      public static Builder newBuilder() { return Builder.create(); }
-      public Builder newBuilderForType() { return newBuilder(); }
-      public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList prototype) {
-        return newBuilder().mergeFrom(prototype);
-      }
-      public Builder toBuilder() { return newBuilder(this); }
-
-      @java.lang.Override
-      protected Builder newBuilderForType(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-        Builder builder = new Builder(parent);
-        return builder;
-      }
-      public static final class Builder extends
-          com.google.protobuf.GeneratedMessage.Builder<Builder>
-         implements org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectListOrBuilder {
-        public static final com.google.protobuf.Descriptors.Descriptor
-            getDescriptor() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_TipObjectList_descriptor;
-        }
-
-        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-            internalGetFieldAccessorTable() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_TipObjectList_fieldAccessorTable;
-        }
-
-        // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.newBuilder()
-        private Builder() {
-          maybeForceBuilderInitialization();
-        }
-
-        private Builder(BuilderParent parent) {
-          super(parent);
-          maybeForceBuilderInitialization();
-        }
-        private void maybeForceBuilderInitialization() {
-          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          }
-        }
-        private static Builder create() {
-          return new Builder();
-        }
-
-        public Builder clear() {
-          super.clear();
-          objectName_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-          bitField0_ = (bitField0_ & ~0x00000001);
-          return this;
-        }
-
-        public Builder clone() {
-          return create().mergeFrom(buildPartial());
-        }
-
-        public com.google.protobuf.Descriptors.Descriptor
-            getDescriptorForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.getDescriptor();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList getDefaultInstanceForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.getDefaultInstance();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList build() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(result);
-          }
-          return result;
-        }
-
-        private org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList buildParsed()
-            throws com.google.protobuf.InvalidProtocolBufferException {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(
-              result).asInvalidProtocolBufferException();
-          }
-          return result;
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList buildPartial() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList result = new org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList(this);
-          int from_bitField0_ = bitField0_;
-          if (((bitField0_ & 0x00000001) == 0x00000001)) {
-            objectName_ = new com.google.protobuf.UnmodifiableLazyStringList(
-                objectName_);
-            bitField0_ = (bitField0_ & ~0x00000001);
-          }
-          result.objectName_ = objectName_;
-          onBuilt();
-          return result;
-        }
-
-        public Builder mergeFrom(com.google.protobuf.Message other) {
-          if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList) {
-            return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList)other);
-          } else {
-            super.mergeFrom(other);
-            return this;
-          }
-        }
-
-        public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList other) {
-          if (other == org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.getDefaultInstance()) return this;
-          if (!other.objectName_.isEmpty()) {
-            if (objectName_.isEmpty()) {
-              objectName_ = other.objectName_;
-              bitField0_ = (bitField0_ & ~0x00000001);
-            } else {
-              ensureObjectNameIsMutable();
-              objectName_.addAll(other.objectName_);
-            }
-            onChanged();
-          }
-          this.mergeUnknownFields(other.getUnknownFields());
-          return this;
-        }
-
-        public final boolean isInitialized() {
-          return true;
-        }
-
-        public Builder mergeFrom(
-            com.google.protobuf.CodedInputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws java.io.IOException {
-          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-            com.google.protobuf.UnknownFieldSet.newBuilder(
-              this.getUnknownFields());
-          while (true) {
-            int tag = input.readTag();
-            switch (tag) {
-              case 0:
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              default: {
-                if (!parseUnknownField(input, unknownFields,
-                                       extensionRegistry, tag)) {
-                  this.setUnknownFields(unknownFields.build());
-                  onChanged();
-                  return this;
-                }
-                break;
-              }
-              case 10: {
-                ensureObjectNameIsMutable();
-                objectName_.add(input.readBytes());
-                break;
-              }
-            }
-          }
-        }
-
-        private int bitField0_;
-
-        // repeated string object_name = 1;
-        private com.google.protobuf.LazyStringList objectName_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-        private void ensureObjectNameIsMutable() {
-          if (!((bitField0_ & 0x00000001) == 0x00000001)) {
-            objectName_ = new com.google.protobuf.LazyStringArrayList(objectName_);
-            bitField0_ |= 0x00000001;
-           }
-        }
-        public java.util.List<String>
-            getObjectNameList() {
-          return java.util.Collections.unmodifiableList(objectName_);
-        }
-        public int getObjectNameCount() {
-          return objectName_.size();
-        }
-        public String getObjectName(int index) {
-          return objectName_.get(index);
-        }
-        public Builder setObjectName(
-            int index, String value) {
-          if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureObjectNameIsMutable();
-          objectName_.set(index, value);
-          onChanged();
-          return this;
-        }
-        public Builder addObjectName(String value) {
-          if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureObjectNameIsMutable();
-          objectName_.add(value);
-          onChanged();
-          return this;
-        }
-        public Builder addAllObjectName(
-            java.lang.Iterable<String> values) {
-          ensureObjectNameIsMutable();
-          super.addAll(values, objectName_);
-          onChanged();
-          return this;
-        }
-        public Builder clearObjectName() {
-          objectName_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-          bitField0_ = (bitField0_ & ~0x00000001);
-          onChanged();
-          return this;
-        }
-        void addObjectName(com.google.protobuf.ByteString value) {
-          ensureObjectNameIsMutable();
-          objectName_.add(value);
-          onChanged();
-        }
-
-        // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.CachedPackInfo.TipObjectList)
-      }
-
-      static {
-        defaultInstance = new TipObjectList(true);
-        defaultInstance.initFields();
-      }
-
-      // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.CachedPackInfo.TipObjectList)
-    }
-
-    public interface ChunkListOrBuilder
-        extends com.google.protobuf.MessageOrBuilder {
-
-      // repeated string chunk_key = 1;
-      java.util.List<String> getChunkKeyList();
-      int getChunkKeyCount();
-      String getChunkKey(int index);
-    }
-    public static final class ChunkList extends
-        com.google.protobuf.GeneratedMessage
-        implements ChunkListOrBuilder {
-      // Use ChunkList.newBuilder() to construct.
-      private ChunkList(Builder builder) {
-        super(builder);
-      }
-      private ChunkList(boolean noInit) {}
-
-      private static final ChunkList defaultInstance;
-      public static ChunkList getDefaultInstance() {
-        return defaultInstance;
-      }
-
-      public ChunkList getDefaultInstanceForType() {
-        return defaultInstance;
-      }
-
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_ChunkList_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_ChunkList_fieldAccessorTable;
-      }
-
-      // repeated string chunk_key = 1;
-      public static final int CHUNK_KEY_FIELD_NUMBER = 1;
-      private com.google.protobuf.LazyStringList chunkKey_;
-      public java.util.List<String>
-          getChunkKeyList() {
-        return chunkKey_;
-      }
-      public int getChunkKeyCount() {
-        return chunkKey_.size();
-      }
-      public String getChunkKey(int index) {
-        return chunkKey_.get(index);
-      }
-
-      private void initFields() {
-        chunkKey_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-      }
-      private byte memoizedIsInitialized = -1;
-      public final boolean isInitialized() {
-        byte isInitialized = memoizedIsInitialized;
-        if (isInitialized != -1) return isInitialized == 1;
-
-        memoizedIsInitialized = 1;
-        return true;
-      }
-
-      public void writeTo(com.google.protobuf.CodedOutputStream output)
-                          throws java.io.IOException {
-        getSerializedSize();
-        for (int i = 0; i < chunkKey_.size(); i++) {
-          output.writeBytes(1, chunkKey_.getByteString(i));
-        }
-        getUnknownFields().writeTo(output);
-      }
-
-      private int memoizedSerializedSize = -1;
-      public int getSerializedSize() {
-        int size = memoizedSerializedSize;
-        if (size != -1) return size;
-
-        size = 0;
-        {
-          int dataSize = 0;
-          for (int i = 0; i < chunkKey_.size(); i++) {
-            dataSize += com.google.protobuf.CodedOutputStream
-              .computeBytesSizeNoTag(chunkKey_.getByteString(i));
-          }
-          size += dataSize;
-          size += 1 * getChunkKeyList().size();
-        }
-        size += getUnknownFields().getSerializedSize();
-        memoizedSerializedSize = size;
-        return size;
-      }
-
-      @java.lang.Override
-      protected Object writeReplace() throws java.io.ObjectStreamException {
-        return super.writeReplace();
-      }
-
-      @java.lang.Override
-      public boolean equals(final Object obj) {
-        if (obj == this) {
-         return true;
-        }
-        if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList)) {
-          return super.equals(obj);
-        }
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList other = (org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList) obj;
-
-        boolean result = true;
-        result = result && getChunkKeyList()
-            .equals(other.getChunkKeyList());
-        result = result &&
-            getUnknownFields().equals(other.getUnknownFields());
-        return result;
-      }
-
-      @java.lang.Override
-      public int hashCode() {
-        int hash = 41;
-        hash = (19 * hash) + getDescriptorForType().hashCode();
-        if (getChunkKeyCount() > 0) {
-          hash = (37 * hash) + CHUNK_KEY_FIELD_NUMBER;
-          hash = (53 * hash) + getChunkKeyList().hashCode();
-        }
-        hash = (29 * hash) + getUnknownFields().hashCode();
-        return hash;
-      }
-
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList parseFrom(
-          com.google.protobuf.ByteString data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList parseFrom(
-          com.google.protobuf.ByteString data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList parseFrom(byte[] data)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList parseFrom(
-          byte[] data,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return newBuilder().mergeFrom(data, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList parseFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList parseFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList parseDelimitedFrom(java.io.InputStream input)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList parseDelimitedFrom(
-          java.io.InputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        Builder builder = newBuilder();
-        if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-          return builder.buildParsed();
-        } else {
-          return null;
-        }
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList parseFrom(
-          com.google.protobuf.CodedInputStream input)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input).buildParsed();
-      }
-      public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList parseFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        return newBuilder().mergeFrom(input, extensionRegistry)
-                 .buildParsed();
-      }
-
-      public static Builder newBuilder() { return Builder.create(); }
-      public Builder newBuilderForType() { return newBuilder(); }
-      public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList prototype) {
-        return newBuilder().mergeFrom(prototype);
-      }
-      public Builder toBuilder() { return newBuilder(this); }
-
-      @java.lang.Override
-      protected Builder newBuilderForType(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-        Builder builder = new Builder(parent);
-        return builder;
-      }
-      public static final class Builder extends
-          com.google.protobuf.GeneratedMessage.Builder<Builder>
-         implements org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkListOrBuilder {
-        public static final com.google.protobuf.Descriptors.Descriptor
-            getDescriptor() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_ChunkList_descriptor;
-        }
-
-        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-            internalGetFieldAccessorTable() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_ChunkList_fieldAccessorTable;
-        }
-
-        // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.newBuilder()
-        private Builder() {
-          maybeForceBuilderInitialization();
-        }
-
-        private Builder(BuilderParent parent) {
-          super(parent);
-          maybeForceBuilderInitialization();
-        }
-        private void maybeForceBuilderInitialization() {
-          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          }
-        }
-        private static Builder create() {
-          return new Builder();
-        }
-
-        public Builder clear() {
-          super.clear();
-          chunkKey_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-          bitField0_ = (bitField0_ & ~0x00000001);
-          return this;
-        }
-
-        public Builder clone() {
-          return create().mergeFrom(buildPartial());
-        }
-
-        public com.google.protobuf.Descriptors.Descriptor
-            getDescriptorForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.getDescriptor();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList getDefaultInstanceForType() {
-          return org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.getDefaultInstance();
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList build() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(result);
-          }
-          return result;
-        }
-
-        private org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList buildParsed()
-            throws com.google.protobuf.InvalidProtocolBufferException {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList result = buildPartial();
-          if (!result.isInitialized()) {
-            throw newUninitializedMessageException(
-              result).asInvalidProtocolBufferException();
-          }
-          return result;
-        }
-
-        public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList buildPartial() {
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList result = new org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList(this);
-          int from_bitField0_ = bitField0_;
-          if (((bitField0_ & 0x00000001) == 0x00000001)) {
-            chunkKey_ = new com.google.protobuf.UnmodifiableLazyStringList(
-                chunkKey_);
-            bitField0_ = (bitField0_ & ~0x00000001);
-          }
-          result.chunkKey_ = chunkKey_;
-          onBuilt();
-          return result;
-        }
-
-        public Builder mergeFrom(com.google.protobuf.Message other) {
-          if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList) {
-            return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList)other);
-          } else {
-            super.mergeFrom(other);
-            return this;
-          }
-        }
-
-        public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList other) {
-          if (other == org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.getDefaultInstance()) return this;
-          if (!other.chunkKey_.isEmpty()) {
-            if (chunkKey_.isEmpty()) {
-              chunkKey_ = other.chunkKey_;
-              bitField0_ = (bitField0_ & ~0x00000001);
-            } else {
-              ensureChunkKeyIsMutable();
-              chunkKey_.addAll(other.chunkKey_);
-            }
-            onChanged();
-          }
-          this.mergeUnknownFields(other.getUnknownFields());
-          return this;
-        }
-
-        public final boolean isInitialized() {
-          return true;
-        }
-
-        public Builder mergeFrom(
-            com.google.protobuf.CodedInputStream input,
-            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-            throws java.io.IOException {
-          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-            com.google.protobuf.UnknownFieldSet.newBuilder(
-              this.getUnknownFields());
-          while (true) {
-            int tag = input.readTag();
-            switch (tag) {
-              case 0:
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              default: {
-                if (!parseUnknownField(input, unknownFields,
-                                       extensionRegistry, tag)) {
-                  this.setUnknownFields(unknownFields.build());
-                  onChanged();
-                  return this;
-                }
-                break;
-              }
-              case 10: {
-                ensureChunkKeyIsMutable();
-                chunkKey_.add(input.readBytes());
-                break;
-              }
-            }
-          }
-        }
-
-        private int bitField0_;
-
-        // repeated string chunk_key = 1;
-        private com.google.protobuf.LazyStringList chunkKey_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-        private void ensureChunkKeyIsMutable() {
-          if (!((bitField0_ & 0x00000001) == 0x00000001)) {
-            chunkKey_ = new com.google.protobuf.LazyStringArrayList(chunkKey_);
-            bitField0_ |= 0x00000001;
-           }
-        }
-        public java.util.List<String>
-            getChunkKeyList() {
-          return java.util.Collections.unmodifiableList(chunkKey_);
-        }
-        public int getChunkKeyCount() {
-          return chunkKey_.size();
-        }
-        public String getChunkKey(int index) {
-          return chunkKey_.get(index);
-        }
-        public Builder setChunkKey(
-            int index, String value) {
-          if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureChunkKeyIsMutable();
-          chunkKey_.set(index, value);
-          onChanged();
-          return this;
-        }
-        public Builder addChunkKey(String value) {
-          if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureChunkKeyIsMutable();
-          chunkKey_.add(value);
-          onChanged();
-          return this;
-        }
-        public Builder addAllChunkKey(
-            java.lang.Iterable<String> values) {
-          ensureChunkKeyIsMutable();
-          super.addAll(values, chunkKey_);
-          onChanged();
-          return this;
-        }
-        public Builder clearChunkKey() {
-          chunkKey_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-          bitField0_ = (bitField0_ & ~0x00000001);
-          onChanged();
-          return this;
-        }
-        void addChunkKey(com.google.protobuf.ByteString value) {
-          ensureChunkKeyIsMutable();
-          chunkKey_.add(value);
-          onChanged();
-        }
-
-        // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.CachedPackInfo.ChunkList)
-      }
-
-      static {
-        defaultInstance = new ChunkList(true);
-        defaultInstance.initFields();
-      }
-
-      // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.CachedPackInfo.ChunkList)
-    }
-
-    private int bitField0_;
-    // required string name = 1;
-    public static final int NAME_FIELD_NUMBER = 1;
-    private Object name_;
-    public boolean hasName() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    public String getName() {
-      Object ref = name_;
-      if (ref instanceof String) {
-        return (String) ref;
-      } else {
-        com.google.protobuf.ByteString bs =
-            (com.google.protobuf.ByteString) ref;
-        String s = bs.toStringUtf8();
-        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
-          name_ = s;
-        }
-        return s;
-      }
-    }
-    private com.google.protobuf.ByteString getNameBytes() {
-      Object ref = name_;
-      if (ref instanceof String) {
-        com.google.protobuf.ByteString b =
-            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
-        name_ = b;
-        return b;
-      } else {
-        return (com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    // required string version = 2;
-    public static final int VERSION_FIELD_NUMBER = 2;
-    private Object version_;
-    public boolean hasVersion() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
-    }
-    public String getVersion() {
-      Object ref = version_;
-      if (ref instanceof String) {
-        return (String) ref;
-      } else {
-        com.google.protobuf.ByteString bs =
-            (com.google.protobuf.ByteString) ref;
-        String s = bs.toStringUtf8();
-        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
-          version_ = s;
-        }
-        return s;
-      }
-    }
-    private com.google.protobuf.ByteString getVersionBytes() {
-      Object ref = version_;
-      if (ref instanceof String) {
-        com.google.protobuf.ByteString b =
-            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
-        version_ = b;
-        return b;
-      } else {
-        return (com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    // required int64 objects_total = 3;
-    public static final int OBJECTS_TOTAL_FIELD_NUMBER = 3;
-    private long objectsTotal_;
-    public boolean hasObjectsTotal() {
-      return ((bitField0_ & 0x00000004) == 0x00000004);
-    }
-    public long getObjectsTotal() {
-      return objectsTotal_;
-    }
-
-    // optional int64 objects_delta = 4;
-    public static final int OBJECTS_DELTA_FIELD_NUMBER = 4;
-    private long objectsDelta_;
-    public boolean hasObjectsDelta() {
-      return ((bitField0_ & 0x00000008) == 0x00000008);
-    }
-    public long getObjectsDelta() {
-      return objectsDelta_;
-    }
-
-    // optional int64 bytes_total = 5;
-    public static final int BYTES_TOTAL_FIELD_NUMBER = 5;
-    private long bytesTotal_;
-    public boolean hasBytesTotal() {
-      return ((bitField0_ & 0x00000010) == 0x00000010);
-    }
-    public long getBytesTotal() {
-      return bytesTotal_;
-    }
-
-    // required .org.eclipse.jgit.storage.dht.CachedPackInfo.TipObjectList tip_list = 6;
-    public static final int TIP_LIST_FIELD_NUMBER = 6;
-    private org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList tipList_;
-    public boolean hasTipList() {
-      return ((bitField0_ & 0x00000020) == 0x00000020);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList getTipList() {
-      return tipList_;
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectListOrBuilder getTipListOrBuilder() {
-      return tipList_;
-    }
-
-    // required .org.eclipse.jgit.storage.dht.CachedPackInfo.ChunkList chunk_list = 7;
-    public static final int CHUNK_LIST_FIELD_NUMBER = 7;
-    private org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList chunkList_;
-    public boolean hasChunkList() {
-      return ((bitField0_ & 0x00000040) == 0x00000040);
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList getChunkList() {
-      return chunkList_;
-    }
-    public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkListOrBuilder getChunkListOrBuilder() {
-      return chunkList_;
-    }
-
-    private void initFields() {
-      name_ = "";
-      version_ = "";
-      objectsTotal_ = 0L;
-      objectsDelta_ = 0L;
-      bytesTotal_ = 0L;
-      tipList_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.getDefaultInstance();
-      chunkList_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.getDefaultInstance();
-    }
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
-
-      if (!hasName()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (!hasVersion()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (!hasObjectsTotal()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (!hasTipList()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (!hasChunkList()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeBytes(1, getNameBytes());
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeBytes(2, getVersionBytes());
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeInt64(3, objectsTotal_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        output.writeInt64(4, objectsDelta_);
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        output.writeInt64(5, bytesTotal_);
-      }
-      if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        output.writeMessage(6, tipList_);
-      }
-      if (((bitField0_ & 0x00000040) == 0x00000040)) {
-        output.writeMessage(7, chunkList_);
-      }
-      getUnknownFields().writeTo(output);
-    }
-
-    private int memoizedSerializedSize = -1;
-    public int getSerializedSize() {
-      int size = memoizedSerializedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(1, getNameBytes());
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(2, getVersionBytes());
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt64Size(3, objectsTotal_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt64Size(4, objectsDelta_);
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt64Size(5, bytesTotal_);
-      }
-      if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(6, tipList_);
-      }
-      if (((bitField0_ & 0x00000040) == 0x00000040)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(7, chunkList_);
-      }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
-      return size;
-    }
-
-    @java.lang.Override
-    protected Object writeReplace() throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
-    public boolean equals(final Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo)) {
-        return super.equals(obj);
-      }
-      org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo other = (org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo) obj;
-
-      boolean result = true;
-      result = result && (hasName() == other.hasName());
-      if (hasName()) {
-        result = result && getName()
-            .equals(other.getName());
-      }
-      result = result && (hasVersion() == other.hasVersion());
-      if (hasVersion()) {
-        result = result && getVersion()
-            .equals(other.getVersion());
-      }
-      result = result && (hasObjectsTotal() == other.hasObjectsTotal());
-      if (hasObjectsTotal()) {
-        result = result && (getObjectsTotal()
-            == other.getObjectsTotal());
-      }
-      result = result && (hasObjectsDelta() == other.hasObjectsDelta());
-      if (hasObjectsDelta()) {
-        result = result && (getObjectsDelta()
-            == other.getObjectsDelta());
-      }
-      result = result && (hasBytesTotal() == other.hasBytesTotal());
-      if (hasBytesTotal()) {
-        result = result && (getBytesTotal()
-            == other.getBytesTotal());
-      }
-      result = result && (hasTipList() == other.hasTipList());
-      if (hasTipList()) {
-        result = result && getTipList()
-            .equals(other.getTipList());
-      }
-      result = result && (hasChunkList() == other.hasChunkList());
-      if (hasChunkList()) {
-        result = result && getChunkList()
-            .equals(other.getChunkList());
-      }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
-      return result;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-      int hash = 41;
-      hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (hasName()) {
-        hash = (37 * hash) + NAME_FIELD_NUMBER;
-        hash = (53 * hash) + getName().hashCode();
-      }
-      if (hasVersion()) {
-        hash = (37 * hash) + VERSION_FIELD_NUMBER;
-        hash = (53 * hash) + getVersion().hashCode();
-      }
-      if (hasObjectsTotal()) {
-        hash = (37 * hash) + OBJECTS_TOTAL_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getObjectsTotal());
-      }
-      if (hasObjectsDelta()) {
-        hash = (37 * hash) + OBJECTS_DELTA_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getObjectsDelta());
-      }
-      if (hasBytesTotal()) {
-        hash = (37 * hash) + BYTES_TOTAL_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getBytesTotal());
-      }
-      if (hasTipList()) {
-        hash = (37 * hash) + TIP_LIST_FIELD_NUMBER;
-        hash = (53 * hash) + getTipList().hashCode();
-      }
-      if (hasChunkList()) {
-        hash = (37 * hash) + CHUNK_LIST_FIELD_NUMBER;
-        hash = (53 * hash) + getChunkList().hashCode();
-      }
-      hash = (29 * hash) + getUnknownFields().hashCode();
-      return hash;
-    }
-
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo parseFrom(
-        com.google.protobuf.ByteString data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo parseFrom(
-        com.google.protobuf.ByteString data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo parseFrom(byte[] data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo parseFrom(
-        byte[] data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo parseFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo parseDelimitedFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo parseFrom(
-        com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
-    }
-    public static org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo parseFrom(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
-    }
-
-    public static Builder newBuilder() { return Builder.create(); }
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo prototype) {
-      return newBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() { return newBuilder(this); }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfoOrBuilder {
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_fieldAccessorTable;
-      }
-
-      // Construct using org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          getTipListFieldBuilder();
-          getChunkListFieldBuilder();
-        }
-      }
-      private static Builder create() {
-        return new Builder();
-      }
-
-      public Builder clear() {
-        super.clear();
-        name_ = "";
-        bitField0_ = (bitField0_ & ~0x00000001);
-        version_ = "";
-        bitField0_ = (bitField0_ & ~0x00000002);
-        objectsTotal_ = 0L;
-        bitField0_ = (bitField0_ & ~0x00000004);
-        objectsDelta_ = 0L;
-        bitField0_ = (bitField0_ & ~0x00000008);
-        bytesTotal_ = 0L;
-        bitField0_ = (bitField0_ & ~0x00000010);
-        if (tipListBuilder_ == null) {
-          tipList_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.getDefaultInstance();
-        } else {
-          tipListBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000020);
-        if (chunkListBuilder_ == null) {
-          chunkList_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.getDefaultInstance();
-        } else {
-          chunkListBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000040);
-        return this;
-      }
-
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
-      public com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.getDescriptor();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo getDefaultInstanceForType() {
-        return org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.getDefaultInstance();
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo build() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo buildPartial() {
-        org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo result = new org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        result.name_ = name_;
-        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-          to_bitField0_ |= 0x00000002;
-        }
-        result.version_ = version_;
-        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
-          to_bitField0_ |= 0x00000004;
-        }
-        result.objectsTotal_ = objectsTotal_;
-        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
-          to_bitField0_ |= 0x00000008;
-        }
-        result.objectsDelta_ = objectsDelta_;
-        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
-          to_bitField0_ |= 0x00000010;
-        }
-        result.bytesTotal_ = bytesTotal_;
-        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
-          to_bitField0_ |= 0x00000020;
-        }
-        if (tipListBuilder_ == null) {
-          result.tipList_ = tipList_;
-        } else {
-          result.tipList_ = tipListBuilder_.build();
-        }
-        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
-          to_bitField0_ |= 0x00000040;
-        }
-        if (chunkListBuilder_ == null) {
-          result.chunkList_ = chunkList_;
-        } else {
-          result.chunkList_ = chunkListBuilder_.build();
-        }
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo) {
-          return mergeFrom((org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo other) {
-        if (other == org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.getDefaultInstance()) return this;
-        if (other.hasName()) {
-          setName(other.getName());
-        }
-        if (other.hasVersion()) {
-          setVersion(other.getVersion());
-        }
-        if (other.hasObjectsTotal()) {
-          setObjectsTotal(other.getObjectsTotal());
-        }
-        if (other.hasObjectsDelta()) {
-          setObjectsDelta(other.getObjectsDelta());
-        }
-        if (other.hasBytesTotal()) {
-          setBytesTotal(other.getBytesTotal());
-        }
-        if (other.hasTipList()) {
-          mergeTipList(other.getTipList());
-        }
-        if (other.hasChunkList()) {
-          mergeChunkList(other.getChunkList());
-        }
-        this.mergeUnknownFields(other.getUnknownFields());
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        if (!hasName()) {
-
-          return false;
-        }
-        if (!hasVersion()) {
-
-          return false;
-        }
-        if (!hasObjectsTotal()) {
-
-          return false;
-        }
-        if (!hasTipList()) {
-
-          return false;
-        }
-        if (!hasChunkList()) {
-
-          return false;
-        }
-        return true;
-      }
-
-      public Builder mergeFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 10: {
-              bitField0_ |= 0x00000001;
-              name_ = input.readBytes();
-              break;
-            }
-            case 18: {
-              bitField0_ |= 0x00000002;
-              version_ = input.readBytes();
-              break;
-            }
-            case 24: {
-              bitField0_ |= 0x00000004;
-              objectsTotal_ = input.readInt64();
-              break;
-            }
-            case 32: {
-              bitField0_ |= 0x00000008;
-              objectsDelta_ = input.readInt64();
-              break;
-            }
-            case 40: {
-              bitField0_ |= 0x00000010;
-              bytesTotal_ = input.readInt64();
-              break;
-            }
-            case 50: {
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.Builder subBuilder = org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.newBuilder();
-              if (hasTipList()) {
-                subBuilder.mergeFrom(getTipList());
-              }
-              input.readMessage(subBuilder, extensionRegistry);
-              setTipList(subBuilder.buildPartial());
-              break;
-            }
-            case 58: {
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.Builder subBuilder = org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.newBuilder();
-              if (hasChunkList()) {
-                subBuilder.mergeFrom(getChunkList());
-              }
-              input.readMessage(subBuilder, extensionRegistry);
-              setChunkList(subBuilder.buildPartial());
-              break;
-            }
-          }
-        }
-      }
-
-      private int bitField0_;
-
-      // required string name = 1;
-      private Object name_ = "";
-      public boolean hasName() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      public String getName() {
-        Object ref = name_;
-        if (!(ref instanceof String)) {
-          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
-          name_ = s;
-          return s;
-        } else {
-          return (String) ref;
-        }
-      }
-      public Builder setName(String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-        name_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearName() {
-        bitField0_ = (bitField0_ & ~0x00000001);
-        name_ = getDefaultInstance().getName();
-        onChanged();
-        return this;
-      }
-      void setName(com.google.protobuf.ByteString value) {
-        bitField0_ |= 0x00000001;
-        name_ = value;
-        onChanged();
-      }
-
-      // required string version = 2;
-      private Object version_ = "";
-      public boolean hasVersion() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      public String getVersion() {
-        Object ref = version_;
-        if (!(ref instanceof String)) {
-          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
-          version_ = s;
-          return s;
-        } else {
-          return (String) ref;
-        }
-      }
-      public Builder setVersion(String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000002;
-        version_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearVersion() {
-        bitField0_ = (bitField0_ & ~0x00000002);
-        version_ = getDefaultInstance().getVersion();
-        onChanged();
-        return this;
-      }
-      void setVersion(com.google.protobuf.ByteString value) {
-        bitField0_ |= 0x00000002;
-        version_ = value;
-        onChanged();
-      }
-
-      // required int64 objects_total = 3;
-      private long objectsTotal_ ;
-      public boolean hasObjectsTotal() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
-      }
-      public long getObjectsTotal() {
-        return objectsTotal_;
-      }
-      public Builder setObjectsTotal(long value) {
-        bitField0_ |= 0x00000004;
-        objectsTotal_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearObjectsTotal() {
-        bitField0_ = (bitField0_ & ~0x00000004);
-        objectsTotal_ = 0L;
-        onChanged();
-        return this;
-      }
-
-      // optional int64 objects_delta = 4;
-      private long objectsDelta_ ;
-      public boolean hasObjectsDelta() {
-        return ((bitField0_ & 0x00000008) == 0x00000008);
-      }
-      public long getObjectsDelta() {
-        return objectsDelta_;
-      }
-      public Builder setObjectsDelta(long value) {
-        bitField0_ |= 0x00000008;
-        objectsDelta_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearObjectsDelta() {
-        bitField0_ = (bitField0_ & ~0x00000008);
-        objectsDelta_ = 0L;
-        onChanged();
-        return this;
-      }
-
-      // optional int64 bytes_total = 5;
-      private long bytesTotal_ ;
-      public boolean hasBytesTotal() {
-        return ((bitField0_ & 0x00000010) == 0x00000010);
-      }
-      public long getBytesTotal() {
-        return bytesTotal_;
-      }
-      public Builder setBytesTotal(long value) {
-        bitField0_ |= 0x00000010;
-        bytesTotal_ = value;
-        onChanged();
-        return this;
-      }
-      public Builder clearBytesTotal() {
-        bitField0_ = (bitField0_ & ~0x00000010);
-        bytesTotal_ = 0L;
-        onChanged();
-        return this;
-      }
-
-      // required .org.eclipse.jgit.storage.dht.CachedPackInfo.TipObjectList tip_list = 6;
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList tipList_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectListOrBuilder> tipListBuilder_;
-      public boolean hasTipList() {
-        return ((bitField0_ & 0x00000020) == 0x00000020);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList getTipList() {
-        if (tipListBuilder_ == null) {
-          return tipList_;
-        } else {
-          return tipListBuilder_.getMessage();
-        }
-      }
-      public Builder setTipList(org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList value) {
-        if (tipListBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          tipList_ = value;
-          onChanged();
-        } else {
-          tipListBuilder_.setMessage(value);
-        }
-        bitField0_ |= 0x00000020;
-        return this;
-      }
-      public Builder setTipList(
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.Builder builderForValue) {
-        if (tipListBuilder_ == null) {
-          tipList_ = builderForValue.build();
-          onChanged();
-        } else {
-          tipListBuilder_.setMessage(builderForValue.build());
-        }
-        bitField0_ |= 0x00000020;
-        return this;
-      }
-      public Builder mergeTipList(org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList value) {
-        if (tipListBuilder_ == null) {
-          if (((bitField0_ & 0x00000020) == 0x00000020) &&
-              tipList_ != org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.getDefaultInstance()) {
-            tipList_ =
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.newBuilder(tipList_).mergeFrom(value).buildPartial();
-          } else {
-            tipList_ = value;
-          }
-          onChanged();
-        } else {
-          tipListBuilder_.mergeFrom(value);
-        }
-        bitField0_ |= 0x00000020;
-        return this;
-      }
-      public Builder clearTipList() {
-        if (tipListBuilder_ == null) {
-          tipList_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.getDefaultInstance();
-          onChanged();
-        } else {
-          tipListBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000020);
-        return this;
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.Builder getTipListBuilder() {
-        bitField0_ |= 0x00000020;
-        onChanged();
-        return getTipListFieldBuilder().getBuilder();
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectListOrBuilder getTipListOrBuilder() {
-        if (tipListBuilder_ != null) {
-          return tipListBuilder_.getMessageOrBuilder();
-        } else {
-          return tipList_;
-        }
-      }
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectListOrBuilder>
-          getTipListFieldBuilder() {
-        if (tipListBuilder_ == null) {
-          tipListBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectListOrBuilder>(
-                  tipList_,
-                  getParentForChildren(),
-                  isClean());
-          tipList_ = null;
-        }
-        return tipListBuilder_;
-      }
-
-      // required .org.eclipse.jgit.storage.dht.CachedPackInfo.ChunkList chunk_list = 7;
-      private org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList chunkList_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkListOrBuilder> chunkListBuilder_;
-      public boolean hasChunkList() {
-        return ((bitField0_ & 0x00000040) == 0x00000040);
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList getChunkList() {
-        if (chunkListBuilder_ == null) {
-          return chunkList_;
-        } else {
-          return chunkListBuilder_.getMessage();
-        }
-      }
-      public Builder setChunkList(org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList value) {
-        if (chunkListBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
-          }
-          chunkList_ = value;
-          onChanged();
-        } else {
-          chunkListBuilder_.setMessage(value);
-        }
-        bitField0_ |= 0x00000040;
-        return this;
-      }
-      public Builder setChunkList(
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.Builder builderForValue) {
-        if (chunkListBuilder_ == null) {
-          chunkList_ = builderForValue.build();
-          onChanged();
-        } else {
-          chunkListBuilder_.setMessage(builderForValue.build());
-        }
-        bitField0_ |= 0x00000040;
-        return this;
-      }
-      public Builder mergeChunkList(org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList value) {
-        if (chunkListBuilder_ == null) {
-          if (((bitField0_ & 0x00000040) == 0x00000040) &&
-              chunkList_ != org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.getDefaultInstance()) {
-            chunkList_ =
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.newBuilder(chunkList_).mergeFrom(value).buildPartial();
-          } else {
-            chunkList_ = value;
-          }
-          onChanged();
-        } else {
-          chunkListBuilder_.mergeFrom(value);
-        }
-        bitField0_ |= 0x00000040;
-        return this;
-      }
-      public Builder clearChunkList() {
-        if (chunkListBuilder_ == null) {
-          chunkList_ = org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.getDefaultInstance();
-          onChanged();
-        } else {
-          chunkListBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000040);
-        return this;
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.Builder getChunkListBuilder() {
-        bitField0_ |= 0x00000040;
-        onChanged();
-        return getChunkListFieldBuilder().getBuilder();
-      }
-      public org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkListOrBuilder getChunkListOrBuilder() {
-        if (chunkListBuilder_ != null) {
-          return chunkListBuilder_.getMessageOrBuilder();
-        } else {
-          return chunkList_;
-        }
-      }
-      private com.google.protobuf.SingleFieldBuilder<
-          org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkListOrBuilder>
-          getChunkListFieldBuilder() {
-        if (chunkListBuilder_ == null) {
-          chunkListBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.Builder, org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkListOrBuilder>(
-                  chunkList_,
-                  getParentForChildren(),
-                  isClean());
-          chunkList_ = null;
-        }
-        return chunkListBuilder_;
-      }
-
-      // @@protoc_insertion_point(builder_scope:org.eclipse.jgit.storage.dht.CachedPackInfo)
-    }
-
-    static {
-      defaultInstance = new CachedPackInfo(true);
-      defaultInstance.initFields();
-    }
-
-    // @@protoc_insertion_point(class_scope:org.eclipse.jgit.storage.dht.CachedPackInfo)
-  }
-
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_RefData_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_RefData_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_RefData_Id_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_RefData_Id_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_ObjectInfo_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_ObjectInfo_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_ObjectCounts_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_ObjectCounts_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_BaseChunk_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_BaseChunk_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_PrefetchHint_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_PrefetchHint_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_TipObjectList_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_TipObjectList_fieldAccessorTable;
-  private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_ChunkList_descriptor;
-  private static
-    com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_ChunkList_fieldAccessorTable;
-
-  public static com.google.protobuf.Descriptors.FileDescriptor
-      getDescriptor() {
-    return descriptor;
-  }
-  private static com.google.protobuf.Descriptors.FileDescriptor
-      descriptor;
-  static {
-    java.lang.String[] descriptorData = {
-      "\n,org/eclipse/jgit/storage/dht/git_store" +
-      ".proto\022\034org.eclipse.jgit.storage.dht\"\343\001\n" +
-      "\007RefData\022\023\n\010sequence\030\005 \002(\r:\0010\022\016\n\006symref\030" +
-      "\001 \001(\t\0228\n\006target\030\002 \001(\0132(.org.eclipse.jgit" +
-      ".storage.dht.RefData.Id\022\021\n\tis_peeled\030\003 \001" +
-      "(\010\0228\n\006peeled\030\004 \001(\0132(.org.eclipse.jgit.st" +
-      "orage.dht.RefData.Id\032,\n\002Id\022\023\n\013object_nam" +
-      "e\030\001 \002(\t\022\021\n\tchunk_key\030\002 \001(\t\"\364\001\n\nObjectInf" +
-      "o\022H\n\013object_type\030\001 \001(\01623.org.eclipse.jgi" +
-      "t.storage.dht.ObjectInfo.ObjectType\022\016\n\006o",
-      "ffset\030\002 \002(\005\022\023\n\013packed_size\030\003 \002(\003\022\025\n\rinfl" +
-      "ated_size\030\004 \002(\003\022\022\n\ndelta_base\030\005 \001(\014\022\025\n\ri" +
-      "s_fragmented\030\006 \001(\010\"5\n\nObjectType\022\n\n\006COMM" +
-      "IT\020\001\022\010\n\004TREE\020\002\022\010\n\004BLOB\020\003\022\007\n\003TAG\020\004\"\217\004\n\tCh" +
-      "unkInfo\022>\n\006source\030\001 \001(\0162..org.eclipse.jg" +
-      "it.storage.dht.ChunkInfo.Source\022G\n\013objec" +
-      "t_type\030\002 \001(\01622.org.eclipse.jgit.storage." +
-      "dht.ChunkInfo.ObjectType\022\023\n\013is_fragment\030" +
-      "\003 \001(\010\022\027\n\017cached_pack_key\030\004 \001(\t\022K\n\robject" +
-      "_counts\030\005 \001(\01324.org.eclipse.jgit.storage",
-      ".dht.ChunkInfo.ObjectCounts\022\022\n\nchunk_siz" +
-      "e\030\006 \001(\005\022\022\n\nindex_size\030\007 \001(\005\022\021\n\tmeta_size" +
-      "\030\010 \001(\005\032R\n\014ObjectCounts\022\r\n\005total\030\001 \001(\005\022\r\n" +
-      "\005whole\030\002 \001(\005\022\021\n\tofs_delta\030\003 \001(\005\022\021\n\tref_d" +
-      "elta\030\004 \001(\005\"-\n\006Source\022\013\n\007RECEIVE\020\001\022\n\n\006INS" +
-      "ERT\020\002\022\n\n\006REPACK\020\003\"@\n\nObjectType\022\t\n\005MIXED" +
-      "\020\000\022\n\n\006COMMIT\020\001\022\010\n\004TREE\020\002\022\010\n\004BLOB\020\003\022\007\n\003TA" +
-      "G\020\004\"\352\002\n\tChunkMeta\022E\n\nbase_chunk\030\001 \003(\01321." +
-      "org.eclipse.jgit.storage.dht.ChunkMeta.B" +
-      "aseChunk\022\020\n\010fragment\030\002 \003(\t\022M\n\017commit_pre",
-      "fetch\0303 \001(\01324.org.eclipse.jgit.storage.d" +
-      "ht.ChunkMeta.PrefetchHint\022K\n\rtree_prefet" +
-      "ch\0304 \001(\01324.org.eclipse.jgit.storage.dht." +
-      "ChunkMeta.PrefetchHint\0326\n\tBaseChunk\022\026\n\016r" +
-      "elative_start\030\001 \002(\003\022\021\n\tchunk_key\030\002 \002(\t\0320" +
-      "\n\014PrefetchHint\022\014\n\004edge\030\001 \003(\t\022\022\n\nsequenti" +
-      "al\030\002 \003(\t\"\322\002\n\016CachedPackInfo\022\014\n\004name\030\001 \002(" +
-      "\t\022\017\n\007version\030\002 \002(\t\022\025\n\robjects_total\030\003 \002(" +
-      "\003\022\025\n\robjects_delta\030\004 \001(\003\022\023\n\013bytes_total\030" +
-      "\005 \001(\003\022L\n\010tip_list\030\006 \002(\0132:.org.eclipse.jg",
-      "it.storage.dht.CachedPackInfo.TipObjectL" +
-      "ist\022J\n\nchunk_list\030\007 \002(\01326.org.eclipse.jg" +
-      "it.storage.dht.CachedPackInfo.ChunkList\032" +
-      "$\n\rTipObjectList\022\023\n\013object_name\030\001 \003(\t\032\036\n" +
-      "\tChunkList\022\021\n\tchunk_key\030\001 \003(\tB1\n,org.ecl" +
-      "ipse.jgit.generated.storage.dht.proto\240\001\001"
-    };
-    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
-      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
-        public com.google.protobuf.ExtensionRegistry assignDescriptors(
-            com.google.protobuf.Descriptors.FileDescriptor root) {
-          descriptor = root;
-          internal_static_org_eclipse_jgit_storage_dht_RefData_descriptor =
-            getDescriptor().getMessageTypes().get(0);
-          internal_static_org_eclipse_jgit_storage_dht_RefData_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_RefData_descriptor,
-              new java.lang.String[] { "Sequence", "Symref", "Target", "IsPeeled", "Peeled", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_RefData_Id_descriptor =
-            internal_static_org_eclipse_jgit_storage_dht_RefData_descriptor.getNestedTypes().get(0);
-          internal_static_org_eclipse_jgit_storage_dht_RefData_Id_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_RefData_Id_descriptor,
-              new java.lang.String[] { "ObjectName", "ChunkKey", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData.Id.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_ObjectInfo_descriptor =
-            getDescriptor().getMessageTypes().get(1);
-          internal_static_org_eclipse_jgit_storage_dht_ObjectInfo_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_ObjectInfo_descriptor,
-              new java.lang.String[] { "ObjectType", "Offset", "PackedSize", "InflatedSize", "DeltaBase", "IsFragmented", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_descriptor =
-            getDescriptor().getMessageTypes().get(2);
-          internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_descriptor,
-              new java.lang.String[] { "Source", "ObjectType", "IsFragment", "CachedPackKey", "ObjectCounts", "ChunkSize", "IndexSize", "MetaSize", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_ObjectCounts_descriptor =
-            internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_descriptor.getNestedTypes().get(0);
-          internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_ObjectCounts_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_ChunkInfo_ObjectCounts_descriptor,
-              new java.lang.String[] { "Total", "Whole", "OfsDelta", "RefDelta", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkInfo.ObjectCounts.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_descriptor =
-            getDescriptor().getMessageTypes().get(3);
-          internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_descriptor,
-              new java.lang.String[] { "BaseChunk", "Fragment", "CommitPrefetch", "TreePrefetch", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_BaseChunk_descriptor =
-            internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_descriptor.getNestedTypes().get(0);
-          internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_BaseChunk_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_BaseChunk_descriptor,
-              new java.lang.String[] { "RelativeStart", "ChunkKey", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_PrefetchHint_descriptor =
-            internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_descriptor.getNestedTypes().get(1);
-          internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_PrefetchHint_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_ChunkMeta_PrefetchHint_descriptor,
-              new java.lang.String[] { "Edge", "Sequential", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.PrefetchHint.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_descriptor =
-            getDescriptor().getMessageTypes().get(4);
-          internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_descriptor,
-              new java.lang.String[] { "Name", "Version", "ObjectsTotal", "ObjectsDelta", "BytesTotal", "TipList", "ChunkList", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_TipObjectList_descriptor =
-            internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_descriptor.getNestedTypes().get(0);
-          internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_TipObjectList_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_TipObjectList_descriptor,
-              new java.lang.String[] { "ObjectName", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.TipObjectList.Builder.class);
-          internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_ChunkList_descriptor =
-            internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_descriptor.getNestedTypes().get(1);
-          internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_ChunkList_fieldAccessorTable = new
-            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_org_eclipse_jgit_storage_dht_CachedPackInfo_ChunkList_descriptor,
-              new java.lang.String[] { "ChunkKey", },
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.class,
-              org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList.Builder.class);
-          return null;
-        }
-      };
-    com.google.protobuf.Descriptors.FileDescriptor
-      .internalBuildGeneratedFileFrom(descriptorData,
-        new com.google.protobuf.Descriptors.FileDescriptor[] {
-        }, assigner);
-  }
-
-  // @@protoc_insertion_point(outer_class_scope)
-}
diff --git a/org.eclipse.jgit.storage.dht.test/.classpath b/org.eclipse.jgit.storage.dht.test/.classpath
deleted file mode 100644 (file)
index 859bd11..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-       <classpathentry kind="src" path="tst"/>
-       <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/>
-       <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
-       <classpathentry kind="output" path="bin"/>
-</classpath>
diff --git a/org.eclipse.jgit.storage.dht.test/.gitignore b/org.eclipse.jgit.storage.dht.test/.gitignore
deleted file mode 100644 (file)
index 934e0e0..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/bin
-/target
diff --git a/org.eclipse.jgit.storage.dht.test/.project b/org.eclipse.jgit.storage.dht.test/.project
deleted file mode 100644 (file)
index 223ac21..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-       <name>org.eclipse.jgit.storage.dht.test</name>
-       <comment></comment>
-       <projects>
-       </projects>
-       <buildSpec>
-               <buildCommand>
-                       <name>org.eclipse.jdt.core.javabuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-               <buildCommand>
-                       <name>org.eclipse.pde.ManifestBuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-               <buildCommand>
-                       <name>org.eclipse.pde.SchemaBuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-               <buildCommand>
-                       <name>org.eclipse.pde.api.tools.apiAnalysisBuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-       </buildSpec>
-       <natures>
-               <nature>org.eclipse.jdt.core.javanature</nature>
-               <nature>org.eclipse.pde.PluginNature</nature>
-               <nature>org.eclipse.pde.api.tools.apiAnalysisNature</nature>
-       </natures>
-</projectDescription>
diff --git a/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.core.resources.prefs b/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.core.resources.prefs
deleted file mode 100644 (file)
index 6a9621d..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-#Sat Dec 20 21:21:24 CET 2008
-eclipse.preferences.version=1
-encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_BothISO88591.patch=ISO-8859-1
-encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_Convert.patch=ISO-8859-1
-encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_DiffCc.patch=ISO-8859-1
-encoding/<project>=UTF-8
diff --git a/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.core.runtime.prefs b/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.core.runtime.prefs
deleted file mode 100644 (file)
index 9f733ee..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#Mon Mar 24 18:55:56 EDT 2008
-eclipse.preferences.version=1
-line.separator=\n
diff --git a/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.jdt.core.prefs b/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644 (file)
index 8bfa5f1..0000000
+++ /dev/null
@@ -1,320 +0,0 @@
-#Tue Feb 05 00:01:29 CET 2008
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
-org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
-org.eclipse.jdt.core.compiler.compliance=1.5
-org.eclipse.jdt.core.compiler.debug.lineNumber=generate
-org.eclipse.jdt.core.compiler.debug.localVariable=generate
-org.eclipse.jdt.core.compiler.debug.sourceFile=generate
-org.eclipse.jdt.core.compiler.doc.comment.support=enabled
-org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=warning
-org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
-org.eclipse.jdt.core.compiler.problem.autoboxing=warning
-org.eclipse.jdt.core.compiler.problem.deprecation=warning
-org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
-org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
-org.eclipse.jdt.core.compiler.problem.discouragedReference=warning
-org.eclipse.jdt.core.compiler.problem.emptyStatement=warning
-org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
-org.eclipse.jdt.core.compiler.problem.fallthroughCase=warning
-org.eclipse.jdt.core.compiler.problem.fieldHiding=warning
-org.eclipse.jdt.core.compiler.problem.finalParameterBound=warning
-org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=error
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=error
-org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=error
-org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=warning
-org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=warning
-org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=error
-org.eclipse.jdt.core.compiler.problem.invalidJavadoc=error
-org.eclipse.jdt.core.compiler.problem.invalidJavadocTags=enabled
-org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsDeprecatedRef=enabled
-org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsNotVisibleRef=enabled
-org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsVisibility=private
-org.eclipse.jdt.core.compiler.problem.localVariableHiding=warning
-org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=error
-org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingJavadocComments=ignore
-org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsVisibility=public
-org.eclipse.jdt.core.compiler.problem.missingJavadocTags=error
-org.eclipse.jdt.core.compiler.problem.missingJavadocTagsOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.missingJavadocTagsVisibility=private
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingSerialVersion=warning
-org.eclipse.jdt.core.compiler.problem.noEffectAssignment=error
-org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=error
-org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
-org.eclipse.jdt.core.compiler.problem.nullReference=warning
-org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=warning
-org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=error
-org.eclipse.jdt.core.compiler.problem.potentialNullReference=warning
-org.eclipse.jdt.core.compiler.problem.rawTypeReference=ignore
-org.eclipse.jdt.core.compiler.problem.redundantNullCheck=warning
-org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled
-org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=error
-org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled
-org.eclipse.jdt.core.compiler.problem.typeParameterHiding=warning
-org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=warning
-org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=warning
-org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=warning
-org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=error
-org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=error
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.unusedImport=error
-org.eclipse.jdt.core.compiler.problem.unusedLabel=error
-org.eclipse.jdt.core.compiler.problem.unusedLocal=error
-org.eclipse.jdt.core.compiler.problem.unusedParameter=warning
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled
-org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=error
-org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=error
-org.eclipse.jdt.core.compiler.source=1.5
-org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_assignment=0
-org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
-org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
-org.eclipse.jdt.core.formatter.alignment_for_enum_constants=0
-org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
-org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
-org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_after_package=1
-org.eclipse.jdt.core.formatter.blank_lines_before_field=1
-org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
-org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
-org.eclipse.jdt.core.formatter.blank_lines_before_method=1
-org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
-org.eclipse.jdt.core.formatter.blank_lines_before_package=0
-org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
-org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines=false
-org.eclipse.jdt.core.formatter.comment.format_comments=true
-org.eclipse.jdt.core.formatter.comment.format_header=false
-org.eclipse.jdt.core.formatter.comment.format_html=true
-org.eclipse.jdt.core.formatter.comment.format_source_code=true
-org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
-org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
-org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
-org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
-org.eclipse.jdt.core.formatter.comment.line_length=80
-org.eclipse.jdt.core.formatter.compact_else_if=true
-org.eclipse.jdt.core.formatter.continuation_indentation=2
-org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
-org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
-org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_empty_lines=false
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=false
-org.eclipse.jdt.core.formatter.indentation.size=4
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
-org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.lineSplit=80
-org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
-org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
-org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
-org.eclipse.jdt.core.formatter.tabulation.char=tab
-org.eclipse.jdt.core.formatter.tabulation.size=4
-org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
diff --git a/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.jdt.ui.prefs b/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.jdt.ui.prefs
deleted file mode 100644 (file)
index df87aaa..0000000
+++ /dev/null
@@ -1,63 +0,0 @@
-#Thu Aug 26 12:30:07 CDT 2010
-eclipse.preferences.version=1
-editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
-formatter_profile=_JGit Format
-formatter_settings_version=11
-internal.default.compliance=default
-org.eclipse.jdt.ui.ignorelowercasenames=true
-org.eclipse.jdt.ui.importorder=java;javax;org;com;
-org.eclipse.jdt.ui.ondemandthreshold=99
-org.eclipse.jdt.ui.staticondemandthreshold=99
-org.eclipse.jdt.ui.text.custom_code_templates=<?xml version\="1.0" encoding\="UTF-8"?><templates/>
-sp_cleanup.add_default_serial_version_id=true
-sp_cleanup.add_generated_serial_version_id=false
-sp_cleanup.add_missing_annotations=false
-sp_cleanup.add_missing_deprecated_annotations=true
-sp_cleanup.add_missing_methods=false
-sp_cleanup.add_missing_nls_tags=false
-sp_cleanup.add_missing_override_annotations=true
-sp_cleanup.add_missing_override_annotations_interface_methods=false
-sp_cleanup.add_serial_version_id=false
-sp_cleanup.always_use_blocks=true
-sp_cleanup.always_use_parentheses_in_expressions=false
-sp_cleanup.always_use_this_for_non_static_field_access=false
-sp_cleanup.always_use_this_for_non_static_method_access=false
-sp_cleanup.convert_to_enhanced_for_loop=false
-sp_cleanup.correct_indentation=false
-sp_cleanup.format_source_code=true
-sp_cleanup.format_source_code_changes_only=true
-sp_cleanup.make_local_variable_final=false
-sp_cleanup.make_parameters_final=false
-sp_cleanup.make_private_fields_final=true
-sp_cleanup.make_type_abstract_if_missing_method=false
-sp_cleanup.make_variable_declarations_final=false
-sp_cleanup.never_use_blocks=false
-sp_cleanup.never_use_parentheses_in_expressions=true
-sp_cleanup.on_save_use_additional_actions=true
-sp_cleanup.organize_imports=false
-sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
-sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
-sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
-sp_cleanup.qualify_static_member_accesses_with_declaring_class=false
-sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
-sp_cleanup.remove_private_constructors=true
-sp_cleanup.remove_trailing_whitespaces=true
-sp_cleanup.remove_trailing_whitespaces_all=true
-sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
-sp_cleanup.remove_unnecessary_casts=false
-sp_cleanup.remove_unnecessary_nls_tags=false
-sp_cleanup.remove_unused_imports=false
-sp_cleanup.remove_unused_local_variables=false
-sp_cleanup.remove_unused_private_fields=true
-sp_cleanup.remove_unused_private_members=false
-sp_cleanup.remove_unused_private_methods=true
-sp_cleanup.remove_unused_private_types=true
-sp_cleanup.sort_members=false
-sp_cleanup.sort_members_all=false
-sp_cleanup.use_blocks=false
-sp_cleanup.use_blocks_only_for_return_and_throw=false
-sp_cleanup.use_parentheses_in_expressions=false
-sp_cleanup.use_this_for_non_static_field_access=false
-sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=true
-sp_cleanup.use_this_for_non_static_method_access=false
-sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=true
diff --git a/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.mylyn.tasks.ui.prefs b/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.mylyn.tasks.ui.prefs
deleted file mode 100644 (file)
index 823c0f5..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-#Tue Jul 19 20:11:28 CEST 2011
-eclipse.preferences.version=1
-project.repository.kind=bugzilla
-project.repository.url=https\://bugs.eclipse.org/bugs
diff --git a/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.mylyn.team.ui.prefs b/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.mylyn.team.ui.prefs
deleted file mode 100644 (file)
index 0cba949..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#Tue Jul 19 20:11:28 CEST 2011
-commit.comment.template=${task.description} \n\nBug\: ${task.key}
-eclipse.preferences.version=1
diff --git a/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.pde.api.tools.prefs b/org.eclipse.jgit.storage.dht.test/.settings/org.eclipse.pde.api.tools.prefs
deleted file mode 100644 (file)
index cd148d9..0000000
+++ /dev/null
@@ -1,94 +0,0 @@
-#Tue Oct 18 00:52:01 CEST 2011
-ANNOTATION_ELEMENT_TYPE_ADDED_METHOD_WITHOUT_DEFAULT_VALUE=Error
-ANNOTATION_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
-ANNOTATION_ELEMENT_TYPE_REMOVED_FIELD=Error
-ANNOTATION_ELEMENT_TYPE_REMOVED_METHOD=Error
-ANNOTATION_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
-API_COMPONENT_ELEMENT_TYPE_REMOVED_API_TYPE=Error
-API_COMPONENT_ELEMENT_TYPE_REMOVED_REEXPORTED_API_TYPE=Error
-API_COMPONENT_ELEMENT_TYPE_REMOVED_REEXPORTED_TYPE=Error
-API_COMPONENT_ELEMENT_TYPE_REMOVED_TYPE=Error
-CLASS_ELEMENT_TYPE_ADDED_METHOD=Error
-CLASS_ELEMENT_TYPE_ADDED_RESTRICTIONS=Error
-CLASS_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
-CLASS_ELEMENT_TYPE_CHANGED_CONTRACTED_SUPERINTERFACES_SET=Error
-CLASS_ELEMENT_TYPE_CHANGED_DECREASE_ACCESS=Error
-CLASS_ELEMENT_TYPE_CHANGED_NON_ABSTRACT_TO_ABSTRACT=Error
-CLASS_ELEMENT_TYPE_CHANGED_NON_FINAL_TO_FINAL=Error
-CLASS_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
-CLASS_ELEMENT_TYPE_REMOVED_CONSTRUCTOR=Error
-CLASS_ELEMENT_TYPE_REMOVED_FIELD=Error
-CLASS_ELEMENT_TYPE_REMOVED_METHOD=Error
-CLASS_ELEMENT_TYPE_REMOVED_SUPERCLASS=Error
-CLASS_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
-CLASS_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
-CONSTRUCTOR_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
-CONSTRUCTOR_ELEMENT_TYPE_CHANGED_DECREASE_ACCESS=Error
-CONSTRUCTOR_ELEMENT_TYPE_CHANGED_VARARGS_TO_ARRAY=Error
-CONSTRUCTOR_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
-ENUM_ELEMENT_TYPE_CHANGED_CONTRACTED_SUPERINTERFACES_SET=Error
-ENUM_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
-ENUM_ELEMENT_TYPE_REMOVED_ENUM_CONSTANT=Error
-ENUM_ELEMENT_TYPE_REMOVED_FIELD=Error
-ENUM_ELEMENT_TYPE_REMOVED_METHOD=Error
-ENUM_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
-FIELD_ELEMENT_TYPE_ADDED_VALUE=Error
-FIELD_ELEMENT_TYPE_CHANGED_DECREASE_ACCESS=Error
-FIELD_ELEMENT_TYPE_CHANGED_FINAL_TO_NON_FINAL_STATIC_CONSTANT=Error
-FIELD_ELEMENT_TYPE_CHANGED_NON_FINAL_TO_FINAL=Error
-FIELD_ELEMENT_TYPE_CHANGED_NON_STATIC_TO_STATIC=Error
-FIELD_ELEMENT_TYPE_CHANGED_STATIC_TO_NON_STATIC=Error
-FIELD_ELEMENT_TYPE_CHANGED_TYPE=Error
-FIELD_ELEMENT_TYPE_CHANGED_VALUE=Error
-FIELD_ELEMENT_TYPE_REMOVED_TYPE_ARGUMENT=Error
-FIELD_ELEMENT_TYPE_REMOVED_VALUE=Error
-ILLEGAL_EXTEND=Warning
-ILLEGAL_IMPLEMENT=Warning
-ILLEGAL_INSTANTIATE=Warning
-ILLEGAL_OVERRIDE=Warning
-ILLEGAL_REFERENCE=Warning
-INTERFACE_ELEMENT_TYPE_ADDED_FIELD=Error
-INTERFACE_ELEMENT_TYPE_ADDED_METHOD=Error
-INTERFACE_ELEMENT_TYPE_ADDED_RESTRICTIONS=Error
-INTERFACE_ELEMENT_TYPE_ADDED_SUPER_INTERFACE_WITH_METHODS=Error
-INTERFACE_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
-INTERFACE_ELEMENT_TYPE_CHANGED_CONTRACTED_SUPERINTERFACES_SET=Error
-INTERFACE_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
-INTERFACE_ELEMENT_TYPE_REMOVED_FIELD=Error
-INTERFACE_ELEMENT_TYPE_REMOVED_METHOD=Error
-INTERFACE_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
-INTERFACE_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
-INVALID_JAVADOC_TAG=Ignore
-INVALID_REFERENCE_IN_SYSTEM_LIBRARIES=Error
-LEAK_EXTEND=Warning
-LEAK_FIELD_DECL=Warning
-LEAK_IMPLEMENT=Warning
-LEAK_METHOD_PARAM=Warning
-LEAK_METHOD_RETURN_TYPE=Warning
-METHOD_ELEMENT_TYPE_ADDED_RESTRICTIONS=Error
-METHOD_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
-METHOD_ELEMENT_TYPE_CHANGED_DECREASE_ACCESS=Error
-METHOD_ELEMENT_TYPE_CHANGED_NON_ABSTRACT_TO_ABSTRACT=Error
-METHOD_ELEMENT_TYPE_CHANGED_NON_FINAL_TO_FINAL=Error
-METHOD_ELEMENT_TYPE_CHANGED_NON_STATIC_TO_STATIC=Error
-METHOD_ELEMENT_TYPE_CHANGED_STATIC_TO_NON_STATIC=Error
-METHOD_ELEMENT_TYPE_CHANGED_VARARGS_TO_ARRAY=Error
-METHOD_ELEMENT_TYPE_REMOVED_ANNOTATION_DEFAULT_VALUE=Error
-METHOD_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
-TYPE_PARAMETER_ELEMENT_TYPE_ADDED_CLASS_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_ADDED_INTERFACE_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_CHANGED_CLASS_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_CHANGED_INTERFACE_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_REMOVED_CLASS_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_REMOVED_INTERFACE_BOUND=Error
-UNUSED_PROBLEM_FILTERS=Warning
-automatically_removed_unused_problem_filters=false
-eclipse.preferences.version=1
-incompatible_api_component_version=Error
-incompatible_api_component_version_include_major_without_breaking_change=Disabled
-incompatible_api_component_version_include_minor_without_api_change=Disabled
-invalid_since_tag_version=Error
-malformed_since_tag=Error
-missing_since_tag=Error
-report_api_breakage_when_major_version_incremented=Disabled
-report_resolution_errors_api_component=Warning
diff --git a/org.eclipse.jgit.storage.dht.test/META-INF/MANIFEST.MF b/org.eclipse.jgit.storage.dht.test/META-INF/MANIFEST.MF
deleted file mode 100644 (file)
index 8ac170f..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-Manifest-Version: 1.0
-Bundle-ManifestVersion: 2
-Bundle-Name: %plugin_name
-Bundle-SymbolicName: org.eclipse.jgit.storage.dht.test
-Bundle-Version: 2.1.0.qualifier
-Bundle-Localization: plugin
-Bundle-Vendor: %provider_name
-Bundle-ActivationPolicy: lazy
-Bundle-RequiredExecutionEnvironment: J2SE-1.5
-Import-Package: org.eclipse.jgit.lib;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.errors;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.storage.dht;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.storage.dht.spi.memory;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.storage.file;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.storage.pack;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.transport;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.util;version="[2.1.0,2.2.0)",
- org.junit;version="[4.0.0,5.0.0)",
- org.hamcrest.core;version="[1.1.0,2.0.0)"
diff --git a/org.eclipse.jgit.storage.dht.test/build.properties b/org.eclipse.jgit.storage.dht.test/build.properties
deleted file mode 100644 (file)
index 32c717a..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-source.. = tst/
-bin.includes = META-INF/,\
-               .,\
-               plugin.properties
diff --git a/org.eclipse.jgit.storage.dht.test/org.eclipse.jgit.storage.dht--All-Tests.launch b/org.eclipse.jgit.storage.dht.test/org.eclipse.jgit.storage.dht--All-Tests.launch
deleted file mode 100644 (file)
index 039b441..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<launchConfiguration type="org.eclipse.jdt.junit.launchconfig">
-<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
-<listEntry value="/org.eclipse.jgit.storage.dht.test"/>
-</listAttribute>
-<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
-<listEntry value="4"/>
-</listAttribute>
-<stringAttribute key="org.eclipse.jdt.junit.CONTAINER" value="=org.eclipse.jgit.storage.dht.test"/>
-<booleanAttribute key="org.eclipse.jdt.junit.KEEPRUNNING_ATTR" value="false"/>
-<stringAttribute key="org.eclipse.jdt.junit.TESTNAME" value=""/>
-<stringAttribute key="org.eclipse.jdt.junit.TEST_KIND" value="org.eclipse.jdt.junit.loader.junit4"/>
-<stringAttribute key="org.eclipse.jdt.launching.MAIN_TYPE" value=""/>
-<stringAttribute key="org.eclipse.jdt.launching.PROJECT_ATTR" value="org.eclipse.jgit.storage.dht.test"/>
-</launchConfiguration>
diff --git a/org.eclipse.jgit.storage.dht.test/plugin.properties b/org.eclipse.jgit.storage.dht.test/plugin.properties
deleted file mode 100644 (file)
index 95cb5d4..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-plugin_name=JGit DHT Storage Tests
-provider_name=Eclipse.org
diff --git a/org.eclipse.jgit.storage.dht.test/pom.xml b/org.eclipse.jgit.storage.dht.test/pom.xml
deleted file mode 100644 (file)
index 6c0655d..0000000
+++ /dev/null
@@ -1,101 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Copyright (C) 2011, Google Inc.
-   and other copyright owners as documented in the project's IP log.
-
-   This program and the accompanying materials are made available
-   under the terms of the Eclipse Distribution License v1.0 which
-   accompanies this distribution, is reproduced below, and is
-   available at http://www.eclipse.org/org/documents/edl-v10.php
-
-   All rights reserved.
-
-   Redistribution and use in source and binary forms, with or
-   without modification, are permitted provided that the following
-   conditions are met:
-
-   - Redistributions of source code must retain the above copyright
-     notice, this list of conditions and the following disclaimer.
-
-   - Redistributions in binary form must reproduce the above
-     copyright notice, this list of conditions and the following
-     disclaimer in the documentation and/or other materials provided
-     with the distribution.
-
-   - Neither the name of the Eclipse Foundation, Inc. nor the
-     names of its contributors may be used to endorse or promote
-     products derived from this software without specific prior
-     written permission.
-
-   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
-   CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
-   INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
-   OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-   ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
-   CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
-   NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-   LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-   CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-   STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-   ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-   ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <groupId>org.eclipse.jgit</groupId>
-    <artifactId>org.eclipse.jgit-parent</artifactId>
-    <version>2.1.0-SNAPSHOT</version>
-  </parent>
-
-  <artifactId>org.eclipse.jgit.storage.dht.test</artifactId>
-  <name>JGit - DHT Storage Tests</name>
-
-  <description>
-    JUnit tests for Git repository storage on a distributed hashtable
-  </description>
-
-  <dependencies>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.eclipse.jgit</groupId>
-      <artifactId>org.eclipse.jgit</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-
-    <dependency>
-      <groupId>org.eclipse.jgit</groupId>
-      <artifactId>org.eclipse.jgit.junit</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-
-    <dependency>
-      <groupId>org.eclipse.jgit</groupId>
-      <artifactId>org.eclipse.jgit.storage.dht</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-  </dependencies>
-
-  <build>
-    <testSourceDirectory>tst/</testSourceDirectory>
-
-    <plugins>
-      <plugin>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <argLine>-Xmx256m -Dfile.encoding=UTF-8</argLine>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-</project>
diff --git a/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/ChunkIndexTest.java b/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/ChunkIndexTest.java
deleted file mode 100644 (file)
index a5524ed..0000000
+++ /dev/null
@@ -1,317 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.eclipse.jgit.lib.MutableObjectId;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.transport.PackedObjectInfo;
-import org.eclipse.jgit.util.NB;
-import org.junit.Test;
-
-public class ChunkIndexTest {
-       @Test
-       public void testSingleObject_NotFound() throws DhtException {
-               List<PackedObjectInfo> objs = list(object(1, 1));
-               ChunkIndex idx = index(objs);
-               assertEquals(-1, idx.findOffset(ObjectId.zeroId()));
-       }
-
-       @Test
-       public void testSingleObject_Offset1() throws DhtException {
-               assertEquals(header(0, 1), header(list(object(1, 0))));
-
-               List<PackedObjectInfo> objs = list(object(0x1200, 255));
-               ChunkIndex idx = index(objs);
-
-               assertEquals(header(0, 1), header(objs));
-               assertEquals(1, idx.getObjectCount());
-               assertEquals(2 + 20 + 1, idx.getIndexSize());
-
-               assertEquals(objs.get(0), idx.getObjectId(0));
-               assertEquals(objs.get(0).getOffset(), idx.getOffset(0));
-               assertEquals(objs.get(0).getOffset(), idx.findOffset(objs.get(0)));
-       }
-
-       @Test
-       public void testSingleObject_Offset2() throws DhtException {
-               assertEquals(header(0, 2), header(list(object(1, 1 << 8))));
-
-               List<PackedObjectInfo> objs = list(object(0x1200, 0xab34));
-               ChunkIndex idx = index(objs);
-
-               assertEquals(header(0, 2), header(objs));
-               assertEquals(1, idx.getObjectCount());
-               assertEquals(2 + 20 + 2, idx.getIndexSize());
-
-               assertEquals(objs.get(0), idx.getObjectId(0));
-               assertEquals(objs.get(0).getOffset(), idx.getOffset(0));
-               assertEquals(objs.get(0).getOffset(), idx.findOffset(objs.get(0)));
-       }
-
-       @Test
-       public void testSingleObject_Offset3() throws DhtException {
-               assertEquals(header(0, 3), header(list(object(1, 1 << 16))));
-
-               List<PackedObjectInfo> objs = list(object(0x1200, 0xab1234));
-               ChunkIndex idx = index(objs);
-
-               assertEquals(header(0, 3), header(objs));
-               assertEquals(1, idx.getObjectCount());
-               assertEquals(2 + 20 + 3, idx.getIndexSize());
-
-               assertEquals(objs.get(0), idx.getObjectId(0));
-               assertEquals(objs.get(0).getOffset(), idx.getOffset(0));
-               assertEquals(objs.get(0).getOffset(), idx.findOffset(objs.get(0)));
-       }
-
-       @Test
-       public void testSingleObject_Offset4() throws DhtException {
-               assertEquals(header(0, 4), header(list(object(1, 1 << 24))));
-
-               List<PackedObjectInfo> objs = list(object(0x1200, 0x7bcdef42));
-               ChunkIndex idx = index(objs);
-
-               assertEquals(header(0, 4), header(objs));
-               assertEquals(1, idx.getObjectCount());
-               assertEquals(objs.get(0), idx.getObjectId(0));
-
-               assertEquals(2 + 20 + 4, idx.getIndexSize());
-               assertEquals(objs.get(0).getOffset(), idx.getOffset(0));
-               assertEquals(objs.get(0).getOffset(), idx.findOffset(objs.get(0)));
-       }
-
-       @Test
-       public void testObjects3() throws DhtException {
-               List<PackedObjectInfo> objs = objects(2, 3, 1);
-               ChunkIndex idx = index(objs);
-
-               assertEquals(header(0, 1), header(objs));
-               assertEquals(3, idx.getObjectCount());
-               assertEquals(2 + 3 * 20 + 3 * 1, idx.getIndexSize());
-               assertTrue(isSorted(objs));
-
-               for (int i = 0; i < objs.size(); i++) {
-                       assertEquals(objs.get(i), idx.getObjectId(i));
-                       assertEquals(objs.get(i).getOffset(), idx.getOffset(i));
-                       assertEquals(objs.get(i).getOffset(), idx.findOffset(objs.get(i)));
-               }
-       }
-
-       @Test
-       public void testObjects255_SameBucket() throws DhtException {
-               int[] ints = new int[255];
-               for (int i = 0; i < 255; i++)
-                       ints[i] = i;
-               List<PackedObjectInfo> objs = objects(ints);
-               ChunkIndex idx = index(objs);
-
-               assertEquals(header(1, 2), header(objs));
-               assertEquals(255, idx.getObjectCount());
-               assertEquals(2 + 256 + 255 * 20 + 255 * 2 //
-                               + 12 + 4 * 256, idx.getIndexSize());
-               assertTrue(isSorted(objs));
-
-               for (int i = 0; i < objs.size(); i++) {
-                       assertEquals(objs.get(i), idx.getObjectId(i));
-                       assertEquals(objs.get(i).getOffset(), idx.getOffset(i));
-                       assertEquals(objs.get(i).getOffset(), idx.findOffset(objs.get(i)));
-               }
-       }
-
-       @Test
-       public void testObjects512_ManyBuckets() throws DhtException {
-               int[] ints = new int[512];
-               for (int i = 0; i < 256; i++) {
-                       ints[i] = (i << 8) | 0;
-                       ints[i + 256] = (i << 8) | 1;
-               }
-               List<PackedObjectInfo> objs = objects(ints);
-               ChunkIndex idx = index(objs);
-
-               assertEquals(header(1, 2), header(objs));
-               assertEquals(512, idx.getObjectCount());
-               assertEquals(2 + 256 + 512 * 20 + 512 * 2 //
-                               + 12 + 4 * 256, idx.getIndexSize());
-               assertTrue(isSorted(objs));
-
-               for (int i = 0; i < objs.size(); i++) {
-                       assertEquals(objs.get(i), idx.getObjectId(i));
-                       assertEquals(objs.get(i).getOffset(), idx.getOffset(i));
-                       assertEquals(objs.get(i).getOffset(), idx.findOffset(objs.get(i)));
-               }
-       }
-
-       @Test
-       public void testFanout2() throws DhtException {
-               List<PackedObjectInfo> objs = new ArrayList<PackedObjectInfo>(65280);
-               MutableObjectId idBuf = new MutableObjectId();
-               for (int i = 0; i < 256; i++) {
-                       idBuf.setByte(2, i & 0xff);
-                       for (int j = 0; j < 255; j++) {
-                               idBuf.setByte(3, j & 0xff);
-                               PackedObjectInfo oe = new PackedObjectInfo(idBuf);
-                               oe.setOffset((i << 8) | j);
-                               objs.add(oe);
-                       }
-               }
-               ChunkIndex idx = index(objs);
-
-               assertEquals(header(2, 2), header(objs));
-               assertEquals(256 * 255, idx.getObjectCount());
-               assertTrue(isSorted(objs));
-
-               for (int i = 0; i < objs.size(); i++) {
-                       assertEquals(objs.get(i), idx.getObjectId(i));
-                       assertEquals(objs.get(i).getOffset(), idx.getOffset(i));
-                       assertEquals(objs.get(i).getOffset(), idx.findOffset(objs.get(i)));
-               }
-       }
-
-       @Test
-       public void testFanout3() throws DhtException {
-               List<PackedObjectInfo> objs = new ArrayList<PackedObjectInfo>(1 << 16);
-               MutableObjectId idBuf = new MutableObjectId();
-               for (int i = 0; i < 256; i++) {
-                       idBuf.setByte(2, i & 0xff);
-                       for (int j = 0; j < 256; j++) {
-                               idBuf.setByte(3, j & 0xff);
-                               PackedObjectInfo oe = new PackedObjectInfo(idBuf);
-                               oe.setOffset((i << 8) | j);
-                               objs.add(oe);
-                       }
-               }
-               ChunkIndex idx = index(objs);
-
-               assertEquals(header(3, 2), header(objs));
-               assertEquals(256 * 256, idx.getObjectCount());
-               assertTrue(isSorted(objs));
-
-               for (int i = 0; i < objs.size(); i++) {
-                       assertEquals(objs.get(i), idx.getObjectId(i));
-                       assertEquals(objs.get(i).getOffset(), idx.getOffset(i));
-                       assertEquals(objs.get(i).getOffset(), idx.findOffset(objs.get(i)));
-               }
-       }
-
-       @Test
-       public void testObjects65280_ManyBuckets() throws DhtException {
-               List<PackedObjectInfo> objs = new ArrayList<PackedObjectInfo>(65280);
-               MutableObjectId idBuf = new MutableObjectId();
-               for (int i = 0; i < 256; i++) {
-                       idBuf.setByte(0, i & 0xff);
-                       for (int j = 0; j < 255; j++) {
-                               idBuf.setByte(3, j & 0xff);
-                               PackedObjectInfo oe = new PackedObjectInfo(idBuf);
-                               oe.setOffset((i << 8) | j);
-                               objs.add(oe);
-                       }
-               }
-               ChunkIndex idx = index(objs);
-
-               assertEquals(header(1, 2), header(objs));
-               assertEquals(65280, idx.getObjectCount());
-               assertTrue(isSorted(objs));
-
-               for (int i = 0; i < objs.size(); i++) {
-                       assertEquals(objs.get(i), idx.getObjectId(i));
-                       assertEquals(objs.get(i).getOffset(), idx.getOffset(i));
-                       assertEquals(objs.get(i).getOffset(), idx.findOffset(objs.get(i)));
-               }
-       }
-
-       private boolean isSorted(List<PackedObjectInfo> objs) {
-               PackedObjectInfo last = objs.get(0);
-               for (int i = 1; i < objs.size(); i++) {
-                       PackedObjectInfo oe = objs.get(i);
-                       if (oe.compareTo(last) <= 0)
-                               return false;
-               }
-               return true;
-       }
-
-       private List<PackedObjectInfo> list(PackedObjectInfo... all) {
-               List<PackedObjectInfo> objs = new ArrayList<PackedObjectInfo>();
-               for (PackedObjectInfo o : all)
-                       objs.add(o);
-               return objs;
-       }
-
-       private int header(int fanoutTable, int offsetTable) {
-               return (0x01 << 8) | (fanoutTable << 3) | offsetTable;
-       }
-
-       private int header(List<PackedObjectInfo> objs) {
-               byte[] index = ChunkIndex.create(objs);
-               return NB.decodeUInt16(index, 0);
-       }
-
-       private ChunkIndex index(List<PackedObjectInfo> objs) throws DhtException {
-               ChunkKey key = null;
-               byte[] index = ChunkIndex.create(objs);
-               return ChunkIndex.fromBytes(key, index, 0, index.length);
-       }
-
-       private List<PackedObjectInfo> objects(int... values) {
-               List<PackedObjectInfo> objs = new ArrayList<PackedObjectInfo>();
-               for (int i = 0; i < values.length; i++)
-                       objs.add(object(values[i], i * 10));
-               return objs;
-       }
-
-       private PackedObjectInfo object(int id, int off) {
-               MutableObjectId idBuf = new MutableObjectId();
-               idBuf.setByte(0, (id >>> 8) & 0xff);
-               idBuf.setByte(1, id & 0xff);
-
-               PackedObjectInfo obj = new PackedObjectInfo(idBuf);
-               obj.setOffset(off);
-               return obj;
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/ChunkKeyTest.java b/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/ChunkKeyTest.java
deleted file mode 100644 (file)
index 9219663..0000000
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-
-import org.eclipse.jgit.lib.ObjectId;
-import org.junit.Test;
-
-public class ChunkKeyTest {
-       @Test
-       public void testKey() {
-               RepositoryKey repo1 = RepositoryKey.fromInt(0x41234567);
-               RepositoryKey repo2 = RepositoryKey.fromInt(2);
-               ObjectId id = ObjectId
-                               .fromString("3e64b928d51b3a28e89cfe2a3f0eeae35ef07839");
-
-               ChunkKey key1 = ChunkKey.create(repo1, id);
-               assertEquals(repo1.asInt(), key1.getRepositoryId());
-               assertEquals(id, key1.getChunkHash());
-               assertEquals("41234567.3e64b928d51b3a28e89cfe2a3f0eeae35ef07839",
-                               key1.asString());
-
-               ChunkKey key2 = ChunkKey.fromBytes(key1.asBytes());
-               assertEquals(repo1.asInt(), key2.getRepositoryId());
-               assertEquals(id, key2.getChunkHash());
-               assertEquals("41234567.3e64b928d51b3a28e89cfe2a3f0eeae35ef07839",
-                               key2.asString());
-
-               ChunkKey key3 = ChunkKey.fromString(key1.asString());
-               assertEquals(repo1.asInt(), key3.getRepositoryId());
-               assertEquals(id, key3.getChunkHash());
-               assertEquals("41234567.3e64b928d51b3a28e89cfe2a3f0eeae35ef07839",
-                               key3.asString());
-
-               assertEquals(key1, key2);
-               assertEquals(key2, key3);
-
-               ChunkKey key4 = ChunkKey.create(repo2, id);
-               assertFalse("not equal", key2.equals(key4));
-
-               ObjectId id2 = ObjectId
-                               .fromString("3e64b928d51b3a28e89cfe2a3f0eeae35ef07840");
-               ChunkKey key5 = ChunkKey.create(repo1, id2);
-               assertFalse("not equal", key2.equals(key5));
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/DhtPackParserTest.java b/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/DhtPackParserTest.java
deleted file mode 100644 (file)
index 6bb09a7..0000000
+++ /dev/null
@@ -1,343 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
-import static org.eclipse.jgit.lib.Constants.OBJ_OFS_DELTA;
-import static org.eclipse.jgit.lib.Constants.OBJ_REF_DELTA;
-import static org.eclipse.jgit.lib.Constants.PACK_SIGNATURE;
-import static org.eclipse.jgit.lib.Constants.newMessageDigest;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.security.MessageDigest;
-import java.util.Arrays;
-import java.util.zip.Deflater;
-
-import org.eclipse.jgit.lib.NullProgressMonitor;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.ObjectInserter;
-import org.eclipse.jgit.lib.ObjectReader;
-import org.eclipse.jgit.storage.dht.spi.memory.MemoryDatabase;
-import org.eclipse.jgit.storage.file.PackLock;
-import org.eclipse.jgit.storage.pack.DeltaEncoder;
-import org.eclipse.jgit.util.NB;
-import org.eclipse.jgit.util.TemporaryBuffer;
-import org.junit.Before;
-import org.junit.Test;
-
-public class DhtPackParserTest {
-       private MemoryDatabase db;
-
-       @Before
-       public void setUpDatabase() {
-               db = new MemoryDatabase();
-       }
-
-       @Test
-       public void testParse() throws IOException {
-               DhtRepository repo = db.open("test.git");
-               repo.create(true);
-
-               ObjectInserter.Formatter fmt = new ObjectInserter.Formatter();
-               byte[] data0 = new byte[512];
-               Arrays.fill(data0, (byte) 0xf3);
-               ObjectId id0 = fmt.idFor(OBJ_BLOB, data0);
-
-               TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(64 * 1024);
-               packHeader(pack, 4);
-               objectHeader(pack, OBJ_BLOB, data0.length);
-               deflate(pack, data0);
-
-               byte[] data1 = clone(0x01, data0);
-               byte[] delta1 = delta(data0, data1);
-               ObjectId id1 = fmt.idFor(OBJ_BLOB, data1);
-               objectHeader(pack, OBJ_REF_DELTA, delta1.length);
-               id0.copyRawTo(pack);
-               deflate(pack, delta1);
-
-               byte[] data2 = clone(0x02, data1);
-               byte[] delta2 = delta(data1, data2);
-               ObjectId id2 = fmt.idFor(OBJ_BLOB, data2);
-               objectHeader(pack, OBJ_REF_DELTA, delta2.length);
-               id1.copyRawTo(pack);
-               deflate(pack, delta2);
-
-               byte[] data3 = clone(0x03, data2);
-               byte[] delta3 = delta(data2, data3);
-               ObjectId id3 = fmt.idFor(OBJ_BLOB, data3);
-               objectHeader(pack, OBJ_REF_DELTA, delta3.length);
-               id2.copyRawTo(pack);
-               deflate(pack, delta3);
-               digest(pack);
-
-               ObjectInserter ins = repo.newObjectInserter();
-               try {
-                       InputStream is = new ByteArrayInputStream(pack.toByteArray());
-                       DhtPackParser p = (DhtPackParser) ins.newPackParser(is);
-                       PackLock lock = p.parse(NullProgressMonitor.INSTANCE);
-                       assertNull(lock);
-               } finally {
-                       ins.release();
-               }
-
-               ObjectReader ctx = repo.newObjectReader();
-               try {
-                       assertTrue(ctx.has(id0, OBJ_BLOB));
-                       assertTrue(ctx.has(id1, OBJ_BLOB));
-                       assertTrue(ctx.has(id2, OBJ_BLOB));
-                       assertTrue(ctx.has(id3, OBJ_BLOB));
-               } finally {
-                       ctx.release();
-               }
-       }
-
-       @Test
-       public void testLargeFragmentWithRefDelta() throws IOException {
-               DhtInserterOptions insOpt = new DhtInserterOptions().setChunkSize(256);
-               @SuppressWarnings("unchecked")
-               DhtRepository repo = (DhtRepository) new DhtRepositoryBuilder<DhtRepositoryBuilder, DhtRepository, MemoryDatabase>()
-                               .setInserterOptions(insOpt).setDatabase(db) //
-                               .setRepositoryName("test.git") //
-                               .setMustExist(false) //
-                               .build();
-               repo.create(true);
-
-               ObjectInserter.Formatter fmt = new ObjectInserter.Formatter();
-               TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(64 * 1024);
-               packHeader(pack, 3);
-
-               byte[] data3 = new byte[4];
-               Arrays.fill(data3, (byte) 0xf3);
-               ObjectId id3 = fmt.idFor(OBJ_BLOB, data3);
-               objectHeader(pack, OBJ_BLOB, data3.length);
-               deflate(pack, data3);
-
-               byte[] data0 = newArray(insOpt.getChunkSize() * 2);
-               ObjectId id0 = fmt.idFor(OBJ_BLOB, data0);
-               objectHeader(pack, OBJ_BLOB, data0.length);
-               store(pack, data0);
-               assertTrue(pack.length() > insOpt.getChunkSize());
-
-               byte[] data1 = clone(1, data0);
-               ObjectId id1 = fmt.idFor(OBJ_BLOB, data1);
-               byte[] delta1 = delta(data0, data1);
-               objectHeader(pack, OBJ_REF_DELTA, delta1.length);
-               id0.copyRawTo(pack);
-               deflate(pack, delta1);
-
-               digest(pack);
-
-               ObjectInserter ins = repo.newObjectInserter();
-               try {
-                       InputStream is = new ByteArrayInputStream(pack.toByteArray());
-                       DhtPackParser p = (DhtPackParser) ins.newPackParser(is);
-                       PackLock lock = p.parse(NullProgressMonitor.INSTANCE);
-                       assertNull(lock);
-               } finally {
-                       ins.release();
-               }
-
-               ObjectReader ctx = repo.newObjectReader();
-               try {
-                       assertTrue(ctx.has(id0, OBJ_BLOB));
-                       assertTrue(ctx.has(id1, OBJ_BLOB));
-                       assertTrue(ctx.has(id3, OBJ_BLOB));
-               } finally {
-                       ctx.release();
-               }
-       }
-
-       @Test
-       public void testLargeFragmentWithOfsDelta() throws IOException {
-               DhtInserterOptions insOpt = new DhtInserterOptions().setChunkSize(256);
-               @SuppressWarnings("unchecked")
-               DhtRepository repo = (DhtRepository) new DhtRepositoryBuilder<DhtRepositoryBuilder, DhtRepository, MemoryDatabase>()
-                               .setInserterOptions(insOpt).setDatabase(db) //
-                               .setRepositoryName("test.git") //
-                               .setMustExist(false) //
-                               .build();
-               repo.create(true);
-
-               ObjectInserter.Formatter fmt = new ObjectInserter.Formatter();
-               TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(64 * 1024);
-               packHeader(pack, 3);
-
-               byte[] data3 = new byte[4];
-               Arrays.fill(data3, (byte) 0xf3);
-               ObjectId id3 = fmt.idFor(OBJ_BLOB, data3);
-               objectHeader(pack, OBJ_BLOB, data3.length);
-               deflate(pack, data3);
-
-               byte[] data0 = newArray(insOpt.getChunkSize() * 2);
-               ObjectId id0 = fmt.idFor(OBJ_BLOB, data0);
-               long pos0 = pack.length();
-               objectHeader(pack, OBJ_BLOB, data0.length);
-               store(pack, data0);
-               assertTrue(pack.length() > insOpt.getChunkSize());
-
-               byte[] data1 = clone(1, data0);
-               ObjectId id1 = fmt.idFor(OBJ_BLOB, data1);
-               byte[] delta1 = delta(data0, data1);
-               long pos1 = pack.length();
-               objectHeader(pack, OBJ_OFS_DELTA, delta1.length);
-               writeOffset(pack, pos1 - pos0);
-               deflate(pack, delta1);
-
-               digest(pack);
-
-               ObjectInserter ins = repo.newObjectInserter();
-               try {
-                       InputStream is = new ByteArrayInputStream(pack.toByteArray());
-                       DhtPackParser p = (DhtPackParser) ins.newPackParser(is);
-                       PackLock lock = p.parse(NullProgressMonitor.INSTANCE);
-                       assertNull(lock);
-               } finally {
-                       ins.release();
-               }
-
-               ObjectReader ctx = repo.newObjectReader();
-               try {
-                       assertTrue(ctx.has(id0, OBJ_BLOB));
-                       assertTrue(ctx.has(id1, OBJ_BLOB));
-                       assertTrue(ctx.has(id3, OBJ_BLOB));
-               } finally {
-                       ctx.release();
-               }
-       }
-
-       private byte[] newArray(int size) {
-               byte[] r = new byte[size];
-               for (int i = 0; i < r.length; i++)
-                       r[i] = (byte) (42 + i);
-               return r;
-       }
-
-       private byte[] clone(int first, byte[] base) {
-               byte[] r = new byte[base.length];
-               System.arraycopy(base, 1, r, 1, r.length - 1);
-               r[0] = (byte) first;
-               return r;
-       }
-
-       private byte[] delta(byte[] base, byte[] dest) throws IOException {
-               ByteArrayOutputStream tmp = new ByteArrayOutputStream();
-               DeltaEncoder de = new DeltaEncoder(tmp, base.length, dest.length);
-               de.insert(dest, 0, 1);
-               de.copy(1, base.length - 1);
-               return tmp.toByteArray();
-       }
-
-       private void packHeader(TemporaryBuffer.Heap pack, int cnt)
-                       throws IOException {
-               final byte[] hdr = new byte[8];
-               NB.encodeInt32(hdr, 0, 2);
-               NB.encodeInt32(hdr, 4, cnt);
-               pack.write(PACK_SIGNATURE);
-               pack.write(hdr, 0, 8);
-       }
-
-       private void objectHeader(TemporaryBuffer.Heap pack, int type, int sz)
-                       throws IOException {
-               byte[] buf = new byte[8];
-               int nextLength = sz >>> 4;
-               buf[0] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (type << 4) | (sz & 0x0F));
-               sz = nextLength;
-               int n = 1;
-               while (sz > 0) {
-                       nextLength >>>= 7;
-                       buf[n++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (sz & 0x7F));
-                       sz = nextLength;
-               }
-               pack.write(buf, 0, n);
-       }
-
-       private void writeOffset(TemporaryBuffer.Heap pack, long offsetDiff)
-                       throws IOException {
-               byte[] headerBuffer = new byte[32];
-               int n = headerBuffer.length - 1;
-               headerBuffer[n] = (byte) (offsetDiff & 0x7F);
-               while ((offsetDiff >>= 7) > 0)
-                       headerBuffer[--n] = (byte) (0x80 | (--offsetDiff & 0x7F));
-               pack.write(headerBuffer, n, headerBuffer.length - n);
-       }
-
-       private void deflate(TemporaryBuffer.Heap pack, byte[] content)
-                       throws IOException {
-               final Deflater deflater = new Deflater();
-               final byte[] buf = new byte[128];
-               deflater.setInput(content, 0, content.length);
-               deflater.finish();
-               do {
-                       final int n = deflater.deflate(buf, 0, buf.length);
-                       if (n > 0)
-                               pack.write(buf, 0, n);
-               } while (!deflater.finished());
-               deflater.end();
-       }
-
-       private void store(TemporaryBuffer.Heap pack, byte[] content)
-                       throws IOException {
-               final Deflater deflater = new Deflater(Deflater.NO_COMPRESSION);
-               final byte[] buf = new byte[128];
-               deflater.setInput(content, 0, content.length);
-               deflater.finish();
-               do {
-                       final int n = deflater.deflate(buf, 0, buf.length);
-                       if (n > 0)
-                               pack.write(buf, 0, n);
-               } while (!deflater.finished());
-               deflater.end();
-       }
-
-       private void digest(TemporaryBuffer.Heap buf) throws IOException {
-               MessageDigest md = newMessageDigest();
-               md.update(buf.toByteArray());
-               buf.write(md.digest());
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/DhtRepositoryBuilderTest.java b/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/DhtRepositoryBuilderTest.java
deleted file mode 100644 (file)
index 0300004..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertSame;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-
-import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.lib.Ref;
-import org.eclipse.jgit.storage.dht.spi.memory.MemoryDatabase;
-import org.junit.Before;
-import org.junit.Test;
-
-public class DhtRepositoryBuilderTest {
-       private MemoryDatabase db;
-
-       @Before
-       public void setUpDatabase() {
-               db = new MemoryDatabase();
-       }
-
-       @Test
-       public void testCreateAndOpen() throws IOException {
-               String name = "test.git";
-
-               DhtRepository repo1 = db.open(name);
-               assertSame(db, repo1.getDatabase());
-               assertSame(repo1, repo1.getRefDatabase().getRepository());
-               assertSame(repo1, repo1.getObjectDatabase().getRepository());
-
-               assertEquals(name, repo1.getRepositoryName().asString());
-               assertNull(repo1.getRepositoryKey());
-               assertFalse(repo1.getObjectDatabase().exists());
-
-               repo1.create(true);
-               assertNotNull(repo1.getRepositoryKey());
-               assertTrue(repo1.getObjectDatabase().exists());
-
-               DhtRepository repo2 = db.open(name);
-               assertNotNull(repo2.getRepositoryKey());
-               assertTrue(repo2.getObjectDatabase().exists());
-               assertEquals(0, repo2.getAllRefs().size());
-
-               Ref HEAD = repo2.getRef(Constants.HEAD);
-               assertTrue(HEAD.isSymbolic());
-               assertEquals(Constants.R_HEADS + Constants.MASTER, //
-                               HEAD.getLeaf().getName());
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/LargeNonDeltaObjectTest.java b/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/LargeNonDeltaObjectTest.java
deleted file mode 100644 (file)
index 9f1bbf1..0000000
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.zip.Deflater;
-
-import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.ObjectInserter;
-import org.eclipse.jgit.lib.ObjectLoader;
-import org.eclipse.jgit.lib.ObjectReader;
-import org.eclipse.jgit.lib.ObjectStream;
-import org.eclipse.jgit.lib.Repository;
-import org.eclipse.jgit.storage.dht.spi.memory.MemoryDatabase;
-import org.eclipse.jgit.util.IO;
-import org.junit.Before;
-import org.junit.Test;
-
-public class LargeNonDeltaObjectTest {
-       private MemoryDatabase db;
-
-       @Before
-       public void setUpDatabase() {
-               db = new MemoryDatabase();
-       }
-
-       @SuppressWarnings("unchecked")
-       @Test
-       public void testInsertRead() throws IOException {
-               DhtInserterOptions insopt = new DhtInserterOptions();
-               insopt.setChunkSize(128);
-               insopt.setCompression(Deflater.NO_COMPRESSION);
-
-               Repository repo = new DhtRepositoryBuilder() //
-                               .setDatabase(db) //
-                               .setInserterOptions(insopt) //
-                               .setRepositoryName("test.git") //
-                               .setMustExist(false) //
-                               .build();
-               repo.create(true);
-
-               byte[] data = new byte[insopt.getChunkSize() * 3];
-               Arrays.fill(data, (byte) 0x42);
-
-               ObjectInserter ins = repo.newObjectInserter();
-               ObjectId id = ins.insert(Constants.OBJ_BLOB, data);
-               ins.flush();
-               ins.release();
-
-               ObjectReader reader = repo.newObjectReader();
-               ObjectLoader ldr = reader.open(id);
-               assertEquals(Constants.OBJ_BLOB, ldr.getType());
-               assertEquals(data.length, ldr.getSize());
-               assertTrue(ldr.isLarge());
-
-               byte[] dst = new byte[data.length];
-               ObjectStream in = ldr.openStream();
-               IO.readFully(in, dst, 0, dst.length);
-               assertTrue(Arrays.equals(data, dst));
-               in.close();
-
-               // Reading should still work, even though initial chunk is gone.
-               dst = new byte[data.length];
-               in = ldr.openStream();
-               IO.readFully(in, dst, 0, dst.length);
-               assertTrue(Arrays.equals(data, dst));
-               in.close();
-
-               reader.release();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/ObjectIndexKeyTest.java b/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/ObjectIndexKeyTest.java
deleted file mode 100644 (file)
index d3419bd..0000000
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.junit.Assert.assertEquals;
-
-import org.eclipse.jgit.lib.ObjectId;
-import org.junit.Test;
-
-public class ObjectIndexKeyTest {
-       @Test
-       public void testKey() {
-               RepositoryKey repo = RepositoryKey.fromInt(0x41234567);
-               ObjectId id = ObjectId
-                               .fromString("3e64b928d51b3a28e89cfe2a3f0eeae35ef07839");
-
-               ObjectIndexKey key1 = ObjectIndexKey.create(repo, id);
-               assertEquals(repo.asInt(), key1.getRepositoryId());
-               assertEquals(key1, id);
-               assertEquals("41234567.3e64b928d51b3a28e89cfe2a3f0eeae35ef07839",
-                               key1.asString());
-
-               ObjectIndexKey key2 = ObjectIndexKey.fromBytes(key1.asBytes());
-               assertEquals(repo.asInt(), key2.getRepositoryId());
-               assertEquals(key2, id);
-               assertEquals("41234567.3e64b928d51b3a28e89cfe2a3f0eeae35ef07839",
-                               key2.asString());
-
-               ObjectIndexKey key3 = ObjectIndexKey.fromString(key1.asString());
-               assertEquals(repo.asInt(), key3.getRepositoryId());
-               assertEquals(key3, id);
-               assertEquals("41234567.3e64b928d51b3a28e89cfe2a3f0eeae35ef07839",
-                               key3.asString());
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/RepositoryKeyTest.java b/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/RepositoryKeyTest.java
deleted file mode 100644 (file)
index 6dc7e0e..0000000
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.junit.Assert.assertEquals;
-
-import org.junit.Test;
-
-public class RepositoryKeyTest {
-       @Test
-       public void fromString() {
-               assertEquals(RepositoryKey.create(2), RepositoryKey
-                               .fromString("40000000"));
-
-               assertEquals(RepositoryKey.create(1), RepositoryKey
-                               .fromString("80000000"));
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/TimeoutTest.java b/org.eclipse.jgit.storage.dht.test/tst/org/eclipse/jgit/storage/dht/TimeoutTest.java
deleted file mode 100644 (file)
index 188158b..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.junit.Assert.assertEquals;
-
-import java.util.concurrent.TimeUnit;
-
-import org.eclipse.jgit.lib.Config;
-import org.junit.Test;
-
-public class TimeoutTest {
-       @Test
-       public void testGetTimeout() {
-               Timeout def = Timeout.seconds(2);
-               Config cfg = new Config();
-               Timeout t;
-
-               cfg.setString("core", "dht", "timeout", "500 ms");
-               t = Timeout.getTimeout(cfg, "core", "dht", "timeout", def);
-               assertEquals(500, t.getTime());
-               assertEquals(TimeUnit.MILLISECONDS, t.getUnit());
-
-               cfg.setString("core", "dht", "timeout", "5.2 sec");
-               t = Timeout.getTimeout(cfg, "core", "dht", "timeout", def);
-               assertEquals(5200, t.getTime());
-               assertEquals(TimeUnit.MILLISECONDS, t.getUnit());
-
-               cfg.setString("core", "dht", "timeout", "1 min");
-               t = Timeout.getTimeout(cfg, "core", "dht", "timeout", def);
-               assertEquals(60, t.getTime());
-               assertEquals(TimeUnit.SECONDS, t.getUnit());
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/.classpath b/org.eclipse.jgit.storage.dht/.classpath
deleted file mode 100644 (file)
index d7edf52..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-       <classpathentry kind="src" path="src"/>
-       <classpathentry kind="src" path="resources"/>
-       <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/>
-       <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
-       <classpathentry kind="output" path="bin"/>
-</classpath>
diff --git a/org.eclipse.jgit.storage.dht/.fbprefs b/org.eclipse.jgit.storage.dht/.fbprefs
deleted file mode 100644 (file)
index 81a0767..0000000
+++ /dev/null
@@ -1,125 +0,0 @@
-#FindBugs User Preferences
-#Mon May 04 16:24:13 PDT 2009
-detectorAppendingToAnObjectOutputStream=AppendingToAnObjectOutputStream|true
-detectorBadAppletConstructor=BadAppletConstructor|false
-detectorBadResultSetAccess=BadResultSetAccess|true
-detectorBadSyntaxForRegularExpression=BadSyntaxForRegularExpression|true
-detectorBadUseOfReturnValue=BadUseOfReturnValue|true
-detectorBadlyOverriddenAdapter=BadlyOverriddenAdapter|true
-detectorBooleanReturnNull=BooleanReturnNull|true
-detectorCallToUnsupportedMethod=CallToUnsupportedMethod|true
-detectorCheckImmutableAnnotation=CheckImmutableAnnotation|true
-detectorCheckTypeQualifiers=CheckTypeQualifiers|true
-detectorCloneIdiom=CloneIdiom|false
-detectorComparatorIdiom=ComparatorIdiom|true
-detectorConfusedInheritance=ConfusedInheritance|true
-detectorConfusionBetweenInheritedAndOuterMethod=ConfusionBetweenInheritedAndOuterMethod|true
-detectorCrossSiteScripting=CrossSiteScripting|true
-detectorDoInsideDoPrivileged=DoInsideDoPrivileged|true
-detectorDontCatchIllegalMonitorStateException=DontCatchIllegalMonitorStateException|true
-detectorDontUseEnum=DontUseEnum|true
-detectorDroppedException=DroppedException|true
-detectorDumbMethodInvocations=DumbMethodInvocations|true
-detectorDumbMethods=DumbMethods|true
-detectorDuplicateBranches=DuplicateBranches|true
-detectorEmptyZipFileEntry=EmptyZipFileEntry|true
-detectorEqualsOperandShouldHaveClassCompatibleWithThis=EqualsOperandShouldHaveClassCompatibleWithThis|true
-detectorFinalizerNullsFields=FinalizerNullsFields|true
-detectorFindBadCast2=FindBadCast2|true
-detectorFindBadForLoop=FindBadForLoop|true
-detectorFindCircularDependencies=FindCircularDependencies|false
-detectorFindDeadLocalStores=FindDeadLocalStores|true
-detectorFindDoubleCheck=FindDoubleCheck|true
-detectorFindEmptySynchronizedBlock=FindEmptySynchronizedBlock|true
-detectorFindFieldSelfAssignment=FindFieldSelfAssignment|true
-detectorFindFinalizeInvocations=FindFinalizeInvocations|true
-detectorFindFloatEquality=FindFloatEquality|true
-detectorFindHEmismatch=FindHEmismatch|true
-detectorFindInconsistentSync2=FindInconsistentSync2|true
-detectorFindJSR166LockMonitorenter=FindJSR166LockMonitorenter|true
-detectorFindLocalSelfAssignment2=FindLocalSelfAssignment2|true
-detectorFindMaskedFields=FindMaskedFields|true
-detectorFindMismatchedWaitOrNotify=FindMismatchedWaitOrNotify|true
-detectorFindNakedNotify=FindNakedNotify|true
-detectorFindNonSerializableStoreIntoSession=FindNonSerializableStoreIntoSession|true
-detectorFindNonSerializableValuePassedToWriteObject=FindNonSerializableValuePassedToWriteObject|true
-detectorFindNonShortCircuit=FindNonShortCircuit|true
-detectorFindNullDeref=FindNullDeref|true
-detectorFindNullDerefsInvolvingNonShortCircuitEvaluation=FindNullDerefsInvolvingNonShortCircuitEvaluation|true
-detectorFindOpenStream=FindOpenStream|true
-detectorFindPuzzlers=FindPuzzlers|true
-detectorFindRefComparison=FindRefComparison|true
-detectorFindReturnRef=FindReturnRef|true
-detectorFindRunInvocations=FindRunInvocations|true
-detectorFindSelfComparison=FindSelfComparison|true
-detectorFindSelfComparison2=FindSelfComparison2|true
-detectorFindSleepWithLockHeld=FindSleepWithLockHeld|true
-detectorFindSpinLoop=FindSpinLoop|true
-detectorFindSqlInjection=FindSqlInjection|true
-detectorFindTwoLockWait=FindTwoLockWait|true
-detectorFindUncalledPrivateMethods=FindUncalledPrivateMethods|true
-detectorFindUnconditionalWait=FindUnconditionalWait|true
-detectorFindUninitializedGet=FindUninitializedGet|true
-detectorFindUnrelatedTypesInGenericContainer=FindUnrelatedTypesInGenericContainer|true
-detectorFindUnreleasedLock=FindUnreleasedLock|true
-detectorFindUnsatisfiedObligation=FindUnsatisfiedObligation|true
-detectorFindUnsyncGet=FindUnsyncGet|true
-detectorFindUselessControlFlow=FindUselessControlFlow|true
-detectorFormatStringChecker=FormatStringChecker|true
-detectorHugeSharedStringConstants=HugeSharedStringConstants|true
-detectorIDivResultCastToDouble=IDivResultCastToDouble|true
-detectorIncompatMask=IncompatMask|true
-detectorInconsistentAnnotations=InconsistentAnnotations|true
-detectorInefficientMemberAccess=InefficientMemberAccess|false
-detectorInefficientToArray=InefficientToArray|true
-detectorInfiniteLoop=InfiniteLoop|true
-detectorInfiniteRecursiveLoop=InfiniteRecursiveLoop|true
-detectorInfiniteRecursiveLoop2=InfiniteRecursiveLoop2|false
-detectorInheritanceUnsafeGetResource=InheritanceUnsafeGetResource|true
-detectorInitializationChain=InitializationChain|true
-detectorInstantiateStaticClass=InstantiateStaticClass|true
-detectorInvalidJUnitTest=InvalidJUnitTest|true
-detectorIteratorIdioms=IteratorIdioms|true
-detectorLazyInit=LazyInit|true
-detectorLoadOfKnownNullValue=LoadOfKnownNullValue|true
-detectorMethodReturnCheck=MethodReturnCheck|true
-detectorMultithreadedInstanceAccess=MultithreadedInstanceAccess|true
-detectorMutableLock=MutableLock|true
-detectorMutableStaticFields=MutableStaticFields|true
-detectorNaming=Naming|true
-detectorNumberConstructor=NumberConstructor|true
-detectorOverridingEqualsNotSymmetrical=OverridingEqualsNotSymmetrical|true
-detectorPreferZeroLengthArrays=PreferZeroLengthArrays|true
-detectorPublicSemaphores=PublicSemaphores|false
-detectorQuestionableBooleanAssignment=QuestionableBooleanAssignment|true
-detectorReadReturnShouldBeChecked=ReadReturnShouldBeChecked|true
-detectorRedundantInterfaces=RedundantInterfaces|true
-detectorRepeatedConditionals=RepeatedConditionals|true
-detectorRuntimeExceptionCapture=RuntimeExceptionCapture|true
-detectorSerializableIdiom=SerializableIdiom|true
-detectorStartInConstructor=StartInConstructor|true
-detectorStaticCalendarDetector=StaticCalendarDetector|true
-detectorStringConcatenation=StringConcatenation|true
-detectorSuperfluousInstanceOf=SuperfluousInstanceOf|true
-detectorSuspiciousThreadInterrupted=SuspiciousThreadInterrupted|true
-detectorSwitchFallthrough=SwitchFallthrough|true
-detectorSynchronizeAndNullCheckField=SynchronizeAndNullCheckField|true
-detectorSynchronizeOnClassLiteralNotGetClass=SynchronizeOnClassLiteralNotGetClass|true
-detectorSynchronizingOnContentsOfFieldToProtectField=SynchronizingOnContentsOfFieldToProtectField|true
-detectorURLProblems=URLProblems|true
-detectorUncallableMethodOfAnonymousClass=UncallableMethodOfAnonymousClass|true
-detectorUnnecessaryMath=UnnecessaryMath|true
-detectorUnreadFields=UnreadFields|true
-detectorUseObjectEquals=UseObjectEquals|false
-detectorUselessSubclassMethod=UselessSubclassMethod|false
-detectorVarArgsProblems=VarArgsProblems|true
-detectorVolatileUsage=VolatileUsage|true
-detectorWaitInLoop=WaitInLoop|true
-detectorWrongMapIterator=WrongMapIterator|true
-detectorXMLFactoryBypass=XMLFactoryBypass|true
-detector_threshold=2
-effort=default
-excludefilter0=findBugs/FindBugsExcludeFilter.xml
-filter_settings=Medium|BAD_PRACTICE,CORRECTNESS,MT_CORRECTNESS,PERFORMANCE,STYLE|false
-filter_settings_neg=MALICIOUS_CODE,NOISE,I18N,SECURITY,EXPERIMENTAL|
-run_at_full_build=true
diff --git a/org.eclipse.jgit.storage.dht/.gitignore b/org.eclipse.jgit.storage.dht/.gitignore
deleted file mode 100644 (file)
index 934e0e0..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/bin
-/target
diff --git a/org.eclipse.jgit.storage.dht/.project b/org.eclipse.jgit.storage.dht/.project
deleted file mode 100644 (file)
index db60c55..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-       <name>org.eclipse.jgit.storage.dht</name>
-       <comment></comment>
-       <projects>
-       </projects>
-       <buildSpec>
-               <buildCommand>
-                       <name>org.eclipse.jdt.core.javabuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-               <buildCommand>
-                       <name>org.eclipse.pde.ManifestBuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-               <buildCommand>
-                       <name>org.eclipse.pde.SchemaBuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-               <buildCommand>
-                       <name>org.eclipse.pde.api.tools.apiAnalysisBuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-       </buildSpec>
-       <natures>
-               <nature>org.eclipse.jdt.core.javanature</nature>
-               <nature>org.eclipse.pde.PluginNature</nature>
-               <nature>org.eclipse.pde.api.tools.apiAnalysisNature</nature>
-       </natures>
-</projectDescription>
diff --git a/org.eclipse.jgit.storage.dht/.settings/org.eclipse.core.resources.prefs b/org.eclipse.jgit.storage.dht/.settings/org.eclipse.core.resources.prefs
deleted file mode 100644 (file)
index 66ac15c..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#Mon Aug 11 16:46:12 PDT 2008
-eclipse.preferences.version=1
-encoding/<project>=UTF-8
diff --git a/org.eclipse.jgit.storage.dht/.settings/org.eclipse.core.runtime.prefs b/org.eclipse.jgit.storage.dht/.settings/org.eclipse.core.runtime.prefs
deleted file mode 100644 (file)
index 006e07e..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#Mon Mar 24 18:55:50 EDT 2008
-eclipse.preferences.version=1
-line.separator=\n
diff --git a/org.eclipse.jgit.storage.dht/.settings/org.eclipse.jdt.core.prefs b/org.eclipse.jgit.storage.dht/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644 (file)
index 7655713..0000000
+++ /dev/null
@@ -1,334 +0,0 @@
-#Fri Oct 02 18:43:47 PDT 2009
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
-org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
-org.eclipse.jdt.core.compiler.compliance=1.5
-org.eclipse.jdt.core.compiler.debug.lineNumber=generate
-org.eclipse.jdt.core.compiler.debug.localVariable=generate
-org.eclipse.jdt.core.compiler.debug.sourceFile=generate
-org.eclipse.jdt.core.compiler.doc.comment.support=enabled
-org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=warning
-org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
-org.eclipse.jdt.core.compiler.problem.autoboxing=warning
-org.eclipse.jdt.core.compiler.problem.deprecation=warning
-org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
-org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
-org.eclipse.jdt.core.compiler.problem.discouragedReference=warning
-org.eclipse.jdt.core.compiler.problem.emptyStatement=warning
-org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
-org.eclipse.jdt.core.compiler.problem.fallthroughCase=warning
-org.eclipse.jdt.core.compiler.problem.fieldHiding=warning
-org.eclipse.jdt.core.compiler.problem.finalParameterBound=warning
-org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=error
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=error
-org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=error
-org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=warning
-org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=warning
-org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=error
-org.eclipse.jdt.core.compiler.problem.invalidJavadoc=error
-org.eclipse.jdt.core.compiler.problem.invalidJavadocTags=enabled
-org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsDeprecatedRef=enabled
-org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsNotVisibleRef=enabled
-org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsVisibility=private
-org.eclipse.jdt.core.compiler.problem.localVariableHiding=warning
-org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=error
-org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingJavadocComments=error
-org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsVisibility=protected
-org.eclipse.jdt.core.compiler.problem.missingJavadocTagDescription=return_tag
-org.eclipse.jdt.core.compiler.problem.missingJavadocTags=error
-org.eclipse.jdt.core.compiler.problem.missingJavadocTagsOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.missingJavadocTagsVisibility=private
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingSerialVersion=warning
-org.eclipse.jdt.core.compiler.problem.noEffectAssignment=error
-org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=error
-org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
-org.eclipse.jdt.core.compiler.problem.nullReference=warning
-org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=warning
-org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=error
-org.eclipse.jdt.core.compiler.problem.potentialNullReference=warning
-org.eclipse.jdt.core.compiler.problem.rawTypeReference=ignore
-org.eclipse.jdt.core.compiler.problem.redundantNullCheck=warning
-org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled
-org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=error
-org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled
-org.eclipse.jdt.core.compiler.problem.typeParameterHiding=warning
-org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=warning
-org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=warning
-org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=warning
-org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=error
-org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=warning
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.unusedImport=error
-org.eclipse.jdt.core.compiler.problem.unusedLabel=error
-org.eclipse.jdt.core.compiler.problem.unusedLocal=error
-org.eclipse.jdt.core.compiler.problem.unusedParameter=warning
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled
-org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=error
-org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=error
-org.eclipse.jdt.core.compiler.source=1.5
-org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_assignment=0
-org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
-org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
-org.eclipse.jdt.core.formatter.alignment_for_enum_constants=0
-org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
-org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
-org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_after_package=1
-org.eclipse.jdt.core.formatter.blank_lines_before_field=1
-org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
-org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
-org.eclipse.jdt.core.formatter.blank_lines_before_method=1
-org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
-org.eclipse.jdt.core.formatter.blank_lines_before_package=0
-org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
-org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
-org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines=false
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
-org.eclipse.jdt.core.formatter.comment.format_block_comments=true
-org.eclipse.jdt.core.formatter.comment.format_comments=true
-org.eclipse.jdt.core.formatter.comment.format_header=false
-org.eclipse.jdt.core.formatter.comment.format_html=true
-org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
-org.eclipse.jdt.core.formatter.comment.format_line_comments=true
-org.eclipse.jdt.core.formatter.comment.format_source_code=true
-org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
-org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
-org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
-org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
-org.eclipse.jdt.core.formatter.comment.line_length=80
-org.eclipse.jdt.core.formatter.compact_else_if=true
-org.eclipse.jdt.core.formatter.continuation_indentation=2
-org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
-org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
-org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_empty_lines=false
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=false
-org.eclipse.jdt.core.formatter.indentation.size=4
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
-org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.lineSplit=80
-org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
-org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
-org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
-org.eclipse.jdt.core.formatter.tabulation.char=tab
-org.eclipse.jdt.core.formatter.tabulation.size=4
-org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
-org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true
diff --git a/org.eclipse.jgit.storage.dht/.settings/org.eclipse.jdt.ui.prefs b/org.eclipse.jgit.storage.dht/.settings/org.eclipse.jdt.ui.prefs
deleted file mode 100644 (file)
index 7b2cdca..0000000
+++ /dev/null
@@ -1,62 +0,0 @@
-#Thu Aug 26 12:30:58 CDT 2010
-eclipse.preferences.version=1
-editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
-formatter_profile=_JGit Format
-formatter_settings_version=11
-org.eclipse.jdt.ui.ignorelowercasenames=true
-org.eclipse.jdt.ui.importorder=java;javax;org;com;
-org.eclipse.jdt.ui.ondemandthreshold=99
-org.eclipse.jdt.ui.staticondemandthreshold=99
-org.eclipse.jdt.ui.text.custom_code_templates=<?xml version\="1.0" encoding\="UTF-8" standalone\="no"?><templates/>
-sp_cleanup.add_default_serial_version_id=true
-sp_cleanup.add_generated_serial_version_id=false
-sp_cleanup.add_missing_annotations=false
-sp_cleanup.add_missing_deprecated_annotations=true
-sp_cleanup.add_missing_methods=false
-sp_cleanup.add_missing_nls_tags=false
-sp_cleanup.add_missing_override_annotations=true
-sp_cleanup.add_missing_override_annotations_interface_methods=false
-sp_cleanup.add_serial_version_id=false
-sp_cleanup.always_use_blocks=true
-sp_cleanup.always_use_parentheses_in_expressions=false
-sp_cleanup.always_use_this_for_non_static_field_access=false
-sp_cleanup.always_use_this_for_non_static_method_access=false
-sp_cleanup.convert_to_enhanced_for_loop=false
-sp_cleanup.correct_indentation=false
-sp_cleanup.format_source_code=true
-sp_cleanup.format_source_code_changes_only=true
-sp_cleanup.make_local_variable_final=false
-sp_cleanup.make_parameters_final=false
-sp_cleanup.make_private_fields_final=true
-sp_cleanup.make_type_abstract_if_missing_method=false
-sp_cleanup.make_variable_declarations_final=false
-sp_cleanup.never_use_blocks=false
-sp_cleanup.never_use_parentheses_in_expressions=true
-sp_cleanup.on_save_use_additional_actions=true
-sp_cleanup.organize_imports=false
-sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
-sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
-sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
-sp_cleanup.qualify_static_member_accesses_with_declaring_class=false
-sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
-sp_cleanup.remove_private_constructors=true
-sp_cleanup.remove_trailing_whitespaces=true
-sp_cleanup.remove_trailing_whitespaces_all=true
-sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
-sp_cleanup.remove_unnecessary_casts=false
-sp_cleanup.remove_unnecessary_nls_tags=false
-sp_cleanup.remove_unused_imports=false
-sp_cleanup.remove_unused_local_variables=false
-sp_cleanup.remove_unused_private_fields=true
-sp_cleanup.remove_unused_private_members=false
-sp_cleanup.remove_unused_private_methods=true
-sp_cleanup.remove_unused_private_types=true
-sp_cleanup.sort_members=false
-sp_cleanup.sort_members_all=false
-sp_cleanup.use_blocks=false
-sp_cleanup.use_blocks_only_for_return_and_throw=false
-sp_cleanup.use_parentheses_in_expressions=false
-sp_cleanup.use_this_for_non_static_field_access=false
-sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=true
-sp_cleanup.use_this_for_non_static_method_access=false
-sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=true
diff --git a/org.eclipse.jgit.storage.dht/.settings/org.eclipse.mylyn.tasks.ui.prefs b/org.eclipse.jgit.storage.dht/.settings/org.eclipse.mylyn.tasks.ui.prefs
deleted file mode 100644 (file)
index 823c0f5..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-#Tue Jul 19 20:11:28 CEST 2011
-eclipse.preferences.version=1
-project.repository.kind=bugzilla
-project.repository.url=https\://bugs.eclipse.org/bugs
diff --git a/org.eclipse.jgit.storage.dht/.settings/org.eclipse.mylyn.team.ui.prefs b/org.eclipse.jgit.storage.dht/.settings/org.eclipse.mylyn.team.ui.prefs
deleted file mode 100644 (file)
index 0cba949..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#Tue Jul 19 20:11:28 CEST 2011
-commit.comment.template=${task.description} \n\nBug\: ${task.key}
-eclipse.preferences.version=1
diff --git a/org.eclipse.jgit.storage.dht/.settings/org.eclipse.pde.api.tools.prefs b/org.eclipse.jgit.storage.dht/.settings/org.eclipse.pde.api.tools.prefs
deleted file mode 100644 (file)
index cd148d9..0000000
+++ /dev/null
@@ -1,94 +0,0 @@
-#Tue Oct 18 00:52:01 CEST 2011
-ANNOTATION_ELEMENT_TYPE_ADDED_METHOD_WITHOUT_DEFAULT_VALUE=Error
-ANNOTATION_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
-ANNOTATION_ELEMENT_TYPE_REMOVED_FIELD=Error
-ANNOTATION_ELEMENT_TYPE_REMOVED_METHOD=Error
-ANNOTATION_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
-API_COMPONENT_ELEMENT_TYPE_REMOVED_API_TYPE=Error
-API_COMPONENT_ELEMENT_TYPE_REMOVED_REEXPORTED_API_TYPE=Error
-API_COMPONENT_ELEMENT_TYPE_REMOVED_REEXPORTED_TYPE=Error
-API_COMPONENT_ELEMENT_TYPE_REMOVED_TYPE=Error
-CLASS_ELEMENT_TYPE_ADDED_METHOD=Error
-CLASS_ELEMENT_TYPE_ADDED_RESTRICTIONS=Error
-CLASS_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
-CLASS_ELEMENT_TYPE_CHANGED_CONTRACTED_SUPERINTERFACES_SET=Error
-CLASS_ELEMENT_TYPE_CHANGED_DECREASE_ACCESS=Error
-CLASS_ELEMENT_TYPE_CHANGED_NON_ABSTRACT_TO_ABSTRACT=Error
-CLASS_ELEMENT_TYPE_CHANGED_NON_FINAL_TO_FINAL=Error
-CLASS_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
-CLASS_ELEMENT_TYPE_REMOVED_CONSTRUCTOR=Error
-CLASS_ELEMENT_TYPE_REMOVED_FIELD=Error
-CLASS_ELEMENT_TYPE_REMOVED_METHOD=Error
-CLASS_ELEMENT_TYPE_REMOVED_SUPERCLASS=Error
-CLASS_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
-CLASS_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
-CONSTRUCTOR_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
-CONSTRUCTOR_ELEMENT_TYPE_CHANGED_DECREASE_ACCESS=Error
-CONSTRUCTOR_ELEMENT_TYPE_CHANGED_VARARGS_TO_ARRAY=Error
-CONSTRUCTOR_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
-ENUM_ELEMENT_TYPE_CHANGED_CONTRACTED_SUPERINTERFACES_SET=Error
-ENUM_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
-ENUM_ELEMENT_TYPE_REMOVED_ENUM_CONSTANT=Error
-ENUM_ELEMENT_TYPE_REMOVED_FIELD=Error
-ENUM_ELEMENT_TYPE_REMOVED_METHOD=Error
-ENUM_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
-FIELD_ELEMENT_TYPE_ADDED_VALUE=Error
-FIELD_ELEMENT_TYPE_CHANGED_DECREASE_ACCESS=Error
-FIELD_ELEMENT_TYPE_CHANGED_FINAL_TO_NON_FINAL_STATIC_CONSTANT=Error
-FIELD_ELEMENT_TYPE_CHANGED_NON_FINAL_TO_FINAL=Error
-FIELD_ELEMENT_TYPE_CHANGED_NON_STATIC_TO_STATIC=Error
-FIELD_ELEMENT_TYPE_CHANGED_STATIC_TO_NON_STATIC=Error
-FIELD_ELEMENT_TYPE_CHANGED_TYPE=Error
-FIELD_ELEMENT_TYPE_CHANGED_VALUE=Error
-FIELD_ELEMENT_TYPE_REMOVED_TYPE_ARGUMENT=Error
-FIELD_ELEMENT_TYPE_REMOVED_VALUE=Error
-ILLEGAL_EXTEND=Warning
-ILLEGAL_IMPLEMENT=Warning
-ILLEGAL_INSTANTIATE=Warning
-ILLEGAL_OVERRIDE=Warning
-ILLEGAL_REFERENCE=Warning
-INTERFACE_ELEMENT_TYPE_ADDED_FIELD=Error
-INTERFACE_ELEMENT_TYPE_ADDED_METHOD=Error
-INTERFACE_ELEMENT_TYPE_ADDED_RESTRICTIONS=Error
-INTERFACE_ELEMENT_TYPE_ADDED_SUPER_INTERFACE_WITH_METHODS=Error
-INTERFACE_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
-INTERFACE_ELEMENT_TYPE_CHANGED_CONTRACTED_SUPERINTERFACES_SET=Error
-INTERFACE_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
-INTERFACE_ELEMENT_TYPE_REMOVED_FIELD=Error
-INTERFACE_ELEMENT_TYPE_REMOVED_METHOD=Error
-INTERFACE_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
-INTERFACE_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
-INVALID_JAVADOC_TAG=Ignore
-INVALID_REFERENCE_IN_SYSTEM_LIBRARIES=Error
-LEAK_EXTEND=Warning
-LEAK_FIELD_DECL=Warning
-LEAK_IMPLEMENT=Warning
-LEAK_METHOD_PARAM=Warning
-LEAK_METHOD_RETURN_TYPE=Warning
-METHOD_ELEMENT_TYPE_ADDED_RESTRICTIONS=Error
-METHOD_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
-METHOD_ELEMENT_TYPE_CHANGED_DECREASE_ACCESS=Error
-METHOD_ELEMENT_TYPE_CHANGED_NON_ABSTRACT_TO_ABSTRACT=Error
-METHOD_ELEMENT_TYPE_CHANGED_NON_FINAL_TO_FINAL=Error
-METHOD_ELEMENT_TYPE_CHANGED_NON_STATIC_TO_STATIC=Error
-METHOD_ELEMENT_TYPE_CHANGED_STATIC_TO_NON_STATIC=Error
-METHOD_ELEMENT_TYPE_CHANGED_VARARGS_TO_ARRAY=Error
-METHOD_ELEMENT_TYPE_REMOVED_ANNOTATION_DEFAULT_VALUE=Error
-METHOD_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
-TYPE_PARAMETER_ELEMENT_TYPE_ADDED_CLASS_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_ADDED_INTERFACE_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_CHANGED_CLASS_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_CHANGED_INTERFACE_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_REMOVED_CLASS_BOUND=Error
-TYPE_PARAMETER_ELEMENT_TYPE_REMOVED_INTERFACE_BOUND=Error
-UNUSED_PROBLEM_FILTERS=Warning
-automatically_removed_unused_problem_filters=false
-eclipse.preferences.version=1
-incompatible_api_component_version=Error
-incompatible_api_component_version_include_major_without_breaking_change=Disabled
-incompatible_api_component_version_include_minor_without_api_change=Disabled
-invalid_since_tag_version=Error
-malformed_since_tag=Error
-missing_since_tag=Error
-report_api_breakage_when_major_version_incremented=Disabled
-report_resolution_errors_api_component=Warning
diff --git a/org.eclipse.jgit.storage.dht/META-INF/MANIFEST.MF b/org.eclipse.jgit.storage.dht/META-INF/MANIFEST.MF
deleted file mode 100644 (file)
index 2093849..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-Manifest-Version: 1.0
-Bundle-ManifestVersion: 2
-Bundle-Name: %plugin_name
-Bundle-SymbolicName: org.eclipse.jgit.storage.dht
-Bundle-Version: 2.1.0.qualifier
-Bundle-Localization: plugin
-Bundle-Vendor: %provider_name
-Export-Package: org.eclipse.jgit.storage.dht;version="2.1.0",
- org.eclipse.jgit.storage.dht.spi;version="2.1.0",
- org.eclipse.jgit.storage.dht.spi.cache;version="2.1.0",
- org.eclipse.jgit.storage.dht.spi.util;version="2.1.0",
- org.eclipse.jgit.storage.dht.spi.memory;version="2.1.0"
-Bundle-ActivationPolicy: lazy
-Bundle-RequiredExecutionEnvironment: J2SE-1.5
-Import-Package: com.google.protobuf;version="[2.4.0,2.5.0)",
- org.eclipse.jgit.errors;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.generated.storage.dht.proto;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.lib;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.nls;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.revwalk;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.transport;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.treewalk;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.storage.file;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.storage.pack;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.util;version="[2.1.0,2.2.0)",
- org.eclipse.jgit.util.io;version="[2.1.0,2.2.0)"
diff --git a/org.eclipse.jgit.storage.dht/README b/org.eclipse.jgit.storage.dht/README
deleted file mode 100644 (file)
index 1e07d37..0000000
+++ /dev/null
@@ -1,89 +0,0 @@
-JGit Storage on DHT
--------------------
-
-This implementation still has some pending issues:
-
-* DhtInserter must skip existing objects
-
-  DirCache writes all trees to the ObjectInserter, letting the
-  inserter figure out which trees we already have, and which are new.
-  DhtInserter should buffer trees into a chunk, then before writing
-  the chunk to the DHT do a batch lookup to find the existing
-  ObjectInfo (if any).  If any exist, the chunk should be compacted to
-  eliminate these objects, and if there is room in the chunk for more
-  objects, it should go back to the DhtInserter to be filled further
-  before flushing.
-
-  This implies the DhtInserter needs to work on multiple chunks at
-  once, and may need to combine chunks together when there is more
-  than one partial chunk.
-
-* DhtPackParser must check for collisions
-
-  Because ChunkCache blindly assumes any copy of an object is an OK
-  copy of an object, DhtPackParser needs to validate all new objects
-  at the end of its importing phase, before it links the objects into
-  the ObjectIndexTable.  Most objects won't already exist, but some
-  may, and those that do must either be removed from their chunk, or
-  have their content byte-for-byte validated.
-
-  Removal from a chunk just means deleting it from the chunk's local
-  index, and not writing it to the global ObjectIndexTable.  This
-  creates a hole in the chunk which is wasted space, and that isn't
-  very useful.  Fortunately objects that fit fully within one chunk
-  may be easy to inflate and double check, as they are small.  Objects
-  that are big span multiple chunks, and the new chunks can simply be
-  deleted from the ChunkTable, leaving the original chunks.
-
-  Deltas can be checked quickly by inflating the delta and checking
-  only the insertion point text, comparing that to the existing data
-  in the repository.  Unfortunately the repository is likely to use a
-  different delta representation, which means at least one of them
-  will need to be fully inflated to check the delta against.
-
-* DhtPackParser should handle small-huge-small-huge
-
-  Multiple chunks need to be open at once, in case we get a bad
-  pattern of small-object, huge-object, small-object, huge-object.  In
-  this case the small-objects should be put together into the same
-  chunk, to prevent having too many tiny chunks.  This is tricky to do
-  with OFS_DELTA.  A long OFS_DELTA requires all prior chunks to be
-  closed out so we know their lengths.
-
-* RepresentationSelector performance bad on Cassandra
-
-  The 1.8 million batch lookups done for linux-2.6 kills Cassandra, it
-  cannot handle this read load.
-
-* READ_REPAIR isn't fully accurate
-
-  There are a lot of places where the generic DHT code should be
-  helping to validate the local replica is consistent, and where it is
-  not, help the underlying storage system to heal the local replica by
-  reading from a remote replica and putting it back to the local one.
-  Most of this should be handled in the DHT SPI layer, but the generic
-  DHT code should be giving better hints during get() method calls.
-
-* LOCAL / WORLD writes
-
-  Many writes should be done locally first, before they replicate to
-  the other replicas, as they might be backed out on an abort.
-
-  Likewise some writes must take place across sufficient replicas to
-  ensure the write is not lost... and this may include ensuring that
-  earlier local-only writes have actually been committed to all
-  replicas.  This committing to replicas might be happening in the
-  background automatically after the local write (e.g. Cassandra will
-  start to send writes made by one node to other nodes, but doesn't
-  promise they finish).  But parts of the code may need to force this
-  replication to complete before the higher level git operation ends.
-
-* Forks/alternates
-
-  Forking is common, but we should avoid duplicating content into the
-  fork if the base repository has it.  This requires some sort of
-  change to the key structure so that chunks are owned by an object
-  pool, and the object pool owns the repositories that use it.  GC
-  proceeds at the object pool level, rather than the repository level,
-  but might want to take some of the reference namespace into account
-  to avoid placing forked less-common content near primary content.
diff --git a/org.eclipse.jgit.storage.dht/about.html b/org.eclipse.jgit.storage.dht/about.html
deleted file mode 100644 (file)
index 01a2671..0000000
+++ /dev/null
@@ -1,59 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1" ?>
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
-<html xmlns="http://www.w3.org/1999/xhtml">
-
-<head>
-<meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1" />
-<title>Eclipse Distribution License - Version 1.0</title>
-<style type="text/css">
-  body {
-    size: 8.5in 11.0in;
-    margin: 0.25in 0.5in 0.25in 0.5in;
-    tab-interval: 0.5in;
-    }
-  p {          
-    margin-left: auto;
-    margin-top:  0.5em;
-    margin-bottom: 0.5em;
-    }
-  p.list {
-       margin-left: 0.5in;
-    margin-top:  0.05em;
-    margin-bottom: 0.05em;
-    }
-  </style>
-
-</head>
-
-<body lang="EN-US">
-
-<p><b>Eclipse Distribution License - v 1.0</b></p>
-
-<p>Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors. </p>
-
-<p>All rights reserved.</p>
-<p>Redistribution and use in source and binary forms, with or without modification, 
-       are permitted provided that the following conditions are met:
-<ul><li>Redistributions of source code must retain the above copyright notice, 
-       this list of conditions and the following disclaimer. </li>
-<li>Redistributions in binary form must reproduce the above copyright notice, 
-       this list of conditions and the following disclaimer in the documentation 
-       and/or other materials provided with the distribution. </li>
-<li>Neither the name of the Eclipse Foundation, Inc. nor the names of its 
-       contributors may be used to endorse or promote products derived from 
-       this software without specific prior written permission. </li></ul>
-</p>
-<p>THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 
-IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, 
-INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 
-NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 
-PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 
-POSSIBILITY OF SUCH DAMAGE.</p>
-
-</body>
-
-</html>
diff --git a/org.eclipse.jgit.storage.dht/build.properties b/org.eclipse.jgit.storage.dht/build.properties
deleted file mode 100644 (file)
index b67aba1..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-source.. = src/
-output.. = bin/
-bin.includes = META-INF/,\
-               .,\
-               plugin.properties,\
-               about.html
diff --git a/org.eclipse.jgit.storage.dht/plugin.properties b/org.eclipse.jgit.storage.dht/plugin.properties
deleted file mode 100644 (file)
index aff758f..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-plugin_name=JGit DHT Storage
-provider_name=Eclipse.org
diff --git a/org.eclipse.jgit.storage.dht/pom.xml b/org.eclipse.jgit.storage.dht/pom.xml
deleted file mode 100644 (file)
index e54b122..0000000
+++ /dev/null
@@ -1,181 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Copyright (C) 2011, Google Inc.
-   and other copyright owners as documented in the project's IP log.
-
-   This program and the accompanying materials are made available
-   under the terms of the Eclipse Distribution License v1.0 which
-   accompanies this distribution, is reproduced below, and is
-   available at http://www.eclipse.org/org/documents/edl-v10.php
-
-   All rights reserved.
-
-   Redistribution and use in source and binary forms, with or
-   without modification, are permitted provided that the following
-   conditions are met:
-
-   - Redistributions of source code must retain the above copyright
-     notice, this list of conditions and the following disclaimer.
-
-   - Redistributions in binary form must reproduce the above
-     copyright notice, this list of conditions and the following
-     disclaimer in the documentation and/or other materials provided
-     with the distribution.
-
-   - Neither the name of the Eclipse Foundation, Inc. nor the
-     names of its contributors may be used to endorse or promote
-     products derived from this software without specific prior
-     written permission.
-
-   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
-   CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
-   INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
-   OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-   ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
-   CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
-   NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-   LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-   CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-   STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-   ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-   ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <groupId>org.eclipse.jgit</groupId>
-    <artifactId>org.eclipse.jgit-parent</artifactId>
-    <version>2.1.0-SNAPSHOT</version>
-  </parent>
-
-  <artifactId>org.eclipse.jgit.storage.dht</artifactId>
-  <name>JGit - DHT Storage</name>
-
-  <description>
-    Git repository storage on a distributed hashtable
-  </description>
-
-  <properties>
-    <translate-qualifier/>
-  </properties>
-
-  <dependencies>
-    <dependency>
-      <groupId>org.eclipse.jgit</groupId>
-      <artifactId>org.eclipse.jgit</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-
-    <dependency>
-      <groupId>org.eclipse.jgit</groupId>
-      <artifactId>org.eclipse.jgit.generated.storage.dht.proto</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-  </dependencies>
-
-  <build>
-    <sourceDirectory>src/</sourceDirectory>
-
-    <resources>
-      <resource>
-        <directory>.</directory>
-        <includes>
-          <include>plugin.properties</include>
-          <include>about.html</include>
-        </includes>
-      </resource>
-      <resource>
-        <directory>resources/</directory>
-      </resource>
-    </resources>
-
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-source-plugin</artifactId>
-        <inherited>true</inherited>
-        <executions>
-          <execution>
-            <id>attach-sources</id>
-            <phase>process-classes</phase>
-            <goals>
-              <goal>jar</goal>
-            </goals>
-            <configuration>
-              <archive>
-                <manifestFile>${source-bundle-manifest}</manifestFile>
-              </archive>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-
-      <plugin>
-        <artifactId>maven-jar-plugin</artifactId>
-        <configuration>
-          <archive>
-            <manifestFile>${bundle-manifest}</manifestFile>
-          </archive>
-        </configuration>
-      </plugin>
-         <plugin>
-            <groupId>org.codehaus.mojo</groupId>
-            <artifactId>findbugs-maven-plugin</artifactId>
-            <configuration>
-               <findbugsXmlOutput>true</findbugsXmlOutput>
-               <failOnError>false</failOnError>
-            </configuration>
-            <executions>
-               <execution>
-                  <goals>
-                     <goal>check</goal>
-                  </goals>
-               </execution>
-            </executions>
-         </plugin>
-
-         <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-pmd-plugin</artifactId>
-            <configuration>
-               <sourceEncoding>utf-8</sourceEncoding>
-               <minimumTokens>100</minimumTokens>
-               <targetJdk>1.5</targetJdk>
-               <format>xml</format>
-               <failOnViolation>false</failOnViolation>
-            </configuration>
-            <executions>
-               <execution>
-                  <goals>
-                     <goal>cpd-check</goal>
-                  </goals>
-               </execution>
-            </executions>
-         </plugin>
-
-         <plugin>
-            <groupId>org.codehaus.mojo</groupId>
-            <artifactId>clirr-maven-plugin</artifactId>
-         </plugin>
-    </plugins>
-  </build>
-
-  <reporting>
-    <plugins>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>clirr-maven-plugin</artifactId>
-        <version>${clirr-version}</version>
-        <configuration>
-          <comparisonVersion>${jgit-last-release-version}</comparisonVersion>
-          <minSeverity>info</minSeverity>
-        </configuration>
-      </plugin>
-    </plugins>
-  </reporting>
-</project>
diff --git a/org.eclipse.jgit.storage.dht/resources/org/eclipse/jgit/storage/dht/DhtText.properties b/org.eclipse.jgit.storage.dht/resources/org/eclipse/jgit/storage/dht/DhtText.properties
deleted file mode 100644 (file)
index d53147a..0000000
+++ /dev/null
@@ -1,32 +0,0 @@
-cannotInsertObject=Cannot insert any objects into a ChunkWriter
-corruptChunk=Chunk {0} is corrupt and does not match its name
-corruptCompressedObject=Corrupt deflate stream in {0} at {1}
-cycleInDeltaChain=Cycle in delta chain {0} offset {1}
-databaseRequired=Database is required
-expectedObjectSizeDuringCopyAsIs=Object {0} has size of 0
-invalidCachedPackInfo=Invalid CachedPackInfo on {0} {1}
-invalidChunkKey=Invalid ChunkKey {0}
-invalidChunkMeta=Invalid ChunkMeta on {0}
-invalidObjectIndexKey=Invalid ObjectIndexKey {0}
-invalidObjectInfo=Invalid ObjectInfo for {0} from {1}
-invalidRefData=Invalid RefData on {0}
-missingChunk=Missing {0}
-missingLongOffsetBase=Missing base for offset -{1} in meta of {0}
-nameRequired=Name or key is required
-noSavedTypeForBase=No type information for base object at {0}
-notTimeUnit=Invalid time unit value: {0}={1}
-objectListSelectingName=Selecting list name
-objectListCountingFrom=Counting objects in {0}
-objectTypeUnknown=unknown
-packParserInvalidPointer=Invalid pointer inside pack parser: {0}, chunk {1}, offset {2}.
-packParserRollbackFailed=DhtPackParser rollback failed
-recordingObjects=Recording objects
-repositoryAlreadyExists=Repository {0} already exists
-repositoryMustBeBare=Only bare repositories are supported
-shortCompressedObject=Short deflate stream in {0} at {1}
-timeoutChunkMeta=Timeout waiting for ChunkMeta
-timeoutLocatingRepository=Timeout locating {0}
-tooManyObjectsInPack={0} is too many objects in a pack file
-unsupportedChunkIndex=Unsupported index version {0} in {1}
-unsupportedObjectTypeInChunk=Unknown object type {0} in {1} at {2}
-wrongChunkPositionInCachedPack=Cached pack {0} put chunk {1} at {2} but delta in {3} expects it at {4}
diff --git a/org.eclipse.jgit.storage.dht/resources/org/eclipse/jgit/storage/dht/dht-schema.html b/org.eclipse.jgit.storage.dht/resources/org/eclipse/jgit/storage/dht/dht-schema.html
deleted file mode 100644 (file)
index c2c8b4c..0000000
+++ /dev/null
@@ -1,1151 +0,0 @@
-<html>
-<head>
-<title>Git on DHT Schema</title>
-
-<style type='text/css'>
-body { font-size: 10pt; }
-h1   { font-size: 16pt; }
-h2   { font-size: 12pt; }
-h3   { font-size: 10pt; }
-
-body {
-  margin-left: 8em;
-  margin-right: 8em;
-}
-h1 { margin-left: -3em; }
-h2 { margin-left: -2em; }
-h3 { margin-left: -1em; }
-hr { margin-left: -4em; margin-right: -4em; }
-
-.coltoc {
-  font-size: 8pt;
-  font-family: monospace;
-}
-
-.rowkey {
-  margin-left: 1em;
-  padding-top: 0.2em;
-  padding-left: 1em;
-  padding-right: 1em;
-  width: 54em;
-  border: 1px dotted red;
-  background-color: #efefef;
-  white-space: nowrap;
-}
-.rowkey .header {
-  font-weight: bold;
-  padding-right: 1em;
-}
-.rowkey .var {
-  font-style: italic;
-  font-family: monospace;
-}
-.rowkey .lit {
-  font-weight: bold;
-  font-family: monospace;
-}
-.rowkey .example {
-  font-family: monospace;
-}
-.rowkey p {
-  white-space: normal;
-}
-
-.colfamily {
-  margin-top: 0.5em;
-  padding-top: 0.2em;
-  padding-left: 1em;
-  padding-right: 1em;
-  width: 55em;
-  border: 1px dotted blue;
-  background-color: #efefef;
-  white-space: nowrap;
-}
-.colfamily .header {
-  font-weight: bold;
-  padding-right: 1em;
-}
-.colfamily .var {
-  font-style: italic;
-  font-family: monospace;
-}
-.colfamily .lit {
-  font-family: monospace;
-}
-.colfamily .example {
-  font-family: monospace;
-}
-.colfamily p {
-  white-space: normal;
-}
-
-.summary_table {
-  border-collapse: collapse;
-  border-spacing: 0;
-}
-.summary_table .desc {
-  font-size: 8pt;
-  white-space: nowrap;
-  text-align: right;
-  width: 20em;
-}
-.summary_table td {
-  border: 1px dotted lightgray;
-  padding-top: 2px;
-  padding-bottom: 2px;
-  padding-left: 5px;
-  padding-right: 5px;
-  vertical-align: top;
-}
-.summary_table tr.no_border td {
-  border: none;
-}
-</style>
-</head>
-<body>
-
-<h1>Git on DHT Schema</h1>
-
-<p>Storing Git repositories on a Distributed Hash Table (DHT) may
-improve scaling for large traffic, but also simplifies management when
-there are many small repositories.</p>
-
-<h2>Table of Contents</h2>
-<ul>
-  <li><a href="#concepts">Concepts</a></li>
-  <li><a href="#summary">Summary</a></li>
-  <li><a href="#security">Data Security</a></li>
-
-  <li>Tables:
-  <ul>
-    <li><a href="#REPOSITORY_INDEX">Table REPOSITORY_INDEX</a>
-      (
-        <a href="#REPOSITORY_INDEX.id" class="toccol">id</a>
-      )</li>
-
-    <li><a href="#REPOSITORY">Table REPOSITORY</a>
-      (
-        <a href="#REPOSITORY.chunk-info" class="toccol">chunk-info</a>,
-        <a href="#REPOSITORY.cached-pack" class="toccol">cached-pack</a>
-      )</li>
-
-    <li><a href="#REF">Table REF</a>
-      (
-        <a href="#REF.target" class="toccol">target</a>
-      )</li>
-
-    <li><a href="#OBJECT_INDEX">Table OBJECT_INDEX</a>
-      (
-        <a href="#OBJECT_INDEX.info" class="toccol">info</a>
-      )</li>
-
-    <li><a href="#CHUNK">Table CHUNK</a>
-      (
-        <a href="#CHUNK.chunk" class="toccol">chunk</a>,
-        <a href="#CHUNK.index" class="toccol">index</a>,
-        <a href="#CHUNK.meta" class="toccol">meta</a>
-      )</li>
-    </ul>
-  </li>
-
-  <li>Protocol Messages:
-    <ul>
-    <li><a href="#message_RefData">RefData</a></li>
-    <li><a href="#message_ObjectInfo">ObjectInfo</a></li>
-    <li><a href="#message_ChunkInfo">ChunkInfo</a></li>
-    <li><a href="#message_ChunkMeta">ChunkMeta</a></li>
-    <li><a href="#message_CachedPackInfo">CachedPackInfo</a></li>
-    </ul>
-  </li>
-</ul>
-
-<a name="concepts"><h2>Concepts</h2></a>
-
-<p><i>Git Repository</i>: Stores the version control history for a
-single project. Each repository is a directed acyclic graph (DAG)
-composed of objects. Revision history for a project is described by a
-commit object pointing to the complete set of files that make up that
-version of the project, and a pointer to the commit that came
-immediately before it. Repositories also contain references,
-associating a human readable branch or tag name to a specific commit
-object. Tommi Virtanen has a
-<a href="http://eagain.net/articles/git-for-computer-scientists/">more
-detailed description of the Git DAG</a>.</p>
-
-<p><i>Object</i>: Git objects are named by applying the SHA-1 hash
-algorithm to their contents. There are 4 object types: commit, tree,
-blob, tag. Objects are typically stored deflated using libz deflate,
-but may also be delta compressed against another similar object,
-further reducing the storage required. The big factor for Git
-repository size is usually object count, e.g. the linux-2.6 repository
-contains 1.8 million objects.</p>
-
-<p><i>Reference</i>: Associates a human readable symbolic name, such
-as <code>refs/heads/master</code> to a specific Git object, usually a
-commit or tag. References are updated to point to the most recent
-object whenever changes are committed to the repository.</p>
-
-<p><i>Git Pack File</i>: A container stream holding many objects in a
-highly compressed format. On the local filesystem, Git uses pack files
-to reduce both inode and space usage by combining millions of objects
-into a single data stream. On the network, Git uses pack files as the
-basic network protocol to transport objects from one system's
-repository to another.</p>
-
-<p><i>Garbage Collection</i>: Scanning the Git object graph to locate
-objects that are reachable, and others that are unreachable. Git also
-generally performs data recompression during this task to produce more
-optimal deltas between objects, reducing overall disk usage and data
-transfer sizes. This is independent of any GC that may be performed by
-the DHT to clean up old cells.</p>
-
-<p>The basic storage strategy employed by this schema is to break a
-standard Git pack file into chunks, approximately 1 MiB in size.  Each
-chunk is stored as one row in the <a href="#CHUNK">CHUNK</a> table.
-During reading, chunks are paged into the application on demand, but
-may also be prefetched using prefetch hints. Rules are used to break
-the standard pack into chunks, these rules help to improve reference
-locality and reduce the number of chunk loads required to service
-common operations. In a nutshell, the DHT is used as a virtual memory
-system for pages about 1 MiB in size.</p>
-
-<a name="summary"><h2>Summary</h2></a>
-
-<p>The schema consists of a handful of tables. Size estimates are
-given for one copy of the linux-2.6 Git repository, a relative tortue
-test case that contains 1.8 million objects and is 425 MiB when stored
-on the local filesystem. All sizes are before any replication made by
-the DHT, or its underlying storage system.</p>
-
-<table style='margin-left: 2em' class='summary_table'>
-  <tr>
-    <th>Table</th>
-    <th>Rows</th>
-    <th>Cells/Row</th>
-    <th>Bytes</th>
-    <th>Bytes/Row</th>
-  </tr>
-
-  <tr>
-    <td><a href="#REPOSITORY_INDEX">REPOSITORY_INDEX</a>
-    <div class='desc'>Map host+path to surrogate key.</div></td>
-    <td align='right'>1</td>
-    <td align='right'>1</td>
-    <td align='right'>&lt; 100 bytes</td>
-    <td align='right'>&lt; 100 bytes</td>
-  </tr>
-
-  <tr>
-    <td><a href="#REPOSITORY">REPOSITORY</a>
-    <div class='desc'>Accounting and replica management.</div></td>
-    <td align='right'>1</td>
-    <td align='right'>403</td>
-    <td align='right'>65 KiB</td>
-    <td align='right'>65 KiB</td>
-  </tr>
-
-  <tr>
-    <td><a href="#REF">REF</a>
-    <div class='desc'>Bind branch/tag name to Git object.</div></td>
-    <td align='right'>211</td>
-    <td align='right'>1</td>
-    <td align='right'>14 KiB</td>
-    <td align='right'>67 bytes</td>
-  </tr>
-
-  <tr>
-    <td><a href="#OBJECT_INDEX">OBJECT_INDEX</a>
-    <div class='desc'>Locate Git object by SHA-1 name.</div></td>
-    <td align='right'>1,861,833</td>
-    <td align='right'>1</td>
-    <td align='right'>154 MiB</td>
-    <td align='right'>87 bytes</td>
-  </tr>
-
-  <tr>
-    <td><a href="#CHUNK">CHUNK</a>
-    <div class='desc'>Complete Git object storage.</div></td>
-    <td align='right'>402</td>
-    <td align='right'>3</td>
-    <td align='right'>417 MiB</td>
-    <td align='right'>~ 1 MiB</td>
-  </tr>
-
-  <tr class='no_border'>
-    <td align='right'><i>Total</i></td>
-    <td align='right'>1,862,448</td>
-    <td align='right'></td>
-    <td align='right'>571 MiB</td>
-    <td align='right'></td>
-  </tr>
-</table>
-
-<a name="security"><h2>Data Security</h2></a>
-
-<p>If data encryption is necessary to protect file contents, the <a
-href="#CHUNK.chunk">CHUNK.chunk</a> column can be encrypted with a
-block cipher such as AES. This column contains the revision commit
-messages, file paths, and file contents. By encrypting one column, the
-majority of the repository data is secured. As each cell value is
-about 1 MiB and contains a trailing 4 bytes of random data, an ECB
-mode of operation may be sufficient. Because the cells are already
-very highly compressed using the Git data compression algorithms,
-there is no increase in disk usage due to encryption.</p>
-
-<p>Branch and tag names (<a href="#REF">REF</a> row keys) are not
-encrypted. If these need to be secured the portion after the ':' would
-need to be encrypted with a block cipher. However these strings are
-very short and very common (HEAD, refs/heads/master, refs/tags/v1.0),
-making encryption difficult. A variation on the schema might move all
-rows for a repository into a single protocol messsage, then encrypt
-the protobuf into a single cell. Unfortunately this strategy has a
-high update cost, and references change frequently.</p>
-
-<p>Object SHA-1 names (<a href="#OBJECT_INDEX">OBJECT_INDEX</a> row
-keys and <a href="#CHUNK.index">CHUNK.index</a> values) are not
-encrypted. This allows a reader to determine if a repository contains
-a specific revision, but does not allow them to inspect the contents
-of the revision. The CHUNK.index column could also be encrypted with a
-block cipher when CHUNK.chunk is encrypted (see above), however the
-OBJECT_INDEX table row keys cannot be encrypted if abbrevation
-expansion is to be supported for end-users of the repository. The row
-keys must be unencrypted as abbreviation resolution is performed by a
-prefix range scan over the keys.</p>
-
-<p>The remaining tables and columns contain only statistics (e.g.
-object counts or cell sizes), or internal surrogate keys
-(repository_id, chunk_key) and do not require encryption.</p>
-
-<hr />
-<a name="REPOSITORY_INDEX"><h2>Table REPOSITORY_INDEX</h2></a>
-
-<p>Maps a repository name, as presented in the URL by an end-user or
-client application, into its internal repository_id value. This
-mapping allows the repository name to be quickly modified (e.g.
-renamed) without needing to update the larger data rows of the
-repository.</p>
-
-<p>The exact semantics of the repository_name format is left as a
-deployment decision, but DNS hostname, '/', repository name would be
-one common usage.</p>
-
-<h3>Row Key</h3>
-<div class='rowkey'>
-  <div>
-  <span class='header'>Row Key:</span>
-  <span class='var'>repository_name</span>
-  </div>
-
-  <p>Human readable name of the repository, typically derived from the
-  HTTP <code>Host</code> header and path in the URL.</p>
-
-  <p>Examples:</p>
-  <ul>
-  <li><span class='example'>com.example.git/pub/git/foo.git</span></li>
-  <li><span class='example'>com.example.git/~user/mystuff.git</span></li>
-  </ul>
-</div>
-
-<h3>Columns</h3>
-<div class='colfamily'>
-  <div>
-  <span class='header'>Column:</span>
-  <a name="REPOSITORY_INDEX.id"><span class='lit'>id:</span></a>
-  </div>
-
-  <p>The repository_id, as an 8-digit hex ASCII string.</p>
-</div>
-
-<h3>Size Estimate</h3>
-
-<p>Less than 100,000 rows. More likely estimate is 1,000 rows.
-Total disk usage under 512 KiB, assuming 1,000 names and 256
-characters per name.</p>
-
-<h3>Updates</h3>
-
-<p>Only on repository creation or rename, which is infrequent (&lt;10
-rows/month). Updates are performed in a row-level transaction, to
-ensure a name is either assigned uniquely, or fails.</p>
-
-<h3>Reads</h3>
-
-<p>Reads are tried first against memcache, then against the DHT if the
-entry did not exist in memcache. Successful reads against the DHT are
-put back into memcache in the background.</p>
-
-<a name="REPOSITORY"><h2>Table REPOSITORY</h2></a>
-
-<p>Tracks top-level information about each repository.</p>
-
-<h3>Row Key</h3>
-<div class='rowkey'>
-  <div>
-  <span class='header'>Row Key:</span>
-  <span class='var'>repository_id</span>
-  </div>
-
-  <p>The repository_id, as an 8-digit hex ASCII string.</p>
-</div>
-
-<p>Typically this is assigned sequentially, then has the bits reversed
-to evenly spread repositories throughout the DHT.  For example the
-first repository is <code>80000000</code>, and the second is
-<code>40000000</code>.</p>
-
-<h3>Columns</h3>
-<div class='colfamily'>
-  <div>
-  <span class='header'>Column:</span>
-  <a name="REPOSITORY.chunk-info"><span class='lit'>chunk-info:</span></a>
-  <span class='var'>chunk_key[short]</span>
-  </div>
-
-  <p>Cell value is the protocol message <a
-  href="#message_ChunkInfo">ChunkInfo</a> describing the chunk's
-  contents. Most of the message's fields are only useful for quota
-  accounting and reporting.</p>
-</div>
-
-<p>This column exists to track all of the chunks that make up a
-repository's object set. Garbage collection and quota accounting tasks
-can primarily drive off this column, rather than scanning the much
-larger <a href="#CHUNK">CHUNK</a> table with a regular expression on
-the chunk row key.</p>
-
-<p>As each chunk averages 1 MiB in size, the linux-2.6 repository
-(at 373 MiB) has about 400 chunks and thus about 400 chunk-info
-cells. The chromium repository (at 1 GiB) has about 1000 chunk-info
-cells. It would not be uncommon to have 2000 chunk-info cells.</p>
-
-<div class='colfamily'>
-  <div>
-  <span class='header'>Column:</span>
-  <a name="REPOSITORY.cached-pack"><span class='lit'>cached-pack:</span></a>
-  <span class='var'>NNNNx38</span>
-  <span class='lit'>.</span>
-  <span class='var'>VVVVx38</span>
-  </div>
-
-  <p>Variables:</p>
-  <ul>
-  <li><span class='var'>NNNNx38</span> = 40 hex digit name of the cached pack</li>
-  <li><span class='var'>VVVVx38</span> = 40 hex digit version of the cached pack</li>
-  </ul>
-
-  <p>Examples:</p>
-  <ul>
-  <li><span class='example'>4e32fb97103981e7dd53dcc786640fa4fdb444b8.8975104a03d22e54f7060502e687599d1a2c2516</span></li>
-  </ul>
-
-  <p>Cell value is the protocol message <a
-  href="#message_CachedPackInfo">CachedPackInfo</a> describing the
-  chunks that make up a cached pack.</p>
-</div>
-
-<p>The <code>cached-pack</code> column family describes large lists of
-chunks that when combined together in a specific order create a valid
-Git pack file directly streamable to a client. This avoids needing to
-enumerate and pack the entire repository on each request.</p>
-
-<p>The cached-pack name (NNNNx38 above) is the SHA-1 of the objects
-contained within the pack, in binary, sorted. This is the standard
-naming convention for pack files on the local filesystem. The version
-(VVVVx38 above) is the SHA-1 of the chunk keys, sorted. The version
-makes the cached-pack cell unique, if any single bit in the compressed
-data is modified a different version will be generated, and a
-different cell will be used to describe the alternate version of the
-same data. The version is necessary to prevent repacks of the same
-object set (but with different compression settings or results) from
-stepping on active readers.</p>
-
-<h2>Size Estimate</h2>
-
-<p>1 row per repository (~1,000 repositories), however the majority of
-the storage cost is in the <code>chunk-info</code> column family,
-which can have more than 2000 cells per repository.</p>
-
-<p>Each <code>chunk-info</code> cell is on average 147 bytes. For a
-large repository like chromium.git (over 1000 chunks) this is only 147
-KiB for the entire row.</p>
-
-<p>Each <code>cached-pack</code> cell is on average 5350 bytes. Most
-repositories have 1 of these cells, 2 while the repository is being
-repacked on the server side to update the cached-pack data.</p>
-
-<h2>Updates</h2>
-
-<p>Information about each ~1 MiB chunk of pack data received over the
-network is stored as a unique column in the <code>chunk-info</code>
-column family.</p>
-
-<p>Most pushes are at least 2 chunks (commit, tree), with 50 pushes
-per repository per day being possible (50,000 new cells/day).</p>
-
-<p><b>TODO:</b> Average push rates?</p>
-
-<h2>Reads</h2>
-
-<p><i>Serving clients:</i> Read all cells in the
-<code>cached-pack</code> column family, typically only 1-5 cells. The
-cells are cached in memcache and read from there first.</p>
-
-<p><i>Garbage collection:</i> Read all cells in the
-<code>chunk-info</code> column family to determine which chunks are
-owned by this repository, without scanning the <a href="#CHUNK">CHUNK</a> table.
-Delete <code>chunk-info</code> after the corresponding <a href="#CHUNK">CHUNK</a>
-row has been deleted. Unchanged chunks have their info left alone.</p>
-
-<a name="REF"><h2>Table REF</h2></a>
-
-<p>Associates a human readable branch (e.g.
-<code>refs/heads/master</code>) or tag (e.g.
-<code>refs/tags/v1.0</code>) name to the Git
-object that represents that current state of
-the repository.</p>
-
-<h3>Row Key</h3>
-<div class='rowkey'>
-  <div>
-  <span class='header'>Row Key:</span>
-  <span class='var'>repository_id</span>
-  <span class='lit'>:</span>
-  <span class='var'>ref_name</span>
-  </div>
-
-  <p>Variables:</p>
-  <ul>
-  <li><span class='var'>repository_id</span> = Repository owning the reference (see above)</li>
-  <li><span class='var'>ref_name</span> = Name of the reference, UTF-8 string</li>
-  </ul>
-
-  <p>Examples:</p>
-  <ul>
-  <li><span class='example'>80000000:HEAD</span></li>
-  <li><span class='example'>80000000:refs/heads/master</span></li>
-  <br />
-  <li><span class='example'>40000000:HEAD</span></li>
-  <li><span class='example'>40000000:refs/heads/master</span></li>
-  </ul>
-</div>
-
-<p>The separator <code>:</code> used in the row key was chosen because
-this character is not permitted in a Git reference name.</p>
-
-<h3>Columns</h3>
-<div class='colfamily'>
-  <div>
-  <span class='header'>Column:</span>
-  <a name="REF.target"><span class='lit'>target:</span></a>
-  </div>
-
-  <p>Cell value is the protocol message
-  <a href="#message_RefData">RefData</a> describing the
-  current SHA-1 the reference points to, and the chunk
-  it was last observed in. The chunk hint allows skipping
-  a read of <a href="#OBJECT_INDEX">OBJECT_INDEX</a>.</p>
-
-  <p>Several versions (5) are stored for emergency rollbacks.
-  Additional versions beyond 5 are cleaned up during table garbage
-  collection as managed by the DHT's cell GC.</p>
-</div>
-
-<h3>Size Estimate</h3>
-
-<p><i>Normal Git usage:</i> ~10 branches per repository, ~200 tags.
-For 1,000 repositories, about 200,000 rows total. Average row size is
-about 240 bytes/row before compression (67 after), or 48M total.</p>
-
-<p><i>Gerrit Code Review usage:</i> More than 300 new rows per day.
-Each snapshot of each change under review is one reference.</p>
-
-<h3>Updates</h3>
-
-<p>Writes are performed by doing an atomic compare-and-swap (through a
-transaction), changing the RefData protocol buffer.</p>
-
-<h3>Reads</h3>
-
-<p>Reads perform prefix scan for all rows starting with
-<code>repository_id:</code>. Plans exist to cache these reads within a
-custom service, avoiding most DHT queries.</p>
-
-<a name="OBJECT_INDEX"><h2>Table OBJECT_INDEX</h2></a>
-
-<p>The Git network protocol has clients sending object SHA-1s to the
-server, with no additional context or information. End-users may also
-type a SHA-1 into a web search box. This table provides a mapping of
-the object SHA-1 to which chunk(s) store the object's data. The table
-is sometimes also called the 'global index', since it names where
-every single object is stored.</p>
-
-<h3>Row Key</h3>
-<div class='rowkey'>
-  <div>
-  <span class='header'>Row Key:</span>
-  <span class='var'>NN</span>
-  <span class='lit'>.</span>
-  <span class='var'>repository_id</span>
-  <span class='lit'>.</span>
-  <span class='var'>NNx40</span>
-  </div>
-
-  <p>Variables:</p>
-  <ul>
-  <li><span class='var'>NN</span> = First 2 hex digits of object SHA-1</li>
-  <li><span class='var'>repository_id</span> = Repository owning the object (see above)</li>
-  <li><span class='var'>NNx40</span> = Complete object SHA-1 name, in hex</li>
-  </ul>
-
-  <p>Examples:</p>
-  <ul>
-  <li><span class='example'>2b.80000000.2b5c9037c81c38b3b9abc29a3a87a4abcd665ed4</span></li>
-  <li><span class='example'>8f.40000000.8f270a441569b127cc4af8a6ef601d94d9490efb</span></li>
-  </ul>
-</div>
-
-<p>The first 2 hex digits (<code>NN</code>) distribute object keys
-within the same repository around the DHT keyspace, preventing a busy
-repository from creating too much of a hot-spot within the DHT.  To
-simplify key generation, these 2 digits are repeated after the
-repository_id, as part of the 40 hex digit object name.</p>
-
-<p>Keys must be clustered by repository_id to support extending
-abbreviations. End-users may supply an abbreviated SHA-1 of 4 or more
-digits (up to 39) and ask the server to complete them to a full 40
-digit SHA-1 if the server has the relevant object within the
-repository's object set.</p>
-
-<p>A schema variant that did not include the repository_id as part of
-the row key was considered, but discarded because completing a short
-4-6 digit abbreviated SHA-1 would be impractical once there were
-billions of objects stored in the DHT. Git end-users expect to be able
-to use 4 or 6 digit abbreviations on very small repositories, as the
-number of objects is low and thus the number of bits required to
-uniquely name an object within that object set is small.</p>
-
-<h3>Columns</h3>
-<div class='colfamily'>
-  <div>
-  <span class='header'>Column:</span>
-  <a name="OBJECT_INDEX.info"><span class='lit'>info:</span></a>
-  <span class='var'>chunk_key[short]</span>
-  </div>
-
-  <p>Cell value is the protocol message
-  <a href="#message_ObjectInfo">ObjectInfo</a> describing how the object
-  named by the row key is stored in the chunk named by the column name.</p>
-
-  <p>Cell timestamp matters. The <b>oldest cell</b> within the
-  entire column family is favored during reads. As chunk_key is
-  unique, versions within a single column aren't relevant.</p>
-</div>
-
-<h3>Size Estimate</h3>
-
-<p>Average row size per object/chunk pair is 144 bytes uncompressed
-(87 compressed), based on the linux-2.6 repository. The linux-2.6
-repository has 1.8 million objects, and is growing at a rate of about
-300,000 objects/year. Total usage for linux-2.6 is above 154M.</p>
-
-<p>Most rows contain only 1 cell, as the object appears in only 1
-chunk within that repository.</p>
-
-<p><i>Worst case:</i> 1.8 million rows/repository * 1,000 repositories
-is around 1.8 billion rows and 182G.</p>
-
-<h3>Updates</h3>
-
-<p>One write per object received over the network; typically performed
-as part of an asynchronous batch. Each batch is sized around 512 KiB
-(about 3000 rows). Because of SHA-1's uniform distribution, row keys
-are first sorted and then batched into buckets of about 3000 rows. To
-prevent too much activity from going to one table segment at a time
-the complete object list is segmented into up to 32 groups which are
-written in round-robin order.</p>
-
-<p>A full push of the linux-2.6 repository writes 1.8 million
-rows as there are 1.8 million objects in the pack stream.</p>
-
-<p>During normal insert or receive operations, each received object is
-a blind write to add one new <code>info:chunk_key[short]</code> cell
-to the row.  During repack, all cells in the <code>info</code> column
-family are replaced with a single cell.</p>
-
-<h3>Reads</h3>
-
-<p>During common ancestor negotiation reads occur in batches of 64-128
-full row keys, uniformly distributed throughout the key space. Most of
-these reads are misses, the OBJECT_INDEX table does not contain the
-key offered by the client. A successful negotation for most developers
-requires at least two rounds of 64 objects back-to-back over HTTP. Due
-to the high miss rate on this table, an in-memory bloom filter may be
-important for performance.</p>
-
-<p>To support the high read-rate (and high miss-rate) during common
-ancestor negotation, an alternative to an in-memory bloom filter
-within the DHT is to downoad the entire set of keys into an alternate
-service job for recently accessed repositories. This service can only
-be used if <i>all</i> of the keys for the same repository_id are
-hosted within the service. Given this is under 36 GiB for the worst
-case 1.8 billion rows mentioned above, this may be feasible. Loading
-the table can be performed by fetching <a
-href="#REPOSITORY.chunk-info">REPOSITORY.chunk-info</a> and then
-performing parallel gets for the <a
-href="#CHUNK.index">CHUNK.index</a> column, and scanning the local
-indexes to construct the list of known objects.</p>
-
-<p>During repacking with no delta reuse, worst case scenario requires
-reading all records with the same repository_id (for linux-2.6 this
-is 1.8 million rows). Reads are made in a configurable batch size,
-right now this is set at 2048 keys/batch, with 4 concurrent batches in
-flight at a time.</p>
-
-<p>Reads are tried first against memcache, then against the DHT if the
-entry did not exist in memcache. Successful reads against the DHT are
-put back into memcache in the background.</p>
-
-<a name="CHUNK"><h2>Table CHUNK</h2></a>
-
-<p>Stores the object data for a repository, containing commit history,
-directory structure, and file revisions. Each chunk is typically 1 MiB
-in size, excluding the index and meta columns.</p>
-
-<h3>Row Key</h3>
-<div class='rowkey'>
-  <div>
-  <span class='header'>Row Key:</span>
-  <span class='var'>HH</span>
-  <span class='lit'>.</span>
-  <span class='var'>repository_id</span>
-  <span class='lit'>.</span>
-  <span class='var'>HHx40</span>
-  </div>
-
-  <p>Variables:</p>
-  <ul>
-  <li><span class='var'>HH</span> = First 2 hex digits of chunk SHA-1</li>
-  <li><span class='var'>repository_id</span> = Repository owning the chunk (see above)</li>
-  <li><span class='var'>HHx40</span> = Complete chunk SHA-1, in hex</li>
-  </ul>
-
-  <p>Examples:</p>
-  <ul>
-  <li><span class='example'>09.80000000.09e0eb57543be633b004b672cbebdf335aa4d53f</span> <i>(full key)</i></li>
-  </ul>
-</div>
-
-<p>Chunk keys are computed by first computing the SHA-1 of the
-<code>chunk:</code> column, which is the compressed object contents
-stored within the chunk. As the chunk data includes a 32 bit salt in
-the trailing 4 bytes, this value is random even for the exact same
-object input.</p>
-
-<p>The leading 2 hex digit <code>HH</code> component distributes
-chunks for the same repository (and over the same time period) evenly
-around the DHT keyspace, preventing any portion from becoming too
-hot.</p>
-
-<h3>Columns</h3>
-<div class='colfamily'>
-  <div>
-  <span class='header'>Column:</span>
-  <a name="CHUNK.chunk"><span class='lit'>chunk:</span></a>
-  </div>
-
-  <p>Multiple objects in Git pack-file format, about 1 MiB in size.
-  The data is already very highly compressed by Git and is not further
-  compressable by the DHT.</p>
-</div>
-
-<p>This column is essentially the standard Git pack-file format,
-without the standard header or trailer. Objects can be stored in
-either whole format (object content is simply deflated inline)
-or in delta format (reference to a delta base is followed by
-deflated sequence of copy and/or insert instructions to recreate
-the object content). The OBJ_OFS_DELTA format is preferred
-for deltas, since it tends to use a shorter encoding than the
-OBJ_REF_DELTA format. Offsets beyond the start of the chunk are
-actually offsets to other chunks, and must be resolved using the
-<code>meta.base_chunk.relative_start</code> field.</p>
-
-<p>Because the row key is derived from the SHA-1 of this column, the
-trailing 4 bytes is randomly generated at insertion time, to make it
-impractical for remote clients to predict the name of the chunk row.
-This allows the stream parser to bindly insert rows without first
-checking for row existance, or worry about replacing an existing
-row and causing data corruption.</p>
-
-<p>This column value is essentially opaque to the DHT.</p>
-
-<div class='colfamily'>
-  <div>
-  <span class='header'>Column:</span>
-  <a name="CHUNK.index"><span class='lit'>index:</span></a>
-  </div>
-
-  <p>Binary searchable table listing object SHA-1 and starting offset
-  of that object within the <code>chunk:</code> data stream. The data
-  in this index is essentially random (due to the SHA-1s stored in
-  binary) and thus is not compressable.</p>
-</div>
-
-<p>Sorted list of SHA-1s of each object that is stored in this chunk,
-along with the offset. This column allows efficient random access to
-any object within the chunk, without needing to perform a remote read
-against <a href="#OBJECT_INDEX">OBJECT_INDEX</a> table. The column is
-very useful at read time, where pointers within Git objects will
-frequently reference other objects stored in the same chunk.</p>
-
-<p>This column is sometimes called the local index, since it is local
-only to the chunk and thus differs from the global index stored in the
-<a href="#OBJECT_INDEX">OBJECT_INDEX</a> table.</p>
-
-<p>The column size is 24 bytes per object stored in the chunk. Commit
-chunks store on average 2200 commits/chunk, so a commit chunk index is
-about 52,800 bytes.</p>
-
-<p>This column value is essentially opaque to the DHT.</p>
-
-<div class='colfamily'>
-  <div>
-  <span class='header'>Column:</span>
-  <a name="CHUNK.meta"><span class='lit'>meta:</span></a>
-  </div>
-
-  <p>Cell value is the protocol message
-  <a href="#message_ChunkMeta">ChunkMeta</a> describing prefetch
-  hints, object fragmentation, and delta base locations. Unlike
-  <code>chunk:</code> and <code>index:</code>, this column is
-  somewhat compressable.</p>
-</div>
-
-<p>The meta column provides information critical for reading the
-chunk's data. (Unlike <a href="#message_ChunkInfo">ChunkInfo</a> in
-the <a href="#REPOSITORY">REPOSITORY</a> table, which is used only for
-accounting.)</p>
-
-<p>The most important element is the BaseChunk nested message,
-describing a chunk that contains a base object required to inflate
-an object that is stored in this chunk as a delta.</p>
-
-<h3>Chunk Contents</h3>
-
-<p>Chunks try to store only a single object type, however mixed object
-type chunks are supported. The rule to store only one object type per
-chunk improves data locality, reducing the number of chunks that need
-to be accessed from the DHT in order to perform a particular Git
-operation. Clustering commits together into a 'commit chunk' improves
-data locality during log/history walking operations, while clustering
-trees together into a 'tree chunk' improves data locality during the
-early stages of packing or difference generation.</p>
-
-<p>Chunks reuse the standard Git pack data format to support direct
-streaming of a chunk's <code>chunk:</code> column to clients, without
-needing to perform any data manipulation on the server. This enables
-high speed data transfer from the DHT to the client.</p>
-
-<h3>Large Object Fragmentation</h3>
-
-<p>If a chunk contains more than one object, all objects within the
-chunk must store their complete compressed form within the chunk. This
-limits an object to less than 1 MiB of compressed data.</p>
-
-<p>Larger objects whose compressed size is bigger than 1 MiB are
-fragmented into multiple chunks. The first chunk contains the object's
-pack header, and the first 1 MiB of compressed data. Subsequent data
-is stored in additional chunks. The additional chunk keys are stored
-in the <code>meta.fragment</code> field. Each chunk that is part of
-the same large object redundantly stores the exact same meta
-value.</p>
-
-<h3>Size Estimate</h3>
-
-<p>Approximately the same size if the repository was stored on the
-local filesystem. For the linux-2.6 repository (373M / 1.8 million
-objects), about 417M (373.75M in <code>chunk:</code>, 42.64M in
-<code>index:</code>, 656K in <code>meta:</code>).</p>
-
-<p>Row count is close to size / 1M (373M / 1M = 373 rows), but may be
-slightly higher (e.g. 402) due to fractional chunks on the end of
-large fragmented objects, or where the single object type rule caused a
-chunk to close before it was full.</p>
-
-<p>For the complete Android repository set, disk usage is ~13G.</p>
-
-<h3>Updates</h3>
-
-<p>This table is (mostly) append-only. Write operations blast in ~1
-MiB chunks, as the key format assures writers the new row does not
-already exist. Chunks are randomly scattered by the hashing function,
-and are not buffered very deep by writers.</p>
-
-<p><i>Interactive writes:</i> Small operations impacting only 1-5
-chunks will write all columns in a single operation. Most chunks of
-this varity will be very small, 1-10 objects per chunk and about 1-10
-KiB worth of compressed data inside of the <code>chunk:</code> column.
-This class of write represents a single change made by one developer
-that must be shared back out immediately.</p>
-
-<p><i>Large pushes:</i> Large operations impacting tens to hundreds of
-chunks will first write the <code>chunk:</code> column, then come back
-later and populate the <code>index:</code> and <code>meta:</code>
-columns once all chunks have been written. The delayed writing of
-index and meta during large operations is required because the
-information for these columns is not available until the entire data
-stream from the Git client has been received and scanned. As the Git
-server may not have sufficient memory to store all chunk data (373M or
-1G!), its written out first to free up memory.</p>
-
-<p><i>Garbage collection:</i> Chunks that are not optimally sized
-(less than the target ~1 MiB), optimally localized (too many graph
-pointers outside of the chunk), or compressed (Git found a smaller way
-to store the same content) will be replaced by first writing new
-chunks, and then deleting the old chunks.</p>
-
-<p>Worst case, this could churn as many as 402 rows and 373M worth of
-data for the linux-2.6 repository. Special consideration will be made
-to try and avoid replacing chunks whose <code>WWWW</code> key
-component is 'sufficiently old' and whose content is already
-sufficiently sized and compressed. This will help to limit churn to
-only more recently dated chunks, which are smaller in size.</p>
-
-<h3>Reads</h3>
-
-<p>All columns are read together as a unit. Memcache is checked first,
-with reads falling back to the DHT if the cache does not have the
-chunk.</p>
-
-<p>Reasonably accurate prefetching is supported through background
-threads and prefetching metadata embedded in the <a
-href="#message_CachedPackInfo">CachedPackInfo</a> and <a
-href="#message_ChunkMeta">ChunkMeta</a> protocol messages used by
-readers.</p>
-
-<hr />
-<h2>Protocol Messages</h2>
-
-<pre>
-package git_store;
-option java_package = "org.eclipse.jgit.storage.dht.proto";
-
-
-    // Entry in RefTable describing the target of the reference.
-    // Either symref *OR* target must be populated, but never both.
-    //
-<a name="message_RefData">message RefData</a> {
-    // An ObjectId with an optional hint about where it can be found.
-    //
-  message Id {
-    required string object_name = 1;
-    optional string chunk_key = 2;
-  }
-
-    // Name of another reference this reference inherits its target
-    // from.  The target is inherited on-the-fly at runtime by reading
-    // the other reference.  Typically only "HEAD" uses symref.
-    //
-  optional string symref = 1;
-
-    // ObjectId this reference currently points at.
-    //
-  optional Id target = 2;
-
-    // True if the correct value for peeled is stored.
-    //
-  optional bool is_peeled = 3;
-
-    // If is_peeled is true, this field is accurate.  This field
-    // exists only if target points to annotated tag object, then
-    // this field stores the "object" field for that tag.
-    //
-  optional Id peeled = 4;
-}
-
-
-    // Entry in ObjectIndexTable, describes how an object appears in a chunk.
-    //
-<a name="message_ObjectInfo">message ObjectInfo</a> {
-    // Type of Git object.
-    //
-  enum ObjectType {
-    COMMIT = 1;
-    TREE = 2;
-    BLOB = 3;
-    TAG = 4;
-  }
-  optional ObjectType object_type = 1;
-
-    // Position of the object's header within its chunk.
-    //
-  required int32 offset = 2;
-
-    // Total number of compressed data bytes, not including the pack
-    // header. For fragmented objects this is the sum of all chunks.
-    //
-  required int64 packed_size = 3;
-
-    // Total number of bytes of the uncompressed object. For a
-    // delta this is the size after applying the delta onto its base.
-    //
-  required int64 inflated_size = 4;
-
-    // ObjectId of the delta base, if this object is stored as a delta.
-    // The base is stored in raw binary.
-    //
-  optional bytes delta_base = 5;
-}
-
-
-    // Describes at a high-level the information about a chunk.
-    // A repository can use this summary to determine how much
-    // data is stored, or when garbage collection should occur.
-    //
-<a name="message_ChunkInfo">message ChunkInfo</a> {
-    // Source of the chunk (what code path created it).
-    //
-  enum Source {
-    RECEIVE = 1;    // Came in over the network from external source.
-    INSERT = 2;     // Created in this repository (e.g. a merge).
-    REPACK = 3;     // Generated during a repack of this repository.
-  }
-  optional Source source = 1;
-
-    // Type of Git object stored in this chunk.
-    //
-  enum ObjectType {
-    MIXED = 0;
-    COMMIT = 1;
-    TREE = 2;
-    BLOB = 3;
-    TAG = 4;
-  }
-  optional ObjectType object_type = 2;
-
-    // True if this chunk is a member of a fragmented object.
-    //
-  optional bool is_fragment = 3;
-
-    // If present, key of the CachedPackInfo object
-    // that this chunk is a member of.
-    //
-  optional string cached_pack_key = 4;
-
-    // Summary description of the objects stored here.
-    //
-  message ObjectCounts {
-      // Number of objects stored in this chunk.
-      //
-    optional int32 total = 1;
-
-      // Number of objects stored in whole (non-delta) form.
-      //
-    optional int32 whole = 2;
-
-      // Number of objects stored in OFS_DELTA format.
-      // The delta base appears in the same chunk, or
-      // may appear in an earlier chunk through the
-      // ChunkMeta.base_chunk link.
-      //
-    optional int32 ofs_delta = 3;
-
-      // Number of objects stored in REF_DELTA format.
-      // The delta base is at an unknown location.
-      //
-    optional int32 ref_delta = 4;
-  }
-  optional ObjectCounts object_counts = 5;
-
-    // Size in bytes of the chunk's compressed data column.
-    //
-  optional int32 chunk_size = 6;
-
-    // Size in bytes of the chunk's index.
-    //
-  optional int32 index_size = 7;
-
-    // Size in bytes of the meta information.
-    //
-  optional int32 meta_size  = 8;
-}
-
-
-    // Describes meta information about a chunk, stored inline with it.
-    //
-<a name="message_ChunkMeta">message ChunkMeta</a> {
-    // Enumerates the other chunks this chunk depends upon by OFS_DELTA.
-    // Entries are sorted by relative_start ascending, enabling search.  Thus
-    // the earliest chunk is at the end of the list.
-    //
-  message BaseChunk {
-      // Bytes between start of the base chunk and start of this chunk.
-      // Although the value is positive, its a negative offset.
-      //
-    required int64 relative_start = 1;
-    required string chunk_key = 2;
-  }
-  repeated BaseChunk base_chunk = 1;
-
-    // If this chunk is part of a fragment, key of every chunk that
-    // makes up the fragment, including this chunk.
-    //
-  repeated string fragment = 2;
-
-    // Chunks that should be prefetched if reading the current chunk.
-    //
-  message PrefetchHint {
-    repeated string edge = 1;
-    repeated string sequential = 2;
-  }
-  optional PrefetchHint commit_prefetch = 51;
-  optional PrefetchHint tree_prefetch = 52;
-}
-
-
-   // Describes a CachedPack, for efficient bulk clones.
-    //
-<a name="message_CachedPackInfo">message CachedPackInfo</a> {
-    // Unique name of the cached pack.  This is the SHA-1 hash of
-    // all of the objects that make up the cached pack, sorted and
-    // in binary form.  (Same rules as Git on the filesystem.)
-    //
-  required string name = 1;
-
-    // SHA-1 of all chunk keys, which are themselves SHA-1s of the
-    // raw chunk data. If any bit differs in compression (due to
-    // repacking) the version will differ.
-    //
-  required string version = 2;
-
-    // Total number of objects in the cached pack. This must be known
-    // in order to set the final resulting pack header correctly before it
-    // is sent to clients.
-    //
-  required int64 objects_total = 3;
-
-    // Number of objects stored as deltas, rather than deflated whole.
-    //
-  optional int64 objects_delta = 4;
-
-    // Total size of the chunks, in bytes, not including the chunk footer.
-    //
-  optional int64 bytes_total = 5;
-
-    // Objects this pack starts from.
-    //
-  message TipObjectList {
-    repeated string object_name = 1;
-  }
-  required TipObjectList tip_list = 6;
-
-    // Chunks, in order of occurrence in the stream.
-    //
-  message ChunkList {
-    repeated string chunk_key = 1;
-  }
-  required ChunkList chunk_list = 7;
-}
-</pre>
-
-</body>
-</html>
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/AsyncCallback.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/AsyncCallback.java
deleted file mode 100644 (file)
index a59e47b..0000000
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-/**
- * Receives notification when an asynchronous operation has finished.
- * <p>
- * Many storage provider interface operations use this type to signal completion
- * or failure status of an operation that runs asynchronously to the caller.
- * <p>
- * Only one of {@link #onSuccess(Object)} or {@link #onFailure(DhtException)}
- * should be invoked.
- *
- * @param <T>
- *            type of object returned from the operation on success.
- */
-public interface AsyncCallback<T> {
-       /**
-        * Notification the operation completed.
-        *
-        * @param result
-        *            the result value from the operation.
-        */
-       public void onSuccess(T result);
-
-       /**
-        * Notification the operation failed.
-        *
-        * @param error
-        *            a description of the error.
-        */
-       public void onFailure(DhtException error);
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/BatchObjectLookup.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/BatchObjectLookup.java
deleted file mode 100644 (file)
index 218bffc..0000000
+++ /dev/null
@@ -1,264 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static java.util.concurrent.TimeUnit.MILLISECONDS;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.Semaphore;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.concurrent.locks.ReentrantLock;
-
-import org.eclipse.jgit.lib.NullProgressMonitor;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.ProgressMonitor;
-import org.eclipse.jgit.lib.ThreadSafeProgressMonitor;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.storage.dht.spi.Database;
-
-abstract class BatchObjectLookup<T extends ObjectId> {
-       private final RepositoryKey repo;
-
-       private final Database db;
-
-       private final DhtReader reader;
-
-       private final ThreadSafeProgressMonitor progress;
-
-       private final Semaphore batches;
-
-       private final ReentrantLock resultLock;
-
-       private final AtomicReference<DhtException> error;
-
-       private final int concurrentBatches;
-
-       private final List<T> retry;
-
-       private final ArrayList<ObjectInfo> tmp;
-
-       private boolean retryMissingObjects;
-
-       private boolean cacheLoadedInfo;
-
-       BatchObjectLookup(DhtReader reader) {
-               this(reader, null);
-       }
-
-       BatchObjectLookup(DhtReader reader, ProgressMonitor monitor) {
-               this.repo = reader.getRepositoryKey();
-               this.db = reader.getDatabase();
-               this.reader = reader;
-
-               if (monitor != null && monitor != NullProgressMonitor.INSTANCE)
-                       this.progress = new ThreadSafeProgressMonitor(monitor);
-               else
-                       this.progress = null;
-
-               this.concurrentBatches = reader.getOptions()
-                               .getObjectIndexConcurrentBatches();
-
-               this.batches = new Semaphore(concurrentBatches);
-               this.resultLock = new ReentrantLock();
-               this.error = new AtomicReference<DhtException>();
-               this.retry = new ArrayList<T>();
-               this.tmp = new ArrayList<ObjectInfo>(4);
-       }
-
-       void setRetryMissingObjects(boolean on) {
-               retryMissingObjects = on;
-       }
-
-       void setCacheLoadedInfo(boolean on) {
-               cacheLoadedInfo = on;
-       }
-
-       void select(Iterable<T> objects) throws IOException {
-               selectInBatches(Context.FAST_MISSING_OK, lookInCache(objects));
-
-               // Not all of the selection ran with fast options.
-               if (retryMissingObjects && !retry.isEmpty()) {
-                       batches.release(concurrentBatches);
-                       selectInBatches(Context.READ_REPAIR, retry);
-               }
-
-               if (progress != null)
-                       progress.pollForUpdates();
-       }
-
-       private Iterable<T> lookInCache(Iterable<T> objects) {
-               RecentInfoCache infoCache = reader.getRecentInfoCache();
-               List<T> missing = null;
-               for (T obj : objects) {
-                       List<ObjectInfo> info = infoCache.get(obj);
-                       if (info != null) {
-                               onResult(obj, info);
-                               if (progress != null)
-                                       progress.update(1);
-                       } else {
-                               if (missing == null) {
-                                       if (objects instanceof List<?>)
-                                               missing = new ArrayList<T>(((List<?>) objects).size());
-                                       else
-                                               missing = new ArrayList<T>();
-                               }
-                               missing.add(obj);
-                       }
-               }
-               if (missing != null)
-                       return missing;
-               return Collections.emptyList();
-       }
-
-       private void selectInBatches(Context options, Iterable<T> objects)
-                       throws DhtException {
-               final int batchSize = reader.getOptions()
-                               .getObjectIndexBatchSize();
-
-               Map<ObjectIndexKey, T> batch = new HashMap<ObjectIndexKey, T>();
-               Iterator<T> otpItr = objects.iterator();
-               while (otpItr.hasNext()) {
-                       T otp = otpItr.next();
-
-                       batch.put(ObjectIndexKey.create(repo, otp), otp);
-
-                       if (batch.size() < batchSize && otpItr.hasNext())
-                               continue;
-
-                       if (error.get() != null)
-                               break;
-
-                       try {
-                               if (progress != null) {
-                                       while (!batches.tryAcquire(500, MILLISECONDS))
-                                               progress.pollForUpdates();
-                                       progress.pollForUpdates();
-                               } else {
-                                       batches.acquire();
-                               }
-                       } catch (InterruptedException err) {
-                               error.compareAndSet(null, new DhtTimeoutException(err));
-                               break;
-                       }
-
-                       startQuery(options, batch);
-                       batch = new HashMap<ObjectIndexKey, T>();
-               }
-
-               try {
-                       if (progress != null) {
-                               while (!batches.tryAcquire(concurrentBatches, 500, MILLISECONDS))
-                                       progress.pollForUpdates();
-                               progress.pollForUpdates();
-                       } else {
-                               batches.acquire(concurrentBatches);
-                       }
-               } catch (InterruptedException err) {
-                       error.compareAndSet(null, new DhtTimeoutException(err));
-               }
-
-               if (error.get() != null)
-                       throw error.get();
-
-               // Make sure retry changes are visible to us.
-               resultLock.lock();
-               resultLock.unlock();
-       }
-
-       private void startQuery(final Context context,
-                       final Map<ObjectIndexKey, T> batch) {
-               final AsyncCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> cb;
-
-               cb = new AsyncCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>>() {
-                       public void onSuccess(Map<ObjectIndexKey, Collection<ObjectInfo>> r) {
-                               resultLock.lock();
-                               try {
-                                       processResults(context, batch, r);
-                               } finally {
-                                       resultLock.unlock();
-                                       batches.release();
-                               }
-                       }
-
-                       public void onFailure(DhtException e) {
-                               error.compareAndSet(null, e);
-                               batches.release();
-                       }
-               };
-               db.objectIndex().get(context, batch.keySet(), cb);
-       }
-
-       private void processResults(Context context, Map<ObjectIndexKey, T> batch,
-                       Map<ObjectIndexKey, Collection<ObjectInfo>> objects) {
-               for (T obj : batch.values()) {
-                       Collection<ObjectInfo> matches = objects.get(obj);
-
-                       if (matches == null || matches.isEmpty()) {
-                               if (retryMissingObjects && context == Context.FAST_MISSING_OK)
-                                       retry.add(obj);
-                               continue;
-                       }
-
-                       tmp.clear();
-                       tmp.addAll(matches);
-                       ObjectInfo.sort(tmp);
-                       if (cacheLoadedInfo)
-                               reader.getRecentInfoCache().put(obj, tmp);
-
-                       onResult(obj, tmp);
-               }
-
-               if (progress != null)
-                       progress.update(objects.size());
-       }
-
-       protected abstract void onResult(T obj, List<ObjectInfo> info);
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackKey.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackKey.java
deleted file mode 100644 (file)
index 274cc68..0000000
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.eclipse.jgit.util.RawParseUtils.decode;
-
-import java.text.MessageFormat;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
-import org.eclipse.jgit.lib.ObjectId;
-
-/** Unique identifier of a {@link CachedPackInfo} in the DHT. */
-public final class CachedPackKey implements RowKey {
-       static final int KEYLEN = 81;
-
-       /**
-        * @param key
-        * @return the key
-        */
-       public static CachedPackKey fromBytes(byte[] key) {
-               return fromBytes(key, 0, key.length);
-       }
-
-       /**
-        * @param key
-        * @param ptr
-        * @param len
-        * @return the key
-        */
-       public static CachedPackKey fromBytes(byte[] key, int ptr, int len) {
-               if (len != KEYLEN)
-                       throw new IllegalArgumentException(MessageFormat.format(
-                                       DhtText.get().invalidChunkKey, decode(key, ptr, ptr + len)));
-
-               ObjectId name = ObjectId.fromString(key, ptr);
-               ObjectId vers = ObjectId.fromString(key, ptr + 41);
-               return new CachedPackKey(name, vers);
-       }
-
-       /**
-        * @param key
-        * @return the key
-        */
-       public static CachedPackKey fromString(String key) {
-               int d = key.indexOf('.');
-               ObjectId name = ObjectId.fromString(key.substring(0, d));
-               ObjectId vers = ObjectId.fromString(key.substring(d + 1));
-               return new CachedPackKey(name, vers);
-       }
-
-       /**
-        * @param info
-        * @return the key
-        */
-       public static CachedPackKey fromInfo(CachedPackInfo info) {
-               ObjectId name = ObjectId.fromString(info.getName());
-               ObjectId vers = ObjectId.fromString(info.getVersion());
-               return new CachedPackKey(name, vers);
-       }
-
-       private final ObjectId name;
-
-       private final ObjectId version;
-
-       CachedPackKey(ObjectId name, ObjectId version) {
-               this.name = name;
-               this.version = version;
-       }
-
-       /** @return unique SHA-1 name of the pack. */
-       public ObjectId getName() {
-               return name;
-       }
-
-       /** @return unique version of the pack. */
-       public ObjectId getVersion() {
-               return version;
-       }
-
-       public byte[] asBytes() {
-               byte[] r = new byte[KEYLEN];
-               name.copyTo(r, 0);
-               r[40] = '.';
-               version.copyTo(r, 41);
-               return r;
-       }
-
-       public String asString() {
-               return name.name() + "." + version.name();
-       }
-
-       @Override
-       public int hashCode() {
-               return name.hashCode();
-       }
-
-       @Override
-       public boolean equals(Object other) {
-               if (this == other)
-                       return true;
-               if (other instanceof CachedPackKey) {
-                       CachedPackKey key = (CachedPackKey) other;
-                       return name.equals(key.name) && version.equals(key.version);
-               }
-               return false;
-       }
-
-       @Override
-       public String toString() {
-               return "cached-pack:" + asString();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkFormatter.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkFormatter.java
deleted file mode 100644 (file)
index 011cfb0..0000000
+++ /dev/null
@@ -1,497 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.security.MessageDigest;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.zip.Deflater;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType;
-import org.eclipse.jgit.lib.AnyObjectId;
-import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.storage.dht.spi.Database;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-import org.eclipse.jgit.transport.PackedObjectInfo;
-import org.eclipse.jgit.util.NB;
-
-/**
- * Formats one {@link PackChunk} for storage in the DHT.
- * <p>
- * Each formatter instance can be used only once.
- */
-class ChunkFormatter {
-       static final int TRAILER_SIZE = 4;
-
-       private final RepositoryKey repo;
-
-       private final DhtInserterOptions options;
-
-       private final byte[] varIntBuf;
-
-       private final int maxObjects;
-
-       private Map<ChunkKey, BaseChunkInfo> baseChunks;
-
-       private List<StoredObject> objectList;
-
-       private byte[] chunkData;
-
-       private int ptr;
-
-       private int mark;
-
-       private int currentObjectType;
-
-       private BaseChunkInfo currentObjectBase;
-
-       private PackChunk.Members builder;
-
-       private GitStore.ChunkInfo.Source source;
-
-       private boolean fragment;
-
-       private int objectType;
-
-       private int objectsTotal, objectsWhole, objectsRefDelta, objectsOfsDelta;
-
-       private ChunkInfo chunkInfo;
-
-       ChunkFormatter(RepositoryKey repo, DhtInserterOptions options) {
-               this.repo = repo;
-               this.options = options;
-               this.varIntBuf = new byte[32];
-               this.chunkData = new byte[options.getChunkSize()];
-               this.maxObjects = options.getMaxObjectCount();
-               this.objectType = -1;
-       }
-
-       void setSource(GitStore.ChunkInfo.Source src) {
-               source = src;
-       }
-
-       void setObjectType(int type) {
-               objectType = type;
-       }
-
-       void setFragment() {
-               fragment = true;
-       }
-
-       ChunkKey getChunkKey() {
-               return getChunkInfo().getChunkKey();
-       }
-
-       ChunkInfo getChunkInfo() {
-               return chunkInfo;
-       }
-
-       ChunkMeta getChunkMeta() {
-               return builder.getMeta();
-       }
-
-       PackChunk getPackChunk() throws DhtException {
-               return builder.build();
-       }
-
-       void setChunkIndex(List<PackedObjectInfo> objs) {
-               builder.setChunkIndex(ChunkIndex.create(objs));
-       }
-
-       ChunkKey end(MessageDigest md) {
-               if (md == null)
-                       md = Constants.newMessageDigest();
-
-               // Embed a small amount of randomness into the chunk content,
-               // and thus impact its name. This prevents malicious clients from
-               // being able to predict what a chunk is called, which keeps them
-               // from replacing an existing chunk.
-               //
-               chunkData = cloneArray(chunkData, ptr + TRAILER_SIZE);
-               NB.encodeInt32(chunkData, ptr, options.nextChunkSalt());
-               ptr += 4;
-
-               md.update(chunkData, 0, ptr);
-               ChunkKey key = ChunkKey.create(repo, ObjectId.fromRaw(md.digest()));
-
-               GitStore.ChunkInfo.Builder info = GitStore.ChunkInfo.newBuilder();
-               info.setSource(source);
-               info.setObjectType(GitStore.ChunkInfo.ObjectType.valueOf(objectType));
-               if (fragment)
-                       info.setIsFragment(true);
-               info.setChunkSize(chunkData.length);
-
-               GitStore.ChunkInfo.ObjectCounts.Builder cnts = info.getObjectCountsBuilder();
-               cnts.setTotal(objectsTotal);
-               if (objectsWhole > 0)
-                       cnts.setWhole(objectsWhole);
-               if (objectsRefDelta > 0)
-                       cnts.setRefDelta(objectsRefDelta);
-               if (objectsOfsDelta > 0)
-                       cnts.setOfsDelta(objectsOfsDelta);
-
-               builder = new PackChunk.Members();
-               builder.setChunkKey(key);
-               builder.setChunkData(chunkData);
-
-               if (baseChunks != null) {
-                       List<BaseChunk> list = new ArrayList<BaseChunk>(baseChunks.size());
-                       for (BaseChunkInfo b : baseChunks.values()) {
-                               if (0 < b.useCount) {
-                                       BaseChunk.Builder c = BaseChunk.newBuilder();
-                                       c.setRelativeStart(b.relativeStart);
-                                       c.setChunkKey(b.key.asString());
-                                       list.add(c.build());
-                               }
-                       }
-                       Collections.sort(list, new Comparator<BaseChunk>() {
-                               public int compare(BaseChunk a, BaseChunk b) {
-                                       return Long.signum(a.getRelativeStart()
-                                                       - b.getRelativeStart());
-                               }
-                       });
-                       ChunkMeta.Builder b = ChunkMeta.newBuilder();
-                       b.addAllBaseChunk(list);
-                       ChunkMeta meta = b.build();
-                       builder.setMeta(meta);
-                       info.setMetaSize(meta.getSerializedSize());
-               }
-
-               if (objectList != null && !objectList.isEmpty()) {
-                       byte[] index = ChunkIndex.create(objectList);
-                       builder.setChunkIndex(index);
-                       info.setIndexSize(index.length);
-               }
-
-               chunkInfo = new ChunkInfo(key, info.build());
-               return getChunkKey();
-       }
-
-       /**
-        * Safely put the chunk to the database.
-        * <p>
-        * This method is slow. It first puts the chunk info, waits for success,
-        * then puts the chunk itself, waits for success, and finally queues up the
-        * object index with its chunk links in the supplied buffer.
-        *
-        * @param db
-        * @param dbWriteBuffer
-        * @throws DhtException
-        */
-       void safePut(Database db, WriteBuffer dbWriteBuffer) throws DhtException {
-               WriteBuffer chunkBuf = db.newWriteBuffer();
-
-               db.repository().put(repo, getChunkInfo(), chunkBuf);
-               chunkBuf.flush();
-
-               db.chunk().put(builder, chunkBuf);
-               chunkBuf.flush();
-
-               linkObjects(db, dbWriteBuffer);
-       }
-
-       void unsafePut(Database db, WriteBuffer dbWriteBuffer) throws DhtException {
-               db.repository().put(repo, getChunkInfo(), dbWriteBuffer);
-               db.chunk().put(builder, dbWriteBuffer);
-               linkObjects(db, dbWriteBuffer);
-       }
-
-       private void linkObjects(Database db, WriteBuffer dbWriteBuffer)
-                       throws DhtException {
-               if (objectList != null && !objectList.isEmpty()) {
-                       for (StoredObject obj : objectList) {
-                               db.objectIndex().add(ObjectIndexKey.create(repo, obj),
-                                               obj.link(getChunkKey()), dbWriteBuffer);
-                       }
-               }
-       }
-
-       boolean whole(Deflater def, int type, byte[] data, int off, final int size,
-                       ObjectId objId) {
-               if (free() < 10 || maxObjects <= objectsTotal)
-                       return false;
-
-               header(type, size);
-               objectsWhole++;
-               currentObjectType = type;
-
-               int endOfHeader = ptr;
-               def.setInput(data, off, size);
-               def.finish();
-               do {
-                       int left = free();
-                       if (left == 0) {
-                               rollback();
-                               return false;
-                       }
-
-                       int n = def.deflate(chunkData, ptr, left);
-                       if (n == 0) {
-                               rollback();
-                               return false;
-                       }
-
-                       ptr += n;
-               } while (!def.finished());
-
-               if (objectList == null)
-                       objectList = new ArrayList<StoredObject>();
-
-               final int packedSize = ptr - endOfHeader;
-               objectList.add(new StoredObject(objId, type, mark, packedSize, size));
-
-               if (objectType < 0)
-                       objectType = type;
-               else if (objectType != type)
-                       objectType = ChunkInfo.OBJ_MIXED;
-
-               return true;
-       }
-
-       boolean whole(int type, long inflatedSize) {
-               if (free() < 10 || maxObjects <= objectsTotal)
-                       return false;
-
-               header(type, inflatedSize);
-               objectsWhole++;
-               currentObjectType = type;
-               return true;
-       }
-
-       boolean ofsDelta(long inflatedSize, long negativeOffset) {
-               final int ofsPtr = encodeVarInt(negativeOffset);
-               final int ofsLen = varIntBuf.length - ofsPtr;
-               if (free() < 10 + ofsLen || maxObjects <= objectsTotal)
-                       return false;
-
-               header(Constants.OBJ_OFS_DELTA, inflatedSize);
-               objectsOfsDelta++;
-               currentObjectType = Constants.OBJ_OFS_DELTA;
-               currentObjectBase = null;
-
-               if (append(varIntBuf, ofsPtr, ofsLen))
-                       return true;
-
-               rollback();
-               return false;
-       }
-
-       boolean refDelta(long inflatedSize, AnyObjectId baseId) {
-               if (free() < 30 || maxObjects <= objectsTotal)
-                       return false;
-
-               header(Constants.OBJ_REF_DELTA, inflatedSize);
-               objectsRefDelta++;
-               currentObjectType = Constants.OBJ_REF_DELTA;
-
-               baseId.copyRawTo(chunkData, ptr);
-               ptr += 20;
-               return true;
-       }
-
-       void useBaseChunk(long relativeStart, ChunkKey baseChunkKey) {
-               if (baseChunks == null)
-                       baseChunks = new HashMap<ChunkKey, BaseChunkInfo>();
-
-               BaseChunkInfo base = baseChunks.get(baseChunkKey);
-               if (base == null) {
-                       base = new BaseChunkInfo(relativeStart, baseChunkKey);
-                       baseChunks.put(baseChunkKey, base);
-               }
-               base.useCount++;
-               currentObjectBase = base;
-       }
-
-       void appendDeflateOutput(Deflater def) {
-               while (!def.finished()) {
-                       int left = free();
-                       if (left == 0)
-                               return;
-                       int n = def.deflate(chunkData, ptr, left);
-                       if (n == 0)
-                               return;
-                       ptr += n;
-               }
-       }
-
-       boolean append(byte[] data, int off, int len) {
-               if (free() < len)
-                       return false;
-
-               System.arraycopy(data, off, chunkData, ptr, len);
-               ptr += len;
-               return true;
-       }
-
-       boolean isEmpty() {
-               return ptr == 0;
-       }
-
-       int getObjectCount() {
-               return objectsTotal;
-       }
-
-       int position() {
-               return ptr;
-       }
-
-       int size() {
-               return ptr;
-       }
-
-       int free() {
-               return (chunkData.length - TRAILER_SIZE) - ptr;
-       }
-
-       byte[] getRawChunkDataArray() {
-               return chunkData;
-       }
-
-       int getCurrentObjectType() {
-               return currentObjectType;
-       }
-
-       void rollback() {
-               ptr = mark;
-               adjustObjectCount(-1, currentObjectType);
-       }
-
-       void adjustObjectCount(int delta, int type) {
-               objectsTotal += delta;
-
-               switch (type) {
-               case Constants.OBJ_COMMIT:
-               case Constants.OBJ_TREE:
-               case Constants.OBJ_BLOB:
-               case Constants.OBJ_TAG:
-                       objectsWhole += delta;
-                       break;
-
-               case Constants.OBJ_OFS_DELTA:
-                       objectsOfsDelta += delta;
-                       if (currentObjectBase != null && --currentObjectBase.useCount == 0)
-                               baseChunks.remove(currentObjectBase.key);
-                       currentObjectBase = null;
-                       break;
-
-               case Constants.OBJ_REF_DELTA:
-                       objectsRefDelta += delta;
-                       break;
-               }
-       }
-
-       private void header(int type, long inflatedSize) {
-               mark = ptr;
-               objectsTotal++;
-
-               long nextLength = inflatedSize >>> 4;
-               chunkData[ptr++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (type << 4) | (inflatedSize & 0x0F));
-               inflatedSize = nextLength;
-               while (inflatedSize > 0) {
-                       nextLength >>>= 7;
-                       chunkData[ptr++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (inflatedSize & 0x7F));
-                       inflatedSize = nextLength;
-               }
-       }
-
-       private int encodeVarInt(long value) {
-               int n = varIntBuf.length - 1;
-               varIntBuf[n] = (byte) (value & 0x7F);
-               while ((value >>= 7) > 0)
-                       varIntBuf[--n] = (byte) (0x80 | (--value & 0x7F));
-               return n;
-       }
-
-       private static byte[] cloneArray(byte[] src, int len) {
-               byte[] dst = new byte[len];
-               System.arraycopy(src, 0, dst, 0, len);
-               return dst;
-       }
-
-       private static class BaseChunkInfo {
-               final long relativeStart;
-
-               final ChunkKey key;
-
-               int useCount;
-
-               BaseChunkInfo(long relativeStart, ChunkKey key) {
-                       this.relativeStart = relativeStart;
-                       this.key = key;
-               }
-       }
-
-       private static class StoredObject extends PackedObjectInfo {
-               private final int type;
-
-               private final int packed;
-
-               private final int inflated;
-
-               StoredObject(AnyObjectId id, int type, int offset, int packed, int size) {
-                       super(id);
-                       setOffset(offset);
-                       this.type = type;
-                       this.packed = packed;
-                       this.inflated = size;
-               }
-
-               ObjectInfo link(ChunkKey key) {
-                       GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder();
-                       b.setObjectType(ObjectType.valueOf(type));
-                       b.setOffset((int) getOffset());
-                       b.setPackedSize(packed);
-                       b.setInflatedSize(inflated);
-                       return new ObjectInfo(key, b.build());
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkIndex.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkIndex.java
deleted file mode 100644 (file)
index 89029c0..0000000
+++ /dev/null
@@ -1,428 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.text.MessageFormat;
-import java.util.Collections;
-import java.util.List;
-
-import org.eclipse.jgit.lib.AnyObjectId;
-import static org.eclipse.jgit.lib.Constants.*;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.transport.PackedObjectInfo;
-import org.eclipse.jgit.util.NB;
-
-/** Index into a {@link PackChunk}. */
-public abstract class ChunkIndex {
-       private static final int V1 = 0x01;
-
-       static ChunkIndex fromBytes(ChunkKey key, byte[] index, int ptr, int len)
-                       throws DhtException {
-               int v = index[ptr] & 0xff;
-               switch (v) {
-               case V1: {
-                       final int offsetFormat = index[ptr + 1] & 7;
-                       switch (offsetFormat) {
-                       case 1:
-                               return new Offset1(index, ptr, len, key);
-                       case 2:
-                               return new Offset2(index, ptr, len, key);
-                       case 3:
-                               return new Offset3(index, ptr, len, key);
-                       case 4:
-                               return new Offset4(index, ptr, len, key);
-                       default:
-                               throw new DhtException(MessageFormat.format(
-                                               DhtText.get().unsupportedChunkIndex,
-                                               Integer.toHexString(NB.decodeUInt16(index, ptr)), key));
-                       }
-               }
-               default:
-                       throw new DhtException(MessageFormat.format(
-                                       DhtText.get().unsupportedChunkIndex,
-                                       Integer.toHexString(v), key));
-               }
-       }
-
-       /**
-        * Format the chunk index and return its binary representation.
-        *
-        * @param list
-        *            the list of objects that appear in the chunk. This list will
-        *            be sorted in-place if it has more than 1 element.
-        * @return binary representation of the chunk's objects and their starting
-        *         offsets. The format is private to this class.
-        */
-       @SuppressWarnings("null")
-       static byte[] create(List<? extends PackedObjectInfo> list) {
-               int cnt = list.size();
-               sortObjectList(list);
-
-               int fanoutFormat = 0;
-               int[] buckets = null;
-               if (64 < cnt) {
-                       buckets = new int[256];
-                       for (PackedObjectInfo oe : list)
-                               buckets[oe.getFirstByte()]++;
-                       fanoutFormat = selectFanoutFormat(buckets);
-               }
-
-               int offsetFormat = selectOffsetFormat(list);
-               byte[] index = new byte[2 // header
-                               + 256 * fanoutFormat // (optional) fanout
-                               + cnt * OBJECT_ID_LENGTH // ids
-                               + cnt * offsetFormat // offsets
-               ];
-               index[0] = V1;
-               index[1] = (byte) ((fanoutFormat << 3) | offsetFormat);
-
-               int ptr = 2;
-
-               switch (fanoutFormat) {
-               case 0:
-                       break;
-               case 1:
-                       for (int i = 0; i < 256; i++, ptr++)
-                               index[ptr] = (byte) buckets[i];
-                       break;
-               case 2:
-                       for (int i = 0; i < 256; i++, ptr += 2)
-                               NB.encodeInt16(index, ptr, buckets[i]);
-                       break;
-               case 3:
-                       for (int i = 0; i < 256; i++, ptr += 3)
-                               encodeUInt24(index, ptr, buckets[i]);
-                       break;
-               case 4:
-                       for (int i = 0; i < 256; i++, ptr += 4)
-                               NB.encodeInt32(index, ptr, buckets[i]);
-                       break;
-               }
-
-               for (PackedObjectInfo oe : list) {
-                       oe.copyRawTo(index, ptr);
-                       ptr += OBJECT_ID_LENGTH;
-               }
-
-               switch (offsetFormat) {
-               case 1:
-                       for (PackedObjectInfo oe : list)
-                               index[ptr++] = (byte) oe.getOffset();
-                       break;
-
-               case 2:
-                       for (PackedObjectInfo oe : list) {
-                               NB.encodeInt16(index, ptr, (int) oe.getOffset());
-                               ptr += 2;
-                       }
-                       break;
-
-               case 3:
-                       for (PackedObjectInfo oe : list) {
-                               encodeUInt24(index, ptr, (int) oe.getOffset());
-                               ptr += 3;
-                       }
-                       break;
-
-               case 4:
-                       for (PackedObjectInfo oe : list) {
-                               NB.encodeInt32(index, ptr, (int) oe.getOffset());
-                               ptr += 4;
-                       }
-                       break;
-               }
-
-               return index;
-       }
-
-       private static int selectFanoutFormat(int[] buckets) {
-               int fmt = 1;
-               int max = 1 << (8 * fmt);
-
-               for (int cnt : buckets) {
-                       while (max <= cnt && fmt < 4) {
-                               if (++fmt == 4)
-                                       return fmt;
-                               max = 1 << (8 * fmt);
-                       }
-               }
-               return fmt;
-       }
-
-       private static int selectOffsetFormat(List<? extends PackedObjectInfo> list) {
-               int fmt = 1;
-               int max = 1 << (8 * fmt);
-
-               for (PackedObjectInfo oe : list) {
-                       while (max <= oe.getOffset() && fmt < 4) {
-                               if (++fmt == 4)
-                                       return fmt;
-                               max = 1 << (8 * fmt);
-                       }
-               }
-               return fmt;
-       }
-
-       private static void sortObjectList(List<? extends PackedObjectInfo> list) {
-               Collections.sort(list);
-       }
-
-       private final byte[] indexBuf;
-
-       private final int indexPtr;
-
-       private final int indexLen;
-
-       private final int[] fanout;
-
-       private final int idTable;
-
-       private final int offsetTable;
-
-       private final int count;
-
-       ChunkIndex(byte[] indexBuf, int ptr, int len, ChunkKey key)
-                       throws DhtException {
-               final int ctl = indexBuf[ptr + 1];
-               final int fanoutFormat = (ctl >>> 3) & 7;
-               final int offsetFormat = ctl & 7;
-
-               switch (fanoutFormat) {
-               case 0:
-                       fanout = null; // no fanout, too small
-                       break;
-
-               case 1: {
-                       int last = 0;
-                       fanout = new int[256];
-                       for (int i = 0; i < 256; i++) {
-                               last += indexBuf[ptr + 2 + i] & 0xff;
-                               fanout[i] = last;
-                       }
-                       break;
-               }
-               case 2: {
-                       int last = 0;
-                       fanout = new int[256];
-                       for (int i = 0; i < 256; i++) {
-                               last += NB.decodeUInt16(indexBuf, ptr + 2 + i * 2);
-                               fanout[i] = last;
-                       }
-                       break;
-               }
-               case 3: {
-                       int last = 0;
-                       fanout = new int[256];
-                       for (int i = 0; i < 256; i++) {
-                               last += decodeUInt24(indexBuf, ptr + 2 + i * 3);
-                               fanout[i] = last;
-                       }
-                       break;
-               }
-               case 4: {
-                       int last = 0;
-                       fanout = new int[256];
-                       for (int i = 0; i < 256; i++) {
-                               last += NB.decodeInt32(indexBuf, ptr + 2 + i * 4);
-                               fanout[i] = last;
-                       }
-                       break;
-               }
-               default:
-                       throw new DhtException(MessageFormat.format(
-                                       DhtText.get().unsupportedChunkIndex,
-                                       Integer.toHexString(NB.decodeUInt16(indexBuf, ptr)), key));
-               }
-
-               this.indexBuf = indexBuf;
-               this.indexPtr = ptr;
-               this.indexLen = len;
-               this.idTable = indexPtr + 2 + 256 * fanoutFormat;
-
-               int recsz = OBJECT_ID_LENGTH + offsetFormat;
-               this.count = (indexLen - (idTable - indexPtr)) / recsz;
-               this.offsetTable = idTable + count * OBJECT_ID_LENGTH;
-       }
-
-       /**
-        * Get the total number of objects described by this index.
-        *
-        * @return number of objects in this index and its associated chunk.
-        */
-       public final int getObjectCount() {
-               return count;
-       }
-
-       /**
-        * Get an ObjectId from this index.
-        *
-        * @param nth
-        *            the object to return. Must be in range [0, getObjectCount).
-        * @return the object id.
-        */
-       public final ObjectId getObjectId(int nth) {
-               return ObjectId.fromRaw(indexBuf, idPosition(nth));
-       }
-
-       /**
-        * Get the offset of an object in the chunk.
-        *
-        * @param nth
-        *            offset to return. Must be in range [0, getObjectCount).
-        * @return the offset.
-        */
-       public final int getOffset(int nth) {
-               return getOffset(indexBuf, offsetTable, nth);
-       }
-
-       /** @return the size of this index, in bytes. */
-       int getIndexSize() {
-               int sz = indexBuf.length;
-               if (fanout != null)
-                       sz += 12 + 256 * 4;
-               return sz;
-       }
-
-       /**
-        * Search for an object in the index.
-        *
-        * @param objId
-        *            the object to locate.
-        * @return offset of the object in the corresponding chunk; -1 if not found.
-        */
-       final int findOffset(AnyObjectId objId) {
-               int hi, lo;
-
-               if (fanout != null) {
-                       int fb = objId.getFirstByte();
-                       lo = fb == 0 ? 0 : fanout[fb - 1];
-                       hi = fanout[fb];
-               } else {
-                       lo = 0;
-                       hi = count;
-               }
-
-               while (lo < hi) {
-                       final int mid = (lo + hi) >>> 1;
-                       final int cmp = objId.compareTo(indexBuf, idPosition(mid));
-                       if (cmp < 0)
-                               hi = mid;
-                       else if (cmp == 0)
-                               return getOffset(mid);
-                       else
-                               lo = mid + 1;
-               }
-               return -1;
-       }
-
-       abstract int getOffset(byte[] indexArray, int offsetTableStart, int nth);
-
-       private int idPosition(int nth) {
-               return idTable + (nth * OBJECT_ID_LENGTH);
-       }
-
-       private static class Offset1 extends ChunkIndex {
-               Offset1(byte[] index, int ptr, int len, ChunkKey key)
-                               throws DhtException {
-                       super(index, ptr, len, key);
-               }
-
-               int getOffset(byte[] index, int offsetTable, int nth) {
-                       return index[offsetTable + nth] & 0xff;
-               }
-       }
-
-       private static class Offset2 extends ChunkIndex {
-               Offset2(byte[] index, int ptr, int len, ChunkKey key)
-                               throws DhtException {
-                       super(index, ptr, len, key);
-               }
-
-               int getOffset(byte[] index, int offsetTable, int nth) {
-                       return NB.decodeUInt16(index, offsetTable + (nth * 2));
-               }
-       }
-
-       private static class Offset3 extends ChunkIndex {
-               Offset3(byte[] index, int ptr, int len, ChunkKey key)
-                               throws DhtException {
-                       super(index, ptr, len, key);
-               }
-
-               int getOffset(byte[] index, int offsetTable, int nth) {
-                       return decodeUInt24(index, offsetTable + (nth * 3));
-               }
-       }
-
-       private static class Offset4 extends ChunkIndex {
-               Offset4(byte[] index, int ptr, int len, ChunkKey key)
-                               throws DhtException {
-                       super(index, ptr, len, key);
-               }
-
-               int getOffset(byte[] index, int offsetTable, int nth) {
-                       return NB.decodeInt32(index, offsetTable + (nth * 4));
-               }
-       }
-
-       private static void encodeUInt24(byte[] intbuf, int offset, int v) {
-               intbuf[offset + 2] = (byte) v;
-               v >>>= 8;
-
-               intbuf[offset + 1] = (byte) v;
-               v >>>= 8;
-
-               intbuf[offset] = (byte) v;
-       }
-
-       private static int decodeUInt24(byte[] intbuf, int offset) {
-               int r = (intbuf[offset] & 0xff) << 8;
-
-               r |= intbuf[offset + 1] & 0xff;
-               r <<= 8;
-
-               r |= intbuf[offset + 2] & 0xff;
-               return r;
-       }
-}
\ No newline at end of file
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkInfo.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkInfo.java
deleted file mode 100644 (file)
index 2c156c8..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
-
-/**
- * Summary information about a chunk owned by a repository.
- */
-public class ChunkInfo {
-       /** Mixed objects are stored in the chunk (instead of single type). */
-       public static final int OBJ_MIXED = 0;
-
-       private final ChunkKey chunkKey;
-
-       private final GitStore.ChunkInfo data;
-
-       /**
-        * Wrap a ChunkInfo message.
-        *
-        * @param key
-        *            associated chunk key.
-        * @param data
-        *            data.
-        */
-       public ChunkInfo(ChunkKey key, GitStore.ChunkInfo data) {
-               this.chunkKey = key;
-               this.data = data;
-       }
-
-       /** @return the repository that contains the chunk. */
-       public RepositoryKey getRepositoryKey() {
-               return chunkKey.getRepositoryKey();
-       }
-
-       /** @return the chunk this information describes. */
-       public ChunkKey getChunkKey() {
-               return chunkKey;
-       }
-
-       /** @return the underlying message containing all data. */
-       public GitStore.ChunkInfo getData() {
-               return data;
-       }
-
-       @Override
-       public String toString() {
-               StringBuilder b = new StringBuilder();
-               b.append("ChunkInfo:");
-               b.append(chunkKey);
-               b.append("\n");
-               b.append(data);
-               return b.toString();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkKey.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkKey.java
deleted file mode 100644 (file)
index ced37b3..0000000
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.eclipse.jgit.storage.dht.KeyUtils.format32;
-import static org.eclipse.jgit.storage.dht.KeyUtils.parse32;
-import static org.eclipse.jgit.util.RawParseUtils.decode;
-
-import java.io.Serializable;
-import java.text.MessageFormat;
-
-import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.lib.ObjectId;
-
-/** Unique identifier of a {@link PackChunk} in the DHT. */
-public final class ChunkKey implements RowKey, Serializable {
-       private static final long serialVersionUID = 1L;
-
-       static final int KEYLEN = 49;
-
-       /**
-        * @param repo
-        * @param chunk
-        * @return the key
-        */
-       public static ChunkKey create(RepositoryKey repo, ObjectId chunk) {
-               return new ChunkKey(repo.asInt(), chunk);
-       }
-
-       /**
-        * @param key
-        * @return the key
-        */
-       public static ChunkKey fromBytes(byte[] key) {
-               return fromBytes(key, 0, key.length);
-       }
-
-       /**
-        * @param key
-        * @param ptr
-        * @param len
-        * @return the key
-        */
-       public static ChunkKey fromBytes(byte[] key, int ptr, int len) {
-               if (len != KEYLEN)
-                       throw new IllegalArgumentException(MessageFormat.format(
-                                       DhtText.get().invalidChunkKey, decode(key, ptr, ptr + len)));
-
-               int repo = parse32(key, ptr);
-               ObjectId chunk = ObjectId.fromString(key, ptr + 9);
-               return new ChunkKey(repo, chunk);
-       }
-
-       /**
-        * @param key
-        * @return the key
-        */
-       public static ChunkKey fromString(String key) {
-               return fromBytes(Constants.encodeASCII(key));
-       }
-
-       private final int repo;
-
-       private final ObjectId chunk;
-
-       ChunkKey(int repo, ObjectId chunk) {
-               this.repo = repo;
-               this.chunk = chunk;
-       }
-
-       /** @return the repository that contains the chunk. */
-       public RepositoryKey getRepositoryKey() {
-               return RepositoryKey.fromInt(repo);
-       }
-
-       int getRepositoryId() {
-               return repo;
-       }
-
-       /** @return unique SHA-1 describing the chunk. */
-       public ObjectId getChunkHash() {
-               return chunk;
-       }
-
-       public byte[] asBytes() {
-               byte[] r = new byte[KEYLEN];
-               format32(r, 0, repo);
-               r[8] = '.';
-               chunk.copyTo(r, 9);
-               return r;
-       }
-
-       public String asString() {
-               return decode(asBytes());
-       }
-
-       @Override
-       public int hashCode() {
-               return chunk.hashCode();
-       }
-
-       @Override
-       public boolean equals(Object other) {
-               if (this == other)
-                       return true;
-               if (other instanceof ChunkKey) {
-                       ChunkKey thisChunk = this;
-                       ChunkKey otherChunk = (ChunkKey) other;
-                       return thisChunk.repo == otherChunk.repo
-                                       && thisChunk.chunk.equals(otherChunk.chunk);
-               }
-               return false;
-       }
-
-       @Override
-       public String toString() {
-               return "chunk:" + asString();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMetaUtil.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMetaUtil.java
deleted file mode 100644 (file)
index 7bc6439..0000000
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.text.MessageFormat;
-import java.util.List;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk;
-
-class ChunkMetaUtil {
-       static BaseChunk getBaseChunk(ChunkKey chunkKey, ChunkMeta meta,
-                       long position) throws DhtException {
-               // Chunks are sorted by ascending relative_start order.
-               // Thus for a pack sequence of: A B C, we have:
-               //
-               // -- C relative_start = 10,000
-               // -- B relative_start = 20,000
-               // -- A relative_start = 30,000
-               //
-               // Indicating that chunk C starts 10,000 bytes before us,
-               // chunk B starts 20,000 bytes before us (and 10,000 before C),
-               // chunk A starts 30,000 bytes before us (and 10,000 before B),
-               //
-               // If position falls within:
-               //
-               // -- C (10k), then position is between 0..10,000
-               // -- B (20k), then position is between 10,000 .. 20,000
-               // -- A (30k), then position is between 20,000 .. 30,000
-
-               List<BaseChunk> baseChunks = meta.getBaseChunkList();
-               int high = baseChunks.size();
-               int low = 0;
-               while (low < high) {
-                       final int mid = (low + high) >>> 1;
-                       final BaseChunk base = baseChunks.get(mid);
-
-                       if (position > base.getRelativeStart()) {
-                               low = mid + 1;
-
-                       } else if (mid == 0 || position == base.getRelativeStart()) {
-                               return base;
-
-                       } else if (baseChunks.get(mid - 1).getRelativeStart() < position) {
-                               return base;
-
-                       } else {
-                               high = mid;
-                       }
-               }
-
-               throw new DhtException(MessageFormat.format(
-                               DhtText.get().missingLongOffsetBase, chunkKey,
-                               Long.valueOf(position)));
-       }
-
-       static ChunkKey getNextFragment(ChunkMeta meta, ChunkKey chunkKey) {
-               int cnt = meta.getFragmentCount();
-               for (int i = 0; i < cnt - 1; i++) {
-                       ChunkKey key = ChunkKey.fromString(meta.getFragment(i));
-                       if (chunkKey.equals(key))
-                               return ChunkKey.fromString(meta.getFragment(i + 1));
-               }
-               return null;
-       }
-
-       private ChunkMetaUtil() {
-               // Static utilities only, do not create instances.
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DeltaBaseCache.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DeltaBaseCache.java
deleted file mode 100644 (file)
index 0bc1652..0000000
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.lang.ref.SoftReference;
-
-/**
- * Caches recently used objects for {@link DhtReader}.
- * <p>
- * This cache is not thread-safe. Each reader should have its own cache.
- */
-final class DeltaBaseCache {
-       private final DhtReader.Statistics stats;
-
-       private int maxByteCount;
-
-       private final Slot[] table;
-
-       private Slot lruHead;
-
-       private Slot lruTail;
-
-       private int curByteCount;
-
-       DeltaBaseCache(DhtReader reader) {
-               stats = reader.getStatistics();
-
-               DhtReaderOptions options = reader.getOptions();
-               maxByteCount = options.getDeltaBaseCacheLimit();
-               table = new Slot[options.getDeltaBaseCacheSize()];
-       }
-
-       Entry get(ChunkKey key, int position) {
-               Slot e = table[hash(key, position)];
-               for (; e != null; e = e.tableNext) {
-                       if (e.offset == position && key.equals(e.chunkKey)) {
-                               Entry buf = e.data.get();
-                               if (buf != null) {
-                                       moveToHead(e);
-                                       stats.deltaBaseCache_Hits++;
-                                       return buf;
-                               }
-                       }
-               }
-               stats.deltaBaseCache_Miss++;
-               return null;
-       }
-
-       void put(ChunkKey key, int offset, int objectType, byte[] data) {
-               if (data.length > maxByteCount)
-                       return; // Too large to cache.
-
-               curByteCount += data.length;
-               releaseMemory();
-
-               int tableIdx = hash(key, offset);
-               Slot e = new Slot(key, offset, data.length);
-               e.data = new SoftReference<Entry>(new Entry(data, objectType));
-               e.tableNext = table[tableIdx];
-               table[tableIdx] = e;
-               moveToHead(e);
-       }
-
-       private void releaseMemory() {
-               while (curByteCount > maxByteCount && lruTail != null) {
-                       Slot currOldest = lruTail;
-                       Slot nextOldest = currOldest.lruPrev;
-
-                       curByteCount -= currOldest.size;
-                       unlink(currOldest);
-                       removeFromTable(currOldest);
-
-                       if (nextOldest == null)
-                               lruHead = null;
-                       else
-                               nextOldest.lruNext = null;
-                       lruTail = nextOldest;
-               }
-       }
-
-       private void removeFromTable(Slot e) {
-               int tableIdx = hash(e.chunkKey, e.offset);
-               Slot p = table[tableIdx];
-
-               if (p == e) {
-                       table[tableIdx] = e.tableNext;
-                       return;
-               }
-
-               for (; p != null; p = p.tableNext) {
-                       if (p.tableNext == e) {
-                               p.tableNext = e.tableNext;
-                               return;
-                       }
-               }
-       }
-
-       private void moveToHead(final Slot e) {
-               unlink(e);
-               e.lruPrev = null;
-               e.lruNext = lruHead;
-               if (lruHead != null)
-                       lruHead.lruPrev = e;
-               else
-                       lruTail = e;
-               lruHead = e;
-       }
-
-       private void unlink(final Slot e) {
-               Slot prev = e.lruPrev;
-               Slot next = e.lruNext;
-
-               if (prev != null)
-                       prev.lruNext = next;
-               if (next != null)
-                       next.lruPrev = prev;
-       }
-
-       private int hash(ChunkKey key, int position) {
-               return (((key.hashCode() & 0xfffff000) + position) >>> 1) % table.length;
-       }
-
-       static class Entry {
-               final byte[] data;
-
-               final int type;
-
-               Entry(final byte[] aData, final int aType) {
-                       data = aData;
-                       type = aType;
-               }
-       }
-
-       private static class Slot {
-               final ChunkKey chunkKey;
-
-               final int offset;
-
-               final int size;
-
-               Slot tableNext;
-
-               Slot lruPrev;
-
-               Slot lruNext;
-
-               SoftReference<Entry> data;
-
-               Slot(ChunkKey key, int offset, int size) {
-                       this.chunkKey = key;
-                       this.offset = offset;
-                       this.size = size;
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtCachedPack.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtCachedPack.java
deleted file mode 100644 (file)
index 0fd253b..0000000
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.io.IOException;
-import java.text.MessageFormat;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.storage.pack.CachedPack;
-import org.eclipse.jgit.storage.pack.ObjectToPack;
-import org.eclipse.jgit.storage.pack.PackOutputStream;
-import org.eclipse.jgit.storage.pack.StoredObjectRepresentation;
-
-/** A cached pack stored by the DHT. */
-public class DhtCachedPack extends CachedPack {
-       private final CachedPackInfo info;
-
-       private Set<ObjectId> tips;
-
-       private Set<ChunkKey> keySet;
-
-       private ChunkKey[] keyList;
-
-       DhtCachedPack(CachedPackInfo info) {
-               this.info = info;
-       }
-
-       @Override
-       public Set<ObjectId> getTips() {
-               if (tips == null) {
-                       tips = new HashSet<ObjectId>();
-                       for (String idString : info.getTipList().getObjectNameList())
-                               tips.add(ObjectId.fromString(idString));
-                       tips = Collections.unmodifiableSet(tips);
-               }
-               return tips;
-       }
-
-       @Override
-       public long getObjectCount() {
-               return info.getObjectsTotal();
-       }
-
-       @Override
-       public long getDeltaCount() throws IOException {
-               return info.getObjectsDelta();
-       }
-
-       /** @return information describing this cached pack. */
-       public CachedPackInfo getCachedPackInfo() {
-               return info;
-       }
-
-       @Override
-       public boolean hasObject(ObjectToPack obj, StoredObjectRepresentation rep) {
-               DhtObjectRepresentation objrep = (DhtObjectRepresentation) rep;
-               if (keySet == null)
-                       init();
-               return keySet.contains(objrep.getChunkKey());
-       }
-
-       private void init() {
-               ChunkList chunkList = info.getChunkList();
-               int cnt = chunkList.getChunkKeyCount();
-               keySet = new HashSet<ChunkKey>();
-               keyList = new ChunkKey[cnt];
-               for (int i = 0; i < cnt; i++) {
-                       ChunkKey key = ChunkKey.fromString(chunkList.getChunkKey(i));
-                       keySet.add(key);
-                       keyList[i] = key;
-               }
-       }
-
-       void copyAsIs(PackOutputStream out, boolean validate, DhtReader ctx)
-                       throws IOException {
-               if (keyList == null)
-                       init();
-
-               // Clear the recent chunks because all of the reader's
-               // chunk limit should be made available for prefetch.
-               int cacheLimit = ctx.getOptions().getChunkLimit();
-               ctx.getRecentChunks().setMaxBytes(0);
-               try {
-                       Prefetcher p = new Prefetcher(ctx, 0, cacheLimit);
-                       p.push(Arrays.asList(keyList));
-                       copyPack(out, p, validate);
-               } finally {
-                       ctx.getRecentChunks().setMaxBytes(cacheLimit);
-               }
-       }
-
-       private void copyPack(PackOutputStream out, Prefetcher prefetcher,
-                       boolean validate) throws DhtException, DhtMissingChunkException,
-                       IOException {
-               Map<String, Long> startsAt = new HashMap<String, Long>();
-               for (ChunkKey key : keyList) {
-                       PackChunk chunk = prefetcher.get(key);
-
-                       // The prefetcher should always produce the chunk for us, if not
-                       // there is something seriously wrong with the ordering or
-                       // within the prefetcher code and aborting is more sane than
-                       // using slow synchronous lookups.
-                       //
-                       if (chunk == null)
-                               throw new DhtMissingChunkException(key);
-
-                       // Verify each long OFS_DELTA chunk appears at the right offset.
-                       // This is a cheap validation that the cached pack hasn't been
-                       // incorrectly created and would confuse the client.
-                       //
-                       long position = out.length();
-                       ChunkMeta meta = chunk.getMeta();
-                       if (meta != null && meta.getBaseChunkCount() != 0) {
-                               for (ChunkMeta.BaseChunk base : meta.getBaseChunkList()) {
-                                       Long act = startsAt.get(base.getChunkKey());
-                                       long exp = position - base.getRelativeStart();
-
-                                       if (act == null) {
-                                               throw new DhtException(MessageFormat.format(DhtText
-                                                               .get().wrongChunkPositionInCachedPack,
-                                                               rowKey(), base.getChunkKey(),
-                                                               "[not written]", key, Long.valueOf(exp)));
-                                       }
-
-                                       if (act.longValue() != exp) {
-                                               throw new DhtException(MessageFormat.format(DhtText
-                                                               .get().wrongChunkPositionInCachedPack,
-                                                               rowKey(), base.getChunkKey(),
-                                                               act, key, Long.valueOf(exp)));
-                                       }
-                               }
-                       }
-
-                       startsAt.put(key.asString(), Long.valueOf(position));
-                       chunk.copyEntireChunkAsIs(out, null, validate);
-               }
-       }
-
-       private String rowKey() {
-               return info.getName() + "." + info.getVersion();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtConfig.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtConfig.java
deleted file mode 100644 (file)
index 24963c7..0000000
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.io.IOException;
-
-import org.eclipse.jgit.errors.ConfigInvalidException;
-import org.eclipse.jgit.lib.StoredConfig;
-
-final class DhtConfig extends StoredConfig {
-       @Override
-       public void load() throws IOException, ConfigInvalidException {
-               clear();
-       }
-
-       @Override
-       public void save() throws IOException {
-               // TODO actually store this configuration.
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtException.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtException.java
deleted file mode 100644 (file)
index 7fdd662..0000000
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.io.IOException;
-
-import org.eclipse.jgit.storage.dht.spi.Database;
-
-/** Any error caused by a {@link Database} operation. */
-public class DhtException extends IOException {
-       private static final long serialVersionUID = 1L;
-
-       /**
-        * @param message
-        */
-       public DhtException(String message) {
-               super(message);
-       }
-
-       /**
-        * @param cause
-        */
-       public DhtException(Throwable cause) {
-               super(cause.getMessage());
-               initCause(cause);
-       }
-
-       /**
-        * @param message
-        * @param cause
-        */
-       public DhtException(String message, Throwable cause) {
-               super(message);
-               initCause(cause);
-       }
-
-       /** TODO: Remove this type and all of its locations. */
-       public static class TODO extends RuntimeException {
-               private static final long serialVersionUID = 1L;
-
-               /**
-                * @param what
-                */
-               public TODO(String what) {
-                       super(what);
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserter.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserter.java
deleted file mode 100644 (file)
index 4ae4cf5..0000000
+++ /dev/null
@@ -1,321 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.io.ByteArrayInputStream;
-import java.io.EOFException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.security.MessageDigest;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.zip.Deflater;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType;
-import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.ObjectInserter;
-import org.eclipse.jgit.storage.dht.spi.Database;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-import org.eclipse.jgit.transport.PackParser;
-import org.eclipse.jgit.transport.PackedObjectInfo;
-import org.eclipse.jgit.util.IO;
-
-class DhtInserter extends ObjectInserter {
-       private final DhtObjDatabase objdb;
-
-       private final RepositoryKey repo;
-
-       private final Database db;
-
-       private final DhtInserterOptions options;
-
-       private Deflater deflater;
-
-       private WriteBuffer dbWriteBuffer;
-
-       private ChunkFormatter activeChunk;
-
-       DhtInserter(DhtObjDatabase objdb) {
-               this.objdb = objdb;
-               this.repo = objdb.getRepository().getRepositoryKey();
-               this.db = objdb.getDatabase();
-               this.options = objdb.getInserterOptions();
-       }
-
-       @Override
-       public ObjectId insert(int type, long len, InputStream in)
-                       throws IOException {
-               if (Integer.MAX_VALUE < len || mustFragmentSize() < len)
-                       return insertStream(type, len, in);
-
-               byte[] tmp;
-               try {
-                       tmp = new byte[(int) len];
-               } catch (OutOfMemoryError tooLarge) {
-                       return insertStream(type, len, in);
-               }
-               IO.readFully(in, tmp, 0, tmp.length);
-               return insert(type, tmp, 0, tmp.length);
-       }
-
-       private ObjectId insertStream(final int type, final long inflatedSize,
-                       final InputStream in) throws IOException {
-
-               // TODO Permit multiple chunks to be buffered here at once.
-               // It might be possible to compress and hold all chunks for
-               // an object, which would then allow them to write their
-               // ChunkInfo and chunks in parallel, as well as avoid the
-               // rewrite with the ChunkFragments at the end.
-
-               MessageDigest chunkDigest = Constants.newMessageDigest();
-               LinkedList<ChunkKey> fragmentList = new LinkedList<ChunkKey>();
-
-               ChunkFormatter chunk = newChunk();
-               int position = chunk.position();
-               if (!chunk.whole(type, inflatedSize))
-                       throw new DhtException(DhtText.get().cannotInsertObject);
-
-               MessageDigest objDigest = digest();
-               objDigest.update(Constants.encodedTypeString(type));
-               objDigest.update((byte) ' ');
-               objDigest.update(Constants.encodeASCII(inflatedSize));
-               objDigest.update((byte) 0);
-
-               Deflater def = deflater();
-               byte[] inBuf = buffer();
-               long packedSize = 0;
-               long done = 0;
-               while (done < inflatedSize) {
-                       if (done == 0 || def.needsInput()) {
-                               int inAvail = in.read(inBuf);
-                               if (inAvail <= 0)
-                                       throw new EOFException();
-                               objDigest.update(inBuf, 0, inAvail);
-                               def.setInput(inBuf, 0, inAvail);
-                               done += inAvail;
-                       }
-
-                       if (chunk.free() == 0) {
-                               packedSize += chunk.size();
-                               chunk.setObjectType(type);
-                               chunk.setFragment();
-                               fragmentList.add(chunk.end(chunkDigest));
-                               chunk.safePut(db, dbBuffer());
-                               chunk = newChunk();
-                       }
-                       chunk.appendDeflateOutput(def);
-               }
-
-               def.finish();
-
-               while (!def.finished()) {
-                       if (chunk.free() == 0) {
-                               packedSize += chunk.size();
-                               chunk.setObjectType(type);
-                               chunk.setFragment();
-                               fragmentList.add(chunk.end(chunkDigest));
-                               chunk.safePut(db, dbBuffer());
-                               chunk = newChunk();
-                       }
-                       chunk.appendDeflateOutput(def);
-               }
-
-               ObjectId objId = ObjectId.fromRaw(objDigest.digest());
-               PackedObjectInfo oe = new PackedObjectInfo(objId);
-               oe.setOffset(position);
-
-               if (!chunk.isEmpty()) {
-                       packedSize += chunk.size();
-                       chunk.setObjectType(type);
-
-                       if (fragmentList.isEmpty()) {
-                               ChunkKey key = chunk.end(chunkDigest);
-                               chunk.setChunkIndex(Collections.singletonList(oe));
-                               chunk.safePut(db, dbBuffer());
-
-                               GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder();
-                               b.setObjectType(ObjectType.valueOf(type));
-                               b.setOffset(position);
-                               b.setPackedSize(packedSize);
-                               b.setInflatedSize(inflatedSize);
-                               ObjectInfo info = new ObjectInfo(key, b.build());
-                               ObjectIndexKey objKey = ObjectIndexKey.create(repo, objId);
-                               db.objectIndex().add(objKey, info, dbBuffer());
-                               return objId;
-                       }
-
-                       chunk.setFragment();
-                       fragmentList.add(chunk.end(chunkDigest));
-                       chunk.safePut(db, dbBuffer());
-               }
-               chunk = null;
-
-               ChunkKey firstChunkKey = fragmentList.get(0);
-
-               ChunkMeta.Builder metaBuilder = ChunkMeta.newBuilder();
-               for (ChunkKey k : fragmentList)
-                       metaBuilder.addFragment(k.asString());
-               ChunkMeta meta = metaBuilder.build();
-
-               for (ChunkKey key : fragmentList) {
-                       PackChunk.Members builder = new PackChunk.Members();
-                       builder.setChunkKey(key);
-                       builder.setMeta(meta);
-
-                       if (firstChunkKey.equals(key))
-                               builder.setChunkIndex(ChunkIndex.create(Arrays.asList(oe)));
-
-                       db.chunk().put(builder, dbBuffer());
-               }
-
-               GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder();
-               b.setObjectType(ObjectType.valueOf(type));
-               b.setOffset(position);
-               b.setPackedSize(packedSize);
-               b.setInflatedSize(inflatedSize);
-               ObjectInfo info = new ObjectInfo(firstChunkKey, b.build());
-               ObjectIndexKey objKey = ObjectIndexKey.create(repo, objId);
-               db.objectIndex().add(objKey, info, dbBuffer());
-
-               return objId;
-       }
-
-       @Override
-       public ObjectId insert(int type, byte[] data, int off, int len)
-                       throws IOException {
-               // TODO Is it important to avoid duplicate objects here?
-               // IIRC writing out a DirCache just blindly writes all of the
-               // tree objects to the inserter, relying on the inserter to
-               // strip out duplicates. We might need to buffer trees as
-               // long as possible, then collapse the buffer by looking up
-               // any existing objects and avoiding inserting those.
-
-               if (mustFragmentSize() < len)
-                       return insertStream(type, len, asStream(data, off, len));
-
-               ObjectId objId = idFor(type, data, off, len);
-
-               if (activeChunk == null)
-                       activeChunk = newChunk();
-
-               if (activeChunk.whole(deflater(), type, data, off, len, objId))
-                       return objId;
-
-               // TODO Allow more than one chunk pending at a time, this would
-               // permit batching puts of the ChunkInfo records.
-
-               if (!activeChunk.isEmpty()) {
-                       activeChunk.end(digest());
-                       activeChunk.safePut(db, dbBuffer());
-                       activeChunk = newChunk();
-                       if (activeChunk.whole(deflater(), type, data, off, len, objId))
-                               return objId;
-               }
-
-               return insertStream(type, len, asStream(data, off, len));
-       }
-
-       /** @return size that compressing still won't fit into a single chunk. */
-       private int mustFragmentSize() {
-               return 4 * options.getChunkSize();
-       }
-
-       @Override
-       public PackParser newPackParser(InputStream in) throws IOException {
-               return new DhtPackParser(objdb, in);
-       }
-
-       @Override
-       public void flush() throws IOException {
-               if (activeChunk != null && !activeChunk.isEmpty()) {
-                       activeChunk.end(digest());
-                       activeChunk.safePut(db, dbBuffer());
-                       activeChunk = null;
-               }
-
-               if (dbWriteBuffer != null)
-                       dbWriteBuffer.flush();
-       }
-
-       @Override
-       public void release() {
-               if (deflater != null) {
-                       deflater.end();
-                       deflater = null;
-               }
-
-               dbWriteBuffer = null;
-               activeChunk = null;
-       }
-
-       private Deflater deflater() {
-               if (deflater == null)
-                       deflater = new Deflater(options.getCompression());
-               else
-                       deflater.reset();
-               return deflater;
-       }
-
-       private WriteBuffer dbBuffer() {
-               if (dbWriteBuffer == null)
-                       dbWriteBuffer = db.newWriteBuffer();
-               return dbWriteBuffer;
-       }
-
-       private ChunkFormatter newChunk() {
-               ChunkFormatter fmt;
-
-               fmt = new ChunkFormatter(repo, options);
-               fmt.setSource(GitStore.ChunkInfo.Source.INSERT);
-               return fmt;
-       }
-
-       private static ByteArrayInputStream asStream(byte[] data, int off, int len) {
-               return new ByteArrayInputStream(data, off, len);
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserterOptions.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserterOptions.java
deleted file mode 100644 (file)
index 56b323b..0000000
+++ /dev/null
@@ -1,224 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static java.util.zip.Deflater.DEFAULT_COMPRESSION;
-import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
-
-import java.security.SecureRandom;
-import java.util.zip.Deflater;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.CoreConfig;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-
-/** Options controlling how objects are inserted into a DHT stored repository. */
-public class DhtInserterOptions {
-       private static final SecureRandom prng = new SecureRandom();
-
-       /** 1024 (number of bytes in one kibibyte/kilobyte) */
-       public static final int KiB = 1024;
-
-       /** 1024 {@link #KiB} (number of bytes in one mebibyte/megabyte) */
-       public static final int MiB = 1024 * KiB;
-
-       private int chunkSize;
-
-       private int writeBufferSize;
-
-       private int compression;
-
-       private int prefetchDepth;
-
-       private long parserCacheLimit;
-
-       /** Create a default inserter configuration. */
-       public DhtInserterOptions() {
-               setChunkSize(1 * MiB);
-               setWriteBufferSize(1 * MiB);
-               setCompression(DEFAULT_COMPRESSION);
-               setPrefetchDepth(50);
-               setParserCacheLimit(512 * getChunkSize());
-       }
-
-       /** @return maximum size of a chunk, in bytes. */
-       public int getChunkSize() {
-               return chunkSize;
-       }
-
-       /**
-        * Set the maximum size of a chunk, in bytes.
-        *
-        * @param sizeInBytes
-        *            the maximum size. A chunk's data segment won't exceed this.
-        * @return {@code this}
-        */
-       public DhtInserterOptions setChunkSize(int sizeInBytes) {
-               chunkSize = Math.max(1024, sizeInBytes);
-               return this;
-       }
-
-       /** @return maximum number of outstanding write bytes. */
-       public int getWriteBufferSize() {
-               return writeBufferSize;
-       }
-
-       /**
-        * Set the maximum number of outstanding bytes in a {@link WriteBuffer}.
-        *
-        * @param sizeInBytes
-        *            maximum number of bytes.
-        * @return {@code this}
-        */
-       public DhtInserterOptions setWriteBufferSize(int sizeInBytes) {
-               writeBufferSize = Math.max(1024, sizeInBytes);
-               return this;
-       }
-
-       /** @return maximum number of objects to put into a chunk. */
-       public int getMaxObjectCount() {
-               // Do not allow the index to be larger than a chunk itself.
-               return getChunkSize() / (OBJECT_ID_LENGTH + 4);
-       }
-
-       /** @return compression level used when writing new objects into chunks. */
-       public int getCompression() {
-               return compression;
-       }
-
-       /**
-        * Set the compression level used when writing new objects.
-        *
-        * @param level
-        *            the compression level. Use
-        *            {@link Deflater#DEFAULT_COMPRESSION} to specify a default
-        *            compression setting.
-        * @return {@code this}
-        */
-       public DhtInserterOptions setCompression(int level) {
-               compression = level;
-               return this;
-       }
-
-       /**
-        * Maximum number of entries in a chunk's prefetch list.
-        * <p>
-        * Each commit or tree chunk stores an optional prefetch list containing the
-        * next X chunk keys that a reader would need if they were traversing the
-        * project history. This implies that chunk prefetch lists are overlapping.
-        * <p>
-        * The depth at insertion time needs to be deep enough to allow readers to
-        * have sufficient parallel prefetch to keep themselves busy without waiting
-        * on sequential loads. If the depth is not sufficient, readers will stall
-        * while they sequentially look up the next chunk they need.
-        *
-        * @return maximum number of entries in a {@link ChunkMeta} list.
-        */
-       public int getPrefetchDepth() {
-               return prefetchDepth;
-       }
-
-       /**
-        * Maximum number of entries in a chunk's prefetch list.
-        *
-        * @param depth
-        *            maximum depth of the prefetch list.
-        * @return {@code this}
-        */
-       public DhtInserterOptions setPrefetchDepth(int depth) {
-               prefetchDepth = Math.max(0, depth);
-               return this;
-       }
-
-       /**
-        * Number of chunks the parser can cache for delta resolution support.
-        *
-        * @return chunks to hold in memory to support delta resolution.
-        */
-       public int getParserCacheSize() {
-               return (int) (getParserCacheLimit() / getChunkSize());
-       }
-
-       /** @return number of bytes the PackParser can cache for delta resolution. */
-       public long getParserCacheLimit() {
-               return parserCacheLimit;
-       }
-
-       /**
-        * Set the number of bytes the PackParser can cache.
-        *
-        * @param limit
-        *            number of bytes the parser can cache.
-        * @return {@code this}
-        */
-       public DhtInserterOptions setParserCacheLimit(long limit) {
-               parserCacheLimit = Math.max(0, limit);
-               return this;
-       }
-
-       /** @return next random 32 bits to salt chunk keys. */
-       int nextChunkSalt() {
-               return prng.nextInt();
-       }
-
-       /**
-        * Update properties by setting fields from the configuration.
-        * <p>
-        * If a property is not defined in the configuration, then it is left
-        * unmodified.
-        *
-        * @param rc
-        *            configuration to read properties from.
-        * @return {@code this}
-        */
-       public DhtInserterOptions fromConfig(Config rc) {
-               setChunkSize(rc.getInt("core", "dht", "chunkSize", getChunkSize()));
-               setWriteBufferSize(rc.getInt("core", "dht", "writeBufferSize", getWriteBufferSize()));
-               setCompression(rc.get(CoreConfig.KEY).getCompression());
-               setPrefetchDepth(rc.getInt("core", "dht", "packParserPrefetchDepth", getPrefetchDepth()));
-               setParserCacheLimit(rc.getLong("core", "dht", "packParserCacheLimit", getParserCacheLimit()));
-               return this;
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtMissingChunkException.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtMissingChunkException.java
deleted file mode 100644 (file)
index 4fc103b..0000000
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.text.MessageFormat;
-
-/** Indicates a {@link PackChunk} doesn't exist in the database. */
-public class DhtMissingChunkException extends DhtException {
-       private static final long serialVersionUID = 1L;
-
-       private final ChunkKey chunkKey;
-
-       /**
-        * Initialize a new missing chunk exception.
-        *
-        * @param key
-        *            the key of the chunk that is not found.
-        */
-       public DhtMissingChunkException(ChunkKey key) {
-               super(MessageFormat.format(DhtText.get().missingChunk, key));
-               chunkKey = key;
-       }
-
-       /**
-        * Initialize a new missing chunk exception.
-        *
-        * @param key
-        *            the key of the chunk that is not found.
-        * @param why
-        *            reason the chunk is missing. This may be an explanation about
-        *            low-level data corruption in the database.
-        */
-       public DhtMissingChunkException(ChunkKey key, Throwable why) {
-               this(key);
-               initCause(why);
-       }
-
-       /** @return key of the chunk that is missing. */
-       public ChunkKey getChunkKey() {
-               return chunkKey;
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjDatabase.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjDatabase.java
deleted file mode 100644 (file)
index 4261676..0000000
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import org.eclipse.jgit.lib.ObjectDatabase;
-import org.eclipse.jgit.lib.ObjectInserter;
-import org.eclipse.jgit.lib.ObjectReader;
-import org.eclipse.jgit.storage.dht.spi.Database;
-
-/** ObjectDatabase stored on top of the DHT database. */
-public class DhtObjDatabase extends ObjectDatabase {
-       private final DhtRepository repository;
-
-       private final Database db;
-
-       private final DhtReaderOptions readerOptions;
-
-       private final DhtInserterOptions inserterOptions;
-
-       DhtObjDatabase(DhtRepository repository, DhtRepositoryBuilder builder) {
-               this.repository = repository;
-               this.db = builder.getDatabase();
-               this.readerOptions = builder.getReaderOptions();
-               this.inserterOptions = builder.getInserterOptions();
-       }
-
-       DhtRepository getRepository() {
-               return repository;
-       }
-
-       Database getDatabase() {
-               return db;
-       }
-
-       DhtReaderOptions getReaderOptions() {
-               return readerOptions;
-       }
-
-       DhtInserterOptions getInserterOptions() {
-               return inserterOptions;
-       }
-
-       @Override
-       public boolean exists() {
-               return repository.getRepositoryKey() != null;
-       }
-
-       @Override
-       public void close() {
-               // Do nothing.
-       }
-
-       @Override
-       public ObjectReader newReader() {
-               return new DhtReader(this);
-       }
-
-       @Override
-       public ObjectInserter newInserter() {
-               return new DhtInserter(this);
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjectRepresentation.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjectRepresentation.java
deleted file mode 100644 (file)
index f6d55c1..0000000
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.storage.pack.StoredObjectRepresentation;
-
-final class DhtObjectRepresentation extends StoredObjectRepresentation {
-       private ObjectInfo info;
-
-       void set(ObjectInfo link) {
-               this.info = link;
-       }
-
-       ChunkKey getChunkKey() {
-               return info.getChunkKey();
-       }
-
-       int getOffset() {
-               return info.getOffset();
-       }
-
-       long getPackedSize() {
-               return info.getPackedSize();
-       }
-
-       boolean isFragmented() {
-               return info.isFragmented();
-       }
-
-       @Override
-       public ObjectId getDeltaBase() {
-               return info.getDeltaBase();
-       }
-
-       @Override
-       public int getFormat() {
-               if (info.isDelta())
-                       return PACK_DELTA;
-               return PACK_WHOLE;
-       }
-
-       @Override
-       public int getWeight() {
-               long size = info.getPackedSize();
-               return (int) Math.min(size, Integer.MAX_VALUE);
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjectToPack.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjectToPack.java
deleted file mode 100644 (file)
index 9816180..0000000
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import org.eclipse.jgit.revwalk.RevObject;
-import org.eclipse.jgit.storage.pack.ObjectToPack;
-import org.eclipse.jgit.storage.pack.StoredObjectRepresentation;
-
-final class DhtObjectToPack extends ObjectToPack {
-       private static final int FRAGMENTED = 1 << 0;
-
-       /** Chunk that contains this object. */
-       ChunkKey chunk;
-
-       /** Offset of this object within its chunk. */
-       int offset;
-
-       /** Number of bytes in the object's compressed form, excluding pack header. */
-       int size;
-
-       /** Order this chunk occurs in the {@link Prefetcher}. */
-       int visitOrder;
-
-       DhtObjectToPack(RevObject obj) {
-               super(obj);
-       }
-
-       boolean isFragmented() {
-               return isExtendedFlag(FRAGMENTED);
-       }
-
-       @Override
-       public void select(StoredObjectRepresentation ref) {
-               DhtObjectRepresentation rep = (DhtObjectRepresentation) ref;
-               chunk = rep.getChunkKey();
-               offset = rep.getOffset();
-
-               final long sz = rep.getPackedSize();
-               if (sz <= Integer.MAX_VALUE)
-                       size = (int) sz;
-               else
-                       size = -1;
-
-               if (rep.isFragmented())
-                       setExtendedFlag(FRAGMENTED);
-               else
-                       clearExtendedFlag(FRAGMENTED);
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtPackParser.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtPackParser.java
deleted file mode 100644 (file)
index a397247..0000000
+++ /dev/null
@@ -1,1442 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
-import static org.eclipse.jgit.lib.Constants.OBJ_COMMIT;
-import static org.eclipse.jgit.lib.Constants.OBJ_OFS_DELTA;
-import static org.eclipse.jgit.lib.Constants.OBJ_REF_DELTA;
-import static org.eclipse.jgit.lib.Constants.OBJ_TAG;
-import static org.eclipse.jgit.lib.Constants.OBJ_TREE;
-import static org.eclipse.jgit.storage.dht.ChunkInfo.OBJ_MIXED;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.security.MessageDigest;
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.ListIterator;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.lib.AnyObjectId;
-import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.lib.MutableObjectId;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.ObjectIdSubclassMap;
-import org.eclipse.jgit.lib.ProgressMonitor;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.storage.dht.spi.Database;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-import org.eclipse.jgit.storage.file.PackLock;
-import org.eclipse.jgit.transport.PackParser;
-import org.eclipse.jgit.transport.PackedObjectInfo;
-import org.eclipse.jgit.treewalk.CanonicalTreeParser;
-import org.eclipse.jgit.util.LongList;
-
-import com.google.protobuf.ByteString;
-
-/** Parses the pack stream into chunks, and indexes the chunks for lookup. */
-public class DhtPackParser extends PackParser {
-       private final DhtObjDatabase objdb;
-
-       private final RepositoryKey repo;
-
-       private final Database db;
-
-       private final DhtInserterOptions options;
-
-       private final MessageDigest chunkKeyDigest;
-
-       /** Number of objects to write to the global index at once. */
-       private final int linkBatchSize;
-
-       private Boolean saveAsCachedPack;
-
-       private WriteBuffer dbWriteBuffer;
-
-       /** Chunk writers for the 4 major object types, keyed by object type code. */
-       private ChunkFormatter[] openChunks;
-
-       /** Edges for current chunks. */
-       private Edges[] openEdges;
-
-       /** Prior chunks that were written, keyed by object type code. */
-       private List<ChunkKey>[] chunkByOrder;
-
-       /** Information on chunks already written out. */
-       private Map<ChunkKey, ChunkInfo> infoByKey;
-
-       /** Information on chunks already written out. */
-       private Map<ChunkKey, ChunkMeta> chunkMeta;
-
-       /** ChunkMeta that needs to be written out again, as it was modified. */
-       private Map<ChunkKey, ChunkMeta> dirtyMeta;
-
-       private Map<ChunkKey, Edges> chunkEdges;
-
-       // Correlated lists, sorted by object stream position.
-       private LongList objStreamPos;
-
-       private LongList objChunkPtrs;
-
-       /** Formatter handling the current object's data stream. */
-       private ChunkFormatter currChunk;
-
-       /** Current type of the object, if known. */
-       private int currType;
-
-       /** Position of the current object in the chunks we create. */
-       private long currChunkPtr;
-
-       /** If using OFS_DELTA, location of the base object in chunk space. */
-       private long currBasePtr;
-
-       /** Starting byte of the object data (aka end of the object header). */
-       private int currDataPos;
-
-       /** Total number of bytes in the object representation. */
-       private long currPackedSize;
-
-       /** Total number of bytes in the entire inflated object. */
-       private long currInflatedSize;
-
-       /** If the current object is fragmented, the list of chunks holding it. */
-       private List<ChunkKey> currFragments;
-
-       /** Previously written chunk that is being re-read during delta resolution. */
-       private PackChunk dbChunk;
-
-       /** Current read position in {@link #dbChunk}. */
-       private int dbPtr;
-
-       /** Recent chunks that were written, or recently read. */
-       private LinkedHashMap<ChunkKey, PackChunk> chunkReadBackCache;
-
-       /** Objects parsed from the stream, sorted by SHA-1. */
-       private List<DhtInfo> objectListByName;
-
-       /** Objects parsed from the stream, sorted by chunk (aka offset). */
-       private List<DhtInfo> objectListByChunk;
-
-       /** Iterators to write {@link #objectListByName} into the global index. */
-       private ListIterator<DhtInfo>[] linkIterators;
-
-       /** If the pack stream was self-contained, the cached pack info record key. */
-       private CachedPackKey cachedPackKey;
-
-       private CanonicalTreeParser treeParser;
-
-       private final MutableObjectId idBuffer;
-
-       private ObjectIdSubclassMap<DhtInfo> objectMap;
-
-       DhtPackParser(DhtObjDatabase objdb, InputStream in) {
-               super(objdb, in);
-
-               // Disable collision checking. DhtReader performs some magic to look
-               // only at old objects, so a colliding replacement will be ignored until
-               // its removed during garbage collection.
-               //
-               setCheckObjectCollisions(false);
-
-               this.objdb = objdb;
-               this.repo = objdb.getRepository().getRepositoryKey();
-               this.db = objdb.getDatabase();
-               this.options = objdb.getInserterOptions();
-               this.chunkKeyDigest = Constants.newMessageDigest();
-
-               dbWriteBuffer = db.newWriteBuffer();
-               openChunks = new ChunkFormatter[5];
-               openEdges = new Edges[5];
-               chunkByOrder = newListArray(5);
-               infoByKey = new HashMap<ChunkKey, ChunkInfo>();
-               dirtyMeta = new HashMap<ChunkKey, ChunkMeta>();
-               chunkMeta = new HashMap<ChunkKey, ChunkMeta>();
-               chunkEdges = new HashMap<ChunkKey, Edges>();
-               treeParser = new CanonicalTreeParser();
-               idBuffer = new MutableObjectId();
-               objectMap = new ObjectIdSubclassMap<DhtInfo>();
-
-               final int max = options.getParserCacheSize();
-               chunkReadBackCache = new LinkedHashMap<ChunkKey, PackChunk>(max, 0.75f, true) {
-                       private static final long serialVersionUID = 1L;
-
-                       @Override
-                       protected boolean removeEldestEntry(Entry<ChunkKey, PackChunk> e) {
-                               return max < size();
-                       }
-               };
-
-               // The typical WriteBuffer flushes at 512 KiB increments, and
-               // the typical ObjectInfo record is around 180 bytes. Use these
-               // figures to come up with a rough estimate for how many links
-               // to construct in one region of the DHT before moving onto a
-               // different region in order to increase parallelism on large
-               // object imports.
-               //
-               linkBatchSize = 512 * 1024 / 180;
-       }
-
-       @SuppressWarnings("unchecked")
-       private static <T> List<T>[] newListArray(int size) {
-               return new List[size];
-       }
-
-       /** @return if true, the pack stream is marked as a cached pack. */
-       public boolean isSaveAsCachedPack() {
-               return saveAsCachedPack != null && saveAsCachedPack.booleanValue();
-       }
-
-       /**
-        * Enable saving the pack stream as a cached pack.
-        *
-        * @param save
-        *            if true, the stream is saved.
-        */
-       public void setSaveAsCachedPack(boolean save) {
-               saveAsCachedPack = Boolean.valueOf(save);
-       }
-
-       @Override
-       public PackLock parse(ProgressMonitor receiving, ProgressMonitor resolving)
-                       throws IOException {
-               boolean success = false;
-               try {
-                       PackLock lock = super.parse(receiving, resolving);
-
-                       chunkReadBackCache = null;
-                       openChunks = null;
-                       openEdges = null;
-                       treeParser = null;
-
-                       final int objCnt = getObjectCount();
-                       if (objCnt == 0) {
-                               // If no objects were received, no chunks were created. Leaving
-                               // success to false and doing a rollback is a good way to make
-                               // sure this is true.
-                               //
-                               return lock;
-                       }
-
-                       createObjectLists();
-
-                       if (isSaveAsCachedPack())
-                               putCachedPack();
-                       computeChunkEdges();
-                       putChunkIndexes();
-                       putDirtyMeta();
-
-                       chunkMeta = null;
-                       chunkEdges = null;
-                       dirtyMeta = null;
-                       objectMap = null;
-                       objectListByChunk = null;
-                       dbWriteBuffer.flush();
-
-                       putGlobalIndex(resolving);
-                       dbWriteBuffer.flush();
-
-                       success = true;
-                       return lock;
-               } finally {
-                       openChunks = null;
-                       openEdges = null;
-                       objStreamPos = null;
-                       objChunkPtrs = null;
-                       currChunk = null;
-                       currFragments = null;
-                       dbChunk = null;
-                       chunkReadBackCache = null;
-                       infoByKey = null;
-                       chunkMeta = null;
-                       chunkEdges = null;
-                       treeParser = null;
-
-                       if (!success)
-                               rollback();
-
-                       chunkByOrder = null;
-                       objectListByName = null;
-                       objectListByChunk = null;
-                       linkIterators = null;
-                       dbWriteBuffer = null;
-               }
-       }
-
-       @SuppressWarnings("unchecked")
-       private void createObjectLists() {
-               List objs = getSortedObjectList(null /* by name */);
-               objectListByName = objs;
-
-               int cnt = objectListByName.size();
-               DhtInfo[] copy = objectListByName.toArray(new DhtInfo[cnt]);
-               Arrays.sort(copy, new Comparator<PackedObjectInfo>() {
-                       public int compare(PackedObjectInfo o1, PackedObjectInfo o2) {
-                               DhtInfo a = (DhtInfo) o1;
-                               DhtInfo b = (DhtInfo) o2;
-                               return Long.signum(a.chunkPtr - b.chunkPtr);
-                       }
-               });
-               objectListByChunk = Arrays.asList(copy);
-       }
-
-       private void putCachedPack() throws DhtException {
-               CachedPackInfo.Builder info = CachedPackInfo.newBuilder();
-
-               for (DhtInfo obj : objectMap) {
-                       if (!obj.isInPack())
-                               return;
-
-                       if (!obj.isReferenced())
-                               info.getTipListBuilder().addObjectName(obj.name());
-               }
-
-               MessageDigest version = Constants.newMessageDigest();
-               addChunkList(info, version, chunkByOrder[OBJ_TAG]);
-               addChunkList(info, version, chunkByOrder[OBJ_COMMIT]);
-               addChunkList(info, version, chunkByOrder[OBJ_TREE]);
-               addChunkList(info, version, chunkByOrder[OBJ_BLOB]);
-
-               info.setName(computePackName().name());
-               info.setVersion(ObjectId.fromRaw(version.digest()).name());
-
-               cachedPackKey = CachedPackKey.fromInfo(info.build());
-               for (List<ChunkKey> list : chunkByOrder) {
-                       if (list == null)
-                               continue;
-                       for (ChunkKey key : list) {
-                               ChunkInfo oldInfo = infoByKey.get(key);
-                               GitStore.ChunkInfo.Builder b =
-                                       GitStore.ChunkInfo.newBuilder(oldInfo.getData());
-                               b.setCachedPackKey(cachedPackKey.asString());
-                               ChunkInfo newInfo = new ChunkInfo(key, b.build());
-                               infoByKey.put(key, newInfo);
-
-                               // A fragment was already put, and has to be re-put.
-                               // Non-fragments will put later and do not put now.
-                               if (newInfo.getData().getIsFragment())
-                                       db.repository().put(repo, newInfo, dbWriteBuffer);
-                       }
-               }
-
-               db.repository().put(repo, info.build(), dbWriteBuffer);
-       }
-
-       private void addChunkList(CachedPackInfo.Builder info,
-                       MessageDigest version, List<ChunkKey> list) {
-               if (list == null)
-                       return;
-
-               long bytesTotal = info.getBytesTotal();
-               long objectsTotal = info.getObjectsTotal();
-               long objectsDelta = info.getObjectsDelta();
-
-               byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
-               for (ChunkKey key : list) {
-                       ChunkInfo chunkInfo = infoByKey.get(key);
-                       GitStore.ChunkInfo c = chunkInfo.getData();
-                       int len = c.getChunkSize() - ChunkFormatter.TRAILER_SIZE;
-                       bytesTotal += len;
-                       objectsTotal += c.getObjectCounts().getTotal();
-                       objectsDelta += c.getObjectCounts().getOfsDelta();
-                       objectsDelta += c.getObjectCounts().getRefDelta();
-                       info.getChunkListBuilder().addChunkKey(
-                                       chunkInfo.getChunkKey().asString());
-                       chunkInfo.getChunkKey().getChunkHash().copyRawTo(buf, 0);
-                       version.update(buf);
-               }
-
-               info.setBytesTotal(bytesTotal);
-               info.setObjectsTotal(objectsTotal);
-               info.setObjectsDelta(objectsDelta);
-       }
-
-       private ObjectId computePackName() {
-               byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
-               MessageDigest md = Constants.newMessageDigest();
-               for (DhtInfo otp : objectListByName) {
-                       otp.copyRawTo(buf, 0);
-                       md.update(buf);
-               }
-               return ObjectId.fromRaw(md.digest());
-       }
-
-       private void rollback() throws DhtException {
-               try {
-                       dbWriteBuffer.abort();
-                       dbWriteBuffer = db.newWriteBuffer();
-
-                       if (cachedPackKey != null)
-                               db.repository().remove(repo, cachedPackKey, dbWriteBuffer);
-
-                       if (linkIterators != null) {
-                               boolean removed = true;
-                               while (removed) {
-                                       removed = false;
-                                       for (ListIterator<DhtInfo> itr : linkIterators) {
-                                               int cnt = 0;
-                                               while (itr.hasPrevious() && cnt < linkBatchSize) {
-                                                       DhtInfo oe = itr.previous();
-                                                       db.objectIndex().remove( //
-                                                                       ObjectIndexKey.create(repo, oe), //
-                                                                       chunkOf(oe.chunkPtr), //
-                                                                       dbWriteBuffer);
-                                                       cnt++;
-                                               }
-                                               if (0 < cnt)
-                                                       removed = true;
-                                       }
-                               }
-                       }
-
-                       deleteChunks(chunkByOrder[OBJ_COMMIT]);
-                       deleteChunks(chunkByOrder[OBJ_TREE]);
-                       deleteChunks(chunkByOrder[OBJ_BLOB]);
-                       deleteChunks(chunkByOrder[OBJ_TAG]);
-
-                       dbWriteBuffer.flush();
-               } catch (Throwable err) {
-                       throw new DhtException(DhtText.get().packParserRollbackFailed, err);
-               }
-       }
-
-       private void deleteChunks(List<ChunkKey> list) throws DhtException {
-               if (list != null) {
-                       for (ChunkKey key : list) {
-                               db.chunk().remove(key, dbWriteBuffer);
-                               db.repository().remove(repo, key, dbWriteBuffer);
-                       }
-               }
-       }
-
-       private void putGlobalIndex(ProgressMonitor pm) throws DhtException {
-               int objcnt = objectListByName.size();
-               pm.beginTask(DhtText.get().recordingObjects, objcnt);
-
-               int segments = Math.max(1, Math.min(objcnt / linkBatchSize, 32));
-               linkIterators = newListIteratorArray(segments);
-
-               int objsPerSegment = objcnt / segments;
-               int beginIdx = 0;
-               for (int i = 0; i < segments - 1; i++) {
-                       int endIdx = Math.min(beginIdx + objsPerSegment, objcnt);
-                       linkIterators[i] = objectListByName.subList(beginIdx, endIdx)
-                                       .listIterator();
-                       beginIdx = endIdx;
-               }
-               linkIterators[segments - 1] = objectListByName
-                               .subList(beginIdx, objcnt).listIterator();
-
-               boolean inserted = true;
-               while (inserted) {
-                       inserted = false;
-                       for (ListIterator<DhtInfo> itr : linkIterators) {
-                               int cnt = 0;
-                               while (itr.hasNext() && cnt < linkBatchSize) {
-                                       DhtInfo oe = itr.next();
-                                       db.objectIndex().add( //
-                                                       ObjectIndexKey.create(repo, oe), //
-                                                       oe.info(chunkOf(oe.chunkPtr)), //
-                                                       dbWriteBuffer);
-                                       cnt++;
-                               }
-                               if (0 < cnt) {
-                                       pm.update(cnt);
-                                       inserted = true;
-                               }
-                       }
-               }
-
-               pm.endTask();
-       }
-
-       @SuppressWarnings("unchecked")
-       private static ListIterator<DhtInfo>[] newListIteratorArray(int size) {
-               return new ListIterator[size];
-       }
-
-       private void computeChunkEdges() throws DhtException {
-               List<DhtInfo> objs = objectListByChunk;
-               int beginIdx = 0;
-               ChunkKey key = chunkOf(objs.get(0).chunkPtr);
-               int type = typeOf(objs.get(0).chunkPtr);
-
-               int objIdx = 1;
-               for (; objIdx < objs.size(); objIdx++) {
-                       DhtInfo oe = objs.get(objIdx);
-                       ChunkKey oeKey = chunkOf(oe.chunkPtr);
-                       if (!key.equals(oeKey)) {
-                               computeEdges(objs.subList(beginIdx, objIdx), key, type);
-                               beginIdx = objIdx;
-
-                               key = oeKey;
-                               type = typeOf(oe.chunkPtr);
-                       }
-                       if (type != OBJ_MIXED && type != typeOf(oe.chunkPtr))
-                               type = OBJ_MIXED;
-               }
-               computeEdges(objs.subList(beginIdx, objs.size()), key, type);
-       }
-
-       private void computeEdges(List<DhtInfo> objs, ChunkKey key, int type)
-                       throws DhtException {
-               Edges edges = chunkEdges.get(key);
-               if (edges == null)
-                       return;
-
-               for (DhtInfo obj : objs)
-                       edges.remove(obj);
-
-               switch (type) {
-               case OBJ_COMMIT:
-                       edges.commitEdges = toChunkList(edges.commitIds);
-                       break;
-               case OBJ_TREE:
-                       // TODO prefetch tree edges
-                       break;
-               }
-
-               edges.commitIds = null;
-       }
-
-       private List<ChunkKey> toChunkList(Set<DhtInfo> objects)
-                       throws DhtException {
-               if (objects == null || objects.isEmpty())
-                       return null;
-
-               Map<ChunkKey, ChunkOrderingEntry> map = new HashMap<ChunkKey, ChunkOrderingEntry>();
-               for (DhtInfo obj : objects) {
-                       if (!obj.isInPack())
-                               continue;
-
-                       long chunkPtr = obj.chunkPtr;
-                       ChunkKey key = chunkOf(chunkPtr);
-                       ChunkOrderingEntry e = map.get(key);
-                       if (e == null) {
-                               e = new ChunkOrderingEntry();
-                               e.key = key;
-                               e.order = chunkIdx(chunkPtr);
-                               map.put(key, e);
-                       } else {
-                               e.order = Math.min(e.order, chunkIdx(chunkPtr));
-                       }
-               }
-
-               ChunkOrderingEntry[] tmp = map.values().toArray(
-                               new ChunkOrderingEntry[map.size()]);
-               Arrays.sort(tmp);
-
-               ChunkKey[] out = new ChunkKey[tmp.length];
-               for (int i = 0; i < tmp.length; i++)
-                       out[i] = tmp[i].key;
-               return Arrays.asList(out);
-       }
-
-       private static final class ChunkOrderingEntry implements
-                       Comparable<ChunkOrderingEntry> {
-               ChunkKey key;
-
-               int order;
-
-               public int compareTo(ChunkOrderingEntry o) {
-                       return order - o.order;
-               }
-       }
-
-       private void putChunkIndexes() throws DhtException {
-               List<DhtInfo> objs = objectListByChunk;
-               int sIdx = 0;
-               DhtInfo oe = objs.get(0);
-               oe.setOffset(offsetOf(oe.chunkPtr));
-
-               ChunkKey key = chunkOf(oe.chunkPtr);
-               int type = typeOf(oe.chunkPtr);
-
-               int objIdx = 1;
-               for (; objIdx < objs.size(); objIdx++) {
-                       oe = objs.get(objIdx);
-                       oe.setOffset(offsetOf(oe.chunkPtr));
-
-                       ChunkKey oeKey = chunkOf(oe.chunkPtr);
-                       if (!key.equals(oeKey)) {
-                               putChunkIndex(objs.subList(sIdx, objIdx), key, type);
-                               sIdx = objIdx;
-
-                               key = oeKey;
-                               type = typeOf(oe.chunkPtr);
-                       }
-                       if (type != OBJ_MIXED && type != typeOf(oe.chunkPtr))
-                               type = OBJ_MIXED;
-               }
-               putChunkIndex(objs.subList(sIdx, objs.size()), key, type);
-       }
-
-       private void putChunkIndex(List<DhtInfo> objectList, ChunkKey key, int type)
-                       throws DhtException {
-               ChunkInfo oldInfo = infoByKey.get(key);
-               GitStore.ChunkInfo.Builder info
-                       = GitStore.ChunkInfo.newBuilder(oldInfo.getData());
-
-               PackChunk.Members builder = new PackChunk.Members();
-               builder.setChunkKey(key);
-
-               byte[] index = ChunkIndex.create(objectList);
-               info.setIndexSize(index.length);
-               builder.setChunkIndex(index);
-
-               ChunkMeta meta = dirtyMeta.remove(key);
-               if (meta == null)
-                       meta = chunkMeta.get(key);
-
-               switch (type) {
-               case OBJ_COMMIT: {
-                       Edges edges = chunkEdges.get(key);
-                       List<ChunkKey> e = edges != null ? edges.commitEdges : null;
-                       List<ChunkKey> s = sequentialHint(key, OBJ_COMMIT);
-                       if (e == null)
-                               e = Collections.emptyList();
-                       if (s == null)
-                               s = Collections.emptyList();
-                       if (!e.isEmpty() || !s.isEmpty()) {
-                               ChunkMeta.Builder m = edit(meta);
-                               ChunkMeta.PrefetchHint.Builder h = m.getCommitPrefetchBuilder();
-                               for (ChunkKey k : e)
-                                       h.addEdge(k.asString());
-                               for (ChunkKey k : s)
-                                       h.addSequential(k.asString());
-                               meta = m.build();
-                       }
-                       break;
-               }
-               case OBJ_TREE: {
-                       List<ChunkKey> s = sequentialHint(key, OBJ_TREE);
-                       if (s == null)
-                               s = Collections.emptyList();
-                       if (!s.isEmpty()) {
-                               ChunkMeta.Builder m = edit(meta);
-                               ChunkMeta.PrefetchHint.Builder h = m.getTreePrefetchBuilder();
-                               for (ChunkKey k : s)
-                                       h.addSequential(k.asString());
-                               meta = m.build();
-                       }
-                       break;
-               }
-               }
-
-               if (meta != null) {
-                       info.setMetaSize(meta.getSerializedSize());
-                       builder.setMeta(meta);
-               }
-
-               ChunkInfo newInfo = new ChunkInfo(key, info.build());
-               infoByKey.put(key, newInfo);
-               db.repository().put(repo, newInfo, dbWriteBuffer);
-               db.chunk().put(builder, dbWriteBuffer);
-       }
-
-       private static ChunkMeta.Builder edit(ChunkMeta meta) {
-               if (meta != null)
-                       return ChunkMeta.newBuilder(meta);
-               return ChunkMeta.newBuilder();
-       }
-
-       private List<ChunkKey> sequentialHint(ChunkKey key, int typeCode) {
-               List<ChunkKey> all = chunkByOrder[typeCode];
-               if (all == null)
-                       return null;
-               int idx = all.indexOf(key);
-               if (0 <= idx) {
-                       int max = options.getPrefetchDepth();
-                       int end = Math.min(idx + 1 + max, all.size());
-                       return all.subList(idx + 1, end);
-               }
-               return null;
-       }
-
-       private void putDirtyMeta() throws DhtException {
-               for (Map.Entry<ChunkKey, ChunkMeta> meta : dirtyMeta.entrySet()) {
-                       PackChunk.Members builder = new PackChunk.Members();
-                       builder.setChunkKey(meta.getKey());
-                       builder.setMeta(meta.getValue());
-                       db.chunk().put(builder, dbWriteBuffer);
-               }
-       }
-
-       @Override
-       protected PackedObjectInfo newInfo(AnyObjectId id, UnresolvedDelta delta,
-                       ObjectId baseId) {
-               DhtInfo obj = objectMap.addIfAbsent(new DhtInfo(id));
-               if (delta != null) {
-                       DhtDelta d = (DhtDelta) delta;
-                       obj.chunkPtr = d.chunkPtr;
-                       obj.packedSize = d.packedSize;
-                       obj.inflatedSize = d.inflatedSize;
-                       obj.base = baseId;
-                       obj.setType(d.getType());
-                       if (d.isFragmented())
-                               obj.setFragmented();
-               }
-               return obj;
-       }
-
-       @Override
-       protected void onPackHeader(long objCnt) throws IOException {
-               if (Integer.MAX_VALUE < objCnt) {
-                       throw new DhtException(MessageFormat.format(
-                                       DhtText.get().tooManyObjectsInPack, Long.valueOf(objCnt)));
-               }
-
-               objStreamPos = new LongList((int) objCnt);
-               objChunkPtrs = new LongList((int) objCnt);
-
-               if (saveAsCachedPack == null)
-                       setSaveAsCachedPack(1000 < objCnt);
-       }
-
-       @Override
-       protected void onBeginWholeObject(long streamPosition, int type,
-                       long inflatedSize) throws IOException {
-               ChunkFormatter w = begin(type);
-               if (!w.whole(type, inflatedSize)) {
-                       endChunk(type);
-                       w = begin(type);
-                       if (!w.whole(type, inflatedSize))
-                               throw panicCannotInsert();
-               }
-
-               currType = type;
-               currDataPos = w.position();
-               currPackedSize = 0;
-               currInflatedSize = inflatedSize;
-               objStreamPos.add(streamPosition);
-       }
-
-       @Override
-       protected void onEndWholeObject(PackedObjectInfo info) throws IOException {
-               boolean fragmented = currFragments != null;
-               endOneObject();
-
-               DhtInfo oe = (DhtInfo) info;
-               oe.chunkPtr = currChunkPtr;
-               oe.packedSize = currPackedSize;
-               oe.inflatedSize = currInflatedSize;
-               oe.setType(currType);
-               if (fragmented)
-                       oe.setFragmented();
-       }
-
-       private void endOneObject() throws DhtException {
-               if (currFragments != null)
-                       endFragmentedObject();
-               objChunkPtrs.add(currChunkPtr);
-       }
-
-       @Override
-       protected void onBeginOfsDelta(long deltaPos, long basePos,
-                       long inflatedSize) throws IOException {
-               long basePtr = objChunkPtrs.get(findStreamIndex(basePos));
-               int type = typeOf(basePtr);
-
-               currType = type;
-               currPackedSize = 0;
-               currInflatedSize = inflatedSize;
-               currBasePtr = basePtr;
-               objStreamPos.add(deltaPos);
-
-               ChunkFormatter w = begin(type);
-               if (isInCurrentChunk(basePtr)) {
-                       if (w.ofsDelta(inflatedSize, w.position() - offsetOf(basePtr))) {
-                               currDataPos = w.position();
-                               return;
-                       }
-
-                       endChunk(type);
-                       w = begin(type);
-               }
-
-               if (!longOfsDelta(w, inflatedSize, basePtr)) {
-                       endChunk(type);
-                       w = begin(type);
-                       if (!longOfsDelta(w, inflatedSize, basePtr))
-                               throw panicCannotInsert();
-               }
-
-               currDataPos = w.position();
-       }
-
-       @Override
-       protected void onBeginRefDelta(long deltaPos, AnyObjectId baseId,
-                       long inflatedSize) throws IOException {
-               // Try to get the base type, but only if it was seen before in this
-               // pack stream. If not assume worst-case of BLOB type.
-               //
-               int typeCode;
-               DhtInfo baseInfo = objectMap.get(baseId);
-               if (baseInfo != null && baseInfo.isInPack()) {
-                       typeCode = baseInfo.getType();
-                       currType = typeCode;
-               } else {
-                       typeCode = OBJ_BLOB;
-                       currType = -1;
-               }
-
-               ChunkFormatter w = begin(typeCode);
-               if (!w.refDelta(inflatedSize, baseId)) {
-                       endChunk(typeCode);
-                       w = begin(typeCode);
-                       if (!w.refDelta(inflatedSize, baseId))
-                               throw panicCannotInsert();
-               }
-
-               currDataPos = w.position();
-               currPackedSize = 0;
-               currInflatedSize = inflatedSize;
-               objStreamPos.add(deltaPos);
-       }
-
-       @Override
-       protected DhtDelta onEndDelta() throws IOException {
-               boolean fragmented = currFragments != null;
-               endOneObject();
-
-               DhtDelta delta = new DhtDelta();
-               delta.chunkPtr = currChunkPtr;
-               delta.packedSize = currPackedSize;
-               delta.inflatedSize = currInflatedSize;
-               if (0 < currType)
-                       delta.setType(currType);
-               if (fragmented)
-                       delta.setFragmented();
-               return delta;
-       }
-
-       @Override
-       protected void onObjectData(Source src, byte[] raw, int pos, int len)
-                       throws IOException {
-               if (src != Source.INPUT)
-                       return;
-
-               if (currChunk.append(raw, pos, len)) {
-                       currPackedSize += len;
-                       return;
-               }
-
-               if (currFragments == null && currChunk.getObjectCount() == 1)
-                       currFragments = new LinkedList<ChunkKey>();
-               if (currFragments != null) {
-                       appendToFragment(raw, pos, len);
-                       return;
-               }
-
-               // Everything between dataPos and dataEnd must be saved.
-               //
-               final int dataPos = currDataPos;
-               final int dataEnd = currChunk.position();
-               final int hdrPos = offsetOf(currChunkPtr);
-               final int hdrLen = dataPos - hdrPos;
-               final int type = typeOf(currChunkPtr);
-               byte[] dataOld = currChunk.getRawChunkDataArray();
-               final int typeOld = currChunk.getCurrentObjectType();
-
-               currChunk.rollback();
-               endChunk(type);
-
-               final ChunkFormatter w = begin(type);
-               switch (typeOld) {
-               case OBJ_COMMIT:
-               case OBJ_BLOB:
-               case OBJ_TREE:
-               case OBJ_TAG:
-               case OBJ_REF_DELTA:
-                       w.adjustObjectCount(1, typeOld);
-                       if (!w.append(dataOld, hdrPos, hdrLen))
-                               throw panicCannotInsert();
-                       break;
-
-               case OBJ_OFS_DELTA:
-                       if (!longOfsDelta(w, currInflatedSize, currBasePtr))
-                               throw panicCannotInsert();
-                       break;
-
-               default:
-                       throw new DhtException("Internal programming error: " + typeOld);
-               }
-
-               currDataPos = w.position();
-               if (dataPos < dataEnd && !w.append(dataOld, dataPos, dataEnd - dataPos))
-                       throw panicCannotInsert();
-               dataOld = null;
-
-               if (w.append(raw, pos, len)) {
-                       currPackedSize += len;
-               } else {
-                       currFragments = new LinkedList<ChunkKey>();
-                       appendToFragment(raw, pos, len);
-               }
-       }
-
-       private boolean longOfsDelta(ChunkFormatter w, long infSize, long basePtr) {
-               final int type = typeOf(basePtr);
-               final List<ChunkKey> infoList = chunkByOrder[type];
-               final int baseIdx = chunkIdx(basePtr);
-               final ChunkInfo baseInfo = infoByKey.get(infoList.get(baseIdx));
-
-               // Go backwards to the start of the base's chunk.
-               long relativeChunkStart = 0;
-               for (int i = infoList.size() - 1; baseIdx <= i; i--) {
-                       GitStore.ChunkInfo info = infoByKey.get(infoList.get(i)).getData();
-                       int packSize = info.getChunkSize() - ChunkFormatter.TRAILER_SIZE;
-                       relativeChunkStart += packSize;
-               }
-
-               // Offset to the base goes back to start of our chunk, then start of
-               // the base chunk, but slide forward the distance of the base within
-               // its own chunk.
-               //
-               long ofs = w.position() + relativeChunkStart - offsetOf(basePtr);
-               if (w.ofsDelta(infSize, ofs)) {
-                       w.useBaseChunk(relativeChunkStart, baseInfo.getChunkKey());
-                       return true;
-               }
-               return false;
-       }
-
-       private void appendToFragment(byte[] raw, int pos, int len)
-                       throws DhtException {
-               while (0 < len) {
-                       if (currChunk.free() == 0) {
-                               int typeCode = typeOf(currChunkPtr);
-                               currChunk.setFragment();
-                               currFragments.add(endChunk(typeCode));
-                               currChunk = openChunk(typeCode);
-                       }
-
-                       int n = Math.min(len, currChunk.free());
-                       currChunk.append(raw, pos, n);
-                       currPackedSize += n;
-                       pos += n;
-                       len -= n;
-               }
-       }
-
-       private void endFragmentedObject() throws DhtException {
-               currChunk.setFragment();
-               ChunkKey lastKey = endChunk(typeOf(currChunkPtr));
-               if (lastKey != null)
-                       currFragments.add(lastKey);
-
-               ChunkMeta.Builder protoBuilder = ChunkMeta.newBuilder();
-               for (ChunkKey key : currFragments)
-                       protoBuilder.addFragment(key.asString());
-               ChunkMeta protoMeta = protoBuilder.build();
-
-               for (ChunkKey key : currFragments) {
-                       ChunkMeta oldMeta = chunkMeta.get(key);
-                       if (oldMeta != null) {
-                               ChunkMeta.Builder newMeta = ChunkMeta.newBuilder(oldMeta);
-                               newMeta.clearFragment();
-                               newMeta.mergeFrom(protoMeta);
-                               ChunkMeta meta = newMeta.build();
-                               dirtyMeta.put(key, meta);
-                               chunkMeta.put(key, meta);
-                       } else {
-                               dirtyMeta.put(key, protoMeta);
-                               chunkMeta.put(key, protoMeta);
-                       }
-               }
-               currFragments = null;
-       }
-
-       @Override
-       protected void onInflatedObjectData(PackedObjectInfo obj, int typeCode,
-                       byte[] data) throws IOException {
-               DhtInfo info = (DhtInfo) obj;
-               info.inflatedSize = data.length;
-               info.setType(typeCode);
-
-               switch (typeCode) {
-               case OBJ_COMMIT:
-                       onCommit(info, data);
-                       break;
-
-               case OBJ_TREE:
-                       onTree(data);
-                       break;
-
-               case OBJ_TAG:
-                       onTag(data);
-                       break;
-               }
-       }
-
-       private void onCommit(DhtInfo obj, byte[] raw) throws DhtException {
-               Edges edges = edges(obj.chunkPtr);
-               edges.remove(obj);
-
-               // TODO compute hints for trees.
-               if (isSaveAsCachedPack()) {
-                       idBuffer.fromString(raw, 5);
-                       lookupByName(idBuffer).setReferenced();
-               }
-
-               int ptr = 46;
-               while (raw[ptr] == 'p') {
-                       idBuffer.fromString(raw, ptr + 7);
-                       DhtInfo p = lookupByName(idBuffer);
-                       p.setReferenced();
-                       edges.commit(p);
-                       ptr += 48;
-               }
-       }
-
-       private void onTree(byte[] data) {
-               if (isSaveAsCachedPack()) {
-                       treeParser.reset(data);
-                       while (!treeParser.eof()) {
-                               idBuffer.fromRaw(treeParser.idBuffer(), treeParser.idOffset());
-                               lookupByName(idBuffer).setReferenced();
-                               treeParser.next();
-                       }
-               }
-       }
-
-       private void onTag(byte[] data) {
-               if (isSaveAsCachedPack()) {
-                       idBuffer.fromString(data, 7); // "object $sha1"
-                       lookupByName(idBuffer).setReferenced();
-               }
-       }
-
-       private DhtInfo lookupByName(AnyObjectId obj) {
-               DhtInfo info = objectMap.get(obj);
-               if (info == null) {
-                       info = new DhtInfo(obj);
-                       objectMap.add(info);
-               }
-               return info;
-       }
-
-       private Edges edges(long chunkPtr) throws DhtException {
-               if (isInCurrentChunk(chunkPtr)) {
-                       int type = typeOf(chunkPtr);
-                       Edges s = openEdges[type];
-                       if (s == null) {
-                               s = new Edges();
-                               openEdges[type] = s;
-                       }
-                       return s;
-               } else {
-                       ChunkKey key = chunkOf(chunkPtr);
-                       Edges s = chunkEdges.get(key);
-                       if (s == null) {
-                               s = new Edges();
-                               chunkEdges.put(key, s);
-                       }
-                       return s;
-               }
-       }
-
-       private static class Edges {
-               Set<DhtInfo> commitIds;
-
-               List<ChunkKey> commitEdges;
-
-               void commit(DhtInfo id) {
-                       if (commitIds == null)
-                               commitIds = new HashSet<DhtInfo>();
-                       commitIds.add(id);
-               }
-
-               void remove(DhtInfo id) {
-                       if (commitIds != null)
-                               commitIds.remove(id);
-               }
-       }
-
-       @Override
-       protected ObjectTypeAndSize seekDatabase(PackedObjectInfo obj,
-                       ObjectTypeAndSize info) throws IOException {
-               return seekDatabase(((DhtInfo) obj).chunkPtr, info);
-       }
-
-       @Override
-       protected ObjectTypeAndSize seekDatabase(UnresolvedDelta delta,
-                       ObjectTypeAndSize info) throws IOException {
-               return seekDatabase(((DhtDelta) delta).chunkPtr, info);
-       }
-
-       private ObjectTypeAndSize seekDatabase(long chunkPtr, ObjectTypeAndSize info)
-                       throws DhtException {
-               seekChunk(chunkOf(chunkPtr), true);
-               dbPtr = dbChunk.readObjectTypeAndSize(offsetOf(chunkPtr), info);
-               return info;
-       }
-
-       @Override
-       protected int readDatabase(byte[] dst, int pos, int cnt) throws IOException {
-               int n = dbChunk.read(dbPtr, dst, pos, cnt);
-               if (0 < n) {
-                       dbPtr += n;
-                       return n;
-               }
-
-               // ChunkMeta for fragments is delayed writing, so it isn't available
-               // on the chunk if the chunk was read-back from the database. Use
-               // our copy of ChunkMeta instead of the PackChunk's copy.
-
-               ChunkMeta meta = chunkMeta.get(dbChunk.getChunkKey());
-               if (meta == null)
-                       return 0;
-
-               ChunkKey next = ChunkMetaUtil.getNextFragment(meta, dbChunk.getChunkKey());
-               if (next == null)
-                       return 0;
-
-               seekChunk(next, false);
-               n = dbChunk.read(0, dst, pos, cnt);
-               dbPtr = n;
-               return n;
-       }
-
-       private void seekChunk(ChunkKey key, boolean cache) throws DhtException,
-                       DhtTimeoutException {
-               if (dbChunk == null || !dbChunk.getChunkKey().equals(key)) {
-                       dbChunk = chunkReadBackCache.get(key);
-                       if (dbChunk == null) {
-                               dbWriteBuffer.flush();
-
-                               Collection<PackChunk.Members> found;
-                               Context opt = Context.READ_REPAIR;
-                               Sync<Collection<PackChunk.Members>> sync = Sync.create();
-                               db.chunk().get(opt, Collections.singleton(key), sync);
-                               try {
-                                       found = sync.get(objdb.getReaderOptions().getTimeout());
-                               } catch (InterruptedException e) {
-                                       throw new DhtTimeoutException(e);
-                               } catch (TimeoutException e) {
-                                       throw new DhtTimeoutException(e);
-                               }
-
-                               if (found.isEmpty()) {
-                                       throw new DhtException(MessageFormat.format(
-                                                       DhtText.get().missingChunk, key));
-                               }
-
-                               dbChunk = found.iterator().next().build();
-                               if (cache)
-                                       chunkReadBackCache.put(key, dbChunk);
-                       }
-               }
-       }
-
-       @Override
-       protected boolean onAppendBase(int typeCode, byte[] data,
-                       PackedObjectInfo info) throws IOException {
-               return false; // This implementation does not copy base objects.
-       }
-
-       @Override
-       protected void onEndThinPack() throws IOException {
-               // Do nothing, this event is not relevant.
-       }
-
-       @Override
-       protected void onPackFooter(byte[] hash) throws IOException {
-               // TODO Combine together fractional chunks to reduce overhead.
-               // Fractional chunks are common for single-commit pushes since
-               // they are broken out by object type.
-
-               // TODO Try to combine the chunk data and its index into a single
-               // put call for the last chunk of each type. This would break the
-               // read back we do in seekDatabase during delta resolution.
-
-               // If there are deltas to be resolved the pending chunks
-               // will need to be reloaded later. Ensure they are stored.
-               //
-               endChunk(OBJ_COMMIT);
-               endChunk(OBJ_TREE);
-               endChunk(OBJ_BLOB);
-               endChunk(OBJ_TAG);
-
-               // These are only necessary during initial parsing. Drop them now.
-               //
-               objStreamPos = null;
-               objChunkPtrs = null;
-       }
-
-       @Override
-       protected void onObjectHeader(Source src, byte[] raw, int pos, int len)
-                       throws IOException {
-               // Do nothing, the original stream headers are not used.
-       }
-
-       @Override
-       protected void onStoreStream(byte[] raw, int pos, int len)
-                       throws IOException {
-               // Do nothing, the stream is being sliced and cannot be stored as-is.
-       }
-
-       @Override
-       protected boolean checkCRC(int oldCRC) {
-               return true; // Don't bother to check CRCs, assume the chunk is OK.
-       }
-
-       private ChunkFormatter begin(int typeCode) throws DhtException {
-               ChunkFormatter w = openChunk(typeCode);
-               currChunk = w;
-               currChunkPtr = makeObjectPointer(w, typeCode);
-               return w;
-       }
-
-       private ChunkFormatter openChunk(int typeCode) throws DhtException {
-               if (typeCode == 0)
-                       throw new DhtException("Invalid internal typeCode 0");
-
-               ChunkFormatter w = openChunks[typeCode];
-               if (w == null) {
-                       w = new ChunkFormatter(repo, options);
-                       w.setSource(GitStore.ChunkInfo.Source.RECEIVE);
-                       w.setObjectType(typeCode);
-                       openChunks[typeCode] = w;
-               }
-               return w;
-       }
-
-       private ChunkKey endChunk(int typeCode) throws DhtException {
-               ChunkFormatter w = openChunks[typeCode];
-               if (w == null)
-                       return null;
-
-               openChunks[typeCode] = null;
-               currChunk = null;
-
-               if (w.isEmpty())
-                       return null;
-
-               ChunkKey key = w.end(chunkKeyDigest);
-               ChunkInfo info = w.getChunkInfo();
-
-               if (chunkByOrder[typeCode] == null)
-                       chunkByOrder[typeCode] = new ArrayList<ChunkKey>();
-               chunkByOrder[typeCode].add(key);
-               infoByKey.put(key, info);
-
-               if (w.getChunkMeta() != null)
-                       chunkMeta.put(key, w.getChunkMeta());
-
-               Edges e = openEdges[typeCode];
-               if (e != null) {
-                       chunkEdges.put(key, e);
-                       openEdges[typeCode] = null;
-               }
-
-               if (currFragments == null)
-                       chunkReadBackCache.put(key, w.getPackChunk());
-
-               w.unsafePut(db, dbWriteBuffer);
-               return key;
-       }
-
-       private int findStreamIndex(long streamPosition) throws DhtException {
-               int high = objStreamPos.size();
-               int low = 0;
-               do {
-                       final int mid = (low + high) >>> 1;
-                       final long pos = objStreamPos.get(mid);
-                       if (streamPosition < pos)
-                               high = mid;
-                       else if (streamPosition == pos)
-                               return mid;
-                       else
-                               low = mid + 1;
-               } while (low < high);
-               throw new DhtException(MessageFormat.format(
-                               DhtText.get().noSavedTypeForBase, Long.valueOf(streamPosition)));
-       }
-
-       private long makeObjectPointer(ChunkFormatter w, int typeCode) {
-               List<ChunkKey> list = chunkByOrder[typeCode];
-               int idx = list == null ? 0 : list.size();
-               int ptr = w.position();
-               return (((long) typeCode) << 61) | (((long) idx) << 32) | ptr;
-       }
-
-       private static int typeOf(long objectPtr) {
-               return (int) (objectPtr >>> 61);
-       }
-
-       private static int chunkIdx(long objectPtr) {
-               return ((int) ((objectPtr << 3) >>> (32 + 3)));
-       }
-
-       private static int offsetOf(long objectPtr) {
-               return (int) objectPtr;
-       }
-
-       private boolean isInCurrentChunk(long objectPtr) {
-               List<ChunkKey> list = chunkByOrder[typeOf(objectPtr)];
-               if (list == null)
-                       return chunkIdx(objectPtr) == 0;
-               return chunkIdx(objectPtr) == list.size();
-       }
-
-       private ChunkKey chunkOf(long objectPtr) throws DhtException {
-               List<ChunkKey> list = chunkByOrder[typeOf(objectPtr)];
-               int idx = chunkIdx(objectPtr);
-               if (list == null || list.size() <= idx) {
-                       throw new DhtException(MessageFormat.format(
-                                       DhtText.get().packParserInvalidPointer, //
-                                       Constants.typeString(typeOf(objectPtr)), //
-                                       Integer.valueOf(idx), //
-                                       Integer.valueOf(offsetOf(objectPtr))));
-               }
-               return list.get(idx);
-       }
-
-       private static DhtException panicCannotInsert() {
-               // This exception should never happen.
-               return new DhtException(DhtText.get().cannotInsertObject);
-       }
-
-       static class DhtInfo extends PackedObjectInfo {
-               private static final int REFERENCED = 1 << 3;
-
-               static final int FRAGMENTED = 1 << 4;
-
-               long chunkPtr;
-
-               long packedSize;
-
-               long inflatedSize;
-
-               ObjectId base;
-
-               DhtInfo(AnyObjectId id) {
-                       super(id);
-               }
-
-               boolean isInPack() {
-                       return chunkPtr != 0;
-               }
-
-               boolean isReferenced() {
-                       return (getCRC() & REFERENCED) != 0;
-               }
-
-               void setReferenced() {
-                       setCRC(getCRC() | REFERENCED);
-               }
-
-               boolean isFragmented() {
-                       return (getCRC() & FRAGMENTED) != 0;
-               }
-
-               void setFragmented() {
-                       setCRC(getCRC() | FRAGMENTED);
-               }
-
-               int getType() {
-                       return getCRC() & 7;
-               }
-
-               void setType(int type) {
-                       setCRC((getCRC() & ~7) | type);
-               }
-
-               ObjectInfo info(ChunkKey chunkKey) {
-                       GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder();
-                       b.setObjectType(GitStore.ObjectInfo.ObjectType.valueOf(getType()));
-                       b.setOffset(offsetOf(chunkPtr));
-                       b.setPackedSize(packedSize);
-                       b.setInflatedSize(inflatedSize);
-                       if (base != null) {
-                               byte[] t = new byte[Constants.OBJECT_ID_LENGTH];
-                               base.copyRawTo(t, 0);
-                               b.setDeltaBase(ByteString.copyFrom(t));
-                       }
-                       if (isFragmented())
-                               b.setIsFragmented(true);
-                       return new ObjectInfo(chunkKey, b.build());
-               }
-       }
-
-       static class DhtDelta extends UnresolvedDelta {
-               long chunkPtr;
-
-               long packedSize;
-
-               long inflatedSize;
-
-               int getType() {
-                       return getCRC() & 7;
-               }
-
-               void setType(int type) {
-                       setCRC((getCRC() & ~7) | type);
-               }
-
-               boolean isFragmented() {
-                       return (getCRC() & DhtInfo.FRAGMENTED) != 0;
-               }
-
-               void setFragmented() {
-                       setCRC(getCRC() | DhtInfo.FRAGMENTED);
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtReader.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtReader.java
deleted file mode 100644 (file)
index 330b5c0..0000000
+++ /dev/null
@@ -1,710 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.eclipse.jgit.lib.Constants.OBJ_COMMIT;
-import static org.eclipse.jgit.lib.Constants.OBJ_TREE;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.lang.reflect.Field;
-import java.lang.reflect.Modifier;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.TimeoutException;
-import java.util.zip.Inflater;
-
-import org.eclipse.jgit.errors.IncorrectObjectTypeException;
-import org.eclipse.jgit.errors.MissingObjectException;
-import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
-import org.eclipse.jgit.lib.AbbreviatedObjectId;
-import org.eclipse.jgit.lib.AnyObjectId;
-import org.eclipse.jgit.lib.AsyncObjectLoaderQueue;
-import org.eclipse.jgit.lib.AsyncObjectSizeQueue;
-import org.eclipse.jgit.lib.InflaterCache;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.ObjectLoader;
-import org.eclipse.jgit.lib.ObjectReader;
-import org.eclipse.jgit.lib.ProgressMonitor;
-import org.eclipse.jgit.revwalk.ObjectWalk;
-import org.eclipse.jgit.revwalk.RevCommit;
-import org.eclipse.jgit.revwalk.RevObject;
-import org.eclipse.jgit.revwalk.RevWalk;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.storage.dht.spi.Database;
-import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable;
-import org.eclipse.jgit.storage.pack.CachedPack;
-import org.eclipse.jgit.storage.pack.ObjectReuseAsIs;
-import org.eclipse.jgit.storage.pack.ObjectToPack;
-import org.eclipse.jgit.storage.pack.PackOutputStream;
-import org.eclipse.jgit.storage.pack.PackWriter;
-
-/**
- * ObjectReader implementation for DHT based repositories.
- * <p>
- * This class is public only to expose its unique statistics for runtime
- * performance reporting. Applications should always prefer to use the more
- * generic base class, {@link ObjectReader}.
- */
-public class DhtReader extends ObjectReader implements ObjectReuseAsIs {
-       private final DhtRepository repository;
-
-       private final RepositoryKey repo;
-
-       private final Database db;
-
-       private final DhtReaderOptions readerOptions;
-
-       private final DhtInserterOptions inserterOptions;
-
-       private final Statistics stats;
-
-       private final RecentInfoCache recentInfo;
-
-       private final RecentChunks recentChunks;
-
-       private final DeltaBaseCache deltaBaseCache;
-
-       private Collection<CachedPack> cachedPacks;
-
-       private Inflater inflater;
-
-       private Prefetcher prefetcher;
-
-       DhtReader(DhtObjDatabase objdb) {
-               this.repository = objdb.getRepository();
-               this.repo = objdb.getRepository().getRepositoryKey();
-               this.db = objdb.getDatabase();
-               this.readerOptions = objdb.getReaderOptions();
-               this.inserterOptions = objdb.getInserterOptions();
-
-               this.stats = new Statistics();
-               this.recentInfo = new RecentInfoCache(getOptions());
-               this.recentChunks = new RecentChunks(this);
-               this.deltaBaseCache = new DeltaBaseCache(this);
-       }
-
-       /** @return describes how this DhtReader has performed. */
-       public Statistics getStatistics() {
-               return stats;
-       }
-
-       Database getDatabase() {
-               return db;
-       }
-
-       RepositoryKey getRepositoryKey() {
-               return repo;
-       }
-
-       DhtReaderOptions getOptions() {
-               return readerOptions;
-       }
-
-       DhtInserterOptions getInserterOptions() {
-               return inserterOptions;
-       }
-
-       RecentInfoCache getRecentInfoCache() {
-               return recentInfo;
-       }
-
-       RecentChunks getRecentChunks() {
-               return recentChunks;
-       }
-
-       DeltaBaseCache getDeltaBaseCache() {
-               return deltaBaseCache;
-       }
-
-       Inflater inflater() {
-               if (inflater == null)
-                       inflater = InflaterCache.get();
-               else
-                       inflater.reset();
-               return inflater;
-       }
-
-       @Override
-       public void release() {
-               recentChunks.clear();
-               endPrefetch();
-
-               InflaterCache.release(inflater);
-               inflater = null;
-
-               super.release();
-       }
-
-       @Override
-       public ObjectReader newReader() {
-               return new DhtReader(repository.getObjectDatabase());
-       }
-
-       @Override
-       public boolean has(AnyObjectId objId, int typeHint) throws IOException {
-               if (objId instanceof RefDataUtil.IdWithChunk)
-                       return true;
-
-               if (recentChunks.has(repo, objId))
-                       return true;
-
-               if (repository.getRefDatabase().findChunk(objId) != null)
-                       return true;
-
-               return !find(objId).isEmpty();
-       }
-
-       @Override
-       public ObjectLoader open(AnyObjectId objId, int typeHint)
-                       throws MissingObjectException, IncorrectObjectTypeException,
-                       IOException {
-               ObjectLoader ldr = recentChunks.open(repo, objId, typeHint);
-               if (ldr != null)
-                       return ldr;
-
-               ChunkAndOffset p = getChunk(objId, typeHint, false);
-               ldr = PackChunk.read(p.chunk, p.offset, this, typeHint);
-               recentChunk(p.chunk);
-               return ldr;
-       }
-
-       @Override
-       public <T extends ObjectId> AsyncObjectLoaderQueue<T> open(
-                       Iterable<T> objectIds, boolean reportMissing) {
-               return new OpenQueue<T>(this, objectIds, reportMissing);
-       }
-
-       @Override
-       public long getObjectSize(AnyObjectId objectId, int typeHint)
-                       throws MissingObjectException, IncorrectObjectTypeException,
-                       IOException {
-               for (ObjectInfo info : find(objectId))
-                       return info.getSize();
-               throw missing(objectId, typeHint);
-       }
-
-       @Override
-       public <T extends ObjectId> AsyncObjectSizeQueue<T> getObjectSize(
-                       Iterable<T> objectIds, boolean reportMissing) {
-               return new SizeQueue<T>(this, objectIds, reportMissing);
-       }
-
-       @Override
-       public void walkAdviceBeginCommits(RevWalk rw, Collection<RevCommit> roots)
-                       throws IOException {
-               endPrefetch();
-
-               // Don't assign the prefetcher right away. Delay until its
-               // configured as push might invoke our own methods that may
-               // try to call back into the active prefetcher.
-               //
-               Prefetcher p = prefetch(OBJ_COMMIT, readerOptions.getWalkCommitsPrefetchRatio());
-               p.push(this, roots);
-               prefetcher = p;
-       }
-
-       @Override
-       public void walkAdviceBeginTrees(ObjectWalk ow, RevCommit min, RevCommit max)
-                       throws IOException {
-               endPrefetch();
-
-               // Don't assign the prefetcher right away. Delay until its
-               // configured as push might invoke our own methods that may
-               // try to call back into the active prefetcher.
-               //
-               Prefetcher p = prefetch(OBJ_TREE, readerOptions.getWalkTreesPrefetchRatio());
-               p.push(this, min.getTree(), max.getTree());
-               prefetcher = p;
-       }
-
-       @Override
-       public void walkAdviceEnd() {
-               endPrefetch();
-       }
-
-       void recentChunk(PackChunk chunk) {
-               recentChunks.put(chunk);
-       }
-
-       ChunkAndOffset getChunkGently(AnyObjectId objId) {
-               return recentChunks.find(repo, objId);
-       }
-
-       ChunkAndOffset getChunk(AnyObjectId objId, int typeHint, boolean checkRecent)
-                       throws DhtException, MissingObjectException {
-               if (checkRecent) {
-                       ChunkAndOffset r = recentChunks.find(repo, objId);
-                       if (r != null)
-                               return r;
-               }
-
-               ChunkKey key;
-               if (objId instanceof RefDataUtil.IdWithChunk)
-                       key = ((RefDataUtil.IdWithChunk) objId).getChunkKey();
-               else
-                       key = repository.getRefDatabase().findChunk(objId);
-
-               if (key != null) {
-                       PackChunk chunk = load(key);
-                       if (chunk != null && chunk.hasIndex()) {
-                               int pos = chunk.findOffset(repo, objId);
-                               if (0 <= pos)
-                                       return new ChunkAndOffset(chunk, pos);
-                       }
-
-                       // The hint above is stale. Fall through and do a
-                       // more exhaustive lookup to find the object.
-               }
-
-               if (prefetcher != null) {
-                       ChunkAndOffset r = prefetcher.find(repo, objId);
-                       if (r != null)
-                               return r;
-               }
-
-               for (ObjectInfo link : find(objId)) {
-                       PackChunk chunk;
-
-                       if (prefetcher != null) {
-                               chunk = prefetcher.get(link.getChunkKey());
-                               if (chunk == null) {
-                                       chunk = load(link.getChunkKey());
-                                       if (chunk == null)
-                                               continue;
-                                       if (prefetcher.isType(typeHint))
-                                               prefetcher.push(chunk.getMeta());
-                               }
-                       } else {
-                               chunk = load(link.getChunkKey());
-                               if (chunk == null)
-                                       continue;
-                       }
-
-                       return new ChunkAndOffset(chunk, link.getOffset());
-               }
-
-               throw missing(objId, typeHint);
-       }
-
-       ChunkKey findChunk(AnyObjectId objId) throws DhtException {
-               if (objId instanceof RefDataUtil.IdWithChunk)
-                       return ((RefDataUtil.IdWithChunk) objId).getChunkKey();
-
-               ChunkKey key = repository.getRefDatabase().findChunk(objId);
-               if (key != null)
-                       return key;
-
-               ChunkAndOffset r = recentChunks.find(repo, objId);
-               if (r != null)
-                       return r.chunk.getChunkKey();
-
-               for (ObjectInfo link : find(objId))
-                       return link.getChunkKey();
-
-               return null;
-       }
-
-       static MissingObjectException missing(AnyObjectId objId, int typeHint) {
-               ObjectId id = objId.copy();
-               if (typeHint != OBJ_ANY)
-                       return new MissingObjectException(id, typeHint);
-               return new MissingObjectException(id, DhtText.get().objectTypeUnknown);
-       }
-
-       PackChunk getChunk(ChunkKey key) throws DhtException {
-               PackChunk chunk = recentChunks.get(key);
-               if (chunk != null)
-                       return chunk;
-
-               chunk = load(key);
-               if (chunk != null)
-                       return chunk;
-
-               throw new DhtMissingChunkException(key);
-       }
-
-       @Override
-       public Collection<ObjectId> resolve(AbbreviatedObjectId id)
-                       throws IOException {
-               // Because ObjectIndexKey requires at least 4 leading digits
-               // don't resolve anything that is shorter than 4 digits.
-               //
-               if (id.length() < 4)
-                       return Collections.emptySet();
-
-               throw new DhtException.TODO("resolve abbreviations");
-       }
-
-       public DhtObjectToPack newObjectToPack(RevObject obj) {
-               return new DhtObjectToPack(obj);
-       }
-
-       @SuppressWarnings("unchecked")
-       public void selectObjectRepresentation(PackWriter packer,
-                       ProgressMonitor monitor, Iterable<ObjectToPack> objects)
-                       throws IOException, MissingObjectException {
-               Iterable itr = objects;
-               new RepresentationSelector(packer, this, monitor).select(itr);
-       }
-
-       private Prefetcher prefetch(final int type, final int ratio) {
-               int limit = readerOptions.getChunkLimit();
-               int prefetchLimit = (int) (limit * (ratio / 100.0));
-               recentChunks.setMaxBytes(limit - prefetchLimit);
-               return new Prefetcher(this, type, prefetchLimit);
-       }
-
-       private void endPrefetch() {
-               recentChunks.setMaxBytes(getOptions().getChunkLimit());
-               prefetcher = null;
-       }
-
-       @SuppressWarnings("unchecked")
-       public void writeObjects(PackOutputStream out, List<ObjectToPack> objects)
-                       throws IOException {
-               prefetcher = prefetch(0, readerOptions.getWriteObjectsPrefetchRatio());
-               try {
-                       List itr = objects;
-                       new ObjectWriter(this, prefetcher).plan(itr);
-                       for (ObjectToPack otp : objects)
-                               out.writeObject(otp);
-               } finally {
-                       endPrefetch();
-               }
-       }
-
-       public void copyObjectAsIs(PackOutputStream out, ObjectToPack otp,
-                       boolean validate) throws IOException,
-                       StoredObjectRepresentationNotAvailableException {
-               DhtObjectToPack obj = (DhtObjectToPack) otp;
-               try {
-                       PackChunk chunk = recentChunks.get(obj.chunk);
-                       if (chunk == null) {
-                               chunk = prefetcher.get(obj.chunk);
-                               if (chunk == null) {
-                                       // This should never happen during packing, it implies
-                                       // the fetch plan was incorrect. Unfortunately that can
-                                       // occur if objects need to be recompressed on the fly.
-                                       //
-                                       stats.access(obj.chunk).cntCopyObjectAsIs_PrefetchMiss++;
-                                       chunk = getChunk(obj.chunk);
-                               }
-                               if (!chunk.isFragment())
-                                       recentChunk(chunk);
-                       }
-                       chunk.copyObjectAsIs(out, obj, validate, this);
-               } catch (DhtMissingChunkException missingChunk) {
-                       stats.access(missingChunk.getChunkKey()).cntCopyObjectAsIs_InvalidChunk++;
-                       throw new StoredObjectRepresentationNotAvailableException(otp);
-               }
-       }
-
-       public Collection<CachedPack> getCachedPacks() throws IOException {
-               if (cachedPacks == null) {
-                       Collection<CachedPackInfo> info;
-                       Collection<CachedPack> packs;
-
-                       try {
-                               info = db.repository().getCachedPacks(repo);
-                       } catch (TimeoutException e) {
-                               throw new DhtTimeoutException(e);
-                       }
-
-                       packs = new ArrayList<CachedPack>(info.size());
-                       for (CachedPackInfo i : info)
-                               packs.add(new DhtCachedPack(i));
-                       cachedPacks = packs;
-               }
-               return cachedPacks;
-       }
-
-       public void copyPackAsIs(PackOutputStream out, CachedPack pack,
-                       boolean validate) throws IOException {
-               ((DhtCachedPack) pack).copyAsIs(out, validate, this);
-       }
-
-       private List<ObjectInfo> find(AnyObjectId obj) throws DhtException {
-               List<ObjectInfo> info = recentInfo.get(obj);
-               if (info != null)
-                       return info;
-
-               stats.cntObjectIndex_Load++;
-               ObjectIndexKey idxKey = ObjectIndexKey.create(repo, obj);
-               Context opt = Context.READ_REPAIR;
-               Sync<Map<ObjectIndexKey, Collection<ObjectInfo>>> sync = Sync.create();
-               db.objectIndex().get(opt, Collections.singleton(idxKey), sync);
-               try {
-                       Collection<ObjectInfo> m;
-
-                       m = sync.get(getOptions().getTimeout()).get(idxKey);
-                       if (m == null || m.isEmpty())
-                               return Collections.emptyList();
-
-                       info = new ArrayList<ObjectInfo>(m);
-                       ObjectInfo.sort(info);
-                       recentInfo.put(obj, info);
-                       return info;
-               } catch (InterruptedException e) {
-                       throw new DhtTimeoutException(e);
-               } catch (TimeoutException e) {
-                       throw new DhtTimeoutException(e);
-               }
-       }
-
-       private PackChunk load(ChunkKey chunkKey) throws DhtException {
-               if (0 == stats.access(chunkKey).cntReader_Load++
-                               && readerOptions.isTrackFirstChunkLoad())
-                       stats.access(chunkKey).locReader_Load = new Throwable("first");
-               Context opt = Context.READ_REPAIR;
-               Sync<Collection<PackChunk.Members>> sync = Sync.create();
-               db.chunk().get(opt, Collections.singleton(chunkKey), sync);
-               try {
-                       Collection<PackChunk.Members> c = sync.get(getOptions()
-                                       .getTimeout());
-                       if (c.isEmpty())
-                               return null;
-                       if (c instanceof List)
-                               return ((List<PackChunk.Members>) c).get(0).build();
-                       return c.iterator().next().build();
-               } catch (InterruptedException e) {
-                       throw new DhtTimeoutException(e);
-               } catch (TimeoutException e) {
-                       throw new DhtTimeoutException(e);
-               }
-       }
-
-       static class ChunkAndOffset {
-               final PackChunk chunk;
-
-               final int offset;
-
-               ChunkAndOffset(PackChunk chunk, int offset) {
-                       this.chunk = chunk;
-                       this.offset = offset;
-               }
-       }
-
-       /** How this DhtReader has performed since creation. */
-       public static class Statistics {
-               private final Map<ChunkKey, ChunkAccess> chunkAccess = new LinkedHashMap<ChunkKey, ChunkAccess>();
-
-               ChunkAccess access(ChunkKey chunkKey) {
-                       ChunkAccess ca = chunkAccess.get(chunkKey);
-                       if (ca == null) {
-                               ca = new ChunkAccess(chunkKey);
-                               chunkAccess.put(chunkKey, ca);
-                       }
-                       return ca;
-               }
-
-               /**
-                * Number of sequential {@link ObjectIndexTable} lookups made by the
-                * reader. These were made without the support of batch lookups.
-                */
-               public int cntObjectIndex_Load;
-
-               /** Cycles detected in delta chains during OBJ_REF_DELTA reads. */
-               public int deltaChainCycles;
-
-               int recentChunks_Hits;
-
-               int recentChunks_Miss;
-
-               int deltaBaseCache_Hits;
-
-               int deltaBaseCache_Miss;
-
-               /** @return ratio of recent chunk hits, [0.00,1.00]. */
-               public double getRecentChunksHitRatio() {
-                       int total = recentChunks_Hits + recentChunks_Miss;
-                       return ((double) recentChunks_Hits) / total;
-               }
-
-               /** @return ratio of delta base cache hits, [0.00,1.00]. */
-               public double getDeltaBaseCacheHitRatio() {
-                       int total = deltaBaseCache_Hits + deltaBaseCache_Miss;
-                       return ((double) deltaBaseCache_Hits) / total;
-               }
-
-               /**
-                * @return collection of chunk accesses made by the application code
-                *         against this reader. The collection's iterator has no
-                *         relevant order.
-                */
-               public Collection<ChunkAccess> getChunkAccess() {
-                       return chunkAccess.values();
-               }
-
-               @Override
-               public String toString() {
-                       StringBuilder b = new StringBuilder();
-                       b.append("DhtReader.Statistics:\n");
-                       b.append(" ");
-                       if (recentChunks_Hits != 0 || recentChunks_Miss != 0)
-                               ratio(b, "recentChunks", getRecentChunksHitRatio());
-                       if (deltaBaseCache_Hits != 0 || deltaBaseCache_Miss != 0)
-                               ratio(b, "deltaBaseCache", getDeltaBaseCacheHitRatio());
-                       appendFields(this, b);
-                       b.append("\n");
-                       for (ChunkAccess ca : getChunkAccess()) {
-                               b.append("  ");
-                               b.append(ca.toString());
-                               b.append("\n");
-                       }
-                       return b.toString();
-               }
-
-               @SuppressWarnings("boxing")
-               static void ratio(StringBuilder b, String name, double value) {
-                       b.append(String.format(" %s=%.2f%%", name, value * 100.0));
-               }
-
-               static void appendFields(Object obj, StringBuilder b) {
-                       try {
-                               for (Field field : obj.getClass().getDeclaredFields()) {
-                                       String n = field.getName();
-
-                                       if (field.getType() == Integer.TYPE
-                                                       && (field.getModifiers() & Modifier.PUBLIC) != 0) {
-                                               int v = field.getInt(obj);
-                                               if (0 < v)
-                                                       b.append(' ').append(n).append('=').append(v);
-                                       }
-                               }
-                       } catch (IllegalArgumentException e) {
-                               throw new RuntimeException(e);
-                       } catch (IllegalAccessException e) {
-                               throw new RuntimeException(e);
-                       }
-               }
-
-               /** Summary describing how a chunk was accessed. */
-               public static final class ChunkAccess {
-                       /** Chunk this access block describes. */
-                       public final ChunkKey chunkKey;
-
-                       /**
-                        * Number of times chunk was loaded sequentially. Incremented when
-                        * the reader had to load the chunk on demand with no cache or
-                        * prefetcher support.
-                        */
-                       public int cntReader_Load;
-
-                       Throwable locReader_Load;
-
-                       /**
-                        * Number of times the prefetcher loaded from the database.
-                        * Incremented each time the prefetcher asked for the chunk from the
-                        * underlying database (which might have its own distributed cache,
-                        * or not).
-                        */
-                       public int cntPrefetcher_Load;
-
-                       /**
-                        * Number of times the prefetcher ordering was wrong. Incremented if
-                        * a reader wants a chunk but the prefetcher didn't have it ready at
-                        * the time of request. This indicates a bad prefetching plan as the
-                        * chunk should have been listed earlier in the prefetcher's list.
-                        */
-                       public int cntPrefetcher_OutOfOrder;
-
-                       /**
-                        * Number of times the reader had to stall to wait for a chunk that
-                        * is currently being prefetched to finish loading and become ready.
-                        * This indicates the prefetcher may have fetched other chunks first
-                        * (had the wrong order), or does not have a deep enough window to
-                        * hide these loads from the application.
-                        */
-                       public int cntPrefetcher_WaitedForLoad;
-
-                       /**
-                        * Number of times the reader asked the prefetcher for the same
-                        * chunk after it was already consumed from the prefetcher. This
-                        * indicates the reader has walked back on itself and revisited a
-                        * chunk again.
-                        */
-                       public int cntPrefetcher_Revisited;
-
-                       /**
-                        * Number of times the reader needed this chunk to copy an object
-                        * as-is into a pack stream, but the prefetcher didn't have it
-                        * ready. This correlates with {@link #cntPrefetcher_OutOfOrder} or
-                        * {@link #cntPrefetcher_Revisited}.
-                        */
-                       public int cntCopyObjectAsIs_PrefetchMiss;
-
-                       /**
-                        * Number of times the reader tried to copy an object from this
-                        * chunk, but discovered the chunk was corrupt or did not contain
-                        * the object as expected.
-                        */
-                       public int cntCopyObjectAsIs_InvalidChunk;
-
-                       ChunkAccess(ChunkKey key) {
-                               chunkKey = key;
-                       }
-
-                       @Override
-                       public String toString() {
-                               StringBuilder b = new StringBuilder();
-                               b.append(chunkKey).append('[');
-                               appendFields(this, b);
-                               b.append(" ]");
-                               if (locReader_Load != null) {
-                                       StringWriter sw = new StringWriter();
-                                       locReader_Load.printStackTrace(new PrintWriter(sw));
-                                       b.append(sw);
-                               }
-                               return b.toString();
-                       }
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtReaderOptions.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtReaderOptions.java
deleted file mode 100644 (file)
index db3f510..0000000
+++ /dev/null
@@ -1,353 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import org.eclipse.jgit.lib.Config;
-
-/** Options controlling how objects are read from a DHT stored repository. */
-public class DhtReaderOptions {
-       /** 1024 (number of bytes in one kibibyte/kilobyte) */
-       public static final int KiB = 1024;
-
-       /** 1024 {@link #KiB} (number of bytes in one mebibyte/megabyte) */
-       public static final int MiB = 1024 * KiB;
-
-       private Timeout timeout;
-
-       private boolean prefetchFollowEdgeHints;
-
-       private int chunkLimit;
-
-       private int openQueuePrefetchRatio;
-
-       private int walkCommitsPrefetchRatio;
-
-       private int walkTreesPrefetchRatio;
-
-       private int writeObjectsPrefetchRatio;
-
-       private int objectIndexConcurrentBatches;
-
-       private int objectIndexBatchSize;
-
-       private int deltaBaseCacheSize;
-
-       private int deltaBaseCacheLimit;
-
-       private int recentInfoCacheSize;
-
-       private boolean trackFirstChunkLoad;
-
-       /** Create a default reader configuration. */
-       public DhtReaderOptions() {
-               setTimeout(Timeout.seconds(5));
-               setPrefetchFollowEdgeHints(true);
-
-               setChunkLimit(5 * MiB);
-               setOpenQueuePrefetchRatio(20 /* percent */);
-               setWalkCommitsPrefetchRatio(20 /* percent */);
-               setWalkTreesPrefetchRatio(20 /* percent */);
-               setWriteObjectsPrefetchRatio(90 /* percent */);
-
-               setObjectIndexConcurrentBatches(2);
-               setObjectIndexBatchSize(512);
-
-               setDeltaBaseCacheSize(1024);
-               setDeltaBaseCacheLimit(10 * MiB);
-
-               setRecentInfoCacheSize(4096);
-       }
-
-       /** @return default timeout to wait on long operations before aborting. */
-       public Timeout getTimeout() {
-               return timeout;
-       }
-
-       /**
-        * Set the default timeout to wait on long operations.
-        *
-        * @param maxWaitTime
-        *            new wait time.
-        * @return {@code this}
-        */
-       public DhtReaderOptions setTimeout(Timeout maxWaitTime) {
-               if (maxWaitTime == null || maxWaitTime.getTime() < 0)
-                       throw new IllegalArgumentException();
-               timeout = maxWaitTime;
-               return this;
-       }
-
-       /** @return if the prefetcher should follow edge hints (experimental) */
-       public boolean isPrefetchFollowEdgeHints() {
-               return prefetchFollowEdgeHints;
-       }
-
-       /**
-        * Enable (or disable) the experimental edge following feature.
-        *
-        * @param follow
-        *            true to follow the edge hints.
-        * @return {@code this}
-        */
-       public DhtReaderOptions setPrefetchFollowEdgeHints(boolean follow) {
-               prefetchFollowEdgeHints = follow;
-               return this;
-       }
-
-       /** @return number of bytes to hold within a DhtReader. */
-       public int getChunkLimit() {
-               return chunkLimit;
-       }
-
-       /**
-        * Set the number of bytes hold within a DhtReader.
-        *
-        * @param maxBytes
-        * @return {@code this}
-        */
-       public DhtReaderOptions setChunkLimit(int maxBytes) {
-               chunkLimit = Math.max(1024, maxBytes);
-               return this;
-       }
-
-       /** @return percentage of {@link #getChunkLimit()} used for prefetch, 0..100. */
-       public int getOpenQueuePrefetchRatio() {
-               return openQueuePrefetchRatio;
-       }
-
-       /**
-        * Set the prefetch ratio used by the open object queue.
-        *
-        * @param ratio 0..100.
-        * @return {@code this}
-        */
-       public DhtReaderOptions setOpenQueuePrefetchRatio(int ratio) {
-               openQueuePrefetchRatio = Math.max(0, Math.min(ratio, 100));
-               return this;
-       }
-
-       /** @return percentage of {@link #getChunkLimit()} used for prefetch, 0..100. */
-       public int getWalkCommitsPrefetchRatio() {
-               return walkCommitsPrefetchRatio;
-       }
-
-       /**
-        * Set the prefetch ratio used by the open object queue.
-        *
-        * @param ratio 0..100.
-        * @return {@code this}
-        */
-       public DhtReaderOptions setWalkCommitsPrefetchRatio(int ratio) {
-               walkCommitsPrefetchRatio = Math.max(0, Math.min(ratio, 100));
-               return this;
-       }
-
-       /** @return percentage of {@link #getChunkLimit()} used for prefetch, 0..100. */
-       public int getWalkTreesPrefetchRatio() {
-               return walkTreesPrefetchRatio;
-       }
-
-       /**
-        * Set the prefetch ratio used by the open object queue.
-        *
-        * @param ratio 0..100.
-        * @return {@code this}
-        */
-       public DhtReaderOptions setWalkTreesPrefetchRatio(int ratio) {
-               walkTreesPrefetchRatio = Math.max(0, Math.min(ratio, 100));
-               return this;
-       }
-
-       /** @return percentage of {@link #getChunkLimit()} used for prefetch, 0..100. */
-       public int getWriteObjectsPrefetchRatio() {
-               return writeObjectsPrefetchRatio;
-       }
-
-       /**
-        * Set the prefetch ratio used by the open object queue.
-        *
-        * @param ratio 0..100.
-        * @return {@code this}
-        */
-       public DhtReaderOptions setWriteObjectsPrefetchRatio(int ratio) {
-               writeObjectsPrefetchRatio = Math.max(0, Math.min(ratio, 100));
-               return this;
-       }
-
-       /** @return number of concurrent reads against ObjectIndexTable. */
-       public int getObjectIndexConcurrentBatches() {
-               return objectIndexConcurrentBatches;
-       }
-
-       /**
-        * Set the number of concurrent readers on ObjectIndexTable.
-        *
-        * @param batches
-        *            number of batches.
-        * @return {@code this}
-        */
-       public DhtReaderOptions setObjectIndexConcurrentBatches(int batches) {
-               objectIndexConcurrentBatches = Math.max(1, batches);
-               return this;
-       }
-
-       /** @return number of objects to lookup in one batch. */
-       public int getObjectIndexBatchSize() {
-               return objectIndexBatchSize;
-       }
-
-       /**
-        * Set the number of objects to lookup at once.
-        *
-        * @param objectCnt
-        *            the number of objects in a lookup batch.
-        * @return {@code this}
-        */
-       public DhtReaderOptions setObjectIndexBatchSize(int objectCnt) {
-               objectIndexBatchSize = Math.max(1, objectCnt);
-               return this;
-       }
-
-       /** @return size of the delta base cache hash table, in object entries. */
-       public int getDeltaBaseCacheSize() {
-               return deltaBaseCacheSize;
-       }
-
-       /**
-        * Set the size of the delta base cache hash table.
-        *
-        * @param slotCnt
-        *            number of slots in the hash table.
-        * @return {@code this}
-        */
-       public DhtReaderOptions setDeltaBaseCacheSize(int slotCnt) {
-               deltaBaseCacheSize = Math.max(1, slotCnt);
-               return this;
-       }
-
-       /** @return maximum number of bytes to hold in per-reader DeltaBaseCache. */
-       public int getDeltaBaseCacheLimit() {
-               return deltaBaseCacheLimit;
-       }
-
-       /**
-        * Set the maximum number of bytes in the DeltaBaseCache.
-        *
-        * @param maxBytes
-        *            the new limit.
-        * @return {@code this}
-        */
-       public DhtReaderOptions setDeltaBaseCacheLimit(int maxBytes) {
-               deltaBaseCacheLimit = Math.max(0, maxBytes);
-               return this;
-       }
-
-       /** @return number of objects to cache information on. */
-       public int getRecentInfoCacheSize() {
-               return recentInfoCacheSize;
-       }
-
-       /**
-        * Set the number of objects to cache information on.
-        *
-        * @param objectCnt
-        *            the number of objects to cache.
-        * @return {@code this}
-        */
-       public DhtReaderOptions setRecentInfoCacheSize(int objectCnt) {
-               recentInfoCacheSize = Math.max(0, objectCnt);
-               return this;
-       }
-
-       /**
-        * @return true if {@link DhtReader.Statistics} includes the stack trace for
-        *         the first time a chunk is loaded. Supports debugging DHT code.
-        */
-       public boolean isTrackFirstChunkLoad() {
-               return trackFirstChunkLoad;
-       }
-
-       /**
-        * Set whether or not the initial load of each chunk should be tracked.
-        *
-        * @param track
-        *            true to track the stack trace of the first load.
-        * @return {@code this}.
-        */
-       public DhtReaderOptions setTrackFirstChunkLoad(boolean track) {
-               trackFirstChunkLoad = track;
-               return this;
-       }
-
-       /**
-        * Update properties by setting fields from the configuration.
-        * <p>
-        * If a property is not defined in the configuration, then it is left
-        * unmodified.
-        *
-        * @param rc
-        *            configuration to read properties from.
-        * @return {@code this}
-        */
-       public DhtReaderOptions fromConfig(Config rc) {
-               setTimeout(Timeout.getTimeout(rc, "core", "dht", "timeout", getTimeout()));
-               setPrefetchFollowEdgeHints(rc.getBoolean("core", "dht", "prefetchFollowEdgeHints", isPrefetchFollowEdgeHints()));
-               setChunkLimit(rc.getInt("core", "dht", "chunkLimit", getChunkLimit()));
-               setOpenQueuePrefetchRatio(rc.getInt("core", "dht", "openQueuePrefetchRatio", getOpenQueuePrefetchRatio()));
-               setWalkCommitsPrefetchRatio(rc.getInt("core", "dht", "walkCommitsPrefetchRatio", getWalkCommitsPrefetchRatio()));
-               setWalkTreesPrefetchRatio(rc.getInt("core", "dht", "walkTreesPrefetchRatio", getWalkTreesPrefetchRatio()));
-               setWriteObjectsPrefetchRatio(rc.getInt("core", "dht", "writeObjectsPrefetchRatio", getWriteObjectsPrefetchRatio()));
-
-               setObjectIndexConcurrentBatches(rc.getInt("core", "dht", "objectIndexConcurrentBatches", getObjectIndexConcurrentBatches()));
-               setObjectIndexBatchSize(rc.getInt("core", "dht", "objectIndexBatchSize", getObjectIndexBatchSize()));
-
-               setDeltaBaseCacheSize(rc.getInt("core", "dht", "deltaBaseCacheSize", getDeltaBaseCacheSize()));
-               setDeltaBaseCacheLimit(rc.getInt("core", "dht", "deltaBaseCacheLimit", getDeltaBaseCacheLimit()));
-
-               setRecentInfoCacheSize(rc.getInt("core", "dht", "recentInfoCacheSize", getRecentInfoCacheSize()));
-
-               setTrackFirstChunkLoad(rc.getBoolean("core", "dht", "debugTrackFirstChunkLoad", isTrackFirstChunkLoad()));
-               return this;
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefDatabase.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefDatabase.java
deleted file mode 100644 (file)
index b439449..0000000
+++ /dev/null
@@ -1,524 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.eclipse.jgit.lib.Ref.Storage.LOOSE;
-import static org.eclipse.jgit.lib.Ref.Storage.NEW;
-import static org.eclipse.jgit.storage.dht.RefDataUtil.NONE;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicReference;
-
-import org.eclipse.jgit.errors.MissingObjectException;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
-import org.eclipse.jgit.lib.AnyObjectId;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.ObjectIdSubclassMap;
-import org.eclipse.jgit.lib.Ref;
-import org.eclipse.jgit.lib.RefDatabase;
-import org.eclipse.jgit.lib.RefRename;
-import org.eclipse.jgit.lib.SymbolicRef;
-import org.eclipse.jgit.revwalk.RevObject;
-import org.eclipse.jgit.revwalk.RevTag;
-import org.eclipse.jgit.revwalk.RevWalk;
-import org.eclipse.jgit.storage.dht.RefDataUtil.IdWithChunk;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.storage.dht.spi.Database;
-import org.eclipse.jgit.util.RefList;
-import org.eclipse.jgit.util.RefMap;
-
-/** Repository references stored on top of a DHT database. */
-public class DhtRefDatabase extends RefDatabase {
-       private final DhtRepository repository;
-
-       private final Database db;
-
-       private final AtomicReference<RefCache> cache;
-
-       DhtRefDatabase(DhtRepository repository, Database db) {
-               this.repository = repository;
-               this.db = db;
-               this.cache = new AtomicReference<RefCache>();
-       }
-
-       DhtRepository getRepository() {
-               return repository;
-       }
-
-       ChunkKey findChunk(AnyObjectId id) {
-               RefCache c = cache.get();
-               if (c != null) {
-                       IdWithChunk i = c.hints.get(id);
-                       if (i != null)
-                               return i.getChunkKey();
-               }
-               return null;
-       }
-
-       @Override
-       public Ref getRef(String needle) throws IOException {
-               RefCache curr = readRefs();
-               for (String prefix : SEARCH_PATH) {
-                       DhtRef ref = curr.ids.get(prefix + needle);
-                       if (ref != null) {
-                               ref = resolve(ref, 0, curr.ids);
-                               return ref;
-                       }
-               }
-               return null;
-       }
-
-       private DhtRef getOneRef(String refName) throws IOException {
-               RefCache curr = readRefs();
-               DhtRef ref = curr.ids.get(refName);
-               if (ref != null)
-                       return resolve(ref, 0, curr.ids);
-               return ref;
-       }
-
-       @Override
-       public List<Ref> getAdditionalRefs() {
-               return Collections.emptyList();
-       }
-
-       @Override
-       public Map<String, Ref> getRefs(String prefix) throws IOException {
-               RefCache curr = readRefs();
-               RefList<DhtRef> packed = RefList.emptyList();
-               RefList<DhtRef> loose = curr.ids;
-               RefList.Builder<DhtRef> sym = new RefList.Builder<DhtRef>(curr.sym.size());
-
-               for (int idx = 0; idx < curr.sym.size(); idx++) {
-                       DhtRef ref = curr.sym.get(idx);
-                       String name = ref.getName();
-                       ref = resolve(ref, 0, loose);
-                       if (ref != null && ref.getObjectId() != null) {
-                               sym.add(ref);
-                       } else {
-                               // A broken symbolic reference, we have to drop it from the
-                               // collections the client is about to receive. Should be a
-                               // rare occurrence so pay a copy penalty.
-                               int toRemove = loose.find(name);
-                               if (0 <= toRemove)
-                                       loose = loose.remove(toRemove);
-                       }
-               }
-
-               return new RefMap(prefix, packed, loose, sym.toRefList());
-       }
-
-       private DhtRef resolve(DhtRef ref, int depth, RefList<DhtRef> loose)
-                       throws IOException {
-               if (!ref.isSymbolic())
-                       return ref;
-
-               DhtRef dst = (DhtRef) ref.getTarget();
-
-               if (MAX_SYMBOLIC_REF_DEPTH <= depth)
-                       return null; // claim it doesn't exist
-
-               dst = loose.get(dst.getName());
-               if (dst == null)
-                       return ref;
-
-               dst = resolve(dst, depth + 1, loose);
-               if (dst == null)
-                       return null;
-
-               return new DhtSymbolicRef(
-                               ref.getName(),
-                               dst,
-                               ((DhtSymbolicRef) ref).getRefData());
-       }
-
-       @Override
-       public Ref peel(Ref ref) throws IOException {
-               final Ref oldLeaf = ref.getLeaf();
-               if (oldLeaf.isPeeled() || oldLeaf.getObjectId() == null)
-                       return ref;
-
-               DhtRef newLeaf = doPeel(oldLeaf);
-
-               RefCache cur = readRefs();
-               int idx = cur.ids.find(oldLeaf.getName());
-               if (0 <= idx && cur.ids.get(idx) == oldLeaf) {
-                       RefList<DhtRef> newList = cur.ids.set(idx, newLeaf);
-                       if (cache.compareAndSet(cur, new RefCache(newList, cur)))
-                               cachePeeledState(oldLeaf, newLeaf);
-               }
-
-               return recreate(ref, newLeaf);
-       }
-
-       private void cachePeeledState(Ref oldLeaf, Ref newLeaf) {
-               // TODO(spearce) Use an ExecutorService here
-               try {
-                       RepositoryKey repo = repository.getRepositoryKey();
-                       RefKey key = RefKey.create(repo, newLeaf.getName());
-                       RefData oldData = ((DhtRef) oldLeaf).getRefData();
-                       RefData newData = ((DhtRef) newLeaf).getRefData();
-                       db.ref().compareAndPut(key, oldData, newData);
-               } catch (TimeoutException e) {
-                       // Ignore a timeout here, we were only trying to update
-                       // a cached value to save peeling costs in the future.
-
-               } catch (DhtException e) {
-                       // Ignore a database error, this was only an attempt to
-                       // fix a value that could be cached to save time later.
-               }
-       }
-
-       private DhtRef doPeel(final Ref leaf) throws MissingObjectException,
-                       IOException {
-               RevWalk rw = new RevWalk(getRepository());
-               try {
-                       DhtReader ctx = (DhtReader) rw.getObjectReader();
-                       RevObject obj = rw.parseAny(leaf.getObjectId());
-                       RefData.Builder d = RefData.newBuilder(((DhtRef) leaf).getRefData());
-
-                       ChunkKey oKey = ctx.findChunk(leaf.getObjectId());
-                       if (oKey != null)
-                               d.getTargetBuilder().setChunkKey(oKey.asString());
-                       else
-                               d.getTargetBuilder().clearChunkKey();
-
-                       if (obj instanceof RevTag) {
-                               ObjectId pId = rw.peel(obj);
-                               d.getPeeledBuilder().setObjectName(pId.name());
-
-                               ChunkKey pKey = ctx.findChunk(pId);
-                               if (pKey != null)
-                                       d.getPeeledBuilder().setChunkKey(pKey.asString());
-                               else
-                                       d.getPeeledBuilder().clearChunkKey();
-                       } else {
-                               d.clearPeeled();
-                       }
-
-                       d.setIsPeeled(true);
-                       d.setSequence(d.getSequence() + 1);
-                       return new DhtObjectIdRef(leaf.getName(), d.build());
-               } finally {
-                       rw.release();
-               }
-       }
-
-       private static Ref recreate(final Ref old, final Ref leaf) {
-               if (old.isSymbolic()) {
-                       Ref dst = recreate(old.getTarget(), leaf);
-                       return new SymbolicRef(old.getName(), dst);
-               }
-               return leaf;
-       }
-
-       @Override
-       public DhtRefUpdate newUpdate(String refName, boolean detach)
-                       throws IOException {
-               boolean detachingSymbolicRef = false;
-               DhtRef ref = getOneRef(refName);
-               if (ref == null)
-                       ref = new DhtObjectIdRef(refName, NONE);
-               else
-                       detachingSymbolicRef = detach && ref.isSymbolic();
-
-               if (detachingSymbolicRef) {
-                       RefData src = ((DhtRef) ref.getLeaf()).getRefData();
-                       RefData.Builder b = RefData.newBuilder(ref.getRefData());
-                       b.clearSymref();
-                       b.setTarget(src.getTarget());
-                       ref = new DhtObjectIdRef(refName, b.build());
-               }
-
-               RepositoryKey repo = repository.getRepositoryKey();
-               DhtRefUpdate update = new DhtRefUpdate(this, repo, db, ref);
-               if (detachingSymbolicRef)
-                       update.setDetachingSymbolicRef();
-               return update;
-       }
-
-       @Override
-       public RefRename newRename(String fromName, String toName)
-                       throws IOException {
-               DhtRefUpdate src = newUpdate(fromName, true);
-               DhtRefUpdate dst = newUpdate(toName, true);
-               return new DhtRefRename(src, dst);
-       }
-
-       @Override
-       public boolean isNameConflicting(String refName) throws IOException {
-               RefList<DhtRef> all = readRefs().ids;
-
-               // Cannot be nested within an existing reference.
-               int lastSlash = refName.lastIndexOf('/');
-               while (0 < lastSlash) {
-                       String needle = refName.substring(0, lastSlash);
-                       if (all.contains(needle))
-                               return true;
-                       lastSlash = refName.lastIndexOf('/', lastSlash - 1);
-               }
-
-               // Cannot be the container of an existing reference.
-               String prefix = refName + '/';
-               int idx = -(all.find(prefix) + 1);
-               if (idx < all.size() && all.get(idx).getName().startsWith(prefix))
-                       return true;
-               return false;
-       }
-
-       @Override
-       public void create() {
-               // Nothing to do.
-       }
-
-       @Override
-       public void close() {
-               clearCache();
-       }
-
-       void clearCache() {
-               cache.set(null);
-       }
-
-       void stored(String refName, RefData newData) {
-               DhtRef ref = fromData(refName, newData);
-               RefCache oldCache, newCache;
-               do {
-                       oldCache = cache.get();
-                       if (oldCache == null)
-                               return;
-
-                       RefList<DhtRef> ids = oldCache.ids.put(ref);
-                       RefList<DhtRef> sym = oldCache.sym;
-
-                       if (ref.isSymbolic()) {
-                               sym = sym.put(ref);
-                       } else {
-                               int p = sym.find(refName);
-                               if (0 <= p)
-                                       sym = sym.remove(p);
-                       }
-
-                       newCache = new RefCache(ids, sym, oldCache.hints);
-               } while (!cache.compareAndSet(oldCache, newCache));
-       }
-
-       void removed(String refName) {
-               RefCache oldCache, newCache;
-               do {
-                       oldCache = cache.get();
-                       if (oldCache == null)
-                               return;
-
-                       int p;
-
-                       RefList<DhtRef> ids = oldCache.ids;
-                       p = ids.find(refName);
-                       if (0 <= p)
-                               ids = ids.remove(p);
-
-                       RefList<DhtRef> sym = oldCache.sym;
-                       p = sym.find(refName);
-                       if (0 <= p)
-                               sym = sym.remove(p);
-
-                       newCache = new RefCache(ids, sym, oldCache.hints);
-               } while (!cache.compareAndSet(oldCache, newCache));
-       }
-
-       private RefCache readRefs() throws DhtException {
-               RefCache c = cache.get();
-               if (c == null) {
-                       try {
-                               c = read();
-                       } catch (TimeoutException e) {
-                               throw new DhtTimeoutException(e);
-                       }
-                       cache.set(c);
-               }
-               return c;
-       }
-
-       private RefCache read() throws DhtException, TimeoutException {
-               RefList.Builder<DhtRef> id = new RefList.Builder<DhtRef>();
-               RefList.Builder<DhtRef> sym = new RefList.Builder<DhtRef>();
-               ObjectIdSubclassMap<IdWithChunk> hints = new ObjectIdSubclassMap<IdWithChunk>();
-
-               for (Map.Entry<RefKey, RefData> e : scan()) {
-                       DhtRef ref = fromData(e.getKey().getName(), e.getValue());
-
-                       if (ref.isSymbolic())
-                               sym.add(ref);
-                       id.add(ref);
-
-                       if (ref.getObjectId() instanceof IdWithChunk
-                                       && !hints.contains(ref.getObjectId()))
-                               hints.add((IdWithChunk) ref.getObjectId());
-                       if (ref.getPeeledObjectId() instanceof IdWithChunk
-                                       && !hints.contains(ref.getPeeledObjectId()))
-                               hints.add((IdWithChunk) ref.getPeeledObjectId());
-               }
-
-               id.sort();
-               sym.sort();
-
-               return new RefCache(id.toRefList(), sym.toRefList(), hints);
-       }
-
-       static DhtRef fromData(String name, RefData data) {
-               if (data.hasSymref())
-                       return new DhtSymbolicRef(name, data);
-               else
-                       return new DhtObjectIdRef(name, data);
-       }
-
-       private static ObjectId idFrom(RefData.Id src) {
-               ObjectId id = ObjectId.fromString(src.getObjectName());
-               if (!src.hasChunkKey())
-                       return id;
-               return new IdWithChunk(id, ChunkKey.fromString(src.getChunkKey()));
-       }
-
-       private Set<Map.Entry<RefKey, RefData>> scan() throws DhtException,
-                       TimeoutException {
-               // TODO(spearce) Do we need to perform READ_REPAIR here?
-               RepositoryKey repo = repository.getRepositoryKey();
-               return db.ref().getAll(Context.LOCAL, repo).entrySet();
-       }
-
-       private static class RefCache {
-               final RefList<DhtRef> ids;
-
-               final RefList<DhtRef> sym;
-
-               final ObjectIdSubclassMap<IdWithChunk> hints;
-
-               RefCache(RefList<DhtRef> ids, RefList<DhtRef> sym,
-                               ObjectIdSubclassMap<IdWithChunk> hints) {
-                       this.ids = ids;
-                       this.sym = sym;
-                       this.hints = hints;
-               }
-
-               RefCache(RefList<DhtRef> ids, RefCache old) {
-                       this(ids, old.sym, old.hints);
-               }
-       }
-
-       static interface DhtRef extends Ref {
-               RefData getRefData();
-       }
-
-       private static class DhtSymbolicRef extends SymbolicRef implements DhtRef {
-               private final RefData data;
-
-               DhtSymbolicRef(String refName,RefData data) {
-                       super(refName, new DhtObjectIdRef(data.getSymref(), NONE));
-                       this.data = data;
-               }
-
-               DhtSymbolicRef(String refName, Ref target, RefData data) {
-                       super(refName, target);
-                       this.data = data;
-               }
-
-               public RefData getRefData() {
-                       return data;
-               }
-       }
-
-       private static class DhtObjectIdRef implements DhtRef {
-               private final String name;
-               private final RefData data;
-               private final ObjectId objectId;
-               private final ObjectId peeledId;
-
-               DhtObjectIdRef(String name, RefData data) {
-                       this.name = name;
-                       this.data = data;
-                       this.objectId = data.hasTarget() ? idFrom(data.getTarget()) : null;
-                       this.peeledId = data.hasPeeled() ? idFrom(data.getPeeled()) : null;
-               }
-
-               public String getName() {
-                       return name;
-               }
-
-               public boolean isSymbolic() {
-                       return false;
-               }
-
-               public Ref getLeaf() {
-                       return this;
-               }
-
-               public Ref getTarget() {
-                       return this;
-               }
-
-               public ObjectId getObjectId() {
-                       return objectId;
-               }
-
-               public Ref.Storage getStorage() {
-                       return data.hasTarget() ? LOOSE : NEW;
-               }
-
-               public boolean isPeeled() {
-                       return data.getIsPeeled();
-               }
-
-               public ObjectId getPeeledObjectId() {
-                       return peeledId;
-               }
-
-               public RefData getRefData() {
-                       return data;
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefRename.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefRename.java
deleted file mode 100644 (file)
index 4df3bde..0000000
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.io.IOException;
-
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.RefRename;
-import org.eclipse.jgit.lib.RefUpdate.Result;
-
-class DhtRefRename extends RefRename {
-       DhtRefRename(DhtRefUpdate src, DhtRefUpdate dst) {
-               super(src, dst);
-       }
-
-       @Override
-       protected Result doRename() throws IOException {
-               // TODO(spearce) Correctly handle renameing foo/bar to foo.
-
-               destination.setExpectedOldObjectId(ObjectId.zeroId());
-               destination.setNewObjectId(source.getRef().getObjectId());
-               switch (destination.update()) {
-               case NEW:
-                       source.delete();
-                       return Result.RENAMED;
-
-               default:
-                       return destination.getResult();
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefUpdate.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefUpdate.java
deleted file mode 100644 (file)
index cb363d0..0000000
+++ /dev/null
@@ -1,224 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.io.IOException;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.errors.MissingObjectException;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.Ref;
-import org.eclipse.jgit.lib.RefUpdate;
-import org.eclipse.jgit.revwalk.RevObject;
-import org.eclipse.jgit.revwalk.RevTag;
-import org.eclipse.jgit.revwalk.RevWalk;
-import org.eclipse.jgit.storage.dht.DhtRefDatabase.DhtRef;
-import org.eclipse.jgit.storage.dht.spi.Database;
-
-class DhtRefUpdate extends RefUpdate {
-       private final DhtRefDatabase refdb;
-
-       private final RepositoryKey repo;
-
-       private final Database db;
-
-       private RefKey refKey;
-
-       private RefData oldData;
-
-       private RefData newData;
-
-       private Ref dstRef;
-
-       private RevWalk rw;
-
-       DhtRefUpdate(DhtRefDatabase refdb, RepositoryKey repo, Database db, Ref ref) {
-               super(ref);
-               this.refdb = refdb;
-               this.repo = repo;
-               this.db = db;
-       }
-
-       @Override
-       protected DhtRefDatabase getRefDatabase() {
-               return refdb;
-       }
-
-       @Override
-       protected DhtRepository getRepository() {
-               return refdb.getRepository();
-       }
-
-       @Override
-       public Result update(RevWalk walk) throws IOException {
-               try {
-                       rw = walk;
-                       return super.update(walk);
-               } finally {
-                       rw = null;
-               }
-       }
-
-       @Override
-       protected boolean tryLock(boolean deref) throws IOException {
-               dstRef = getRef();
-               if (deref)
-                       dstRef = dstRef.getLeaf();
-
-               refKey = RefKey.create(repo, dstRef.getName());
-               oldData = ((DhtRef) dstRef).getRefData();
-
-               if (dstRef.isSymbolic())
-                       setOldObjectId(null);
-               else
-                       setOldObjectId(dstRef.getObjectId());
-
-               return true;
-       }
-
-       @Override
-       protected void unlock() {
-               // No state is held while "locked".
-       }
-
-       @Override
-       protected Result doUpdate(Result desiredResult) throws IOException {
-               try {
-                       newData = newData();
-                       boolean r = db.ref().compareAndPut(refKey, oldData, newData);
-                       if (r) {
-                               getRefDatabase().stored(dstRef.getName(), newData);
-                               return desiredResult;
-                       } else {
-                               getRefDatabase().clearCache();
-                               return Result.LOCK_FAILURE;
-                       }
-               } catch (TimeoutException e) {
-                       return Result.IO_FAILURE;
-               }
-       }
-
-       @Override
-       protected Result doDelete(Result desiredResult) throws IOException {
-               try {
-                       boolean r = db.ref().compareAndRemove(refKey, oldData);
-                       if (r) {
-                               getRefDatabase().removed(dstRef.getName());
-                               return desiredResult;
-                       } else {
-                               getRefDatabase().clearCache();
-                               return Result.LOCK_FAILURE;
-                       }
-               } catch (TimeoutException e) {
-                       return Result.IO_FAILURE;
-               }
-       }
-
-       @Override
-       protected Result doLink(String target) throws IOException {
-               try {
-                       RefData.Builder d = RefData.newBuilder(oldData);
-                       clearRefData(d);
-                       updateSequence(d);
-                       d.setSymref(target);
-                       newData = d.build();
-                       boolean r = db.ref().compareAndPut(refKey, oldData, newData);
-                       if (r) {
-                               getRefDatabase().stored(dstRef.getName(), newData);
-                               if (getRef().getStorage() == Ref.Storage.NEW)
-                                       return Result.NEW;
-                               return Result.FORCED;
-                       } else {
-                               getRefDatabase().clearCache();
-                               return Result.LOCK_FAILURE;
-                       }
-               } catch (TimeoutException e) {
-                       return Result.IO_FAILURE;
-               }
-       }
-
-       private RefData newData() throws IOException {
-               RefData.Builder d = RefData.newBuilder(oldData);
-               clearRefData(d);
-               updateSequence(d);
-
-               ObjectId newId = getNewObjectId();
-               d.getTargetBuilder().setObjectName(newId.name());
-               try {
-                       DhtReader ctx = (DhtReader) rw.getObjectReader();
-                       RevObject obj = rw.parseAny(newId);
-
-                       ChunkKey oKey = ctx.findChunk(newId);
-                       if (oKey != null)
-                               d.getTargetBuilder().setChunkKey(oKey.asString());
-
-                       if (obj instanceof RevTag) {
-                               ObjectId pId = rw.peel(obj);
-                               ChunkKey pKey = ctx.findChunk(pId);
-                               if (pKey != null)
-                                       d.getPeeledBuilder().setChunkKey(pKey.asString());
-                               d.getPeeledBuilder().setObjectName(pId.name());
-                       }
-               } catch (MissingObjectException e) {
-                       // Automatic peeling failed. Ignore the problem and deal with it
-                       // during reading later, this is the classical Git behavior on disk.
-               }
-               return d.build();
-       }
-
-       private static void clearRefData(RefData.Builder d) {
-               // Clear fields individually rather than discarding the RefData.
-               // This way implementation specific extensions are carried
-               // through from the old version to the new version.
-               d.clearSymref();
-               d.clearTarget();
-               d.clearPeeled();
-               d.clearIsPeeled();
-       }
-
-       private static void updateSequence(RefData.Builder d) {
-               d.setSequence(d.getSequence() + 1);
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRepository.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRepository.java
deleted file mode 100644 (file)
index faff469..0000000
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.io.IOException;
-import java.text.MessageFormat;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.lib.RefUpdate;
-import org.eclipse.jgit.lib.Repository;
-import org.eclipse.jgit.lib.StoredConfig;
-import org.eclipse.jgit.storage.dht.spi.Database;
-import org.eclipse.jgit.storage.file.ReflogReader;
-
-/**
- * A Git repository storing its objects and references in a DHT.
- * <p>
- * With the exception of repository creation, this class is thread-safe, but
- * readers created from it are not. When creating a new repository using the
- * {@link #create(boolean)} method, the newly constructed repository object does
- * not ensure the assigned {@link #getRepositoryKey()} will be visible to all
- * threads. Applications are encouraged to use their own synchronization when
- * sharing a Repository instance that was used to create a new repository.
- */
-public class DhtRepository extends Repository {
-       private final RepositoryName name;
-
-       private final Database db;
-
-       private final DhtRefDatabase refdb;
-
-       private final DhtObjDatabase objdb;
-
-       private final DhtConfig config;
-
-       private RepositoryKey key;
-
-       /**
-        * Initialize an in-memory representation of a DHT backed repository.
-        *
-        * @param builder
-        *            description of the repository and its data storage.
-        */
-       public DhtRepository(DhtRepositoryBuilder builder) {
-               super(builder);
-               this.name = RepositoryName.create(builder.getRepositoryName());
-               this.key = builder.getRepositoryKey();
-               this.db = builder.getDatabase();
-
-               this.refdb = new DhtRefDatabase(this, db);
-               this.objdb = new DhtObjDatabase(this, builder);
-               this.config = new DhtConfig();
-       }
-
-       /** @return database cluster that houses this repository (among others). */
-       public Database getDatabase() {
-               return db;
-       }
-
-       /** @return human readable name used to open this repository. */
-       public RepositoryName getRepositoryName() {
-               return name;
-       }
-
-       /** @return unique identity of the repository in the {@link #getDatabase()}. */
-       public RepositoryKey getRepositoryKey() {
-               return key;
-       }
-
-       @Override
-       public StoredConfig getConfig() {
-               return config;
-       }
-
-       @Override
-       public DhtRefDatabase getRefDatabase() {
-               return refdb;
-       }
-
-       @Override
-       public DhtObjDatabase getObjectDatabase() {
-               return objdb;
-       }
-
-       @Override
-       public void create(boolean bare) throws IOException {
-               if (!bare)
-                       throw new IllegalArgumentException(
-                                       DhtText.get().repositoryMustBeBare);
-
-               if (getObjectDatabase().exists())
-                       throw new DhtException(MessageFormat.format(
-                                       DhtText.get().repositoryAlreadyExists, name.asString()));
-
-               try {
-                       key = db.repository().nextKey();
-                       db.repositoryIndex().putUnique(name, key);
-               } catch (TimeoutException err) {
-                       throw new DhtTimeoutException(MessageFormat.format(
-                                       DhtText.get().timeoutLocatingRepository, name), err);
-               }
-
-               String master = Constants.R_HEADS + Constants.MASTER;
-               RefUpdate.Result result = updateRef(Constants.HEAD, true).link(master);
-               if (result != RefUpdate.Result.NEW)
-                       throw new IOException(result.name());
-       }
-
-       @Override
-       public void scanForRepoChanges() {
-               refdb.clearCache();
-       }
-
-       @Override
-       public void notifyIndexChanged() {
-               // we do not support non-bare repositories yet
-       }
-
-       @Override
-       public String toString() {
-               return "DhtRepostitory[" + key + " / " + name + "]";
-       }
-
-       // TODO This method should be removed from the JGit API.
-       @Override
-       public ReflogReader getReflogReader(String refName) {
-               throw new UnsupportedOperationException();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRepositoryBuilder.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRepositoryBuilder.java
deleted file mode 100644 (file)
index a02b313..0000000
+++ /dev/null
@@ -1,236 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.io.File;
-import java.text.MessageFormat;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.errors.RepositoryNotFoundException;
-import org.eclipse.jgit.lib.BaseRepositoryBuilder;
-import org.eclipse.jgit.storage.dht.spi.Database;
-
-/**
- * Constructs a {@link DhtRepository}.
- *
- * @param <B>
- *            type of builder used by the DHT system.
- * @param <R>
- *            type of repository used by the DHT system.
- * @param <D>
- *            type of database used by the DHT system.
- */
-public class DhtRepositoryBuilder<B extends DhtRepositoryBuilder, R extends DhtRepository, D extends Database>
-               extends BaseRepositoryBuilder<B, R> {
-       private D database;
-
-       private DhtReaderOptions readerOptions;
-
-       private DhtInserterOptions inserterOptions;
-
-       private String name;
-
-       private RepositoryKey key;
-
-       /** Initializes an empty builder with no values set. */
-       public DhtRepositoryBuilder() {
-               setBare();
-               setMustExist(true);
-       }
-
-       /** @return the database that stores the repositories. */
-       public D getDatabase() {
-               return database;
-       }
-
-       /**
-        * Set the cluster used to store the repositories.
-        *
-        * @param database
-        *            the database supplier.
-        * @return {@code this}
-        */
-       public B setDatabase(D database) {
-               this.database = database;
-               return self();
-       }
-
-       /** @return options used by readers accessing the repository. */
-       public DhtReaderOptions getReaderOptions() {
-               return readerOptions;
-       }
-
-       /**
-        * Set the reader options.
-        *
-        * @param opt
-        *            new reader options object.
-        * @return {@code this}
-        */
-       public B setReaderOptions(DhtReaderOptions opt) {
-               readerOptions = opt;
-               return self();
-       }
-
-       /** @return options used by writers accessing the repository. */
-       public DhtInserterOptions getInserterOptions() {
-               return inserterOptions;
-       }
-
-       /**
-        * Set the inserter options.
-        *
-        * @param opt
-        *            new inserter options object.
-        * @return {@code this}
-        */
-       public B setInserterOptions(DhtInserterOptions opt) {
-               inserterOptions = opt;
-               return self();
-       }
-
-       /** @return name of the repository in the DHT. */
-       public String getRepositoryName() {
-               return name;
-       }
-
-       /**
-        * Set the name of the repository to open.
-        *
-        * @param name
-        *            the name.
-        * @return {@code this}.
-        */
-       public B setRepositoryName(String name) {
-               this.name = name;
-               return self();
-       }
-
-       /** @return the repository's key. */
-       public RepositoryKey getRepositoryKey() {
-               return key;
-       }
-
-       /**
-        * @param key
-        * @return {@code this}
-        */
-       public B setRepositoryKey(RepositoryKey key) {
-               this.key = key;
-               return self();
-       }
-
-       @Override
-       public B setup() throws IllegalArgumentException, DhtException,
-                       RepositoryNotFoundException {
-               if (getDatabase() == null)
-                       throw new IllegalArgumentException(DhtText.get().databaseRequired);
-
-               if (getReaderOptions() == null)
-                       setReaderOptions(new DhtReaderOptions());
-               if (getInserterOptions() == null)
-                       setInserterOptions(new DhtInserterOptions());
-
-               if (getRepositoryKey() == null) {
-                       if (getRepositoryName() == null)
-                               throw new IllegalArgumentException(DhtText.get().nameRequired);
-
-                       RepositoryKey r;
-                       try {
-                               r = getDatabase().repositoryIndex().get(
-                                               RepositoryName.create(name));
-                       } catch (TimeoutException e) {
-                               throw new DhtTimeoutException(MessageFormat.format(
-                                               DhtText.get().timeoutLocatingRepository, name), e);
-                       }
-                       if (isMustExist() && r == null)
-                               throw new RepositoryNotFoundException(getRepositoryName());
-                       if (r != null)
-                               setRepositoryKey(r);
-               }
-               return self();
-       }
-
-       @Override
-       @SuppressWarnings("unchecked")
-       public R build() throws IllegalArgumentException, DhtException,
-                       RepositoryNotFoundException {
-               return (R) new DhtRepository(setup());
-       }
-
-       // We don't support local file IO and thus shouldn't permit these to set.
-
-       @Override
-       public B setGitDir(File gitDir) {
-               if (gitDir != null)
-                       throw new IllegalArgumentException();
-               return self();
-       }
-
-       @Override
-       public B setObjectDirectory(File objectDirectory) {
-               if (objectDirectory != null)
-                       throw new IllegalArgumentException();
-               return self();
-       }
-
-       @Override
-       public B addAlternateObjectDirectory(File other) {
-               throw new UnsupportedOperationException("Alternates not supported");
-       }
-
-       @Override
-       public B setWorkTree(File workTree) {
-               if (workTree != null)
-                       throw new IllegalArgumentException();
-               return self();
-       }
-
-       @Override
-       public B setIndexFile(File indexFile) {
-               if (indexFile != null)
-                       throw new IllegalArgumentException();
-               return self();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtText.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtText.java
deleted file mode 100644 (file)
index 4fb520b..0000000
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import org.eclipse.jgit.nls.NLS;
-import org.eclipse.jgit.nls.TranslationBundle;
-
-/** Translation bundle for the DHT storage provider. */
-public class DhtText extends TranslationBundle {
-       /** @return an instance of this translation bundle. */
-       public static DhtText get() {
-               return NLS.getBundleFor(DhtText.class);
-       }
-
-       /***/ public String cannotInsertObject;
-       /***/ public String corruptChunk;
-       /***/ public String corruptCompressedObject;
-       /***/ public String cycleInDeltaChain;
-       /***/ public String databaseRequired;
-       /***/ public String expectedObjectSizeDuringCopyAsIs;
-       /***/ public String invalidCachedPackInfo;
-       /***/ public String invalidChunkKey;
-       /***/ public String invalidChunkMeta;
-       /***/ public String invalidObjectIndexKey;
-       /***/ public String invalidObjectInfo;
-       /***/ public String invalidRefData;
-       /***/ public String missingChunk;
-       /***/ public String missingLongOffsetBase;
-       /***/ public String nameRequired;
-       /***/ public String noSavedTypeForBase;
-       /***/ public String notTimeUnit;
-       /***/ public String objectListSelectingName;
-       /***/ public String objectListCountingFrom;
-       /***/ public String objectTypeUnknown;
-       /***/ public String packParserInvalidPointer;
-       /***/ public String packParserRollbackFailed;
-       /***/ public String recordingObjects;
-       /***/ public String repositoryAlreadyExists;
-       /***/ public String repositoryMustBeBare;
-       /***/ public String shortCompressedObject;
-       /***/ public String timeoutChunkMeta;
-       /***/ public String timeoutLocatingRepository;
-       /***/ public String tooManyObjectsInPack;
-       /***/ public String unsupportedChunkIndex;
-       /***/ public String unsupportedObjectTypeInChunk;
-       /***/ public String wrongChunkPositionInCachedPack;
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtTimeoutException.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtTimeoutException.java
deleted file mode 100644 (file)
index 32d52f0..0000000
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.storage.dht.spi.Database;
-
-/** Any error caused by a {@link Database} operation. */
-public class DhtTimeoutException extends DhtException {
-       private static final long serialVersionUID = 1L;
-
-       /**
-        * @param message
-        */
-       public DhtTimeoutException(String message) {
-               super(message);
-       }
-
-       /**
-        * @param message
-        * @param cause
-        */
-       public DhtTimeoutException(String message, TimeoutException cause) {
-               super(message);
-               initCause(cause);
-       }
-
-       /**
-        * @param cause
-        */
-       public DhtTimeoutException(TimeoutException cause) {
-               super(cause.getMessage());
-               initCause(cause);
-       }
-
-       /**
-        * @param cause
-        */
-       public DhtTimeoutException(InterruptedException cause) {
-               super(cause.getMessage());
-               initCause(cause);
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/KeyUtils.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/KeyUtils.java
deleted file mode 100644 (file)
index 6608a38..0000000
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import org.eclipse.jgit.util.RawParseUtils;
-
-final class KeyUtils {
-       static short parse16(byte[] src, int pos) {
-               return (short) RawParseUtils.parseHexInt16(src, pos);
-       }
-
-       static int parse32(byte[] src, int pos) {
-               return RawParseUtils.parseHexInt32(src, pos);
-       }
-
-       static void format16(byte[] dst, int p, short w) {
-               int o = p + 3;
-               while (o >= p && w != 0) {
-                       dst[o--] = hexbyte[w & 0xf];
-                       w >>>= 4;
-               }
-               while (o >= p)
-                       dst[o--] = '0';
-       }
-
-       static void format32(byte[] dst, int p, int w) {
-               int o = p + 7;
-               while (o >= p && w != 0) {
-                       dst[o--] = hexbyte[w & 0xf];
-                       w >>>= 4;
-               }
-               while (o >= p)
-                       dst[o--] = '0';
-       }
-
-       private static final byte[] hexbyte = { '0', '1', '2', '3', '4', '5', '6',
-                       '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' };
-
-       private KeyUtils() {
-               // Do not create instances of this class.
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/LargeNonDeltaObject.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/LargeNonDeltaObject.java
deleted file mode 100644 (file)
index e6afd73..0000000
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.zip.InflaterInputStream;
-
-import org.eclipse.jgit.errors.LargeObjectException;
-import org.eclipse.jgit.errors.MissingObjectException;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.lib.ObjectLoader;
-import org.eclipse.jgit.lib.ObjectStream;
-
-/** Loader for a large non-delta object. */
-class LargeNonDeltaObject extends ObjectLoader {
-       private final int type;
-
-       private final long sz;
-
-       private final int pos;
-
-       private final DhtReader ctx;
-
-       private final ChunkMeta meta;
-
-       private PackChunk firstChunk;
-
-       LargeNonDeltaObject(int type, long sz, PackChunk pc, int pos, DhtReader ctx) {
-               this.type = type;
-               this.sz = sz;
-               this.pos = pos;
-               this.ctx = ctx;
-               this.meta = pc.getMeta();
-               firstChunk = pc;
-       }
-
-       @Override
-       public boolean isLarge() {
-               return true;
-       }
-
-       @Override
-       public byte[] getCachedBytes() throws LargeObjectException {
-               throw new LargeObjectException.ExceedsByteArrayLimit();
-       }
-
-       @Override
-       public int getType() {
-               return type;
-       }
-
-       @Override
-       public long getSize() {
-               return sz;
-       }
-
-       @Override
-       public ObjectStream openStream() throws MissingObjectException, IOException {
-               PackChunk pc = firstChunk;
-               if (pc != null)
-                       firstChunk = null;
-               else
-                       pc = ctx.getChunk(ChunkKey.fromString(meta.getFragment(0)));
-
-               InputStream in = new ChunkInputStream(meta, ctx, pos, pc);
-               in = new BufferedInputStream(new InflaterInputStream(in), 8192);
-               return new ObjectStream.Filter(type, sz, in);
-       }
-
-       private static class ChunkInputStream extends InputStream {
-               private final ChunkMeta meta;
-
-               private final DhtReader ctx;
-
-               private int ptr;
-
-               private PackChunk pc;
-
-               private int fragment;
-
-               ChunkInputStream(ChunkMeta meta, DhtReader ctx, int pos, PackChunk pc) {
-                       this.ctx = ctx;
-                       this.meta = meta;
-                       this.ptr = pos;
-                       this.pc = pc;
-               }
-
-               @Override
-               public int read(byte[] dstbuf, int dstptr, int dstlen)
-                               throws IOException {
-                       if (0 == dstlen)
-                               return 0;
-
-                       int n = pc.read(ptr, dstbuf, dstptr, dstlen);
-                       if (n == 0) {
-                               if (fragment == meta.getFragmentCount())
-                                       return -1;
-
-                               pc = ctx.getChunk(ChunkKey.fromString(
-                                               meta.getFragment(++fragment)));
-                               ptr = 0;
-                               n = pc.read(ptr, dstbuf, dstptr, dstlen);
-                               if (n == 0)
-                                       return -1;
-                       }
-                       ptr += n;
-                       return n;
-               }
-
-               @Override
-               public int read() throws IOException {
-                       byte[] tmp = new byte[1];
-                       int n = read(tmp, 0, 1);
-                       return n == 1 ? tmp[0] & 0xff : -1;
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectIndexKey.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectIndexKey.java
deleted file mode 100644 (file)
index ab8f835..0000000
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.eclipse.jgit.storage.dht.KeyUtils.format32;
-import static org.eclipse.jgit.storage.dht.KeyUtils.parse32;
-import static org.eclipse.jgit.util.RawParseUtils.decode;
-
-import java.text.MessageFormat;
-
-import org.eclipse.jgit.lib.AnyObjectId;
-import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.lib.ObjectId;
-
-/** Identifies an ObjectId in the DHT. */
-public final class ObjectIndexKey extends ObjectId implements RowKey {
-       private static final int KEYLEN = 49;
-
-       /**
-        * @param repo
-        * @param objId
-        * @return the key
-        */
-       public static ObjectIndexKey create(RepositoryKey repo, AnyObjectId objId) {
-               return new ObjectIndexKey(repo.asInt(), objId);
-       }
-
-       /**
-        * @param key
-        * @return the key
-        */
-       public static ObjectIndexKey fromBytes(byte[] key) {
-               if (key.length != KEYLEN)
-                       throw new IllegalArgumentException(MessageFormat.format(
-                                       DhtText.get().invalidChunkKey, decode(key)));
-
-               int repo = parse32(key, 0);
-               ObjectId id = ObjectId.fromString(key, 9);
-               return new ObjectIndexKey(repo, id);
-       }
-
-       /**
-        * @param key
-        * @return the key
-        */
-       public static ObjectIndexKey fromString(String key) {
-               return fromBytes(Constants.encodeASCII(key));
-       }
-
-       private final int repo;
-
-       ObjectIndexKey(int repo, AnyObjectId objId) {
-               super(objId);
-               this.repo = repo;
-       }
-
-       /** @return the repository that contains the object. */
-       public RepositoryKey getRepositoryKey() {
-               return RepositoryKey.fromInt(repo);
-       }
-
-       int getRepositoryId() {
-               return repo;
-       }
-
-       public byte[] asBytes() {
-               byte[] r = new byte[KEYLEN];
-               format32(r, 0, repo);
-               r[8] = '.';
-               copyTo(r, 9);
-               return r;
-       }
-
-       public String asString() {
-               return decode(asBytes());
-       }
-
-       @Override
-       public String toString() {
-               return "object-index:" + asString();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectInfo.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectInfo.java
deleted file mode 100644 (file)
index 9123a8b..0000000
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Date;
-import java.util.List;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
-import org.eclipse.jgit.lib.ObjectId;
-
-/** Connects an object to the chunk it is stored in. */
-public class ObjectInfo {
-       /** Orders ObjectInfo by their time member, oldest first. */
-       public static final Comparator<ObjectInfo> BY_TIME = new Comparator<ObjectInfo>() {
-               public int compare(ObjectInfo a, ObjectInfo b) {
-                       return Long.signum(a.getTime() - b.getTime());
-               }
-       };
-
-       /**
-        * Sort the info list according to time, oldest member first.
-        *
-        * @param toSort
-        *            list to sort.
-        */
-       public static void sort(List<ObjectInfo> toSort) {
-               Collections.sort(toSort, BY_TIME);
-       }
-
-       private final ChunkKey chunk;
-
-       private final long time;
-
-       private final GitStore.ObjectInfo data;
-
-       /**
-        * Wrap an ObjectInfo from the storage system.
-        *
-        * @param chunkKey
-        *            the chunk the object points to.
-        * @param data
-        *            the data of the ObjectInfo.
-        */
-       public ObjectInfo(ChunkKey chunkKey, GitStore.ObjectInfo data) {
-               this.chunk = chunkKey;
-               this.time = 0;
-               this.data = data;
-       }
-
-       /**
-        * Wrap an ObjectInfo from the storage system.
-        *
-        * @param chunkKey
-        *            the chunk the object points to.
-        * @param time
-        *            timestamp of the ObjectInfo.
-        * @param data
-        *            the data of the ObjectInfo.
-        */
-       public ObjectInfo(ChunkKey chunkKey, long time, GitStore.ObjectInfo data) {
-               this.chunk = chunkKey;
-               this.time = time < 0 ? 0 : time;
-               this.data = data;
-       }
-
-       /** @return the chunk this link points to. */
-       public ChunkKey getChunkKey() {
-               return chunk;
-       }
-
-       /** @return approximate time the object was created, in milliseconds. */
-       public long getTime() {
-               return time;
-       }
-
-       /** @return GitStore.ObjectInfo to embed in the database. */
-       public GitStore.ObjectInfo getData() {
-               return data;
-       }
-
-       /** @return type of the object, in OBJ_* constants. */
-       public int getType() {
-               return data.getObjectType().getNumber();
-       }
-
-       /** @return size of the object when fully inflated. */
-       public long getSize() {
-               return data.getInflatedSize();
-       }
-
-       /** @return true if the object storage uses delta compression. */
-       public boolean isDelta() {
-               return data.hasDeltaBase();
-       }
-
-       /** @return true if the object has been fragmented across chunks. */
-       public boolean isFragmented() {
-               return data.getIsFragmented();
-       }
-
-       int getOffset() {
-               return data.getOffset();
-       }
-
-       long getPackedSize() {
-               return data.getPackedSize();
-       }
-
-       ObjectId getDeltaBase() {
-               if (data.hasDeltaBase())
-                       return ObjectId.fromRaw(data.getDeltaBase().toByteArray(), 0);
-               return null;
-       }
-
-       @Override
-       public String toString() {
-               StringBuilder b = new StringBuilder();
-               b.append("ObjectInfo:");
-               b.append(chunk);
-               if (0 < time)
-                       b.append(" @ ").append(new Date(time));
-               b.append("\n");
-               b.append(data.toString());
-               return b.toString();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectWriter.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectWriter.java
deleted file mode 100644 (file)
index d36b03b..0000000
+++ /dev/null
@@ -1,255 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.Semaphore;
-import java.util.concurrent.atomic.AtomicReference;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.util.BlockList;
-
-/**
- * Re-orders objects destined for a pack stream by chunk locality.
- * <p>
- * By re-ordering objects according to chunk locality, and then the original
- * order the PackWriter intended to use, objects can be copied quickly from
- * chunks, and each chunk is visited at most once. A {@link Prefetcher} for the
- * {@link DhtReader} is used to fetch chunks in the order they will be used,
- * improving throughput by reducing the number of round-trips required to the
- * storage system.
- */
-final class ObjectWriter {
-       private final DhtReader ctx;
-
-       private final Prefetcher prefetch;
-
-       private final int batchSize;
-
-       private final Semaphore metaBatches;
-
-       private final AtomicReference<DhtException> metaError;
-
-       private final LinkedHashMap<ChunkKey, Integer> allVisits;
-
-       private final Map<ChunkKey, ChunkMeta> allMeta;
-
-       private final Set<ChunkKey> metaMissing;
-
-       private Set<ChunkKey> metaToRead;
-
-       private int curVisit;
-
-       ObjectWriter(DhtReader ctx, Prefetcher prefetch) {
-               this.ctx = ctx;
-               this.prefetch = prefetch;
-
-               batchSize = ctx.getOptions().getObjectIndexBatchSize();
-               metaBatches = new Semaphore(batchSize);
-               metaError = new AtomicReference<DhtException>();
-
-               allVisits = new LinkedHashMap<ChunkKey, Integer>();
-               allMeta = new HashMap<ChunkKey, ChunkMeta>();
-               metaMissing = new HashSet<ChunkKey>();
-               metaToRead = new HashSet<ChunkKey>();
-               curVisit = 1;
-       }
-
-       void plan(List<DhtObjectToPack> list) throws DhtException {
-               try {
-                       for (DhtObjectToPack obj : list)
-                               visit(obj);
-
-                       if (!metaToRead.isEmpty())
-                               startBatch(Context.FAST_MISSING_OK);
-                       awaitPendingBatches();
-
-                       synchronized (metaMissing) {
-                               if (!metaMissing.isEmpty()) {
-                                       metaBatches.release(batchSize);
-                                       resolveMissing();
-                                       awaitPendingBatches();
-                               }
-                       }
-               } catch (InterruptedException err) {
-                       throw new DhtTimeoutException(err);
-               }
-
-               Iterable<ChunkKey> order;
-               synchronized (allMeta) {
-                       if (allMeta.isEmpty()) {
-                               order = allVisits.keySet();
-                       } else {
-                               BlockList<ChunkKey> keys = new BlockList<ChunkKey>();
-                               for (ChunkKey key : allVisits.keySet()) {
-                                       keys.add(key);
-
-                                       ChunkMeta meta = allMeta.remove(key);
-                                       if (meta != null) {
-                                               for (int i = 1; i < meta.getFragmentCount(); i++)
-                                                       keys.add(ChunkKey.fromString(meta.getFragment(i)));
-                                       }
-                               }
-                               order = keys;
-                       }
-               }
-               prefetch.push(order);
-
-               Collections.sort(list, new Comparator<DhtObjectToPack>() {
-                       public int compare(DhtObjectToPack a, DhtObjectToPack b) {
-                               return a.visitOrder - b.visitOrder;
-                       }
-               });
-       }
-
-       private void visit(DhtObjectToPack obj) throws InterruptedException,
-                       DhtTimeoutException {
-               // Plan the visit to the delta base before the object. This
-               // ensures the base is in the stream first, and OFS_DELTA can
-               // be used for the delta.
-               //
-               DhtObjectToPack base = (DhtObjectToPack) obj.getDeltaBase();
-               if (base != null && base.visitOrder == 0) {
-                       // Use the current visit, even if its wrong. This will
-                       // prevent infinite recursion when there is a cycle in the
-                       // delta chain. Cycles are broken during writing, not in
-                       // the earlier planning phases.
-                       //
-                       obj.visitOrder = curVisit;
-                       visit(base);
-               }
-
-               ChunkKey key = obj.chunk;
-               if (key != null) {
-                       Integer i = allVisits.get(key);
-                       if (i == null) {
-                               i = Integer.valueOf(1 + allVisits.size());
-                               allVisits.put(key, i);
-                       }
-                       curVisit = i.intValue();
-               }
-
-               if (obj.isFragmented()) {
-                       metaToRead.add(key);
-                       if (metaToRead.size() == batchSize)
-                               startBatch(Context.FAST_MISSING_OK);
-               }
-               obj.visitOrder = curVisit;
-       }
-
-       private void resolveMissing() throws DhtTimeoutException,
-                       InterruptedException {
-               metaToRead = new HashSet<ChunkKey>();
-               for (ChunkKey key : metaMissing) {
-                       metaToRead.add(key);
-                       if (metaToRead.size() == batchSize)
-                               startBatch(Context.LOCAL);
-               }
-               if (!metaToRead.isEmpty())
-                       startBatch(Context.LOCAL);
-       }
-
-       private void startBatch(Context context) throws InterruptedException,
-                       DhtTimeoutException {
-               Timeout to = ctx.getOptions().getTimeout();
-               if (!metaBatches.tryAcquire(1, to.getTime(), to.getUnit()))
-                       throw new DhtTimeoutException(DhtText.get().timeoutChunkMeta);
-
-               Set<ChunkKey> keys = metaToRead;
-               ctx.getDatabase().chunk().getMeta(
-                               context,
-                               keys,
-                               new MetaLoader(context, keys));
-               metaToRead = new HashSet<ChunkKey>();
-       }
-
-       private void awaitPendingBatches() throws InterruptedException,
-                       DhtTimeoutException, DhtException {
-               Timeout to = ctx.getOptions().getTimeout();
-               if (!metaBatches.tryAcquire(batchSize, to.getTime(), to.getUnit()))
-                       throw new DhtTimeoutException(DhtText.get().timeoutChunkMeta);
-               if (metaError.get() != null)
-                       throw metaError.get();
-       }
-
-       private class MetaLoader implements AsyncCallback<Map<ChunkKey, ChunkMeta>> {
-               private final Context context;
-
-               private final Set<ChunkKey> keys;
-
-               MetaLoader(Context context, Set<ChunkKey> keys) {
-                       this.context = context;
-                       this.keys = keys;
-               }
-
-               public void onSuccess(Map<ChunkKey, ChunkMeta> result) {
-                       try {
-                               synchronized (allMeta) {
-                                       allMeta.putAll(result);
-                                       keys.removeAll(result.keySet());
-                               }
-                               if (context == Context.FAST_MISSING_OK && !keys.isEmpty()) {
-                                       synchronized (metaMissing) {
-                                               metaMissing.addAll(keys);
-                                       }
-                               }
-                       } finally {
-                               metaBatches.release(1);
-                       }
-               }
-
-               public void onFailure(DhtException error) {
-                       metaError.compareAndSet(null, error);
-                       metaBatches.release(1);
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/OpenQueue.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/OpenQueue.java
deleted file mode 100644 (file)
index 32b2234..0000000
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-package org.eclipse.jgit.storage.dht;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.eclipse.jgit.errors.MissingObjectException;
-import org.eclipse.jgit.lib.AsyncObjectLoaderQueue;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.ObjectLoader;
-import org.eclipse.jgit.lib.ObjectReader;
-
-/**
- * Locates objects in large batches, then opens them clustered by chunk.
- * <p>
- * To simplify the implementation this method performs lookups for the
- * {@link ObjectInfo} in large batches, clusters those by ChunkKey, and loads
- * the chunks with a {@link Prefetcher}.
- * <p>
- * The lookup queue is completely spun out during the first invocation of
- * {@link #next()}, ensuring all chunks are known before any single chunk is
- * accessed. This is necessary to improve access locality and prevent thrashing
- * of the local ChunkCache. It also causes {@link MissingObjectException} to be
- * thrown at the start of traversal, until the lookup queue is exhausted.
- *
- * @param <T>
- *            type of object to associate with the loader.
- */
-final class OpenQueue<T extends ObjectId> extends QueueObjectLookup<T>
-               implements AsyncObjectLoaderQueue<T> {
-       private Map<ChunkKey, Collection<ObjectWithInfo<T>>> byChunk;
-
-       private Iterator<Collection<ObjectWithInfo<T>>> chunkItr;
-
-       private Iterator<ObjectWithInfo<T>> objectItr;
-
-       private Prefetcher prefetcher;
-
-       private ObjectWithInfo<T> current;
-
-       private PackChunk currChunk;
-
-       OpenQueue(DhtReader reader, Iterable<T> objectIds, boolean reportMissing) {
-               super(reader, reportMissing);
-               setCacheLoadedInfo(true);
-               setNeedChunkOnly(true);
-               init(objectIds);
-
-               byChunk = new LinkedHashMap<ChunkKey, Collection<ObjectWithInfo<T>>>();
-               objectItr = Collections.<ObjectWithInfo<T>> emptyList().iterator();
-       }
-
-       public boolean next() throws MissingObjectException, IOException {
-               if (chunkItr == null)
-                       init();
-
-               if (!objectItr.hasNext()) {
-                       currChunk = null;
-                       if (!chunkItr.hasNext()) {
-                               release();
-                               return false;
-                       }
-                       objectItr = chunkItr.next().iterator();
-               }
-
-               current = objectItr.next();
-               return true;
-       }
-
-       public T getCurrent() {
-               return current.object;
-       }
-
-       public ObjectId getObjectId() {
-               return getCurrent();
-       }
-
-       public ObjectLoader open() throws IOException {
-               ChunkKey chunkKey = current.chunkKey;
-
-               // Objects returned by the queue are clustered by chunk. This object
-               // is either in the current chunk, or are the next chunk ready on the
-               // prefetcher. Anything else is a programming error.
-               //
-               PackChunk chunk;
-               if (currChunk != null && chunkKey.equals(currChunk.getChunkKey()))
-                       chunk = currChunk;
-               else {
-                       chunk = prefetcher.get(chunkKey);
-                       if (chunk == null)
-                               throw new DhtMissingChunkException(chunkKey);
-                       currChunk = chunk;
-                       reader.recentChunk(chunk);
-               }
-
-               if (current.info != null) {
-                       int ptr = current.info.getOffset();
-                       int type = current.info.getType();
-                       return PackChunk.read(chunk, ptr, reader, type);
-               } else {
-                       int ptr = chunk.findOffset(repo, current.object);
-                       if (ptr < 0)
-                               throw DhtReader.missing(current.object, ObjectReader.OBJ_ANY);
-                       return PackChunk.read(chunk, ptr, reader, ObjectReader.OBJ_ANY);
-               }
-       }
-
-       @Override
-       public boolean cancel(boolean mayInterruptIfRunning) {
-               release();
-               return true;
-       }
-
-       @Override
-       public void release() {
-               reader.getRecentChunks().setMaxBytes(reader.getOptions().getChunkLimit());
-               prefetcher = null;
-               currChunk = null;
-       }
-
-       private void init() throws IOException {
-               ObjectWithInfo<T> c;
-
-               while ((c = nextObjectWithInfo()) != null) {
-                       ChunkKey chunkKey = c.chunkKey;
-                       Collection<ObjectWithInfo<T>> list = byChunk.get(chunkKey);
-                       if (list == null) {
-                               list = new ArrayList<ObjectWithInfo<T>>();
-                               byChunk.put(chunkKey, list);
-
-                               if (prefetcher == null) {
-                                       int limit = reader.getOptions().getChunkLimit();
-                                       int ratio = reader.getOptions().getOpenQueuePrefetchRatio();
-                                       int prefetchLimit = (int) (limit * (ratio / 100.0));
-                                       reader.getRecentChunks().setMaxBytes(limit - prefetchLimit);
-                                       prefetcher = new Prefetcher(reader, 0, prefetchLimit);
-                               }
-                               prefetcher.push(chunkKey);
-                       }
-                       list.add(c);
-               }
-
-               chunkItr = byChunk.values().iterator();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/PackChunk.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/PackChunk.java
deleted file mode 100644 (file)
index 57d357e..0000000
+++ /dev/null
@@ -1,795 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.eclipse.jgit.lib.Constants.OBJ_BAD;
-import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
-import static org.eclipse.jgit.lib.Constants.OBJ_COMMIT;
-import static org.eclipse.jgit.lib.Constants.OBJ_OFS_DELTA;
-import static org.eclipse.jgit.lib.Constants.OBJ_REF_DELTA;
-import static org.eclipse.jgit.lib.Constants.OBJ_TAG;
-import static org.eclipse.jgit.lib.Constants.OBJ_TREE;
-import static org.eclipse.jgit.lib.Constants.newMessageDigest;
-import static org.eclipse.jgit.storage.dht.ChunkFormatter.TRAILER_SIZE;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.security.MessageDigest;
-import java.text.MessageFormat;
-import java.util.zip.DataFormatException;
-import java.util.zip.Inflater;
-
-import org.eclipse.jgit.errors.CorruptObjectException;
-import org.eclipse.jgit.errors.LargeObjectException;
-import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.lib.AnyObjectId;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.ObjectLoader;
-import org.eclipse.jgit.storage.pack.BinaryDelta;
-import org.eclipse.jgit.storage.pack.PackOutputStream;
-import org.eclipse.jgit.transport.PackParser;
-
-/**
- * Chunk of object data, stored under a {@link ChunkKey}.
- * <p>
- * A chunk typically contains thousands of objects, compressed in the Git native
- * pack file format. Its associated {@link ChunkIndex} provides offsets for each
- * object's header and compressed data.
- * <p>
- * Chunks (and their indexes) are opaque binary blobs meant only to be read by
- * the Git implementation.
- */
-public final class PackChunk {
-       /** Constructs a {@link PackChunk} while reading from the DHT. */
-       public static class Members {
-               private ChunkKey chunkKey;
-
-               private byte[] dataBuf;
-
-               private int dataPtr;
-
-               private int dataLen;
-
-               private byte[] indexBuf;
-
-               private int indexPtr;
-
-               private int indexLen;
-
-               private ChunkMeta meta;
-
-               /** @return the chunk key. Never null. */
-               public ChunkKey getChunkKey() {
-                       return chunkKey;
-               }
-
-               /**
-                * @param key
-                * @return {@code this}
-                */
-               public Members setChunkKey(ChunkKey key) {
-                       this.chunkKey = key;
-                       return this;
-               }
-
-               /** @return true if there is chunk data present. */
-               public boolean hasChunkData() {
-                       return dataBuf != null;
-               }
-
-               /** @return the chunk data, or null if not available. */
-               public byte[] getChunkData() {
-                       return asArray(dataBuf, dataPtr, dataLen);
-               }
-
-               /** @return the chunk data, or null if not available. */
-               public ByteBuffer getChunkDataAsByteBuffer() {
-                       return asByteBuffer(dataBuf, dataPtr, dataLen);
-               }
-
-               private static byte[] asArray(byte[] buf, int ptr, int len) {
-                       if (buf == null)
-                               return null;
-                       if (ptr == 0 && buf.length == len)
-                               return buf;
-                       byte[] r = new byte[len];
-                       System.arraycopy(buf, ptr, r, 0, len);
-                       return r;
-               }
-
-               private static ByteBuffer asByteBuffer(byte[] buf, int ptr, int len) {
-                       return buf != null ? ByteBuffer.wrap(buf, ptr, len) : null;
-               }
-
-               /**
-                * @param chunkData
-                * @return {@code this}
-                */
-               public Members setChunkData(byte[] chunkData) {
-                       return setChunkData(chunkData, 0, chunkData.length);
-               }
-
-               /**
-                * @param chunkData
-                * @param ptr
-                * @param len
-                * @return {@code this}
-                */
-               public Members setChunkData(byte[] chunkData, int ptr, int len) {
-                       this.dataBuf = chunkData;
-                       this.dataPtr = ptr;
-                       this.dataLen = len;
-                       return this;
-               }
-
-               /** @return true if there is a chunk index present. */
-               public boolean hasChunkIndex() {
-                       return indexBuf != null;
-               }
-
-               /** @return the chunk index, or null if not available. */
-               public byte[] getChunkIndex() {
-                       return asArray(indexBuf, indexPtr, indexLen);
-               }
-
-               /** @return the chunk index, or null if not available. */
-               public ByteBuffer getChunkIndexAsByteBuffer() {
-                       return asByteBuffer(indexBuf, indexPtr, indexLen);
-               }
-
-               /**
-                * @param chunkIndex
-                * @return {@code this}
-                */
-               public Members setChunkIndex(byte[] chunkIndex) {
-                       return setChunkIndex(chunkIndex, 0, chunkIndex.length);
-               }
-
-               /**
-                * @param chunkIndex
-                * @param ptr
-                * @param len
-                * @return {@code this}
-                */
-               public Members setChunkIndex(byte[] chunkIndex, int ptr, int len) {
-                       this.indexBuf = chunkIndex;
-                       this.indexPtr = ptr;
-                       this.indexLen = len;
-                       return this;
-               }
-
-               /** @return true if there is meta information present. */
-               public boolean hasMeta() {
-                       return meta != null;
-               }
-
-               /** @return the inline meta data, or null if not available. */
-               public ChunkMeta getMeta() {
-                       return meta;
-               }
-
-               /**
-                * @param meta
-                * @return {@code this}
-                */
-               public Members setMeta(ChunkMeta meta) {
-                       this.meta = meta;
-                       return this;
-               }
-
-               /**
-                * @return the PackChunk instance.
-                * @throws DhtException
-                *             if early validation indicates the chunk data is corrupt
-                *             or not recognized by this version of the library.
-                */
-               public PackChunk build() throws DhtException {
-                       ChunkIndex i;
-                       if (indexBuf != null)
-                               i = ChunkIndex.fromBytes(chunkKey, indexBuf, indexPtr, indexLen);
-                       else
-                               i = null;
-
-                       return new PackChunk(chunkKey, dataBuf, dataPtr, dataLen, i, meta);
-               }
-       }
-
-       private static final int INFLATE_STRIDE = 512;
-
-       private final ChunkKey key;
-
-       private final byte[] dataBuf;
-
-       private final int dataPtr;
-
-       private final int dataLen;
-
-       private final ChunkIndex index;
-
-       private final ChunkMeta meta;
-
-       private volatile Boolean valid;
-
-       PackChunk(ChunkKey key, byte[] dataBuf, int dataPtr, int dataLen,
-                       ChunkIndex index, ChunkMeta meta) {
-               this.key = key;
-               this.dataBuf = dataBuf;
-               this.dataPtr = dataPtr;
-               this.dataLen = dataLen;
-               this.index = index;
-               this.meta = meta;
-       }
-
-       /** @return unique name of this chunk in the database. */
-       public ChunkKey getChunkKey() {
-               return key;
-       }
-
-       /** @return index describing the objects stored within this chunk. */
-       public ChunkIndex getIndex() {
-               return index;
-       }
-
-       /** @return inline meta information, or null if no data was necessary. */
-       public ChunkMeta getMeta() {
-               return meta;
-       }
-
-       @Override
-       public String toString() {
-               return "PackChunk[" + getChunkKey() + "]";
-       }
-
-       boolean hasIndex() {
-               return index != null;
-       }
-
-       boolean isFragment() {
-               return meta != null && 0 < meta.getFragmentCount();
-       }
-
-       int findOffset(RepositoryKey repo, AnyObjectId objId) {
-               if (key.getRepositoryId() == repo.asInt() && index != null)
-                       return index.findOffset(objId);
-               return -1;
-       }
-
-       boolean contains(RepositoryKey repo, AnyObjectId objId) {
-               return 0 <= findOffset(repo, objId);
-       }
-
-       static ObjectLoader read(PackChunk pc, int pos, final DhtReader ctx,
-                       final int typeHint) throws IOException {
-               try {
-                       return read1(pc, pos, ctx, typeHint, true /* use recentChunks */);
-               } catch (DeltaChainCycleException cycleFound) {
-                       // A cycle can occur if recentChunks cache was used by the reader
-                       // to satisfy an OBJ_REF_DELTA, but the chunk that was chosen has
-                       // a reverse delta back onto an object already being read during
-                       // this invocation. Its not as uncommon as it sounds, as the Git
-                       // wire protocol can sometimes copy an object the repository already
-                       // has when dealing with reverts or cherry-picks.
-                       //
-                       // Work around the cycle by disabling the recentChunks cache for
-                       // this resolution only. This will force the DhtReader to re-read
-                       // OBJECT_INDEX and consider only the oldest chunk for any given
-                       // object. There cannot be a cycle if the method only walks along
-                       // the oldest chunks.
-                       try {
-                               ctx.getStatistics().deltaChainCycles++;
-                               return read1(pc, pos, ctx, typeHint, false /* no recentChunks */);
-                       } catch (DeltaChainCycleException cannotRecover) {
-                               throw new DhtException(MessageFormat.format(
-                                               DhtText.get().cycleInDeltaChain, pc.getChunkKey(),
-                                               Integer.valueOf(pos)));
-                       }
-               }
-       }
-
-       @SuppressWarnings("null")
-       private static ObjectLoader read1(PackChunk pc, int pos,
-                       final DhtReader ctx, final int typeHint, final boolean recent)
-                       throws IOException, DeltaChainCycleException {
-               try {
-                       Delta delta = null;
-                       byte[] data = null;
-                       int type = OBJ_BAD;
-                       boolean cached = false;
-
-                       SEARCH: for (;;) {
-                               final byte[] dataBuf = pc.dataBuf;
-                               final int dataPtr = pc.dataPtr;
-                               final int posPtr = dataPtr + pos;
-                               int c = dataBuf[posPtr] & 0xff;
-                               int typeCode = (c >> 4) & 7;
-                               long sz = c & 15;
-                               int shift = 4;
-                               int p = 1;
-                               while ((c & 0x80) != 0) {
-                                       c = dataBuf[posPtr + p++] & 0xff;
-                                       sz += ((long) (c & 0x7f)) << shift;
-                                       shift += 7;
-                               }
-
-                               switch (typeCode) {
-                               case OBJ_COMMIT:
-                               case OBJ_TREE:
-                               case OBJ_BLOB:
-                               case OBJ_TAG: {
-                                       if (delta != null) {
-                                               data = inflate(sz, pc, pos + p, ctx);
-                                               type = typeCode;
-                                               break SEARCH;
-                                       }
-
-                                       if (sz < Integer.MAX_VALUE && !pc.isFragment()) {
-                                               try {
-                                                       data = pc.inflateOne(sz, pos + p, ctx);
-                                                       return new ObjectLoader.SmallObject(typeCode, data);
-                                               } catch (LargeObjectException tooBig) {
-                                                       // Fall through and stream.
-                                               }
-                                       }
-
-                                       return new LargeNonDeltaObject(typeCode, sz, pc, pos + p, ctx);
-                               }
-
-                               case OBJ_OFS_DELTA: {
-                                       c = dataBuf[posPtr + p++] & 0xff;
-                                       long base = c & 127;
-                                       while ((c & 128) != 0) {
-                                               base += 1;
-                                               c = dataBuf[posPtr + p++] & 0xff;
-                                               base <<= 7;
-                                               base += (c & 127);
-                                       }
-
-                                       ChunkKey baseChunkKey;
-                                       int basePosInChunk;
-
-                                       if (base <= pos) {
-                                               // Base occurs in the same chunk, just earlier.
-                                               baseChunkKey = pc.getChunkKey();
-                                               basePosInChunk = pos - (int) base;
-                                       } else {
-                                               // Long offset delta, base occurs in another chunk.
-                                               // Adjust distance to be from our chunk start.
-                                               base = base - pos;
-
-                                               ChunkMeta.BaseChunk baseChunk;
-                                               baseChunk = ChunkMetaUtil.getBaseChunk(
-                                                               pc.key,
-                                                               pc.meta,
-                                                               base);
-                                               baseChunkKey = ChunkKey.fromString(baseChunk.getChunkKey());
-                                               basePosInChunk = (int) (baseChunk.getRelativeStart() - base);
-                                       }
-
-                                       delta = new Delta(delta, //
-                                                       pc.key, pos, (int) sz, p, //
-                                                       baseChunkKey, basePosInChunk);
-                                       if (sz != delta.deltaSize)
-                                               break SEARCH;
-
-                                       DeltaBaseCache.Entry e = delta.getBase(ctx);
-                                       if (e != null) {
-                                               type = e.type;
-                                               data = e.data;
-                                               cached = true;
-                                               break SEARCH;
-                                       }
-                                       if (baseChunkKey != pc.getChunkKey())
-                                               pc = ctx.getChunk(baseChunkKey);
-                                       pos = basePosInChunk;
-                                       continue SEARCH;
-                               }
-
-                               case OBJ_REF_DELTA: {
-                                       ObjectId id = ObjectId.fromRaw(dataBuf, posPtr + p);
-                                       PackChunk nc = pc;
-                                       int base = pc.index.findOffset(id);
-                                       if (base < 0) {
-                                               DhtReader.ChunkAndOffset n;
-                                               n = ctx.getChunk(id, typeHint, recent);
-                                               nc = n.chunk;
-                                               base = n.offset;
-                                       }
-                                       checkCycle(delta, pc.key, pos);
-                                       delta = new Delta(delta, //
-                                                       pc.key, pos, (int) sz, p + 20, //
-                                                       nc.getChunkKey(), base);
-                                       if (sz != delta.deltaSize)
-                                               break SEARCH;
-
-                                       DeltaBaseCache.Entry e = delta.getBase(ctx);
-                                       if (e != null) {
-                                               type = e.type;
-                                               data = e.data;
-                                               cached = true;
-                                               break SEARCH;
-                                       }
-                                       pc = nc;
-                                       pos = base;
-                                       continue SEARCH;
-                               }
-
-                               default:
-                                       throw new DhtException(MessageFormat.format(
-                                                       DhtText.get().unsupportedObjectTypeInChunk, //
-                                                       Integer.valueOf(typeCode), //
-                                                       pc.getChunkKey(), //
-                                                       Integer.valueOf(pos)));
-                               }
-                       }
-
-                       // At this point there is at least one delta to apply to data.
-                       // (Whole objects with no deltas to apply return early above.)
-
-                       do {
-                               if (!delta.deltaChunk.equals(pc.getChunkKey()))
-                                       pc = ctx.getChunk(delta.deltaChunk);
-                               pos = delta.deltaPos;
-
-                               // Cache only the base immediately before desired object.
-                               if (cached)
-                                       cached = false;
-                               else if (delta.next == null)
-                                       delta.putBase(ctx, type, data);
-
-                               final byte[] cmds = delta.decompress(pc, ctx);
-                               final long sz = BinaryDelta.getResultSize(cmds);
-                               final byte[] result = newResult(sz);
-                               BinaryDelta.apply(data, cmds, result);
-                               data = result;
-                               delta = delta.next;
-                       } while (delta != null);
-
-                       return new ObjectLoader.SmallObject(type, data);
-
-               } catch (DataFormatException dfe) {
-                       CorruptObjectException coe = new CorruptObjectException(
-                                       MessageFormat.format(DhtText.get().corruptCompressedObject,
-                                                       pc.getChunkKey(), Integer.valueOf(pos)));
-                       coe.initCause(dfe);
-                       throw coe;
-               }
-       }
-
-       private static byte[] inflate(long sz, PackChunk pc, int pos,
-                       DhtReader reader) throws DataFormatException, DhtException {
-               if (pc.isFragment())
-                       return inflateFragment(sz, pc, pos, reader);
-               return pc.inflateOne(sz, pos, reader);
-       }
-
-       private byte[] inflateOne(long sz, int pos, DhtReader reader)
-                       throws DataFormatException {
-               // Because the chunk ends in a 4 byte CRC, there is always
-               // more data available for input than the inflater needs.
-               // This also helps with an optimization in libz where it
-               // wants at least 1 extra byte of input beyond the end.
-
-               final byte[] dstbuf = newResult(sz);
-               final Inflater inf = reader.inflater();
-               final int offset = pos;
-               int dstoff = 0;
-
-               int bs = Math.min(dataLen - pos, INFLATE_STRIDE);
-               inf.setInput(dataBuf, dataPtr + pos, bs);
-               pos += bs;
-
-               while (dstoff < dstbuf.length) {
-                       int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff);
-                       if (n == 0) {
-                               if (inf.needsInput()) {
-                                       bs = Math.min(dataLen - pos, INFLATE_STRIDE);
-                                       inf.setInput(dataBuf, dataPtr + pos, bs);
-                                       pos += bs;
-                                       continue;
-                               }
-                               break;
-                       }
-                       dstoff += n;
-               }
-
-               if (dstoff != sz) {
-                       throw new DataFormatException(MessageFormat.format(
-                                       DhtText.get().shortCompressedObject,
-                                       getChunkKey(),
-                                       Integer.valueOf(offset)));
-               }
-               return dstbuf;
-       }
-
-       private static byte[] inflateFragment(long sz, PackChunk pc, final int pos,
-                       DhtReader reader) throws DataFormatException, DhtException {
-               byte[] dstbuf = newResult(sz);
-               int dstoff = 0;
-
-               final Inflater inf = reader.inflater();
-               final ChunkMeta meta = pc.meta;
-               int nextChunk = 1;
-
-               int bs = pc.dataLen - pos - TRAILER_SIZE;
-               inf.setInput(pc.dataBuf, pc.dataPtr + pos, bs);
-
-               while (dstoff < dstbuf.length) {
-                       int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff);
-                       if (n == 0) {
-                               if (inf.needsInput()) {
-                                       if (meta.getFragmentCount() <= nextChunk)
-                                               break;
-                                       pc = reader.getChunk(ChunkKey.fromString(
-                                                       meta.getFragment(nextChunk++)));
-                                       if (meta.getFragmentCount() == nextChunk)
-                                               bs = pc.dataLen; // Include trailer on last chunk.
-                                       else
-                                               bs = pc.dataLen - TRAILER_SIZE;
-                                       inf.setInput(pc.dataBuf, pc.dataPtr, bs);
-                                       continue;
-                               }
-                               break;
-                       }
-                       dstoff += n;
-               }
-
-               if (dstoff != sz) {
-                       throw new DataFormatException(MessageFormat.format(
-                                       DhtText.get().shortCompressedObject,
-                                       ChunkKey.fromString(meta.getFragment(0)),
-                                       Integer.valueOf(pos)));
-               }
-               return dstbuf;
-       }
-
-       private static byte[] newResult(long sz) {
-               if (Integer.MAX_VALUE < sz)
-                       throw new LargeObjectException.ExceedsByteArrayLimit();
-               try {
-                       return new byte[(int) sz];
-               } catch (OutOfMemoryError noMemory) {
-                       throw new LargeObjectException.OutOfMemory(noMemory);
-               }
-       }
-
-       int readObjectTypeAndSize(int ptr, PackParser.ObjectTypeAndSize info) {
-               ptr += dataPtr;
-
-               int c = dataBuf[ptr++] & 0xff;
-               int typeCode = (c >> 4) & 7;
-               long sz = c & 15;
-               int shift = 4;
-               while ((c & 0x80) != 0) {
-                       c = dataBuf[ptr++] & 0xff;
-                       sz += ((long) (c & 0x7f)) << shift;
-                       shift += 7;
-               }
-
-               switch (typeCode) {
-               case OBJ_OFS_DELTA:
-                       c = dataBuf[ptr++] & 0xff;
-                       while ((c & 128) != 0)
-                               c = dataBuf[ptr++] & 0xff;
-                       break;
-
-               case OBJ_REF_DELTA:
-                       ptr += 20;
-                       break;
-               }
-
-               info.type = typeCode;
-               info.size = sz;
-               return ptr - dataPtr;
-       }
-
-       int read(int ptr, byte[] dst, int dstPos, int cnt) {
-               // Do not allow readers to read the CRC-32 from the tail.
-               int n = Math.min(cnt, (dataLen - TRAILER_SIZE) - ptr);
-               System.arraycopy(dataBuf, dataPtr + ptr, dst, dstPos, n);
-               return n;
-       }
-
-       void copyObjectAsIs(PackOutputStream out, DhtObjectToPack obj,
-                       boolean validate, DhtReader ctx) throws IOException,
-                       StoredObjectRepresentationNotAvailableException {
-               if (validate && !isValid()) {
-                       StoredObjectRepresentationNotAvailableException gone;
-
-                       gone = new StoredObjectRepresentationNotAvailableException(obj);
-                       gone.initCause(new DhtException(MessageFormat.format(
-                                       DhtText.get().corruptChunk, getChunkKey())));
-                       throw gone;
-               }
-
-               int ptr = dataPtr + obj.offset;
-               int c = dataBuf[ptr++] & 0xff;
-               int typeCode = (c >> 4) & 7;
-               long inflatedSize = c & 15;
-               int shift = 4;
-               while ((c & 0x80) != 0) {
-                       c = dataBuf[ptr++] & 0xff;
-                       inflatedSize += ((long) (c & 0x7f)) << shift;
-                       shift += 7;
-               }
-
-               switch (typeCode) {
-               case OBJ_OFS_DELTA:
-                       do {
-                               c = dataBuf[ptr++] & 0xff;
-                       } while ((c & 128) != 0);
-                       break;
-
-               case OBJ_REF_DELTA:
-                       ptr += 20;
-                       break;
-               }
-
-               // If the size is positive, its accurate. If its -1, this is a
-               // fragmented object that will need more handling below,
-               // so copy all of the chunk, minus the trailer.
-
-               final int maxAvail = (dataLen - TRAILER_SIZE) - (ptr - dataPtr);
-               final int copyLen;
-               if (0 < obj.size)
-                       copyLen = Math.min(obj.size, maxAvail);
-               else if (-1 == obj.size)
-                       copyLen = maxAvail;
-               else
-                       throw new DhtException(MessageFormat.format(
-                                       DhtText.get().expectedObjectSizeDuringCopyAsIs, obj));
-               out.writeHeader(obj, inflatedSize);
-               out.write(dataBuf, ptr, copyLen);
-
-               // If the object was fragmented, send all of the other fragments.
-               if (isFragment()) {
-                       int cnt = meta.getFragmentCount();
-                       for (int fragId = 1; fragId < cnt; fragId++) {
-                               PackChunk pc = ctx.getChunk(ChunkKey.fromString(
-                                               meta.getFragment(fragId)));
-                               pc.copyEntireChunkAsIs(out, obj, validate);
-                       }
-               }
-       }
-
-       void copyEntireChunkAsIs(PackOutputStream out, DhtObjectToPack obj,
-                       boolean validate) throws IOException {
-               if (validate && !isValid()) {
-                       if (obj != null)
-                               throw new CorruptObjectException(obj, MessageFormat.format(
-                                               DhtText.get().corruptChunk, getChunkKey()));
-                       else
-                               throw new DhtException(MessageFormat.format(
-                                               DhtText.get().corruptChunk, getChunkKey()));
-               }
-
-               // Do not copy the trailer onto the output stream.
-               out.write(dataBuf, dataPtr, dataLen - TRAILER_SIZE);
-       }
-
-       @SuppressWarnings("boxing")
-       private boolean isValid() {
-               Boolean v = valid;
-               if (v == null) {
-                       MessageDigest m = newMessageDigest();
-                       m.update(dataBuf, dataPtr, dataLen);
-                       v = key.getChunkHash().compareTo(m.digest(), 0) == 0;
-                       valid = v;
-               }
-               return v.booleanValue();
-       }
-
-       /** @return the complete size of this chunk, in memory. */
-       int getTotalSize() {
-               // Assume the index is part of the buffer, and report its total size..
-               if (dataPtr != 0 || dataLen != dataBuf.length)
-                       return dataBuf.length;
-
-               int sz = dataLen;
-               if (index != null)
-                       sz += index.getIndexSize();
-               return sz;
-       }
-
-       private static class Delta {
-               /** Child that applies onto this object. */
-               final Delta next;
-
-               /** The chunk the delta is stored in. */
-               final ChunkKey deltaChunk;
-
-               /** Offset of the delta object. */
-               final int deltaPos;
-
-               /** Size of the inflated delta stream. */
-               final int deltaSize;
-
-               /** Total size of the delta's pack entry header (including base). */
-               final int hdrLen;
-
-               /** The chunk the base is stored in. */
-               final ChunkKey baseChunk;
-
-               /** Offset of the base object. */
-               final int basePos;
-
-               Delta(Delta next, ChunkKey dc, int ofs, int sz, int hdrLen,
-                               ChunkKey bc, int bp) {
-                       this.next = next;
-                       this.deltaChunk = dc;
-                       this.deltaPos = ofs;
-                       this.deltaSize = sz;
-                       this.hdrLen = hdrLen;
-                       this.baseChunk = bc;
-                       this.basePos = bp;
-               }
-
-               byte[] decompress(PackChunk chunk, DhtReader reader)
-                               throws DataFormatException, DhtException {
-                       return inflate(deltaSize, chunk, deltaPos + hdrLen, reader);
-               }
-
-               DeltaBaseCache.Entry getBase(DhtReader ctx) {
-                       return ctx.getDeltaBaseCache().get(baseChunk, basePos);
-               }
-
-               void putBase(DhtReader ctx, int type, byte[] data) {
-                       ctx.getDeltaBaseCache().put(baseChunk, basePos, type, data);
-               }
-       }
-
-       private static void checkCycle(Delta delta, ChunkKey key, int ofs)
-                       throws DeltaChainCycleException {
-               for (; delta != null; delta = delta.next) {
-                       if (delta.deltaPos == ofs && delta.deltaChunk.equals(key))
-                               throw DeltaChainCycleException.INSTANCE;
-               }
-       }
-
-       private static class DeltaChainCycleException extends Exception {
-               private static final long serialVersionUID = 1L;
-
-               static final DeltaChainCycleException INSTANCE = new DeltaChainCycleException();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Prefetcher.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Prefetcher.java
deleted file mode 100644 (file)
index fef2b4f..0000000
+++ /dev/null
@@ -1,423 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.eclipse.jgit.lib.Constants.OBJ_COMMIT;
-import static org.eclipse.jgit.lib.Constants.OBJ_TREE;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.errors.MissingObjectException;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.lib.AnyObjectId;
-import org.eclipse.jgit.revwalk.RevCommit;
-import org.eclipse.jgit.revwalk.RevTree;
-import org.eclipse.jgit.storage.dht.DhtReader.ChunkAndOffset;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.storage.dht.spi.Database;
-
-class Prefetcher implements StreamingCallback<Collection<PackChunk.Members>> {
-       private static enum Status {
-               ON_QUEUE, LOADING, WAITING, READY, DONE;
-       }
-
-       private final Database db;
-
-       private final DhtReader.Statistics stats;
-
-       private final int objectType;
-
-       private final HashMap<ChunkKey, PackChunk> ready;
-
-       private final HashMap<ChunkKey, Status> status;
-
-       private final LinkedList<ChunkKey> queue;
-
-       private final boolean followEdgeHints;
-
-       private final int averageChunkSize;
-
-       private final int highWaterMark;
-
-       private final int lowWaterMark;
-
-       private boolean first = true;
-
-       private boolean automaticallyPushHints = true;
-
-       private ChunkKey stopAt;
-
-       private int bytesReady;
-
-       private int bytesLoading;
-
-       private DhtException error;
-
-       Prefetcher(DhtReader reader, int objectType, int prefetchLimitInBytes) {
-               this.db = reader.getDatabase();
-               this.stats = reader.getStatistics();
-               this.objectType = objectType;
-               this.ready = new HashMap<ChunkKey, PackChunk>();
-               this.status = new HashMap<ChunkKey, Status>();
-               this.queue = new LinkedList<ChunkKey>();
-               this.followEdgeHints = reader.getOptions().isPrefetchFollowEdgeHints();
-               this.averageChunkSize = reader.getInserterOptions().getChunkSize();
-               this.highWaterMark = prefetchLimitInBytes;
-
-               int lwm = (highWaterMark / averageChunkSize) - 4;
-               if (lwm <= 0)
-                       lwm = (highWaterMark / averageChunkSize) / 2;
-               lowWaterMark = lwm * averageChunkSize;
-       }
-
-       boolean isType(int type) {
-               return objectType == type;
-       }
-
-       void push(DhtReader ctx, Collection<RevCommit> roots) {
-               // Approximate walk by using hints from the most recent commit.
-               // Since the commits were recently parsed by the reader, we can
-               // ask the reader for their chunk locations and most likely get
-               // cache hits.
-
-               int time = -1;
-               PackChunk chunk = null;
-
-               for (RevCommit cmit : roots) {
-                       if (time < cmit.getCommitTime()) {
-                               ChunkAndOffset p = ctx.getChunkGently(cmit);
-                               if (p != null && p.chunk.getMeta() != null) {
-                                       time = cmit.getCommitTime();
-                                       chunk = p.chunk;
-                               }
-                       }
-               }
-
-               if (chunk != null) {
-                       synchronized (this) {
-                               status.put(chunk.getChunkKey(), Status.DONE);
-                               push(chunk.getMeta());
-                       }
-               }
-       }
-
-       void push(DhtReader ctx, RevTree start, RevTree end) throws DhtException,
-                       MissingObjectException {
-               // Unlike commits, trees aren't likely to be loaded when they
-               // are pushed into the prefetcher. Find the tree and load it
-               // as necessary to get the prefetch meta established.
-               //
-               Sync<Map<ObjectIndexKey, Collection<ObjectInfo>>> sync = Sync.create();
-               Set<ObjectIndexKey> toFind = new HashSet<ObjectIndexKey>();
-               toFind.add(ObjectIndexKey.create(ctx.getRepositoryKey(), start));
-               toFind.add(ObjectIndexKey.create(ctx.getRepositoryKey(), end));
-               db.objectIndex().get(Context.READ_REPAIR, toFind, sync);
-
-               Map<ObjectIndexKey, Collection<ObjectInfo>> trees;
-               try {
-                       trees = sync.get(ctx.getOptions().getTimeout());
-               } catch (InterruptedException e) {
-                       throw new DhtTimeoutException(e);
-               } catch (TimeoutException e) {
-                       throw new DhtTimeoutException(e);
-               }
-
-               ChunkKey startKey = chunk(trees.get(start));
-               if (startKey == null)
-                       throw DhtReader.missing(start, OBJ_TREE);
-
-               ChunkKey endKey = chunk(trees.get(end));
-               if (endKey == null)
-                       throw DhtReader.missing(end, OBJ_TREE);
-
-               synchronized (this) {
-                       stopAt = endKey;
-                       push(startKey);
-                       maybeStartGet();
-               }
-       }
-
-       private static ChunkKey chunk(Collection<ObjectInfo> info) {
-               if (info == null || info.isEmpty())
-                       return null;
-
-               List<ObjectInfo> infoList = new ArrayList<ObjectInfo>(info);
-               ObjectInfo.sort(infoList);
-               return infoList.get(0).getChunkKey();
-       }
-
-       void push(ChunkKey key) {
-               push(Collections.singleton(key));
-       }
-
-       void push(ChunkMeta meta) {
-               if (meta == null)
-                       return;
-
-               ChunkMeta.PrefetchHint hint;
-               switch (objectType) {
-               case OBJ_COMMIT:
-                       hint = meta.getCommitPrefetch();
-                       break;
-               case OBJ_TREE:
-                       hint = meta.getTreePrefetch();
-                       break;
-               default:
-                       return;
-               }
-
-               if (hint != null) {
-                       synchronized (this) {
-                               if (followEdgeHints && 0 < hint.getEdgeCount())
-                                       push(hint.getEdgeList());
-                               else
-                                       push(hint.getSequentialList());
-                       }
-               }
-       }
-
-       private void push(List<String> list) {
-               List<ChunkKey> keys = new ArrayList<ChunkKey>(list.size());
-               for (String keyString : list)
-                       keys.add(ChunkKey.fromString(keyString));
-               push(keys);
-       }
-
-       void push(Iterable<ChunkKey> list) {
-               synchronized (this) {
-                       for (ChunkKey key : list) {
-                               if (status.containsKey(key))
-                                       continue;
-
-                               status.put(key, Status.ON_QUEUE);
-                               queue.add(key);
-
-                               if (key.equals(stopAt)) {
-                                       automaticallyPushHints = false;
-                                       break;
-                               }
-                       }
-
-                       if (!first)
-                               maybeStartGet();
-               }
-       }
-
-       synchronized ChunkAndOffset find(RepositoryKey repo, AnyObjectId objId) {
-               for (PackChunk c : ready.values()) {
-                       int p = c.findOffset(repo, objId);
-                       if (0 <= p)
-                               return new ChunkAndOffset(useReadyChunk(c.getChunkKey()), p);
-               }
-               return null;
-       }
-
-       synchronized PackChunk get(ChunkKey key) throws DhtException {
-               GET: for (;;) {
-                       if (error != null)
-                               throw error;
-
-                       Status chunkStatus = status.get(key);
-                       if (chunkStatus == null)
-                               return null;
-
-                       switch (chunkStatus) {
-                       case ON_QUEUE:
-                               if (queue.isEmpty()) {
-                                       // Should never happen, but let the caller load.
-                                       status.put(key, Status.DONE);
-                                       return null;
-
-                               } else if (bytesReady + bytesLoading < highWaterMark) {
-                                       // Make sure its first in the queue, start, and wait.
-                                       if (!queue.getFirst().equals(key)) {
-                                               int idx = queue.indexOf(key);
-                                               if (first && objectType == OBJ_COMMIT) {
-                                                       // If the prefetcher has not started yet, skip all
-                                                       // chunks up to this first request. Assume this
-                                                       // initial out-of-order get occurred because the
-                                                       // RevWalk has already parsed all of the commits
-                                                       // up to this point and does not need them again.
-                                                       //
-                                                       for (; 0 < idx; idx--)
-                                                               status.put(queue.removeFirst(), Status.DONE);
-                                                       forceStartGet();
-                                                       continue GET;
-                                               }
-
-                                               stats.access(key).cntPrefetcher_OutOfOrder++;
-                                               queue.remove(idx);
-                                               queue.addFirst(key);
-                                       }
-                                       forceStartGet();
-                                       continue GET;
-
-                               } else {
-                                       // It cannot be moved up to the front of the queue
-                                       // without violating the prefetch size. Let the
-                                       // caller load the chunk out of order.
-                                       stats.access(key).cntPrefetcher_OutOfOrder++;
-                                       status.put(key, Status.DONE);
-                                       return null;
-                               }
-
-                       case LOADING: // Wait for a prefetch that is already started.
-                               status.put(key, Status.WAITING);
-                               //$FALL-THROUGH$
-                       case WAITING:
-                               stats.access(key).cntPrefetcher_WaitedForLoad++;
-                               try {
-                                       wait();
-                               } catch (InterruptedException e) {
-                                       throw new DhtTimeoutException(e);
-                               }
-                               continue GET;
-
-                       case READY:
-                               return useReadyChunk(key);
-
-                       case DONE:
-                               stats.access(key).cntPrefetcher_Revisited++;
-                               return null;
-
-                       default:
-                               throw new IllegalStateException(key + " " + chunkStatus);
-                       }
-               }
-       }
-
-       private PackChunk useReadyChunk(ChunkKey key) {
-               PackChunk chunk = ready.remove(key);
-
-               status.put(chunk.getChunkKey(), Status.DONE);
-               bytesReady -= chunk.getTotalSize();
-
-               if (automaticallyPushHints) {
-                       push(chunk.getMeta());
-                       maybeStartGet();
-               }
-
-               return chunk;
-       }
-
-       private void maybeStartGet() {
-               if (!queue.isEmpty() && bytesReady + bytesLoading <= lowWaterMark)
-                       forceStartGet();
-       }
-
-       private void forceStartGet() {
-               // Use a LinkedHashSet so insertion order is iteration order.
-               // This may help a provider that loads sequentially in the
-               // set's iterator order to load in the order we want data.
-               //
-               LinkedHashSet<ChunkKey> toLoad = new LinkedHashSet<ChunkKey>();
-
-               while (bytesReady + bytesLoading < highWaterMark && !queue.isEmpty()) {
-                       ChunkKey key = queue.removeFirst();
-
-                       stats.access(key).cntPrefetcher_Load++;
-                       toLoad.add(key);
-                       status.put(key, Status.LOADING);
-                       bytesLoading += averageChunkSize;
-
-                       // For the first chunk, start immediately to reduce the
-                       // startup latency associated with additional chunks.
-                       if (first)
-                               break;
-               }
-
-               if (!toLoad.isEmpty() && error == null)
-                       db.chunk().get(Context.LOCAL, toLoad, this);
-
-               if (first) {
-                       first = false;
-                       maybeStartGet();
-               }
-       }
-
-       public synchronized void onPartialResult(Collection<PackChunk.Members> res) {
-               try {
-                       bytesLoading -= averageChunkSize * res.size();
-                       for (PackChunk.Members builder : res)
-                               chunkIsReady(builder.build());
-               } catch (DhtException loadError) {
-                       onError(loadError);
-               }
-       }
-
-       private void chunkIsReady(PackChunk chunk) {
-               ChunkKey key = chunk.getChunkKey();
-               ready.put(key, chunk);
-               bytesReady += chunk.getTotalSize();
-
-               if (status.put(key, Status.READY) == Status.WAITING)
-                       notifyAll();
-       }
-
-       public synchronized void onSuccess(Collection<PackChunk.Members> result) {
-               if (result != null && !result.isEmpty())
-                       onPartialResult(result);
-       }
-
-       public synchronized void onFailure(DhtException asyncError) {
-               onError(asyncError);
-       }
-
-       private void onError(DhtException asyncError) {
-               if (error == null) {
-                       error = asyncError;
-                       notifyAll();
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/QueueObjectLookup.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/QueueObjectLookup.java
deleted file mode 100644 (file)
index 9cf513d..0000000
+++ /dev/null
@@ -1,295 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import org.eclipse.jgit.errors.MissingObjectException;
-import org.eclipse.jgit.lib.AsyncOperation;
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.storage.dht.spi.Database;
-
-class QueueObjectLookup<T extends ObjectId> implements AsyncOperation {
-       protected final RepositoryKey repo;
-
-       protected final Database db;
-
-       protected final DhtReader reader;
-
-       private final DhtReaderOptions options;
-
-       private final boolean reportMissing;
-
-       private final ArrayList<ObjectInfo> tmp;
-
-       private final int concurrentBatches;
-
-       private int runningBatches;
-
-       private Context context;
-
-       private Iterator<T> toFind;
-
-       private List<T> toRetry;
-
-       private ObjectWithInfo<T> nextResult;
-
-       private DhtException error;
-
-       private boolean needChunkOnly;
-
-       private boolean cacheLoadedInfo;
-
-       QueueObjectLookup(DhtReader reader, boolean reportMissing) {
-               this.repo = reader.getRepositoryKey();
-               this.db = reader.getDatabase();
-               this.reader = reader;
-               this.options = reader.getOptions();
-               this.reportMissing = reportMissing;
-               this.tmp = new ArrayList<ObjectInfo>(4);
-               this.context = Context.FAST_MISSING_OK;
-               this.toRetry = new ArrayList<T>();
-
-               this.concurrentBatches = options.getObjectIndexConcurrentBatches();
-       }
-
-       void setCacheLoadedInfo(boolean on) {
-               cacheLoadedInfo = on;
-       }
-
-       void setNeedChunkOnly(boolean on) {
-               needChunkOnly = on;
-       }
-
-       void init(Iterable<T> objectIds) {
-               toFind = lookInCache(objectIds).iterator();
-       }
-
-       private Iterable<T> lookInCache(Iterable<T> objects) {
-               RecentInfoCache infoCache = reader.getRecentInfoCache();
-               List<T> missing = null;
-               for (T obj : objects) {
-                       if (needChunkOnly && obj instanceof RefDataUtil.IdWithChunk) {
-                               push(obj, ((RefDataUtil.IdWithChunk) obj).getChunkKey());
-                               continue;
-                       }
-
-                       List<ObjectInfo> info = infoCache.get(obj);
-                       if (info != null && !info.isEmpty()) {
-                               push(obj, info.get(0));
-                       } else {
-                               if (missing == null) {
-                                       if (objects instanceof List<?>)
-                                               missing = new ArrayList<T>(((List<?>) objects).size());
-                                       else
-                                               missing = new ArrayList<T>();
-                               }
-                               missing.add(obj);
-                       }
-               }
-               if (missing != null)
-                       return missing;
-               return Collections.emptyList();
-       }
-
-       synchronized ObjectWithInfo<T> nextObjectWithInfo()
-                       throws MissingObjectException, IOException {
-               for (;;) {
-                       if (error != null)
-                               throw error;
-
-                       // Consider starting another batch before popping a result.
-                       // This ensures lookup is running while results are being
-                       // consumed by the calling application.
-                       //
-                       while (runningBatches < concurrentBatches) {
-                               if (!toFind.hasNext() // reached end of original input
-                                               && runningBatches == 0 // all batches finished
-                                               && toRetry != null // haven't yet retried
-                                               && !toRetry.isEmpty()) {
-                                       toFind = toRetry.iterator();
-                                       toRetry = null;
-                                       context = Context.READ_REPAIR;
-                               }
-
-                               if (toFind.hasNext())
-                                       startBatch(context);
-                               else
-                                       break;
-                       }
-
-                       ObjectWithInfo<T> c = pop();
-                       if (c != null) {
-                               if (c.chunkKey != null)
-                                       return c;
-                               else
-                                       throw missing(c.object);
-
-                       } else if (!toFind.hasNext() && runningBatches == 0)
-                               return null;
-
-                       try {
-                               wait();
-                       } catch (InterruptedException e) {
-                               throw new DhtTimeoutException(e);
-                       }
-               }
-       }
-
-       private synchronized void startBatch(final Context ctx) {
-               final int batchSize = options.getObjectIndexBatchSize();
-               final Map<ObjectIndexKey, T> batch = new HashMap<ObjectIndexKey, T>();
-               while (toFind.hasNext() && batch.size() < batchSize) {
-                       T obj = toFind.next();
-                       batch.put(ObjectIndexKey.create(repo, obj), obj);
-               }
-
-               final AsyncCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> cb;
-
-               cb = new AsyncCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>>() {
-                       public void onSuccess(Map<ObjectIndexKey, Collection<ObjectInfo>> r) {
-                               processResults(ctx, batch, r);
-                       }
-
-                       public void onFailure(DhtException e) {
-                               processFailure(e);
-                       }
-               };
-               db.objectIndex().get(ctx, batch.keySet(), cb);
-               runningBatches++;
-       }
-
-       private synchronized void processResults(Context ctx,
-                       Map<ObjectIndexKey, T> batch,
-                       Map<ObjectIndexKey, Collection<ObjectInfo>> objects) {
-               for (T obj : batch.values()) {
-                       Collection<ObjectInfo> matches = objects.get(obj);
-
-                       if (matches == null || matches.isEmpty()) {
-                               if (ctx == Context.FAST_MISSING_OK)
-                                       toRetry.add(obj);
-                               else if (reportMissing)
-                                       push(obj, (ChunkKey) null);
-                               continue;
-                       }
-
-                       tmp.clear();
-                       tmp.addAll(matches);
-                       ObjectInfo.sort(tmp);
-                       if (cacheLoadedInfo)
-                               reader.getRecentInfoCache().put(obj, tmp);
-
-                       push(obj, tmp.get(0));
-               }
-
-               runningBatches--;
-               notify();
-       }
-
-       private synchronized void processFailure(DhtException e) {
-               runningBatches--;
-               error = e;
-               notify();
-       }
-
-       private void push(T obj, ChunkKey chunkKey) {
-               nextResult = new ObjectWithInfo<T>(obj, chunkKey, nextResult);
-       }
-
-       private void push(T obj, ObjectInfo info) {
-               nextResult = new ObjectWithInfo<T>(obj, info, nextResult);
-       }
-
-       private ObjectWithInfo<T> pop() {
-               ObjectWithInfo<T> r = nextResult;
-               if (r == null)
-                       return null;
-               nextResult = r.next;
-               return r;
-       }
-
-       public boolean cancel(boolean mayInterruptIfRunning) {
-               return true;
-       }
-
-       public void release() {
-               // Do nothing, there is nothing to abort or discard.
-       }
-
-       private static <T extends ObjectId> MissingObjectException missing(T id) {
-               return new MissingObjectException(id, DhtText.get().objectTypeUnknown);
-       }
-
-       static class ObjectWithInfo<T extends ObjectId> {
-               final T object;
-
-               final ObjectInfo info;
-
-               final ChunkKey chunkKey;
-
-               final ObjectWithInfo<T> next;
-
-               ObjectWithInfo(T object, ObjectInfo info, ObjectWithInfo<T> next) {
-                       this.object = object;
-                       this.info = info;
-                       this.chunkKey = info.getChunkKey();
-                       this.next = next;
-               }
-
-               ObjectWithInfo(T object, ChunkKey chunkKey, ObjectWithInfo<T> next) {
-                       this.object = object;
-                       this.info = null;
-                       this.chunkKey = chunkKey;
-                       this.next = next;
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentChunks.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentChunks.java
deleted file mode 100644 (file)
index 22608ee..0000000
+++ /dev/null
@@ -1,237 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.io.IOException;
-import java.util.HashMap;
-
-import org.eclipse.jgit.lib.AnyObjectId;
-import org.eclipse.jgit.lib.ObjectLoader;
-import org.eclipse.jgit.storage.dht.DhtReader.ChunkAndOffset;
-import org.eclipse.jgit.storage.dht.RefDataUtil.IdWithChunk;
-
-final class RecentChunks {
-       private final DhtReader reader;
-
-       private final DhtReader.Statistics stats;
-
-       private final HashMap<ChunkKey, Node> byKey;
-
-       private int maxBytes;
-
-       private int curBytes;
-
-       private Node lruHead;
-
-       private Node lruTail;
-
-       RecentChunks(DhtReader reader) {
-               this.reader = reader;
-               this.stats = reader.getStatistics();
-               this.byKey = new HashMap<ChunkKey, Node>();
-               this.maxBytes = reader.getOptions().getChunkLimit();
-       }
-
-       void setMaxBytes(int newMax) {
-               maxBytes = Math.max(0, newMax);
-               if (0 < maxBytes)
-                       prune();
-               else
-                       clear();
-       }
-
-       PackChunk get(ChunkKey key) {
-               Node n = byKey.get(key);
-               if (n != null) {
-                       hit(n);
-                       stats.recentChunks_Hits++;
-                       return n.chunk;
-               }
-               stats.recentChunks_Miss++;
-               return null;
-       }
-
-       void put(PackChunk chunk) {
-               Node n = byKey.get(chunk.getChunkKey());
-               if (n != null && n.chunk == chunk) {
-                       hit(n);
-                       return;
-               }
-
-               curBytes += chunk.getTotalSize();
-               prune();
-
-               n = new Node();
-               n.chunk = chunk;
-               byKey.put(chunk.getChunkKey(), n);
-               first(n);
-       }
-
-       private void prune() {
-               while (maxBytes < curBytes) {
-                       Node n = lruTail;
-                       if (n == null)
-                               break;
-
-                       PackChunk c = n.chunk;
-                       curBytes -= c.getTotalSize();
-                       byKey.remove(c.getChunkKey());
-                       remove(n);
-               }
-       }
-
-       ObjectLoader open(RepositoryKey repo, AnyObjectId objId, int typeHint)
-                       throws IOException {
-               if (objId instanceof IdWithChunk) {
-                       PackChunk chunk = get(((IdWithChunk) objId).getChunkKey());
-                       if (chunk != null) {
-                               int pos = chunk.findOffset(repo, objId);
-                               if (0 <= pos)
-                                       return PackChunk.read(chunk, pos, reader, typeHint);
-                       }
-
-                       // IdWithChunk is only a hint, and can be wrong. Locally
-                       // searching is faster than looking in the Database.
-               }
-
-               for (Node n = lruHead; n != null; n = n.next) {
-                       int pos = n.chunk.findOffset(repo, objId);
-                       if (0 <= pos) {
-                               hit(n);
-                               stats.recentChunks_Hits++;
-                               return PackChunk.read(n.chunk, pos, reader, typeHint);
-                       }
-               }
-
-               return null;
-       }
-
-       ChunkAndOffset find(RepositoryKey repo, AnyObjectId objId) {
-               if (objId instanceof IdWithChunk) {
-                       PackChunk chunk = get(((IdWithChunk) objId).getChunkKey());
-                       if (chunk != null) {
-                               int pos = chunk.findOffset(repo, objId);
-                               if (0 <= pos)
-                                       return new ChunkAndOffset(chunk, pos);
-                       }
-
-                       // IdWithChunk is only a hint, and can be wrong. Locally
-                       // searching is faster than looking in the Database.
-               }
-
-               for (Node n = lruHead; n != null; n = n.next) {
-                       int pos = n.chunk.findOffset(repo, objId);
-                       if (0 <= pos) {
-                               hit(n);
-                               stats.recentChunks_Hits++;
-                               return new ChunkAndOffset(n.chunk, pos);
-                       }
-               }
-
-               return null;
-       }
-
-       boolean has(RepositoryKey repo, AnyObjectId objId) {
-               for (Node n = lruHead; n != null; n = n.next) {
-                       int pos = n.chunk.findOffset(repo, objId);
-                       if (0 <= pos) {
-                               hit(n);
-                               stats.recentChunks_Hits++;
-                               return true;
-                       }
-               }
-               return false;
-       }
-
-       void clear() {
-               curBytes = 0;
-               lruHead = null;
-               lruTail = null;
-               byKey.clear();
-       }
-
-       private void hit(Node n) {
-               if (lruHead != n) {
-                       remove(n);
-                       first(n);
-               }
-       }
-
-       private void remove(Node node) {
-               Node p = node.prev;
-               Node n = node.next;
-
-               if (p != null)
-                       p.next = n;
-               if (n != null)
-                       n.prev = p;
-
-               if (lruHead == node)
-                       lruHead = n;
-               if (lruTail == node)
-                       lruTail = p;
-       }
-
-       private void first(Node node) {
-               Node h = lruHead;
-
-               node.prev = null;
-               node.next = h;
-
-               if (h != null)
-                       h.prev = node;
-               else
-                       lruTail = node;
-
-               lruHead = node;
-       }
-
-       private static class Node {
-               PackChunk chunk;
-
-               Node prev;
-
-               Node next;
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentInfoCache.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentInfoCache.java
deleted file mode 100644 (file)
index cb5882a..0000000
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.eclipse.jgit.lib.AnyObjectId;
-import org.eclipse.jgit.lib.ObjectId;
-
-final class RecentInfoCache {
-       private final Map<ObjectId, List<ObjectInfo>> infoCache;
-
-       RecentInfoCache(DhtReaderOptions options) {
-               final int sz = options.getRecentInfoCacheSize();
-               infoCache = new LinkedHashMap<ObjectId, List<ObjectInfo>>(sz, 0.75f, true) {
-                       private static final long serialVersionUID = 1L;
-
-                       @Override
-                       protected boolean removeEldestEntry(Entry<ObjectId, List<ObjectInfo>> e) {
-                               return sz < size();
-                       }
-               };
-       }
-
-       List<ObjectInfo> get(AnyObjectId id) {
-               return infoCache.get(id);
-       }
-
-       void put(AnyObjectId id, List<ObjectInfo> info) {
-               infoCache.put(id.copy(), copyList(info));
-       }
-
-       private static List<ObjectInfo> copyList(List<ObjectInfo> info) {
-               int cnt = info.size();
-               if (cnt == 1)
-                       return Collections.singletonList(info.get(0));
-
-               ObjectInfo[] tmp = info.toArray(new ObjectInfo[cnt]);
-               return Collections.unmodifiableList(Arrays.asList(tmp));
-       }
-
-       void clear() {
-               infoCache.clear();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefDataUtil.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefDataUtil.java
deleted file mode 100644 (file)
index b0d4a68..0000000
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
-import org.eclipse.jgit.lib.AnyObjectId;
-import org.eclipse.jgit.lib.ObjectId;
-
-/** Tools to work with {@link RefData}. */
-public class RefDataUtil {
-       /** Magic constant meaning does not exist. */
-       public static final RefData NONE = RefData.newBuilder().buildPartial();
-
-       static class IdWithChunk extends ObjectId {
-               private final ChunkKey chunkKey;
-
-               IdWithChunk(AnyObjectId id, ChunkKey key) {
-                       super(id);
-                       this.chunkKey = key;
-               }
-
-               ChunkKey getChunkKey() {
-                       return chunkKey;
-               }
-
-               @Override
-               public String toString() {
-                       return name() + "->" + chunkKey;
-               }
-       }
-
-       private RefDataUtil() {
-               // Utility class, do not create instances.
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefKey.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefKey.java
deleted file mode 100644 (file)
index b4d378f..0000000
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.eclipse.jgit.lib.Constants.encode;
-import static org.eclipse.jgit.storage.dht.KeyUtils.format32;
-import static org.eclipse.jgit.storage.dht.KeyUtils.parse32;
-import static org.eclipse.jgit.util.RawParseUtils.decode;
-
-import org.eclipse.jgit.lib.Constants;
-
-/** Unique identifier of a reference in the DHT. */
-public final class RefKey implements RowKey {
-       /**
-        * @param repo
-        * @param name
-        * @return the key
-        */
-       public static RefKey create(RepositoryKey repo, String name) {
-               return new RefKey(repo.asInt(), name);
-       }
-
-       /**
-        * @param key
-        * @return the key
-        */
-       public static RefKey fromBytes(byte[] key) {
-               int repo = parse32(key, 0);
-               String name = decode(key, 9, key.length);
-               return new RefKey(repo, name);
-       }
-
-       /**
-        * @param key
-        * @return the key
-        */
-       public static RefKey fromString(String key) {
-               int c = key.indexOf(':');
-               int repo = parse32(Constants.encodeASCII(key.substring(0, c)), 0);
-               String name = key.substring(c + 1);
-               return new RefKey(repo, name);
-       }
-
-       private final int repo;
-
-       private final String name;
-
-       RefKey(int repo, String name) {
-               this.repo = repo;
-               this.name = name;
-       }
-
-       /** @return the repository this reference lives within. */
-       public RepositoryKey getRepositoryKey() {
-               return RepositoryKey.fromInt(repo);
-       }
-
-       /** @return the name of the reference. */
-       public String getName() {
-               return name;
-       }
-
-       public byte[] asBytes() {
-               byte[] nameRaw = encode(name);
-               byte[] r = new byte[9 + nameRaw.length];
-               format32(r, 0, repo);
-               r[8] = ':';
-               System.arraycopy(nameRaw, 0, r, 9, nameRaw.length);
-               return r;
-       }
-
-       public String asString() {
-               return getRepositoryKey().asString() + ":" + name;
-       }
-
-       @Override
-       public int hashCode() {
-               return name.hashCode();
-       }
-
-       @Override
-       public boolean equals(Object other) {
-               if (this == other)
-                       return true;
-               if (other instanceof RefKey) {
-                       RefKey thisRef = this;
-                       RefKey otherRef = (RefKey) other;
-                       return thisRef.repo == otherRef.repo
-                                       && thisRef.name.equals(otherRef.name);
-               }
-               return false;
-       }
-
-       @Override
-       public String toString() {
-               return "ref:" + asString();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RepositoryKey.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RepositoryKey.java
deleted file mode 100644 (file)
index 2835d62..0000000
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.eclipse.jgit.storage.dht.KeyUtils.format32;
-import static org.eclipse.jgit.storage.dht.KeyUtils.parse32;
-import static org.eclipse.jgit.util.RawParseUtils.decode;
-
-import org.eclipse.jgit.lib.Constants;
-
-/** */
-public final class RepositoryKey implements RowKey {
-       /**
-        * @param sequentialId
-        * @return the key
-        */
-       public static RepositoryKey create(int sequentialId) {
-               return new RepositoryKey(Integer.reverse(sequentialId));
-       }
-
-       /**
-        * @param key
-        * @return the key
-        */
-       public static RepositoryKey fromBytes(byte[] key) {
-               return new RepositoryKey(parse32(key, 0));
-       }
-
-       /**
-        * @param key
-        * @return the key
-        */
-       public static RepositoryKey fromString(String key) {
-               return new RepositoryKey(parse32(Constants.encodeASCII(key), 0));
-       }
-
-       /**
-        * @param reverseId
-        * @return the key
-        */
-       public static RepositoryKey fromInt(int reverseId) {
-               return new RepositoryKey(reverseId);
-       }
-
-       private final int id;
-
-       RepositoryKey(int id) {
-               this.id = id;
-       }
-
-       /** @return 32 bit value describing the repository. */
-       public int asInt() {
-               return id;
-       }
-
-       public byte[] asBytes() {
-               byte[] r = new byte[8];
-               format32(r, 0, asInt());
-               return r;
-       }
-
-       public String asString() {
-               return decode(asBytes());
-       }
-
-       @Override
-       public int hashCode() {
-               return id;
-       }
-
-       @Override
-       public boolean equals(Object other) {
-               if (this == other)
-                       return true;
-               if (other instanceof RepositoryKey)
-                       return id == ((RepositoryKey) other).id;
-               return false;
-       }
-
-       @Override
-       public String toString() {
-               return "repository:" + asString();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RepositoryName.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RepositoryName.java
deleted file mode 100644 (file)
index 18443fa..0000000
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import static org.eclipse.jgit.lib.Constants.encode;
-import static org.eclipse.jgit.util.RawParseUtils.decode;
-
-/** Unique name of a repository, as specified by the URL. */
-public class RepositoryName implements RowKey {
-       /**
-        * @param name
-        * @return the key
-        */
-       public static RepositoryName create(String name) {
-               return new RepositoryName(name);
-       }
-
-       /**
-        * @param name
-        * @return the key
-        */
-       public static RepositoryName fromBytes(byte[] name) {
-               return new RepositoryName(decode(name));
-       }
-
-       /**
-        * @param name
-        * @return the key
-        */
-       public static RepositoryName fromString(String name) {
-               return new RepositoryName(name);
-       }
-
-       private final String name;
-
-       RepositoryName(String name) {
-               this.name = name;
-       }
-
-       public byte[] asBytes() {
-               return encode(name);
-       }
-
-       public String asString() {
-               return name;
-       }
-
-       @Override
-       public int hashCode() {
-               return name.hashCode();
-       }
-
-       @Override
-       public boolean equals(Object other) {
-               if (this == other)
-                       return true;
-               if (other instanceof RepositoryName)
-                       return name.equals(((RepositoryName) other).name);
-               return false;
-       }
-
-       @Override
-       public String toString() {
-               return "repository:" + asString();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RepresentationSelector.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RepresentationSelector.java
deleted file mode 100644 (file)
index 8c14d30..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.util.List;
-
-import org.eclipse.jgit.lib.ProgressMonitor;
-import org.eclipse.jgit.storage.pack.PackWriter;
-
-final class RepresentationSelector extends BatchObjectLookup<DhtObjectToPack> {
-       private final PackWriter packer;
-
-       private final DhtObjectRepresentation rep;
-
-       RepresentationSelector(PackWriter packer, DhtReader reader,
-                       ProgressMonitor monitor) {
-               super(reader, monitor);
-               setRetryMissingObjects(true);
-
-               this.packer = packer;
-               this.rep = new DhtObjectRepresentation();
-       }
-
-       protected void onResult(DhtObjectToPack obj, List<ObjectInfo> info) {
-               // Go through the objects backwards. This is necessary because
-               // info is sorted oldest->newest but PackWriter wants the reverse
-               // order to try and prevent delta chain cycles.
-               //
-               for (int i = info.size() - 1; 0 <= i; i--) {
-                       rep.set(info.get(i));
-                       packer.select(obj, rep);
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RowKey.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RowKey.java
deleted file mode 100644 (file)
index e088b36..0000000
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-/**
- * Key for any row that the DHT will be asked to store.
- * <p>
- * Implementations of this interface know how to encode and decode themselves
- * from a byte array format, expecting the DHT to use the byte array as the row
- * key within the database.
- * <p>
- * It is strongly encouraged to use only row keys that are valid UTF-8 strings,
- * as most DHT systems have client tools that can interact with rows using the
- * UTF-8 encoding.
- */
-public interface RowKey {
-       /** @return key formatted as byte array for storage in the DHT. */
-       public byte[] asBytes();
-
-       /** @return key formatted as a String for storage in the DHT. */
-       public String asString();
-
-       /** @return relatively unique hash code value for in-memory compares. */
-       public int hashCode();
-
-       /**
-        * Compare this key to another key for equality.
-        *
-        * @param other
-        *            the other key instance, may be null.
-        * @return true if these keys reference the same row.
-        */
-       public boolean equals(Object other);
-
-       /** @return pretty printable string for debugging/reporting only. */
-       public String toString();
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/SizeQueue.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/SizeQueue.java
deleted file mode 100644 (file)
index 3069886..0000000
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.io.IOException;
-
-import org.eclipse.jgit.errors.MissingObjectException;
-import org.eclipse.jgit.lib.AsyncObjectSizeQueue;
-import org.eclipse.jgit.lib.ObjectId;
-
-final class SizeQueue<T extends ObjectId> extends QueueObjectLookup<T>
-               implements AsyncObjectSizeQueue<T> {
-       private ObjectWithInfo<T> currResult;
-
-       SizeQueue(DhtReader reader, Iterable<T> objectIds, boolean reportMissing) {
-               super(reader, reportMissing);
-               init(objectIds);
-       }
-
-       public boolean next() throws MissingObjectException, IOException {
-               currResult = nextObjectWithInfo();
-               return currResult != null;
-       }
-
-       public T getCurrent() {
-               return currResult.object;
-       }
-
-       public long getSize() {
-               return currResult.info.getSize();
-       }
-
-       public ObjectId getObjectId() {
-               return getCurrent();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/StreamingCallback.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/StreamingCallback.java
deleted file mode 100644 (file)
index 9ec379f..0000000
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-/**
- * Extension of {@link AsyncCallback} supporting partial results.
- * <p>
- * Instead of buffering all results for {@link #onSuccess(Object)}, the storage
- * provider may choose to offer some results earlier by invoking the
- * {@link #onPartialResult(Object)} method declared in this interface.
- * <p>
- * If any results were delivered early to {@link #onPartialResult(Object)} then
- * {@link #onSuccess(Object)} is invoked with {@code null} when all results have
- * been supplied and no more remain to be delivered.
- * <p>
- * If an error occurs, {@link #onFailure(DhtException)} will be invoked,
- * potentially after one or more {@link #onPartialResult(Object)} notifications
- * were already made. In an error condition, {@link #onSuccess(Object)} will not
- * be invoked.
- *
- * @param <T>
- *            type of object returned from the operation on success.
- */
-public interface StreamingCallback<T> extends AsyncCallback<T> {
-       /**
-        * Receives partial results from the operation.
-        *
-        * @param result
-        *            the result value from the operation.
-        */
-       public void onPartialResult(T result);
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Sync.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Sync.java
deleted file mode 100644 (file)
index 4833375..0000000
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-
-/**
- * Helper to implement a synchronous method in terms of an asynchronous one.
- * <p>
- * Implementors can use this type to wait for an asynchronous computation to
- * complete on a background thread by passing the Sync instance as though it
- * were the AsyncCallback:
- *
- * <pre>
- * Sync&lt;T&gt; sync = Sync.create();
- * async(..., sync);
- * return sync.get(timeout, TimeUnit.MILLISECONDS);
- * </pre>
- *
- * @param <T>
- *            type of value object.
- */
-public abstract class Sync<T> implements AsyncCallback<T> {
-       private static final Sync<?> NONE = new Sync<Object>() {
-               public void onSuccess(Object result) {
-                       // Discard
-               }
-
-               public void onFailure(DhtException error) {
-                       // Discard
-               }
-
-               @Override
-               public Object get(long timeout, TimeUnit unit) throws DhtException,
-                               InterruptedException, TimeoutException {
-                       return null;
-               }
-       };
-
-       /**
-        * Helper method to create a new sync object.
-        *
-        * @param <T>
-        *            type of value object.
-        * @return a new instance.
-        */
-       public static <T> Sync<T> create() {
-               return new Value<T>();
-       }
-
-       /**
-        * Singleton callback that ignores onSuccess, onFailure.
-        *
-        * @param <T>
-        *            type of value object.
-        * @return callback that discards all results.
-        */
-       @SuppressWarnings("unchecked")
-       public static <T> Sync<T> none() {
-               return (Sync<T>) NONE;
-       }
-
-       /**
-        * Wait for the asynchronous operation to complete.
-        * <p>
-        * To prevent application deadlock, waiting can only be performed with the
-        * supplied timeout.
-        *
-        * @param timeout
-        *            amount of time to wait before failing.
-        * @return the returned value.
-        * @throws DhtException
-        *             the asynchronous operation failed.
-        * @throws InterruptedException
-        *             the current thread was interrupted before the operation
-        *             completed.
-        * @throws TimeoutException
-        *             the timeout elapsed before the operation completed.
-        */
-       public T get(Timeout timeout) throws DhtException, InterruptedException,
-                       TimeoutException {
-               return get(timeout.getTime(), timeout.getUnit());
-       }
-
-       /**
-        * Wait for the asynchronous operation to complete.
-        * <p>
-        * To prevent application deadlock, waiting can only be performed with the
-        * supplied timeout.
-        *
-        * @param timeout
-        *            amount of time to wait before failing.
-        * @param unit
-        *            units of {@code timeout}. For example
-        *            {@link TimeUnit#MILLISECONDS}.
-        * @return the returned value.
-        * @throws DhtException
-        *             the asynchronous operation failed.
-        * @throws InterruptedException
-        *             the current thread was interrupted before the operation
-        *             completed.
-        * @throws TimeoutException
-        *             the timeout elapsed before the operation completed.
-        */
-       public abstract T get(long timeout, TimeUnit unit) throws DhtException,
-                       InterruptedException, TimeoutException;
-
-       private static class Value<T> extends Sync<T> {
-
-               private final CountDownLatch wait = new CountDownLatch(1);
-
-               private T data;
-
-               private DhtException error;
-
-               /**
-                * Wait for the asynchronous operation to complete.
-                * <p>
-                * To prevent application deadlock, waiting can only be performed with
-                * the supplied timeout.
-                *
-                * @param timeout
-                *            amount of time to wait before failing.
-                * @param unit
-                *            units of {@code timeout}. For example
-                *            {@link TimeUnit#MILLISECONDS}.
-                * @return the returned value.
-                * @throws DhtException
-                *             the asynchronous operation failed.
-                * @throws InterruptedException
-                *             the current thread was interrupted before the operation
-                *             completed.
-                * @throws TimeoutException
-                *             the timeout elapsed before the operation completed.
-                */
-               public T get(long timeout, TimeUnit unit) throws DhtException,
-                               InterruptedException, TimeoutException {
-                       if (wait.await(timeout, unit)) {
-                               if (error != null)
-                                       throw error;
-                               return data;
-                       }
-                       throw new TimeoutException();
-               }
-
-               public void onSuccess(T obj) {
-                       data = obj;
-                       wait.countDown();
-               }
-
-               public void onFailure(DhtException err) {
-                       error = err;
-                       wait.countDown();
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Timeout.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Timeout.java
deleted file mode 100644 (file)
index 2e4f3a4..0000000
+++ /dev/null
@@ -1,242 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht;
-
-import java.text.MessageFormat;
-import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.util.StringUtils;
-
-/** Length of time to wait for an operation before giving up. */
-public class Timeout {
-       /**
-        * Construct a new timeout, expressed in milliseconds.
-        *
-        * @param millis
-        *            number of milliseconds to wait.
-        * @return the timeout.
-        */
-       public static Timeout milliseconds(int millis) {
-               return new Timeout(millis, TimeUnit.MILLISECONDS);
-       }
-
-       /**
-        * Construct a new timeout, expressed in seconds.
-        *
-        * @param sec
-        *            number of seconds to wait.
-        * @return the timeout.
-        */
-       public static Timeout seconds(int sec) {
-               return new Timeout(sec, TimeUnit.SECONDS);
-       }
-
-       /**
-        * Construct a new timeout, expressed in (possibly fractional) seconds.
-        *
-        * @param sec
-        *            number of seconds to wait.
-        * @return the timeout.
-        */
-       public static Timeout seconds(double sec) {
-               return new Timeout((long) (sec * 1000), TimeUnit.MILLISECONDS);
-       }
-
-       /**
-        * Obtain a timeout from the configuration.
-        *
-        * @param cfg
-        *            configuration to read.
-        * @param section
-        *            section key to read.
-        * @param subsection
-        *            subsection to read, may be null.
-        * @param name
-        *            variable to read.
-        * @param defaultValue
-        *            default to return if no timeout is specified in the
-        *            configuration.
-        * @return the configured timeout.
-        */
-       public static Timeout getTimeout(Config cfg, String section,
-                       String subsection, String name, Timeout defaultValue) {
-               String valStr = cfg.getString(section, subsection, name);
-               if (valStr == null)
-                       return defaultValue;
-
-               valStr = valStr.trim();
-               if (valStr.length() == 0)
-                       return defaultValue;
-
-               Matcher m = matcher("^([1-9][0-9]*(?:\\.[0-9]*)?)\\s*(.*)$", valStr);
-               if (!m.matches())
-                       throw notTimeUnit(section, subsection, name, valStr);
-
-               String digits = m.group(1);
-               String unitName = m.group(2).trim();
-
-               long multiplier;
-               TimeUnit unit;
-               if ("".equals(unitName)) {
-                       multiplier = 1;
-                       unit = TimeUnit.MILLISECONDS;
-
-               } else if (anyOf(unitName, "ms", "millisecond", "milliseconds")) {
-                       multiplier = 1;
-                       unit = TimeUnit.MILLISECONDS;
-
-               } else if (anyOf(unitName, "s", "sec", "second", "seconds")) {
-                       multiplier = 1;
-                       unit = TimeUnit.SECONDS;
-
-               } else if (anyOf(unitName, "m", "min", "minute", "minutes")) {
-                       multiplier = 60;
-                       unit = TimeUnit.SECONDS;
-
-               } else if (anyOf(unitName, "h", "hr", "hour", "hours")) {
-                       multiplier = 3600;
-                       unit = TimeUnit.SECONDS;
-
-               } else
-                       throw notTimeUnit(section, subsection, name, valStr);
-
-               if (digits.indexOf('.') == -1) {
-                       try {
-                               return new Timeout(multiplier * Long.parseLong(digits), unit);
-                       } catch (NumberFormatException nfe) {
-                               throw notTimeUnit(section, subsection, name, valStr);
-                       }
-               } else {
-                       double inputTime;
-                       try {
-                               inputTime = multiplier * Double.parseDouble(digits);
-                       } catch (NumberFormatException nfe) {
-                               throw notTimeUnit(section, subsection, name, valStr);
-                       }
-
-                       if (unit == TimeUnit.MILLISECONDS) {
-                               TimeUnit newUnit = TimeUnit.NANOSECONDS;
-                               long t = (long) (inputTime * newUnit.convert(1, unit));
-                               return new Timeout(t, newUnit);
-
-                       } else if (unit == TimeUnit.SECONDS && multiplier == 1) {
-                               TimeUnit newUnit = TimeUnit.MILLISECONDS;
-                               long t = (long) (inputTime * newUnit.convert(1, unit));
-                               return new Timeout(t, newUnit);
-
-                       } else {
-                               return new Timeout((long) inputTime, unit);
-                       }
-               }
-       }
-
-       private static Matcher matcher(String pattern, String valStr) {
-               return Pattern.compile(pattern).matcher(valStr);
-       }
-
-       private static boolean anyOf(String a, String... cases) {
-               for (String b : cases) {
-                       if (StringUtils.equalsIgnoreCase(a, b))
-                               return true;
-               }
-               return false;
-       }
-
-       private static IllegalArgumentException notTimeUnit(String section,
-                       String subsection, String name, String valueString) {
-               String key = section
-                               + (subsection != null ? "." + subsection : "")
-                               + "." + name;
-               return new IllegalArgumentException(MessageFormat.format(
-                               DhtText.get().notTimeUnit, key, valueString));
-       }
-
-       private final long time;
-
-       private final TimeUnit unit;
-
-       /**
-        * Construct a new timeout.
-        *
-        * @param time
-        *            how long to wait.
-        * @param unit
-        *            the unit that {@code time} was expressed in.
-        */
-       public Timeout(long time, TimeUnit unit) {
-               this.time = time;
-               this.unit = unit;
-       }
-
-       /** @return how long to wait, expressed as {@link #getUnit()}s. */
-       public long getTime() {
-               return time;
-       }
-
-       /** @return the unit of measure for {@link #getTime()}. */
-       public TimeUnit getUnit() {
-               return unit;
-       }
-
-       @Override
-       public int hashCode() {
-               return (int) time;
-       }
-
-       @Override
-       public boolean equals(Object other) {
-               if (other instanceof Timeout)
-                       return getTime() == ((Timeout) other).getTime()
-                                       && getUnit().equals(((Timeout) other).getUnit());
-               return false;
-       }
-
-       @Override
-       public String toString() {
-               return getTime() + " " + getUnit();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/ChunkTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/ChunkTable.java
deleted file mode 100644 (file)
index db0fded..0000000
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi;
-
-import java.util.Collection;
-import java.util.Map;
-import java.util.Set;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.storage.dht.AsyncCallback;
-import org.eclipse.jgit.storage.dht.ChunkKey;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.PackChunk;
-import org.eclipse.jgit.storage.dht.StreamingCallback;
-
-/**
- * Stores object data in compressed pack format.
- * <p>
- * Each chunk stores multiple objects, using the highly compressed and Git
- * native pack file format. Chunks are sized during insertion, but average
- * around 1 MB for historical chunks, and may be as small as a few KB for very
- * recent chunks that were written in small bursts.
- * <p>
- * Objects whose compressed form is too large to fit into a single chunk are
- * fragmented across multiple chunks, and the fragment information is used to
- * put them back together in the correct order. Since the fragmenting occurs
- * after data compression, random access to bytes of the large object is not
- * currently possible.
- * <p>
- * Chunk keys are very well distributed, by embedding a uniformly random number
- * at the start of the key, and also including a small time component. This
- * layout permits chunks to be evenly spread across a cluster of disks or
- * servers in a round-robin fashion (based on a hash of the leading bytes), but
- * also offers some chance for older chunks to be located near each other and
- * have that part of the storage system see less activity over time.
- */
-public interface ChunkTable {
-       /**
-        * Asynchronously load one or more chunks
-        * <p>
-        * Callers are responsible for breaking up very large collections of chunk
-        * keys into smaller units, based on the reader's batch size option. Since
-        * chunks typically 1 MB each, 10-20 keys is a reasonable batch size, but
-        * depends on available JVM memory and performance of this method obtaining
-        * chunks from the database.
-        *
-        * @param options
-        *            options to control reading.
-        * @param keys
-        *            the chunk keys to obtain.
-        * @param callback
-        *            receives the results when ready. If this is an instance of
-        *            {@link StreamingCallback}, implementors should try to deliver
-        *            results early.
-        */
-       public void get(Context options, Set<ChunkKey> keys,
-                       AsyncCallback<Collection<PackChunk.Members>> callback);
-
-       /**
-        * Asynchronously load one or more chunk meta fields.
-        * <p>
-        * Usually meta is loaded by {@link #get(Context, Set, AsyncCallback)}, but
-        * some uses may require looking up the fragment data without having the
-        * entire chunk.
-        *
-        * @param options
-        *            options to control reading.
-        * @param keys
-        *            the chunk keys to obtain.
-        * @param callback
-        *            receives the results when ready. If this is an instance of
-        *            {@link StreamingCallback}, implementors should try to deliver
-        *            results early.
-        */
-       public void getMeta(Context options, Set<ChunkKey> keys,
-                       AsyncCallback<Map<ChunkKey, ChunkMeta>> callback);
-
-       /**
-        * Put some (or all) of a single chunk.
-        * <p>
-        * The higher level storage layer typically stores chunks in pieces. Its
-        * common to first store the data, then much later store the fragments and
-        * index. Sometimes all of the members are ready at once, and can be put
-        * together as a single unit. This method handles both approaches to storing
-        * a chunk.
-        * <p>
-        * Implementors must use a partial writing approach, for example:
-        *
-        * <pre>
-        *   ColumnUpdateList list = ...;
-        *   if (chunk.getChunkData() != null)
-        *     list.addColumn(&quot;chunk_data&quot;, chunk.getChunkData());
-        *   if (chunk.getChunkIndex() != null)
-        *     list.addColumn(&quot;chunk_index&quot;, chunk.getChunkIndex());
-        *   if (chunk.getFragments() != null)
-        *     list.addColumn(&quot;fragments&quot;, chunk.getFragments());
-        *   createOrUpdateRow(chunk.getChunkKey(), list);
-        * </pre>
-        *
-        * @param chunk
-        *            description of the chunk to be stored.
-        * @param buffer
-        *            buffer to enqueue the put onto.
-        * @throws DhtException
-        *             if the buffer flushed and an enqueued operation failed.
-        */
-       public void put(PackChunk.Members chunk, WriteBuffer buffer)
-                       throws DhtException;
-
-       /**
-        * Completely remove a chunk and all of its data elements.
-        * <p>
-        * Chunk removal should occur as quickly as possible after the flush has
-        * completed, as the caller has already ensured the chunk is not in use.
-        *
-        * @param key
-        *            key of the chunk to remove.
-        * @param buffer
-        *            buffer to enqueue the remove onto.
-        * @throws DhtException
-        *             if the buffer flushed and an enqueued operation failed.
-        */
-       public void remove(ChunkKey key, WriteBuffer buffer) throws DhtException;
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/Context.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/Context.java
deleted file mode 100644 (file)
index b0e7ff4..0000000
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi;
-
-/**
- * Options used when accessing a {@link Database}.
- * <p>
- * <i>Warning:</i> This type may change from enumeration to class in the future.
- */
-public enum Context {
-       /** Perform a fast read, but may miss results. */
-       FAST_MISSING_OK,
-
-       /** Read from a local replica. */
-       LOCAL,
-
-       /** Repair the local replica if a read failed. */
-       READ_REPAIR;
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/Database.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/Database.java
deleted file mode 100644 (file)
index fbad5d8..0000000
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi;
-
-/**
- * A distributed database implementation.
- * <p>
- * A DHT provider must implement this interface to return table references for
- * each of the named tables. The database and the tables it returns are held as
- * singletons, and thus must be thread-safe. If the underlying implementation
- * needs to use individual "connections" for each operation, it is responsible
- * for setting up a connection pool, borrowing and returning resources within
- * each of the table APIs.
- * <p>
- * Most APIs on the tables are asynchronous and must perform their computation
- * in the background using a different thread than the caller. Implementations
- * that have only an underlying synchronous API should configure and use an
- * {@link java.util.concurrent.ExecutorService} to perform computation in the
- * background on a thread pool.
- * <p>
- * Tables returned by these methods should be singletons, as the higher level
- * DHT implementation usually invokes these methods each time it needs to use a
- * given table. The suggested implementation approach is:
- *
- * <pre>
- * class MyDatabase implements Database {
- *     private final RepositoryIndexTable rep = new MyRepositoryIndex();
- *
- *     private final RefTable ref = new MyRefTable();
- *
- *     public RepositoryIndexTable repositoryIndex() {
- *             return rep;
- *     }
- *
- *     public RefTable ref() {
- *             return ref;
- *     }
- * }
- * </pre>
- */
-public interface Database {
-       /** @return a handle to the table listing known repositories. */
-       public RepositoryIndexTable repositoryIndex();
-
-       /** @return a handle to the table storing repository metadata. */
-       public RepositoryTable repository();
-
-       /** @return a handle to the table listing references in a repository. */
-       public RefTable ref();
-
-       /** @return a handle to the table listing known objects. */
-       public ObjectIndexTable objectIndex();
-
-       /** @return a handle to the table listing pack data chunks. */
-       public ChunkTable chunk();
-
-       /**
-        * Create a new WriteBuffer for the current thread.
-        * <p>
-        * Unlike other methods on this interface, the returned buffer <b>must</b>
-        * be a new object on every invocation. Buffers do not need to be
-        * thread-safe.
-        *
-        * @return a new buffer to handle pending writes.
-        */
-       public WriteBuffer newWriteBuffer();
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/ObjectIndexTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/ObjectIndexTable.java
deleted file mode 100644 (file)
index 9245815..0000000
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi;
-
-import java.util.Collection;
-import java.util.Map;
-import java.util.Set;
-
-import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.storage.dht.AsyncCallback;
-import org.eclipse.jgit.storage.dht.ChunkKey;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.ObjectIndexKey;
-import org.eclipse.jgit.storage.dht.ObjectInfo;
-
-/**
- * Associates an {@link ObjectId} to the {@link ChunkKey} its stored in.
- * <p>
- * This table provides a global index listing every single object within the
- * repository, and which chunks the object can be found it. Readers use this
- * table to find an object when they are forced to start from a bare SHA-1 that
- * was input by a user, or supplied over the network from a client.
- */
-public interface ObjectIndexTable {
-       /**
-        * Asynchronously locate one or more objects in the repository.
-        * <p>
-        * Callers are responsible for breaking up very large collections of objects
-        * into smaller units, based on the reader's batch size option. 1,000 to
-        * 10,000 is a reasonable range for the reader to batch on.
-        *
-        * @param options
-        *            options to control reading.
-        * @param objects
-        *            set of object names to locate the chunks of.
-        * @param callback
-        *            receives the results when ready.
-        */
-       public void get(Context options, Set<ObjectIndexKey> objects,
-                       AsyncCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> callback);
-
-       /**
-        * Record the fact that {@code objId} can be found by {@code info}.
-        * <p>
-        * If there is already data for {@code objId} in the table, this method
-        * should add the new chunk onto the existing data list.
-        * <p>
-        * This method should use batched asynchronous puts as much as possible.
-        * Initial imports of an existing repository may require millions of add
-        * operations to this table, one for each object being imported.
-        *
-        * @param objId
-        *            the unique ObjectId.
-        * @param info
-        *            a chunk that is known to store {@code objId}.
-        * @param buffer
-        *            buffer to enqueue the put onto.
-        * @throws DhtException
-        *             if the buffer flushed and an enqueued operation failed.
-        */
-       public void add(ObjectIndexKey objId, ObjectInfo info, WriteBuffer buffer)
-                       throws DhtException;
-
-       /**
-        * Remove a single chunk from an object.
-        * <p>
-        * If this is the last remaining chunk for the object, the object should
-        * also be removed from the table. Removal can be deferred, or can occur
-        * immediately. That is, {@code get()} may return the object with an empty
-        * collection, but to prevent unlimited disk usage the database should
-        * eventually remove the object.
-        *
-        * @param objId
-        *            the unique ObjectId.
-        * @param chunk
-        *            the chunk that needs to be removed from this object.
-        * @param buffer
-        *            buffer to enqueue the remove onto.
-        * @throws DhtException
-        *             if the buffer flushed and an enqueued operation failed.
-        */
-       public void remove(ObjectIndexKey objId, ChunkKey chunk, WriteBuffer buffer)
-                       throws DhtException;
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RefTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RefTable.java
deleted file mode 100644 (file)
index b46ca0b..0000000
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi;
-
-import java.util.Map;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.RefDataUtil;
-import org.eclipse.jgit.storage.dht.RefKey;
-import org.eclipse.jgit.storage.dht.RepositoryKey;
-
-/**
- * Tracks all branches and tags for a repository.
- * <p>
- * Each repository has one or more references, pointing to the most recent
- * revision on that branch, or to the tagged revision if its a tag.
- */
-public interface RefTable {
-       /**
-        * Read all known references in the repository.
-        *
-        * @param options
-        *            options to control reading.
-        * @param repository
-        *            the repository to load the references from.
-        * @return map of all references. Empty map if there are no references.
-        * @throws DhtException
-        *             the database cannot be read.
-        * @throws TimeoutException
-        *             the operation to read the database timed out.
-        */
-       public Map<RefKey, RefData> getAll(Context options, RepositoryKey repository)
-                       throws DhtException, TimeoutException;
-
-       /**
-        * Compare a reference, and delete if it matches.
-        *
-        * @param refKey
-        *            reference to delete.
-        * @param oldData
-        *            the old data for the reference. The delete only occurs if the
-        *            value is still equal to {@code oldData}.
-        * @return true if the delete was successful; false if the current value
-        *         does not match {@code oldData}.
-        * @throws DhtException
-        *             the database cannot be updated.
-        * @throws TimeoutException
-        *             the operation to modify the database timed out.
-        */
-       public boolean compareAndRemove(RefKey refKey, RefData oldData)
-                       throws DhtException, TimeoutException;
-
-       /**
-        * Compare a reference, and put if it matches.
-        *
-        * @param refKey
-        *            reference to create or replace.
-        * @param oldData
-        *            the old data for the reference. The put only occurs if the
-        *            value is still equal to {@code oldData}. Use
-        *            {@link RefDataUtil#NONE} if the reference should not exist and
-        *            is being created.
-        * @param newData
-        *            new value to store.
-        * @return true if the put was successful; false if the current value does
-        *         not match {@code prior}.
-        * @throws DhtException
-        *             the database cannot be updated.
-        * @throws TimeoutException
-        *             the operation to modify the database timed out.
-        */
-       public boolean compareAndPut(RefKey refKey, RefData oldData, RefData newData)
-                       throws DhtException, TimeoutException;
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RepositoryIndexTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RepositoryIndexTable.java
deleted file mode 100644 (file)
index 36afd13..0000000
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi;
-
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.RepositoryKey;
-import org.eclipse.jgit.storage.dht.RepositoryName;
-
-/**
- * Maps a repository name from a URL, to the internal {@link RepositoryKey}.
- * <p>
- * The internal identifier is used for all data storage, as its part of the row
- * keys for each data row that makes up the repository. By using an internal
- * key, repositories can be efficiently renamed in O(1) time, without changing
- * existing data rows.
- */
-public interface RepositoryIndexTable {
-       /**
-        * Find a repository by name.
-        *
-        * @param name
-        *            name of the repository, from the URL.
-        * @return the internal key; null if not found.
-        * @throws DhtException
-        * @throws TimeoutException
-        */
-       public RepositoryKey get(RepositoryName name) throws DhtException,
-                       TimeoutException;
-
-       /**
-        * Atomically record the association of name to identifier.
-        * <p>
-        * This method must use some sort of transaction system to ensure the name
-        * either points at {@code key} when complete, or fails fast with an
-        * exception if the name is used by a different key. This may require
-        * running some sort of lock management service in parallel to the database.
-        *
-        * @param name
-        *            name of the repository.
-        * @param key
-        *            internal key used to find the repository's data.
-        * @throws DhtException
-        * @throws TimeoutException
-        */
-       public void putUnique(RepositoryName name, RepositoryKey key)
-                       throws DhtException, TimeoutException;
-
-       /**
-        * Remove the association of a name to an identifier.
-        * <p>
-        * This method must use some sort of transaction system to ensure the name
-        * is removed only if it currently references {@code key}. This may require
-        * running some sort of lock management service in parallel to the database.
-        *
-        * @param name
-        *            name of the repository.
-        * @param key
-        *            internal key defining the repository.
-        * @throws DhtException
-        * @throws TimeoutException
-        */
-       public void remove(RepositoryName name, RepositoryKey key)
-                       throws DhtException, TimeoutException;
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RepositoryTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RepositoryTable.java
deleted file mode 100644 (file)
index 8f2dab8..0000000
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi;
-
-import java.util.Collection;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
-import org.eclipse.jgit.storage.dht.CachedPackKey;
-import org.eclipse.jgit.storage.dht.ChunkInfo;
-import org.eclipse.jgit.storage.dht.ChunkKey;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.RepositoryKey;
-
-/**
- * Tracks high-level information about all known repositories.
- */
-public interface RepositoryTable {
-       /**
-        * Generate a new unique RepositoryKey.
-        *
-        * @return a new unique key.
-        * @throws DhtException
-        *             keys cannot be generated at this time.
-        */
-       public RepositoryKey nextKey() throws DhtException;
-
-       /**
-        * Record the existence of a chunk.
-        *
-        * @param repo
-        *            repository owning the chunk.
-        * @param info
-        *            information about the chunk.
-        * @param buffer
-        *            buffer to enqueue the put onto.
-        * @throws DhtException
-        *             if the buffer flushed and an enqueued operation failed.
-        */
-       public void put(RepositoryKey repo, ChunkInfo info, WriteBuffer buffer)
-                       throws DhtException;
-
-       /**
-        * Remove the information about a chunk.
-        *
-        * @param repo
-        *            repository owning the chunk.
-        * @param chunk
-        *            the chunk that needs to be deleted.
-        * @param buffer
-        *            buffer to enqueue the remove onto.
-        * @throws DhtException
-        *             if the buffer flushed and an enqueued operation failed.
-        */
-       public void remove(RepositoryKey repo, ChunkKey chunk, WriteBuffer buffer)
-                       throws DhtException;
-
-       /**
-        * Get the cached packs, if any.
-        *
-        * @param repo
-        *            repository owning the packs.
-        * @return cached pack descriptions.
-        * @throws DhtException
-        * @throws TimeoutException
-        */
-       public Collection<CachedPackInfo> getCachedPacks(RepositoryKey repo)
-                       throws DhtException, TimeoutException;
-
-       /**
-        * Record the existence of a cached pack.
-        *
-        * @param repo
-        *            repository owning the pack.
-        * @param info
-        *            information about the pack.
-        * @param buffer
-        *            buffer to enqueue the put onto.
-        * @throws DhtException
-        *             if the buffer flushed and an enqueued operation failed.
-        */
-       public void put(RepositoryKey repo, CachedPackInfo info, WriteBuffer buffer)
-                       throws DhtException;
-
-       /**
-        * Remove the existence of a cached pack.
-        *
-        * @param repo
-        *            repository owning the pack.
-        * @param key
-        *            information about the pack.
-        * @param buffer
-        *            buffer to enqueue the put onto.
-        * @throws DhtException
-        *             if the buffer flushed and an enqueued operation failed.
-        */
-       public void remove(RepositoryKey repo, CachedPackKey key, WriteBuffer buffer)
-                       throws DhtException;
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/WriteBuffer.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/WriteBuffer.java
deleted file mode 100644 (file)
index 5521ec2..0000000
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi;
-
-import org.eclipse.jgit.storage.dht.DhtException;
-
-/** Potentially buffers writes until full, or until flush. */
-public interface WriteBuffer {
-       /**
-        * Flush any pending writes, and wait for them to complete.
-        *
-        * @throws DhtException
-        *             one or more writes failed. As writes may occur in any order,
-        *             the exact state of the database is unspecified.
-        */
-       public void flush() throws DhtException;
-
-       /**
-        * Abort pending writes, and wait for acknowledgment.
-        * <p>
-        * Once a buffer has been aborted, it cannot be reused. Application code
-        * must discard the buffer instance and use a different buffer to issue
-        * subsequent operations.
-        * <p>
-        * If writes have not been started yet, they should be discarded and not
-        * submitted to the storage system.
-        * <p>
-        * If writes have already been started asynchronously in the background,
-        * this method may try to cancel them, but must wait for the operation to
-        * either complete or abort before returning. This allows callers to clean
-        * up by scanning the storage system and making corrections to clean up any
-        * partial writes.
-        *
-        * @throws DhtException
-        *             one or more already started writes failed.
-        */
-       public void abort() throws DhtException;
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheBuffer.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheBuffer.java
deleted file mode 100644 (file)
index 4eb26bd..0000000
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.cache;
-
-import static java.util.Collections.singleton;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.Sync;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change;
-import org.eclipse.jgit.storage.dht.spi.util.AbstractWriteBuffer;
-
-/** WriteBuffer implementation for a {@link CacheDatabase}. */
-public class CacheBuffer extends AbstractWriteBuffer {
-       private final WriteBuffer dbBuffer;
-
-       private final CacheService client;
-
-       private final Sync<Void> none;
-
-       private List<CacheService.Change> pending;
-
-       private List<CacheService.Change> afterFlush;
-
-       /**
-        * Initialize a new buffer.
-        *
-        * @param dbBuffer
-        *            the underlying database's own buffer.
-        * @param client
-        *            connection to the cache service.
-        * @param options
-        *            options controlling cache operations.
-        */
-       public CacheBuffer(WriteBuffer dbBuffer, CacheService client,
-                       CacheOptions options) {
-               super(null, options.getWriteBufferSize());
-               this.dbBuffer = dbBuffer;
-               this.client = client;
-               this.none = Sync.none();
-       }
-
-       /**
-        * Schedule removal of a key from the cache.
-        * <p>
-        * Unlike {@link #removeAfterFlush(CacheKey)}, these removals can be flushed
-        * when the cache buffer is full, potentially before any corresponding
-        * removal is written to the underlying database.
-        *
-        * @param key
-        *            key to remove.
-        * @throws DhtException
-        *             a prior flush failed.
-        */
-       public void remove(CacheKey key) throws DhtException {
-               modify(CacheService.Change.remove(key));
-       }
-
-       /**
-        * Schedule a removal only after the underlying database flushes.
-        * <p>
-        * Unlike {@link #remove(CacheKey)}, these removals are buffered until the
-        * application calls {@link #flush()} and aren't sent to the cache service
-        * until after the underlying database flush() operation is completed
-        * successfully.
-        *
-        * @param key
-        *            key to remove.
-        */
-       public void removeAfterFlush(CacheKey key) {
-               if (afterFlush == null)
-                       afterFlush = newList();
-               afterFlush.add(CacheService.Change.remove(key));
-       }
-
-       /**
-        * Schedule storing (or replacing) a key in the cache.
-        *
-        * @param key
-        *            key to store.
-        * @param value
-        *            new value to store.
-        * @throws DhtException
-        *             a prior flush failed.
-        */
-       public void put(CacheKey key, byte[] value) throws DhtException {
-               modify(CacheService.Change.put(key, value));
-       }
-
-       /**
-        * Schedule any cache change.
-        *
-        * @param op
-        *            the cache operation.
-        * @throws DhtException
-        *             a prior flush failed.
-        */
-       public void modify(CacheService.Change op) throws DhtException {
-               int sz = op.getKey().getBytes().length;
-               if (op.getData() != null)
-                       sz += op.getData().length;
-               if (add(sz)) {
-                       if (pending == null)
-                               pending = newList();
-                       pending.add(op);
-                       queued(sz);
-               } else {
-                       client.modify(singleton(op), wrap(none, sz));
-               }
-       }
-
-       /** @return the underlying database's own write buffer. */
-       public WriteBuffer getWriteBuffer() {
-               return dbBuffer;
-       }
-
-       @Override
-       protected void startQueuedOperations(int bytes) throws DhtException {
-               client.modify(pending, wrap(none, bytes));
-               pending = null;
-       }
-
-       public void flush() throws DhtException {
-               dbBuffer.flush();
-
-               if (afterFlush != null) {
-                       for (CacheService.Change op : afterFlush)
-                               modify(op);
-                       afterFlush = null;
-               }
-
-               super.flush();
-       }
-
-       @Override
-       public void abort() throws DhtException {
-               pending = null;
-               afterFlush = null;
-
-               dbBuffer.abort();
-               super.abort();
-       }
-
-       private static List<Change> newList() {
-               return new ArrayList<CacheService.Change>();
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheChunkTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheChunkTable.java
deleted file mode 100644 (file)
index b7f94fd..0000000
+++ /dev/null
@@ -1,515 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.cache;
-
-import static java.util.Collections.singleton;
-import static java.util.Collections.singletonMap;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ExecutorService;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.storage.dht.AsyncCallback;
-import org.eclipse.jgit.storage.dht.ChunkKey;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.PackChunk;
-import org.eclipse.jgit.storage.dht.StreamingCallback;
-import org.eclipse.jgit.storage.dht.Sync;
-import org.eclipse.jgit.storage.dht.spi.ChunkTable;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change;
-
-import com.google.protobuf.CodedInputStream;
-import com.google.protobuf.CodedOutputStream;
-import com.google.protobuf.InvalidProtocolBufferException;
-import com.google.protobuf.WireFormat;
-
-/** Cache wrapper around ChunkTable. */
-public class CacheChunkTable implements ChunkTable {
-       private final ChunkTable db;
-
-       private final ExecutorService executor;
-
-       private final CacheService client;
-
-       private final Sync<Void> none;
-
-       private final Namespace nsChunk = Namespace.CHUNK;
-
-       private final Namespace nsMeta = Namespace.CHUNK_META;
-
-       /**
-        * Initialize a new wrapper.
-        *
-        * @param dbTable
-        *            the underlying database's corresponding table.
-        * @param cacheDatabase
-        *            the cache database.
-        */
-       public CacheChunkTable(ChunkTable dbTable, CacheDatabase cacheDatabase) {
-               this.db = dbTable;
-               this.executor = cacheDatabase.getExecutorService();
-               this.client = cacheDatabase.getClient();
-               this.none = Sync.none();
-       }
-
-       public void get(Context options, Set<ChunkKey> keys,
-                       AsyncCallback<Collection<PackChunk.Members>> callback) {
-               List<CacheKey> toFind = new ArrayList<CacheKey>(keys.size());
-               for (ChunkKey k : keys)
-                       toFind.add(nsChunk.key(k));
-               client.get(toFind, new ChunkFromCache(options, keys, callback));
-       }
-
-       public void getMeta(Context options, Set<ChunkKey> keys,
-                       AsyncCallback<Map<ChunkKey, ChunkMeta>> callback) {
-               List<CacheKey> toFind = new ArrayList<CacheKey>(keys.size());
-               for (ChunkKey k : keys)
-                       toFind.add(nsMeta.key(k));
-               client.get(toFind, new MetaFromCache(options, keys, callback));
-       }
-
-       public void put(PackChunk.Members chunk, WriteBuffer buffer)
-                       throws DhtException {
-               CacheBuffer buf = (CacheBuffer) buffer;
-               db.put(chunk, buf.getWriteBuffer());
-
-               // Only store fragmented meta. This is all callers should ask for.
-               if (chunk.hasMeta() && chunk.getMeta().getFragmentCount() != 0) {
-                       buf.put(nsMeta.key(chunk.getChunkKey()),
-                                       chunk.getMeta().toByteArray());
-               }
-
-               if (chunk.hasChunkData())
-                       buf.put(nsChunk.key(chunk.getChunkKey()), encode(chunk));
-               else
-                       buf.removeAfterFlush(nsChunk.key(chunk.getChunkKey()));
-       }
-
-       public void remove(ChunkKey key, WriteBuffer buffer) throws DhtException {
-               CacheBuffer buf = (CacheBuffer) buffer;
-               buf.remove(nsChunk.key(key));
-               buf.remove(nsMeta.key(key));
-               db.remove(key, buf.getWriteBuffer());
-       }
-
-       private static byte[] encode(PackChunk.Members members) {
-               // Its too slow to encode ByteBuffer through the standard code.
-               // Since the message is only 3 fields, do it by hand.
-               ByteBuffer data = members.getChunkDataAsByteBuffer();
-               ByteBuffer index = members.getChunkIndexAsByteBuffer();
-               ChunkMeta meta = members.getMeta();
-
-               int sz = 0;
-               if (data != null)
-                       sz += computeByteBufferSize(1, data);
-               if (index != null)
-                       sz += computeByteBufferSize(2, index);
-               if (meta != null)
-                       sz += CodedOutputStream.computeMessageSize(3, meta);
-
-               byte[] r = new byte[sz];
-               CodedOutputStream out = CodedOutputStream.newInstance(r);
-               try {
-                       if (data != null)
-                               writeByteBuffer(out, 1, data);
-                       if (index != null)
-                               writeByteBuffer(out, 2, index);
-                       if (meta != null)
-                               out.writeMessage(3, meta);
-               } catch (IOException err) {
-                       throw new RuntimeException("Cannot buffer chunk", err);
-               }
-               return r;
-       }
-
-       private static int computeByteBufferSize(int fieldNumber, ByteBuffer data) {
-               int n = data.remaining();
-               return CodedOutputStream.computeTagSize(fieldNumber)
-                               + CodedOutputStream.computeRawVarint32Size(n)
-                               + n;
-       }
-
-       private static void writeByteBuffer(CodedOutputStream out, int fieldNumber,
-                       ByteBuffer data) throws IOException {
-               byte[] d = data.array();
-               int p = data.arrayOffset() + data.position();
-               int n = data.remaining();
-               out.writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED);
-               out.writeRawVarint32(n);
-               out.writeRawBytes(d, p, n);
-       }
-
-       private static PackChunk.Members decode(ChunkKey key, byte[] raw) {
-               PackChunk.Members members = new PackChunk.Members();
-               members.setChunkKey(key);
-
-               // Its too slow to convert using the standard code, as copies
-               // are made. Instead find offsets in the stream and use that.
-               CodedInputStream in = CodedInputStream.newInstance(raw);
-               try {
-                       int tag = in.readTag();
-                       for (;;) {
-                               switch (WireFormat.getTagFieldNumber(tag)) {
-                               case 0:
-                                       return members;
-                               case 1: {
-                                       int cnt = in.readRawVarint32();
-                                       int ptr = in.getTotalBytesRead();
-                                       members.setChunkData(raw, ptr, cnt);
-                                       in.skipRawBytes(cnt);
-                                       tag = in.readTag();
-                                       if (WireFormat.getTagFieldNumber(tag) != 2)
-                                               continue;
-                               }
-                               //$FALL-THROUGH$
-                               case 2: {
-                                       int cnt = in.readRawVarint32();
-                                       int ptr = in.getTotalBytesRead();
-                                       members.setChunkIndex(raw, ptr, cnt);
-                                       in.skipRawBytes(cnt);
-                                       tag = in.readTag();
-                                       if (WireFormat.getTagFieldNumber(tag) != 3)
-                                               continue;
-                               }
-                               //$FALL-THROUGH$
-                               case 3: {
-                                       int cnt = in.readRawVarint32();
-                                       int oldLimit = in.pushLimit(cnt);
-                                       members.setMeta(ChunkMeta.parseFrom(in));
-                                       in.popLimit(oldLimit);
-                                       tag = in.readTag();
-                                       continue;
-                               }
-                               default:
-                                       in.skipField(tag);
-                               }
-                       }
-               } catch (IOException err) {
-                       throw new RuntimeException("Cannot decode chunk", err);
-               }
-       }
-
-       private class ChunkFromCache implements
-                       StreamingCallback<Map<CacheKey, byte[]>> {
-               private final Object lock = new Object();
-
-               private final Context options;
-
-               private final Set<ChunkKey> remaining;
-
-               private final AsyncCallback<Collection<PackChunk.Members>> normalCallback;
-
-               private final StreamingCallback<Collection<PackChunk.Members>> streamingCallback;
-
-               private final List<PackChunk.Members> all;
-
-               ChunkFromCache(Context options, Set<ChunkKey> keys,
-                               AsyncCallback<Collection<PackChunk.Members>> callback) {
-                       this.options = options;
-                       this.remaining = new HashSet<ChunkKey>(keys);
-                       this.normalCallback = callback;
-
-                       if (callback instanceof StreamingCallback<?>) {
-                               streamingCallback = (StreamingCallback<Collection<PackChunk.Members>>) callback;
-                               all = null;
-                       } else {
-                               streamingCallback = null;
-                               all = new ArrayList<PackChunk.Members>(keys.size());
-                       }
-               }
-
-               public void onPartialResult(Map<CacheKey, byte[]> result) {
-                       for (Map.Entry<CacheKey, byte[]> ent : result.entrySet()) {
-                               ChunkKey key = ChunkKey.fromBytes(ent.getKey().getBytes());
-                               PackChunk.Members members = decode(key, ent.getValue());
-
-                               if (streamingCallback != null) {
-                                       streamingCallback.onPartialResult(singleton(members));
-
-                                       synchronized (lock) {
-                                               remaining.remove(key);
-                                       }
-                               } else {
-                                       synchronized (lock) {
-                                               all.add(members);
-                                               remaining.remove(key);
-                                       }
-                               }
-                       }
-               }
-
-               public void onSuccess(Map<CacheKey, byte[]> result) {
-                       if (result != null && !result.isEmpty())
-                               onPartialResult(result);
-
-                       synchronized (lock) {
-                               if (remaining.isEmpty() || options == Context.FAST_MISSING_OK) {
-                                       normalCallback.onSuccess(all);
-                               } else {
-                                       db.get(options, remaining, new ChunkFromDatabase(all,
-                                                       normalCallback, streamingCallback));
-                               }
-                       }
-               }
-
-               public void onFailure(DhtException error) {
-                       // TODO(spearce) We may want to just drop to database here.
-                       normalCallback.onFailure(error);
-               }
-       }
-
-       private class ChunkFromDatabase implements
-                       StreamingCallback<Collection<PackChunk.Members>> {
-               private final Object lock = new Object();
-
-               private final List<PackChunk.Members> all;
-
-               private final AsyncCallback<Collection<PackChunk.Members>> normalCallback;
-
-               private final StreamingCallback<Collection<PackChunk.Members>> streamingCallback;
-
-               ChunkFromDatabase(
-                               List<PackChunk.Members> all,
-                               AsyncCallback<Collection<PackChunk.Members>> normalCallback,
-                               StreamingCallback<Collection<PackChunk.Members>> streamingCallback) {
-                       this.all = all;
-                       this.normalCallback = normalCallback;
-                       this.streamingCallback = streamingCallback;
-               }
-
-               public void onPartialResult(Collection<PackChunk.Members> result) {
-                       final List<PackChunk.Members> toPutIntoCache = copy(result);
-
-                       if (streamingCallback != null)
-                               streamingCallback.onPartialResult(result);
-                       else {
-                               synchronized (lock) {
-                                       all.addAll(result);
-                               }
-                       }
-
-                       // Encoding is rather expensive, so move the cache population
-                       // into it a different background thread to prevent the current
-                       // database task from being starved of time.
-                       //
-                       executor.submit(new Runnable() {
-                               public void run() {
-                                       for (PackChunk.Members members : toPutIntoCache) {
-                                               ChunkKey key = members.getChunkKey();
-                                               Change op = Change.put(nsChunk.key(key), encode(members));
-                                               client.modify(singleton(op), none);
-                                       }
-                               }
-                       });
-               }
-
-               private <T> List<T> copy(Collection<T> result) {
-                       return new ArrayList<T>(result);
-               }
-
-               public void onSuccess(Collection<PackChunk.Members> result) {
-                       if (result != null && !result.isEmpty())
-                               onPartialResult(result);
-
-                       synchronized (lock) {
-                               normalCallback.onSuccess(all);
-                       }
-               }
-
-               public void onFailure(DhtException error) {
-                       normalCallback.onFailure(error);
-               }
-       }
-
-       private class MetaFromCache implements
-                       StreamingCallback<Map<CacheKey, byte[]>> {
-               private final Object lock = new Object();
-
-               private final Context options;
-
-               private final Set<ChunkKey> remaining;
-
-               private final AsyncCallback<Map<ChunkKey, ChunkMeta>> normalCallback;
-
-               private final StreamingCallback<Map<ChunkKey, ChunkMeta>> streamingCallback;
-
-               private final Map<ChunkKey, ChunkMeta> all;
-
-               MetaFromCache(Context options, Set<ChunkKey> keys,
-                               AsyncCallback<Map<ChunkKey, ChunkMeta>> callback) {
-                       this.options = options;
-                       this.remaining = new HashSet<ChunkKey>(keys);
-                       this.normalCallback = callback;
-
-                       if (callback instanceof StreamingCallback<?>) {
-                               streamingCallback = (StreamingCallback<Map<ChunkKey, ChunkMeta>>) callback;
-                               all = null;
-                       } else {
-                               streamingCallback = null;
-                               all = new HashMap<ChunkKey, ChunkMeta>();
-                       }
-               }
-
-               public void onPartialResult(Map<CacheKey, byte[]> result) {
-                       for (Map.Entry<CacheKey, byte[]> ent : result.entrySet()) {
-                               ChunkKey key = ChunkKey.fromBytes(ent.getKey().getBytes());
-                               ChunkMeta meta;
-                               try {
-                                       meta = ChunkMeta.parseFrom(ent.getValue());
-                               } catch (InvalidProtocolBufferException e) {
-                                       // Invalid meta message, remove the cell from cache.
-                                       client.modify(singleton(Change.remove(ent.getKey())),
-                                                       Sync.<Void> none());
-                                       continue;
-                               }
-
-                               if (streamingCallback != null) {
-                                       streamingCallback.onPartialResult(singletonMap(key, meta));
-
-                                       synchronized (lock) {
-                                               remaining.remove(key);
-                                       }
-                               } else {
-                                       synchronized (lock) {
-                                               all.put(key, meta);
-                                               remaining.remove(key);
-                                       }
-                               }
-                       }
-               }
-
-               public void onSuccess(Map<CacheKey, byte[]> result) {
-                       if (result != null && !result.isEmpty())
-                               onPartialResult(result);
-
-                       synchronized (lock) {
-                               if (remaining.isEmpty() || options == Context.FAST_MISSING_OK) {
-                                       normalCallback.onSuccess(all);
-                               } else {
-                                       db.getMeta(options, remaining, new MetaFromDatabase(all,
-                                                       normalCallback, streamingCallback));
-                               }
-                       }
-               }
-
-               public void onFailure(DhtException error) {
-                       // TODO(spearce) We may want to just drop to database here.
-                       normalCallback.onFailure(error);
-               }
-       }
-
-       private class MetaFromDatabase implements
-                       StreamingCallback<Map<ChunkKey, ChunkMeta>> {
-               private final Object lock = new Object();
-
-               private final Map<ChunkKey, ChunkMeta> all;
-
-               private final AsyncCallback<Map<ChunkKey, ChunkMeta>> normalCallback;
-
-               private final StreamingCallback<Map<ChunkKey, ChunkMeta>> streamingCallback;
-
-               MetaFromDatabase(Map<ChunkKey, ChunkMeta> all,
-                               AsyncCallback<Map<ChunkKey, ChunkMeta>> normalCallback,
-                               StreamingCallback<Map<ChunkKey, ChunkMeta>> streamingCallback) {
-                       this.all = all;
-                       this.normalCallback = normalCallback;
-                       this.streamingCallback = streamingCallback;
-               }
-
-               public void onPartialResult(Map<ChunkKey, ChunkMeta> result) {
-                       final Map<ChunkKey, ChunkMeta> toPutIntoCache = copy(result);
-
-                       if (streamingCallback != null)
-                               streamingCallback.onPartialResult(result);
-                       else {
-                               synchronized (lock) {
-                                       all.putAll(result);
-                               }
-                       }
-
-                       // Encoding is rather expensive, so move the cache population
-                       // into it a different background thread to prevent the current
-                       // database task from being starved of time.
-                       //
-                       executor.submit(new Runnable() {
-                               public void run() {
-                                       for (Map.Entry<ChunkKey, ChunkMeta> ent
-                                                       : toPutIntoCache.entrySet()) {
-                                               ChunkKey key = ent.getKey();
-                                               Change op = Change.put(nsMeta.key(key),
-                                                               ent.getValue().toByteArray());
-                                               client.modify(singleton(op), none);
-                                       }
-                               }
-                       });
-               }
-
-               private <K, V> Map<K, V> copy(Map<K, V> result) {
-                       return new HashMap<K, V>(result);
-               }
-
-               public void onSuccess(Map<ChunkKey, ChunkMeta> result) {
-                       if (result != null && !result.isEmpty())
-                               onPartialResult(result);
-
-                       synchronized (lock) {
-                               normalCallback.onSuccess(all);
-                       }
-               }
-
-               public void onFailure(DhtException error) {
-                       normalCallback.onFailure(error);
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheDatabase.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheDatabase.java
deleted file mode 100644 (file)
index da3ea5f..0000000
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.cache;
-
-import java.util.concurrent.ExecutorService;
-
-import org.eclipse.jgit.storage.dht.spi.ChunkTable;
-import org.eclipse.jgit.storage.dht.spi.Database;
-import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable;
-import org.eclipse.jgit.storage.dht.spi.RefTable;
-import org.eclipse.jgit.storage.dht.spi.RepositoryIndexTable;
-import org.eclipse.jgit.storage.dht.spi.RepositoryTable;
-
-/**
- * Uses a cache for fast-lookups, but falls-back to another Database.
- * <p>
- * On a read miss, this database falls back to read another Database, and then
- * puts the read value into the cache for later access.
- */
-public class CacheDatabase implements Database {
-       private final Database database;
-
-       private final ExecutorService executorService;
-
-       private final CacheService client;
-
-       private final CacheOptions options;
-
-       private final CacheRepositoryIndexTable repositoryIndex;
-
-       private final CacheRepositoryTable repository;
-
-       private final CacheRefTable ref;
-
-       private final CacheObjectIndexTable objectIndex;
-
-       private final CacheChunkTable chunk;
-
-       /**
-        * Initialize a cache database.
-        *
-        * @param database
-        *            underlying storage database, used for read-misses and all
-        *            writes.
-        * @param executor
-        *            executor service to perform expensive cache updates in the
-        *            background.
-        * @param client
-        *            implementation of the cache service.
-        * @param options
-        *            configuration of the cache.
-        */
-       public CacheDatabase(Database database, ExecutorService executor,
-                       CacheService client, CacheOptions options) {
-               this.database = database;
-               this.executorService = executor;
-               this.client = client;
-               this.options = options;
-
-               repositoryIndex = new CacheRepositoryIndexTable(database
-                               .repositoryIndex(), this);
-
-               repository = new CacheRepositoryTable(database.repository(), this);
-               ref = new CacheRefTable(database.ref(), this);
-               objectIndex = new CacheObjectIndexTable(database.objectIndex(), this);
-               chunk = new CacheChunkTable(database.chunk(), this);
-       }
-
-       /** @return the underlying database the cache wraps. */
-       public Database getDatabase() {
-               return database;
-       }
-
-       /** @return executor pool for long operations. */
-       public ExecutorService getExecutorService() {
-               return executorService;
-       }
-
-       /** @return client connecting to the cache service. */
-       public CacheService getClient() {
-               return client;
-       }
-
-       /** @return connection options for the cache service. */
-       public CacheOptions getOptions() {
-               return options;
-       }
-
-       public RepositoryIndexTable repositoryIndex() {
-               return repositoryIndex;
-       }
-
-       public RepositoryTable repository() {
-               return repository;
-       }
-
-       public RefTable ref() {
-               return ref;
-       }
-
-       public ObjectIndexTable objectIndex() {
-               return objectIndex;
-       }
-
-       public ChunkTable chunk() {
-               return chunk;
-       }
-
-       public CacheBuffer newWriteBuffer() {
-               return new CacheBuffer(database.newWriteBuffer(), client, options);
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheKey.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheKey.java
deleted file mode 100644 (file)
index 67c6c0f..0000000
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.cache;
-
-import java.util.Arrays;
-
-import org.eclipse.jgit.storage.dht.RowKey;
-import org.eclipse.jgit.util.RawParseUtils;
-
-/** Simple byte array based key for cache storage. */
-public class CacheKey {
-       private final Namespace ns;
-
-       private final byte[] key;
-
-       private volatile int hashCode;
-
-       /**
-        * Wrap a database key.
-        *
-        * @param ns
-        *            the namespace the key is contained within.
-        * @param key
-        *            the key to wrap.
-        */
-       public CacheKey(Namespace ns, RowKey key) {
-               this(ns, key.asBytes());
-       }
-
-       /**
-        * Wrap a byte array.
-        *
-        * @param ns
-        *            the namespace the key is contained within.
-        * @param key
-        *            the key to wrap.
-        */
-       public CacheKey(Namespace ns, byte[] key) {
-               this.ns = ns;
-               this.key = key;
-       }
-
-       /** @return namespace to segregate keys by. */
-       public Namespace getNamespace() {
-               return ns;
-       }
-
-       /** @return this key's bytes, within {@link #getNamespace()}. */
-       public byte[] getBytes() {
-               return key;
-       }
-
-       @Override
-       public int hashCode() {
-               if (hashCode == 0) {
-                       int h = 5381;
-                       for (int ptr = 0; ptr < key.length; ptr++)
-                               h = ((h << 5) + h) + (key[ptr] & 0xff);
-                       if (h == 0)
-                               h = 1;
-                       hashCode = h;
-               }
-               return hashCode;
-       }
-
-       @Override
-       public boolean equals(Object other) {
-               if (this == other)
-                       return true;
-               if (other instanceof CacheKey) {
-                       CacheKey m = (CacheKey) other;
-                       return ns.equals(m.ns) && Arrays.equals(key, m.key);
-               }
-               return false;
-       }
-
-       @Override
-       public String toString() {
-               return ns + ":" + RawParseUtils.decode(key);
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheObjectIndexTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheObjectIndexTable.java
deleted file mode 100644 (file)
index 0cd3549..0000000
+++ /dev/null
@@ -1,324 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.cache;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ExecutorService;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex;
-import org.eclipse.jgit.storage.dht.AsyncCallback;
-import org.eclipse.jgit.storage.dht.ChunkKey;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.ObjectIndexKey;
-import org.eclipse.jgit.storage.dht.ObjectInfo;
-import org.eclipse.jgit.storage.dht.StreamingCallback;
-import org.eclipse.jgit.storage.dht.Sync;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change;
-
-import com.google.protobuf.InvalidProtocolBufferException;
-
-/** Cache wrapper around ObjectIndexTable. */
-public class CacheObjectIndexTable implements ObjectIndexTable {
-       private final ObjectIndexTable db;
-
-       private final ExecutorService executor;
-
-       private final CacheService client;
-
-       private final Namespace ns = Namespace.OBJECT_INDEX;
-
-       /**
-        * Initialize a new wrapper.
-        *
-        * @param dbTable
-        *            the underlying database's corresponding table.
-        * @param cacheDatabase
-        *            the cache database.
-        */
-       public CacheObjectIndexTable(ObjectIndexTable dbTable,
-                       CacheDatabase cacheDatabase) {
-               this.db = dbTable;
-               this.executor = cacheDatabase.getExecutorService();
-               this.client = cacheDatabase.getClient();
-       }
-
-       public void get(Context options, Set<ObjectIndexKey> objects,
-                       AsyncCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> callback) {
-               List<CacheKey> toFind = new ArrayList<CacheKey>(objects.size());
-               for (ObjectIndexKey k : objects)
-                       toFind.add(ns.key(k));
-               client.get(toFind, new LoaderFromCache(options, objects, callback));
-       }
-
-       public void add(ObjectIndexKey objId, ObjectInfo info, WriteBuffer buffer)
-                       throws DhtException {
-               // During addition, the cache is not populated. This prevents a
-               // race condition when the cache is cold. Readers need to scan
-               // the database and ensure the oldest ObjectInfo is loaded into
-               // the cache in order to allow PackChunk to break delta cycles.
-               //
-               // This does have a small performance penalty, as recently added
-               // objects are often read not long after they were written. But
-               // without good multi-system transaction support between the
-               // cache and the underlying storage we cannot do better.
-               //
-               db.add(objId, info, ((CacheBuffer) buffer).getWriteBuffer());
-       }
-
-       public void remove(ObjectIndexKey objId, ChunkKey chunk, WriteBuffer buffer)
-                       throws DhtException {
-               CacheBuffer buf = (CacheBuffer) buffer;
-               db.remove(objId, chunk, buf.getWriteBuffer());
-
-               // TODO This suffers from a race condition. The removal from the
-               // cache can occur before the database update takes place, and a
-               // concurrent reader might re-populate the cache with the stale data.
-               //
-               buf.remove(ns.key(objId));
-       }
-
-       private class LoaderFromCache implements
-                       StreamingCallback<Map<CacheKey, byte[]>> {
-               private final Object lock = new Object();
-
-               private final Context options;
-
-               private final Set<ObjectIndexKey> remaining;
-
-               private final AsyncCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> normalCallback;
-
-               private final StreamingCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> streamingCallback;
-
-               private final Map<ObjectIndexKey, Collection<ObjectInfo>> all;
-
-               LoaderFromCache(
-                               Context options,
-                               Set<ObjectIndexKey> objects,
-                               AsyncCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> callback) {
-                       this.options = options;
-                       this.remaining = new HashSet<ObjectIndexKey>(objects);
-                       this.normalCallback = callback;
-
-                       if (callback instanceof StreamingCallback<?>) {
-                               streamingCallback = (StreamingCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>>) callback;
-                               all = null;
-                       } else {
-                               streamingCallback = null;
-                               all = new HashMap<ObjectIndexKey, Collection<ObjectInfo>>();
-                       }
-               }
-
-               public void onPartialResult(Map<CacheKey, byte[]> result) {
-                       Map<ObjectIndexKey, Collection<ObjectInfo>> tmp;
-                       if (streamingCallback != null)
-                               tmp = new HashMap<ObjectIndexKey, Collection<ObjectInfo>>();
-                       else
-                               tmp = null;
-
-                       for (Map.Entry<CacheKey, byte[]> e : result.entrySet()) {
-                               ObjectIndexKey objKey;
-                               Collection<ObjectInfo> list;
-                               try {
-                                       list = decode(e.getValue());
-                               } catch (InvalidProtocolBufferException badCell) {
-                                       client.modify(
-                                                       Collections.singleton(Change.remove(e.getKey())),
-                                                       Sync.<Void> none());
-                                       continue;
-                               }
-                               objKey = ObjectIndexKey.fromBytes(e.getKey().getBytes());
-
-                               if (tmp != null)
-                                       tmp.put(objKey, list);
-                               else {
-                                       synchronized (lock) {
-                                               all.put(objKey, list);
-                                               remaining.remove(objKey);
-                                       }
-                               }
-                       }
-
-                       if (tmp != null) {
-                               streamingCallback.onPartialResult(tmp);
-                               synchronized (lock) {
-                                       remaining.removeAll(tmp.keySet());
-                               }
-                       }
-               }
-
-               private Collection<ObjectInfo> decode(byte[] value)
-                               throws InvalidProtocolBufferException {
-                       CachedObjectIndex cacheEntry = CachedObjectIndex.parseFrom(value);
-                       int sz = cacheEntry.getItemCount();
-                       ObjectInfo[] r = new ObjectInfo[sz];
-                       for (int i = 0; i < sz; i++) {
-                               CachedObjectIndex.Item item = cacheEntry.getItem(i);
-                               r[i] = new ObjectInfo(
-                                               ChunkKey.fromString(item.getChunkKey()),
-                                               item.getTime(),
-                                               item.getObjectInfo());
-                       }
-                       return Arrays.asList(r);
-               }
-
-               public void onSuccess(Map<CacheKey, byte[]> result) {
-                       if (result != null && !result.isEmpty())
-                               onPartialResult(result);
-
-                       synchronized (lock) {
-                               if (remaining.isEmpty() || options == Context.FAST_MISSING_OK) {
-                                       normalCallback.onSuccess(all);
-                               } else {
-                                       db.get(options, remaining, new LoaderFromDatabase(all,
-                                                       normalCallback, streamingCallback));
-                               }
-                       }
-               }
-
-               public void onFailure(DhtException error) {
-                       // TODO(spearce) We may want to just drop to database here.
-                       normalCallback.onFailure(error);
-               }
-       }
-
-       private class LoaderFromDatabase implements
-                       StreamingCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> {
-               private final Object lock = new Object();
-
-               private final Map<ObjectIndexKey, Collection<ObjectInfo>> all;
-
-               private final AsyncCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> normalCallback;
-
-               private final StreamingCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> streamingCallback;
-
-               LoaderFromDatabase(
-                               Map<ObjectIndexKey, Collection<ObjectInfo>> all,
-                               AsyncCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> normalCallback,
-                               StreamingCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> streamingCallback) {
-                       this.all = all;
-                       this.normalCallback = normalCallback;
-                       this.streamingCallback = streamingCallback;
-               }
-
-               public void onPartialResult(
-                               Map<ObjectIndexKey, Collection<ObjectInfo>> result) {
-                       final Map<ObjectIndexKey, Collection<ObjectInfo>> toPut = copy(result);
-
-                       if (streamingCallback != null)
-                               streamingCallback.onPartialResult(result);
-                       else {
-                               synchronized (lock) {
-                                       all.putAll(result);
-                               }
-                       }
-
-                       // Encoding is rather expensive, so move the cache population
-                       // into it a different background thread to prevent the current
-                       // database task from being starved of time.
-                       //
-                       executor.submit(new Runnable() {
-                               public void run() {
-                                       List<Change> ops = new ArrayList<Change>(toPut.size());
-
-                                       for (Map.Entry<ObjectIndexKey, Collection<ObjectInfo>> e : all(toPut)) {
-                                               List<ObjectInfo> items = copy(e.getValue());
-                                               ObjectInfo.sort(items);
-                                               ops.add(Change.put(ns.key(e.getKey()), encode(items)));
-                                       }
-
-                                       client.modify(ops, Sync.<Void> none());
-                               }
-
-                               private byte[] encode(List<ObjectInfo> items) {
-                                       CachedObjectIndex.Builder b;
-                                       b = CachedObjectIndex.newBuilder();
-                                       for (ObjectInfo info : items) {
-                                               CachedObjectIndex.Item.Builder i = b.addItemBuilder();
-                                               i.setChunkKey(info.getChunkKey().asString());
-                                               i.setObjectInfo(info.getData());
-                                               if (0 < info.getTime())
-                                                       i.setTime(info.getTime());
-                                       }
-                                       return b.build().toByteArray();
-                               }
-                       });
-               }
-
-               private <K, V> Map<K, V> copy(Map<K, V> map) {
-                       return new HashMap<K, V>(map);
-               }
-
-               private <T> List<T> copy(Collection<T> result) {
-                       return new ArrayList<T>(result);
-               }
-
-               private <K, V> Set<Map.Entry<K, V>> all(final Map<K, V> toPut) {
-                       return toPut.entrySet();
-               }
-
-               public void onSuccess(Map<ObjectIndexKey, Collection<ObjectInfo>> result) {
-                       if (result != null && !result.isEmpty())
-                               onPartialResult(result);
-
-                       synchronized (lock) {
-                               normalCallback.onSuccess(all);
-                       }
-               }
-
-               public void onFailure(DhtException error) {
-                       normalCallback.onFailure(error);
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheOptions.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheOptions.java
deleted file mode 100644 (file)
index 9eef55c..0000000
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.cache;
-
-import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.storage.dht.Timeout;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-
-/** Options to configure the cache. */
-public class CacheOptions {
-       private Timeout timeout;
-
-       private int writeBufferSize;
-
-       /** Initialize default options. */
-       public CacheOptions() {
-               setTimeout(Timeout.milliseconds(500));
-               setWriteBufferSize(512 * 1024);
-       }
-
-       /** @return default timeout for all operations. */
-       public Timeout getTimeout() {
-               return timeout;
-       }
-
-       /**
-        * Set the default timeout to wait on long operations.
-        *
-        * @param maxWaitTime
-        *            new wait time.
-        * @return {@code this}
-        */
-       public CacheOptions setTimeout(Timeout maxWaitTime) {
-               if (maxWaitTime == null || maxWaitTime.getTime() < 0)
-                       throw new IllegalArgumentException();
-               timeout = maxWaitTime;
-               return this;
-       }
-
-       /** @return size in bytes to buffer operations. */
-       public int getWriteBufferSize() {
-               return writeBufferSize;
-       }
-
-       /**
-        * Set the maximum number of outstanding bytes in a {@link WriteBuffer}.
-        *
-        * @param sizeInBytes
-        *            maximum number of bytes.
-        * @return {@code this}
-        */
-       public CacheOptions setWriteBufferSize(int sizeInBytes) {
-               writeBufferSize = Math.max(1024, sizeInBytes);
-               return this;
-       }
-
-       /**
-        * Update properties by setting fields from the configuration.
-        * <p>
-        * If a property is not defined in the configuration, then it is left
-        * unmodified.
-        *
-        * @param rc
-        *            configuration to read properties from.
-        * @return {@code this}
-        */
-       public CacheOptions fromConfig(final Config rc) {
-               setTimeout(Timeout.getTimeout(rc, "cache", "dht", "timeout", getTimeout()));
-               setWriteBufferSize(rc.getInt("cache", "dht", "writeBufferSize", getWriteBufferSize()));
-               return this;
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRefTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRefTable.java
deleted file mode 100644 (file)
index 2b6c8da..0000000
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.cache;
-
-import java.util.Map;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.RefKey;
-import org.eclipse.jgit.storage.dht.RepositoryKey;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.storage.dht.spi.RefTable;
-
-/**
- * Cache wrapper around RefTable.
- * <p>
- * Currently this is a straight pass-through.
- */
-public class CacheRefTable implements RefTable {
-       private final RefTable db;
-
-       /**
-        * Initialize a new wrapper.
-        *
-        * @param dbTable
-        *            the underlying database's corresponding table.
-        * @param cacheDatabase
-        *            the cache database.
-        */
-       public CacheRefTable(RefTable dbTable, CacheDatabase cacheDatabase) {
-               this.db = dbTable;
-       }
-
-       public Map<RefKey, RefData> getAll(Context options, RepositoryKey repository)
-                       throws DhtException, TimeoutException {
-               return db.getAll(options, repository);
-       }
-
-       public boolean compareAndRemove(RefKey refKey, RefData oldData)
-                       throws DhtException, TimeoutException {
-               return db.compareAndRemove(refKey, oldData);
-       }
-
-       public boolean compareAndPut(RefKey refKey, RefData oldData, RefData newData)
-                       throws DhtException, TimeoutException {
-               return db.compareAndPut(refKey, oldData, newData);
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRepositoryIndexTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRepositoryIndexTable.java
deleted file mode 100644 (file)
index b50092c..0000000
+++ /dev/null
@@ -1,145 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.cache;
-
-import static java.util.Collections.emptyMap;
-import static java.util.Collections.singleton;
-
-import java.util.Map;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.RepositoryKey;
-import org.eclipse.jgit.storage.dht.RepositoryName;
-import org.eclipse.jgit.storage.dht.Sync;
-import org.eclipse.jgit.storage.dht.spi.RepositoryIndexTable;
-import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change;
-
-/** Cache wrapper around RepositoryIndexTable. */
-public class CacheRepositoryIndexTable implements RepositoryIndexTable {
-       private final RepositoryIndexTable db;
-
-       private final CacheService client;
-
-       private final CacheOptions options;
-
-       private final Namespace ns;
-
-       private final Sync<Void> none;
-
-       /**
-        * Initialize a new wrapper.
-        *
-        * @param dbTable
-        *            the underlying database's corresponding table.
-        * @param cacheDatabase
-        *            the cache database.
-        */
-       public CacheRepositoryIndexTable(RepositoryIndexTable dbTable,
-                       CacheDatabase cacheDatabase) {
-               this.db = dbTable;
-               this.client = cacheDatabase.getClient();
-               this.options = cacheDatabase.getOptions();
-               this.ns = Namespace.REPOSITORY_INDEX;
-               this.none = Sync.none();
-       }
-
-       public RepositoryKey get(RepositoryName name) throws DhtException,
-                       TimeoutException {
-               CacheKey memKey = ns.key(name);
-               Sync<Map<CacheKey, byte[]>> sync = Sync.create();
-               client.get(singleton(memKey), sync);
-
-               Map<CacheKey, byte[]> result;
-               try {
-                       result = sync.get(options.getTimeout());
-               } catch (InterruptedException e) {
-                       throw new TimeoutException();
-               } catch (TimeoutException timeout) {
-                       // Fall through and read the database directly.
-                       result = emptyMap();
-               }
-
-               byte[] data = result.get(memKey);
-               if (data != null) {
-                       if (data.length == 0)
-                               return null;
-                       return RepositoryKey.fromBytes(data);
-               }
-
-               RepositoryKey key = db.get(name);
-               data = key != null ? key.asBytes() : new byte[0];
-               client.modify(singleton(Change.put(memKey, data)), none);
-               return key;
-       }
-
-       public void putUnique(RepositoryName name, RepositoryKey key)
-                       throws DhtException, TimeoutException {
-               db.putUnique(name, key);
-
-               Sync<Void> sync = Sync.create();
-               CacheKey memKey = ns.key(name);
-               byte[] data = key.asBytes();
-               client.modify(singleton(Change.put(memKey, data)), sync);
-               try {
-                       sync.get(options.getTimeout());
-               } catch (InterruptedException e) {
-                       throw new TimeoutException();
-               }
-       }
-
-       public void remove(RepositoryName name, RepositoryKey key)
-                       throws DhtException, TimeoutException {
-               db.remove(name, key);
-
-               Sync<Void> sync = Sync.create();
-               CacheKey memKey = ns.key(name);
-               client.modify(singleton(Change.remove(memKey)), sync);
-               try {
-                       sync.get(options.getTimeout());
-               } catch (InterruptedException e) {
-                       throw new TimeoutException();
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRepositoryTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRepositoryTable.java
deleted file mode 100644 (file)
index a378e0a..0000000
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.cache;
-
-import static java.util.Collections.emptyMap;
-import static java.util.Collections.singleton;
-
-import java.util.Collection;
-import java.util.Map;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList;
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
-import org.eclipse.jgit.storage.dht.CachedPackKey;
-import org.eclipse.jgit.storage.dht.ChunkInfo;
-import org.eclipse.jgit.storage.dht.ChunkKey;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.RepositoryKey;
-import org.eclipse.jgit.storage.dht.Sync;
-import org.eclipse.jgit.storage.dht.spi.RepositoryTable;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change;
-
-import com.google.protobuf.InvalidProtocolBufferException;
-
-/** Cache wrapper around RepositoryTable. */
-public class CacheRepositoryTable implements RepositoryTable {
-       private final RepositoryTable db;
-
-       private final CacheService client;
-
-       private final CacheOptions options;
-
-       private final Namespace nsCachedPack = Namespace.CACHED_PACK;
-
-       private final Sync<Void> none;
-
-       /**
-        * Initialize a new wrapper.
-        *
-        * @param dbTable
-        *            the underlying database's corresponding table.
-        * @param cacheDatabase
-        *            the cache database.
-        */
-       public CacheRepositoryTable(RepositoryTable dbTable,
-                       CacheDatabase cacheDatabase) {
-               this.db = dbTable;
-               this.client = cacheDatabase.getClient();
-               this.options = cacheDatabase.getOptions();
-               this.none = Sync.none();
-       }
-
-       public RepositoryKey nextKey() throws DhtException {
-               return db.nextKey();
-       }
-
-       public void put(RepositoryKey repo, ChunkInfo info, WriteBuffer buffer)
-                       throws DhtException {
-               CacheBuffer buf = (CacheBuffer) buffer;
-               db.put(repo, info, buf.getWriteBuffer());
-       }
-
-       public void remove(RepositoryKey repo, ChunkKey chunk, WriteBuffer buffer)
-                       throws DhtException {
-               CacheBuffer buf = (CacheBuffer) buffer;
-               db.remove(repo, chunk, buf.getWriteBuffer());
-       }
-
-       public Collection<CachedPackInfo> getCachedPacks(RepositoryKey repo)
-                       throws DhtException, TimeoutException {
-               CacheKey memKey = nsCachedPack.key(repo);
-               Sync<Map<CacheKey, byte[]>> sync = Sync.create();
-               client.get(singleton(memKey), sync);
-
-               Map<CacheKey, byte[]> result;
-               try {
-                       result = sync.get(options.getTimeout());
-               } catch (InterruptedException e) {
-                       throw new TimeoutException();
-               } catch (TimeoutException timeout) {
-                       // Fall through and read the database directly.
-                       result = emptyMap();
-               }
-
-               byte[] data = result.get(memKey);
-               if (data != null) {
-                       try {
-                               return CachedPackInfoList.parseFrom(data).getPackList();
-                       } catch (InvalidProtocolBufferException e) {
-                               // Invalidate the cache entry and fall through.
-                               client.modify(singleton(Change.remove(memKey)), none);
-                       }
-               }
-
-               Collection<CachedPackInfo> r = db.getCachedPacks(repo);
-               CachedPackInfoList.Builder list = CachedPackInfoList.newBuilder();
-               list.addAllPack(r);
-               client.modify(
-                               singleton(Change.put(memKey, list.build().toByteArray())),
-                               none);
-               return r;
-       }
-
-       public void put(RepositoryKey repo, CachedPackInfo info, WriteBuffer buffer)
-                       throws DhtException {
-               CacheBuffer buf = (CacheBuffer) buffer;
-               db.put(repo, info, buf.getWriteBuffer());
-               buf.removeAfterFlush(nsCachedPack.key(repo));
-       }
-
-       public void remove(RepositoryKey repo, CachedPackKey key, WriteBuffer buffer)
-                       throws DhtException {
-               CacheBuffer buf = (CacheBuffer) buffer;
-               db.remove(repo, key, buf.getWriteBuffer());
-               buf.removeAfterFlush(nsCachedPack.key(repo));
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheService.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheService.java
deleted file mode 100644 (file)
index 31616b5..0000000
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.cache;
-
-import java.util.Collection;
-import java.util.Map;
-
-import org.eclipse.jgit.storage.dht.AsyncCallback;
-import org.eclipse.jgit.storage.dht.StreamingCallback;
-
-/** Connects to the network based memory cache server(s). */
-public interface CacheService {
-       /**
-        * Lookup one or more cache keys and return the results.
-        * <p>
-        * Callers are responsible for breaking up very large collections of chunk
-        * keys into smaller units, based on the reader's batch size option.
-        *
-        * @param keys
-        *            keys to locate.
-        * @param callback
-        *            receives the results when ready. If this is an instance of
-        *            {@link StreamingCallback}, implementors should try to deliver
-        *            results early.
-        */
-       void get(Collection<CacheKey> keys,
-                       AsyncCallback<Map<CacheKey, byte[]>> callback);
-
-       /**
-        * Modify one or more cache keys.
-        *
-        * @param changes
-        *            changes to apply to the cache.
-        * @param callback
-        *            receives notification when the changes have been applied.
-        */
-       void modify(Collection<Change> changes, AsyncCallback<Void> callback);
-
-       /** A change to the cache. */
-       public static class Change {
-               /** Operation the change describes. */
-               public static enum Type {
-                       /** Store (or replace) the key. */
-                       PUT,
-
-                       /** Only store the key if not already stored. */
-                       PUT_IF_ABSENT,
-
-                       /** Remove the associated key. */
-                       REMOVE;
-               }
-
-               /**
-                * Initialize a put operation.
-                *
-                * @param key
-                *            the key to store.
-                * @param data
-                *            the value to store.
-                * @return the operation.
-                */
-               public static Change put(CacheKey key, byte[] data) {
-                       return new Change(Type.PUT, key, data);
-               }
-
-               /**
-                * Initialize a put operation.
-                *
-                * @param key
-                *            the key to store.
-                * @param data
-                *            the value to store.
-                * @return the operation.
-                */
-               public static Change putIfAbsent(CacheKey key, byte[] data) {
-                       return new Change(Type.PUT_IF_ABSENT, key, data);
-               }
-
-               /**
-                * Initialize a remove operation.
-                *
-                * @param key
-                *            the key to remove.
-                * @return the operation.
-                */
-               public static Change remove(CacheKey key) {
-                       return new Change(Type.REMOVE, key, null);
-               }
-
-               private final Type type;
-
-               private final CacheKey key;
-
-               private final byte[] data;
-
-               /**
-                * Initialize a new change.
-                *
-                * @param type
-                * @param key
-                * @param data
-                */
-               public Change(Type type, CacheKey key, byte[] data) {
-                       this.type = type;
-                       this.key = key;
-                       this.data = data;
-               }
-
-               /** @return type of change that will take place. */
-               public Type getType() {
-                       return type;
-               }
-
-               /** @return the key that will be modified. */
-               public CacheKey getKey() {
-                       return key;
-               }
-
-               /** @return new data value if this is a PUT type of change. */
-               public byte[] getData() {
-                       return data;
-               }
-
-               @Override
-               public String toString() {
-                       return getType() + " " + getKey();
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/Namespace.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/Namespace.java
deleted file mode 100644 (file)
index 76dc311..0000000
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.cache;
-
-import java.util.Arrays;
-
-import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.storage.dht.RowKey;
-import org.eclipse.jgit.storage.dht.spi.ChunkTable;
-import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable;
-import org.eclipse.jgit.storage.dht.spi.RepositoryIndexTable;
-import org.eclipse.jgit.util.RawParseUtils;
-
-/** Defines a space within the cache cluster. */
-public class Namespace {
-       /** Namespace used by the {@link ChunkTable}. */
-       public static final Namespace CHUNK = create("chunk");
-
-       /** Namespace used by the {@link ChunkTable} for meta field only. */
-       public static final Namespace CHUNK_META = create("chunkMeta");
-
-       /** Namespace used by the {@link ObjectIndexTable}. */
-       public static final Namespace OBJECT_INDEX = create("objectIndex");
-
-       /** Namespace used by the {@link RepositoryIndexTable}. */
-       public static final Namespace REPOSITORY_INDEX = create("repositoryIndex");
-
-       /** Namespace used by the cached pack information. */
-       public static final Namespace CACHED_PACK = create("cachedPack");
-
-       /**
-        * Create a namespace from a string name.
-        *
-        * @param name
-        *            the name to wrap.
-        * @return the namespace.
-        */
-       public static Namespace create(String name) {
-               return new Namespace(Constants.encode(name));
-       }
-
-       /**
-        * Create a namespace from a byte array.
-        *
-        * @param name
-        *            the name to wrap.
-        * @return the namespace.
-        */
-       public static Namespace create(byte[] name) {
-               return new Namespace(name);
-       }
-
-       private final byte[] name;
-
-       private volatile int hashCode;
-
-       private Namespace(byte[] name) {
-               this.name = name;
-       }
-
-       /** @return this namespace, encoded in UTF-8. */
-       public byte[] getBytes() {
-               return name;
-       }
-
-       /**
-        * Construct a MemKey within this namespace.
-        *
-        * @param key
-        *            the key to include.
-        * @return key within this namespace.
-        */
-       public CacheKey key(byte[] key) {
-               return new CacheKey(this, key);
-       }
-
-       /**
-        * Construct a MemKey within this namespace.
-        *
-        * @param key
-        *            the key to include.
-        * @return key within this namespace.
-        */
-       public CacheKey key(RowKey key) {
-               return new CacheKey(this, key);
-       }
-
-       @Override
-       public int hashCode() {
-               if (hashCode == 0) {
-                       int h = 5381;
-                       for (int ptr = 0; ptr < name.length; ptr++)
-                               h = ((h << 5) + h) + (name[ptr] & 0xff);
-                       if (h == 0)
-                               h = 1;
-                       hashCode = h;
-               }
-               return hashCode;
-       }
-
-       @Override
-       public boolean equals(Object other) {
-               if (other == this)
-                       return true;
-               if (other instanceof Namespace)
-                       return Arrays.equals(name, ((Namespace) other).name);
-               return false;
-       }
-
-       @Override
-       public String toString() {
-               return RawParseUtils.decode(name);
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemChunkTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemChunkTable.java
deleted file mode 100644 (file)
index 277b2b8..0000000
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.memory;
-
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
-import org.eclipse.jgit.storage.dht.AsyncCallback;
-import org.eclipse.jgit.storage.dht.ChunkKey;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.DhtText;
-import org.eclipse.jgit.storage.dht.PackChunk;
-import org.eclipse.jgit.storage.dht.spi.ChunkTable;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;
-
-import com.google.protobuf.InvalidProtocolBufferException;
-
-final class MemChunkTable implements ChunkTable {
-       private final MemTable table = new MemTable();
-
-       private final ColumnMatcher colData = new ColumnMatcher("data");
-
-       private final ColumnMatcher colIndex = new ColumnMatcher("index");
-
-       private final ColumnMatcher colMeta = new ColumnMatcher("meta");
-
-       public void get(Context options, Set<ChunkKey> keys,
-                       AsyncCallback<Collection<PackChunk.Members>> callback) {
-               int cnt = keys.size();
-               List<PackChunk.Members> out = new ArrayList<PackChunk.Members>(cnt);
-
-               for (ChunkKey chunk : keys) {
-                       byte[] row = chunk.asBytes();
-                       MemTable.Cell cell;
-
-                       cell = table.get(row, colData.name());
-                       if (cell == null)
-                               continue;
-
-                       PackChunk.Members m = new PackChunk.Members();
-                       m.setChunkKey(chunk);
-                       m.setChunkData(cell.getValue());
-
-                       cell = table.get(row, colIndex.name());
-                       if (cell != null)
-                               m.setChunkIndex(cell.getValue());
-
-                       cell = table.get(row, colMeta.name());
-                       if (cell != null) {
-                               try {
-                                       m.setMeta(ChunkMeta.parseFrom(cell.getValue()));
-                               } catch (InvalidProtocolBufferException err) {
-                                       callback.onFailure(new DhtException(MessageFormat.format(
-                                                       DhtText.get().invalidChunkMeta, chunk), err));
-                                       return;
-                               }
-                       }
-
-                       out.add(m);
-               }
-
-               callback.onSuccess(out);
-       }
-
-       public void getMeta(Context options, Set<ChunkKey> keys,
-                       AsyncCallback<Map<ChunkKey, ChunkMeta>> callback) {
-               Map<ChunkKey, ChunkMeta> out = new HashMap<ChunkKey, ChunkMeta>();
-
-               for (ChunkKey chunk : keys) {
-                       byte[] row = chunk.asBytes();
-                       MemTable.Cell cell = table.get(row, colMeta.name());
-                       if (cell != null) {
-                               try {
-                                       out.put(chunk, ChunkMeta.parseFrom(cell.getValue()));
-                               } catch (InvalidProtocolBufferException err) {
-                                       callback.onFailure(new DhtException(MessageFormat.format(
-                                                       DhtText.get().invalidChunkMeta, chunk), err));
-                                       return;
-                               }
-                       }
-               }
-
-               callback.onSuccess(out);
-       }
-
-       public void put(PackChunk.Members chunk, WriteBuffer buffer)
-                       throws DhtException {
-               byte[] row = chunk.getChunkKey().asBytes();
-
-               if (chunk.hasChunkData())
-                       table.put(row, colData.name(), chunk.getChunkData());
-
-               if (chunk.hasChunkIndex())
-                       table.put(row, colIndex.name(), chunk.getChunkIndex());
-
-               if (chunk.hasMeta())
-                       table.put(row, colMeta.name(), chunk.getMeta().toByteArray());
-       }
-
-       public void remove(ChunkKey key, WriteBuffer buffer) throws DhtException {
-               table.deleteRow(key.asBytes());
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemObjectIndexTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemObjectIndexTable.java
deleted file mode 100644 (file)
index e3bb7fd..0000000
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.memory;
-
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
-import org.eclipse.jgit.storage.dht.AsyncCallback;
-import org.eclipse.jgit.storage.dht.ChunkKey;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.DhtText;
-import org.eclipse.jgit.storage.dht.ObjectIndexKey;
-import org.eclipse.jgit.storage.dht.ObjectInfo;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;
-
-import com.google.protobuf.InvalidProtocolBufferException;
-
-final class MemObjectIndexTable implements ObjectIndexTable {
-       private final MemTable table = new MemTable();
-
-       private final ColumnMatcher colInfo = new ColumnMatcher("info:");
-
-       public void get(Context options, Set<ObjectIndexKey> objects,
-                       AsyncCallback<Map<ObjectIndexKey, Collection<ObjectInfo>>> callback) {
-               Map<ObjectIndexKey, Collection<ObjectInfo>> out = new HashMap<ObjectIndexKey, Collection<ObjectInfo>>();
-
-               for (ObjectIndexKey objId : objects) {
-                       for (MemTable.Cell cell : table.scanFamily(objId.asBytes(), colInfo)) {
-                               Collection<ObjectInfo> chunks = out.get(objId);
-                               ChunkKey chunkKey;
-                               if (chunks == null) {
-                                       chunks = new ArrayList<ObjectInfo>(4);
-                                       out.put(objId, chunks);
-                               }
-
-                               chunkKey = ChunkKey.fromBytes(colInfo.suffix(cell.getName()));
-                               try {
-                                       chunks.add(new ObjectInfo(
-                                                       chunkKey,
-                                                       cell.getTimestamp(),
-                                                       GitStore.ObjectInfo.parseFrom(cell.getValue())));
-                               } catch (InvalidProtocolBufferException badCell) {
-                                       callback.onFailure(new DhtException(MessageFormat.format(
-                                                       DhtText.get().invalidObjectInfo, objId, chunkKey),
-                                                       badCell));
-                                       return;
-                               }
-                       }
-               }
-
-               callback.onSuccess(out);
-       }
-
-       public void add(ObjectIndexKey objId, ObjectInfo info, WriteBuffer buffer)
-                       throws DhtException {
-               ChunkKey chunk = info.getChunkKey();
-               table.put(objId.asBytes(), colInfo.append(chunk.asBytes()),
-                               info.getData().toByteArray());
-       }
-
-       public void remove(ObjectIndexKey objId, ChunkKey chunk, WriteBuffer buffer)
-                       throws DhtException {
-               table.delete(objId.asBytes(), colInfo.append(chunk.asBytes()));
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRefTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRefTable.java
deleted file mode 100644 (file)
index 595e3fd..0000000
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.memory;
-
-import java.text.MessageFormat;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.DhtText;
-import org.eclipse.jgit.storage.dht.RefDataUtil;
-import org.eclipse.jgit.storage.dht.RefKey;
-import org.eclipse.jgit.storage.dht.RepositoryKey;
-import org.eclipse.jgit.storage.dht.spi.Context;
-import org.eclipse.jgit.storage.dht.spi.RefTable;
-import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;
-
-import com.google.protobuf.InvalidProtocolBufferException;
-
-final class MemRefTable implements RefTable {
-       private final MemTable table = new MemTable();
-
-       private final ColumnMatcher colRef = new ColumnMatcher("ref:");
-
-       public Map<RefKey, RefData> getAll(Context options, RepositoryKey repository)
-                       throws DhtException, TimeoutException {
-               Map<RefKey, RefData> out = new HashMap<RefKey, RefData>();
-               for (MemTable.Cell cell : table.scanFamily(repository.asBytes(), colRef)) {
-                       RefKey ref = RefKey.fromBytes(colRef.suffix(cell.getName()));
-                       try {
-                               out.put(ref, RefData.parseFrom(cell.getValue()));
-                       } catch (InvalidProtocolBufferException badCell) {
-                               throw new DhtException(MessageFormat.format(
-                                               DhtText.get().invalidRefData, ref), badCell);
-                       }
-               }
-               return out;
-       }
-
-       public boolean compareAndPut(RefKey refKey, RefData oldData, RefData newData)
-                       throws DhtException, TimeoutException {
-               RepositoryKey repo = refKey.getRepositoryKey();
-               return table.compareAndSet( //
-                               repo.asBytes(), //
-                               colRef.append(refKey.asBytes()), //
-                               oldData != RefDataUtil.NONE ? oldData.toByteArray() : null, //
-                               newData.toByteArray());
-       }
-
-       public boolean compareAndRemove(RefKey refKey, RefData oldData)
-                       throws DhtException, TimeoutException {
-               RepositoryKey repo = refKey.getRepositoryKey();
-               return table.compareAndSet( //
-                               repo.asBytes(), //
-                               colRef.append(refKey.asBytes()), //
-                               oldData != RefDataUtil.NONE ? oldData.toByteArray() : null, //
-                               null);
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRepositoryIndexTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRepositoryIndexTable.java
deleted file mode 100644 (file)
index 000ff77..0000000
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.memory;
-
-import java.text.MessageFormat;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.DhtText;
-import org.eclipse.jgit.storage.dht.RepositoryKey;
-import org.eclipse.jgit.storage.dht.RepositoryName;
-import org.eclipse.jgit.storage.dht.spi.RepositoryIndexTable;
-import org.eclipse.jgit.storage.dht.spi.memory.MemTable.Cell;
-import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;
-
-final class MemRepositoryIndexTable implements RepositoryIndexTable {
-       private final MemTable table = new MemTable();
-
-       private final ColumnMatcher colId = new ColumnMatcher("id");
-
-       public RepositoryKey get(RepositoryName name) throws DhtException,
-                       TimeoutException {
-               Cell cell = table.get(name.asBytes(), colId.name());
-               if (cell == null)
-                       return null;
-               return RepositoryKey.fromBytes(cell.getValue());
-       }
-
-       public void putUnique(RepositoryName name, RepositoryKey key)
-                       throws DhtException, TimeoutException {
-               boolean ok = table.compareAndSet( //
-                               name.asBytes(), //
-                               colId.name(), //
-                               null, //
-                               key.asBytes());
-               if (!ok)
-                       throw new DhtException(MessageFormat.format(
-                                       DhtText.get().repositoryAlreadyExists, name.asString()));
-       }
-
-       public void remove(RepositoryName name, RepositoryKey key)
-                       throws DhtException, TimeoutException {
-               boolean ok = table.compareAndSet(
-                               name.asBytes(),
-                               colId.name(),
-                               key.asBytes(),
-                               null);
-               if (!ok)
-                       throw new DhtException(MessageFormat.format(
-                                       DhtText.get().repositoryAlreadyExists, name.asString()));
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRepositoryTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRepositoryTable.java
deleted file mode 100644 (file)
index d393934..0000000
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.memory;
-
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
-import org.eclipse.jgit.storage.dht.CachedPackKey;
-import org.eclipse.jgit.storage.dht.ChunkInfo;
-import org.eclipse.jgit.storage.dht.ChunkKey;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.DhtText;
-import org.eclipse.jgit.storage.dht.RepositoryKey;
-import org.eclipse.jgit.storage.dht.spi.RepositoryTable;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;
-
-import com.google.protobuf.InvalidProtocolBufferException;
-
-final class MemRepositoryTable implements RepositoryTable {
-       private final AtomicInteger nextId = new AtomicInteger();
-
-       private final MemTable table = new MemTable();
-
-       private final ColumnMatcher colChunkInfo = new ColumnMatcher("chunk-info:");
-
-       private final ColumnMatcher colCachedPack = new ColumnMatcher("cached-pack:");
-
-       public RepositoryKey nextKey() throws DhtException {
-               return RepositoryKey.create(nextId.incrementAndGet());
-       }
-
-       public void put(RepositoryKey repo, ChunkInfo info, WriteBuffer buffer)
-                       throws DhtException {
-               table.put(repo.asBytes(),
-                               colChunkInfo.append(info.getChunkKey().asBytes()),
-                               info.getData().toByteArray());
-       }
-
-       public void remove(RepositoryKey repo, ChunkKey chunk, WriteBuffer buffer)
-                       throws DhtException {
-               table.delete(repo.asBytes(), colChunkInfo.append(chunk.asBytes()));
-       }
-
-       public Collection<CachedPackInfo> getCachedPacks(RepositoryKey repo)
-                       throws DhtException, TimeoutException {
-               List<CachedPackInfo> out = new ArrayList<CachedPackInfo>(4);
-               for (MemTable.Cell cell : table.scanFamily(repo.asBytes(), colCachedPack)) {
-                       try {
-                               out.add(CachedPackInfo.parseFrom(cell.getValue()));
-                       } catch (InvalidProtocolBufferException e) {
-                               throw new DhtException(MessageFormat.format(
-                                               DhtText.get().invalidCachedPackInfo, repo,
-                                               CachedPackKey.fromBytes(cell.getName())), e);
-                       }
-               }
-               return out;
-       }
-
-       public void put(RepositoryKey repo, CachedPackInfo info, WriteBuffer buffer)
-                       throws DhtException {
-               CachedPackKey key = CachedPackKey.fromInfo(info);
-               table.put(repo.asBytes(),
-                               colCachedPack.append(key.asBytes()),
-                               info.toByteArray());
-       }
-
-       public void remove(RepositoryKey repo, CachedPackKey key, WriteBuffer buffer)
-                       throws DhtException {
-               table.delete(repo.asBytes(), colCachedPack.append(key.asBytes()));
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemTable.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemTable.java
deleted file mode 100644 (file)
index ec28b34..0000000
+++ /dev/null
@@ -1,299 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.memory;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;
-import org.eclipse.jgit.util.RawParseUtils;
-import org.eclipse.jgit.util.SystemReader;
-
-/**
- * Tiny in-memory NoSQL style table.
- * <p>
- * This table is thread-safe, but not very efficient. It uses a single lock to
- * protect its internal data structure from concurrent access, and stores all
- * data as byte arrays. To reduce memory usage, the arrays passed by the caller
- * during put or compareAndSet are used as-is in the internal data structure,
- * and may be returned later. Callers should not modify byte arrays once they
- * are stored in the table, or when obtained from the table.
- */
-public class MemTable {
-       private final Map<Key, Map<Key, Cell>> map;
-
-       private final Object lock;
-
-       /** Initialize an empty table. */
-       public MemTable() {
-               map = new HashMap<Key, Map<Key, Cell>>();
-               lock = new Object();
-       }
-
-       /**
-        * Put a value into a cell.
-        *
-        * @param row
-        * @param col
-        * @param val
-        */
-       public void put(byte[] row, byte[] col, byte[] val) {
-               synchronized (lock) {
-                       Key rowKey = new Key(row);
-                       Map<Key, Cell> r = map.get(rowKey);
-                       if (r == null) {
-                               r = new HashMap<Key, Cell>(4);
-                               map.put(rowKey, r);
-                       }
-                       r.put(new Key(col), new Cell(row, col, val));
-               }
-       }
-
-       /**
-        * Delete an entire row.
-        *
-        * @param row
-        */
-       public void deleteRow(byte[] row) {
-               synchronized (lock) {
-                       map.remove(new Key(row));
-               }
-       }
-
-       /**
-        * Delete a cell.
-        *
-        * @param row
-        * @param col
-        */
-       public void delete(byte[] row, byte[] col) {
-               synchronized (lock) {
-                       Key rowKey = new Key(row);
-                       Map<Key, Cell> r = map.get(rowKey);
-                       if (r == null)
-                               return;
-
-                       r.remove(new Key(col));
-                       if (r.isEmpty())
-                               map.remove(rowKey);
-               }
-       }
-
-       /**
-        * Compare and put or delete a cell.
-        * <p>
-        * This method performs an atomic compare-and-swap operation on the named
-        * cell. If the cell does not yet exist, it will be created. If the cell
-        * exists, it will be replaced, and if {@code newVal} is null, the cell will
-        * be deleted.
-        *
-        * @param row
-        * @param col
-        * @param oldVal
-        *            if null, the cell must not exist, otherwise the cell's current
-        *            value must exactly equal this value for the update to occur.
-        * @param newVal
-        *            if null, the cell will be removed, otherwise the cell will be
-        *            created or updated to this value.
-        * @return true if successful, false if {@code oldVal} does not match.
-        */
-       public boolean compareAndSet(byte[] row, byte[] col, byte[] oldVal,
-                       byte[] newVal) {
-               synchronized (lock) {
-                       Key rowKey = new Key(row);
-                       Key colKey = new Key(col);
-
-                       Map<Key, Cell> r = map.get(rowKey);
-                       if (r == null) {
-                               r = new HashMap<Key, Cell>(4);
-                               map.put(rowKey, r);
-                       }
-
-                       Cell oldCell = r.get(colKey);
-                       if (!same(oldCell, oldVal)) {
-                               if (r.isEmpty())
-                                       map.remove(rowKey);
-                               return false;
-                       }
-
-                       if (newVal != null) {
-                               r.put(colKey, new Cell(row, col, newVal));
-                               return true;
-                       }
-
-                       r.remove(colKey);
-                       if (r.isEmpty())
-                               map.remove(rowKey);
-                       return true;
-               }
-       }
-
-       private static boolean same(Cell oldCell, byte[] expVal) {
-               if (oldCell == null)
-                       return expVal == null;
-
-               if (expVal == null)
-                       return false;
-
-               return Arrays.equals(oldCell.value, expVal);
-       }
-
-       /**
-        * Get a single cell, or null.
-        *
-        * @param row
-        * @param col
-        * @return the cell, or null.
-        */
-       public Cell get(byte[] row, byte[] col) {
-               synchronized (lock) {
-                       Map<Key, Cell> r = map.get(new Key(row));
-                       return r != null ? r.get(new Key(col)) : null;
-               }
-       }
-
-       /**
-        * Scan all cells in a row.
-        *
-        * @param row
-        *            the row to scan.
-        * @param family
-        *            if not null, the family to filter and return.
-        * @return iterator for the cells. Cells may appear in any order, including
-        *         random. Never null.
-        */
-       public Iterable<Cell> scanFamily(byte[] row, ColumnMatcher family) {
-               synchronized (lock) {
-                       Map<Key, Cell> r = map.get(new Key(row));
-                       if (r == null)
-                               return Collections.emptyList();
-
-                       if (family == null)
-                               return new ArrayList<Cell>(r.values());
-
-                       ArrayList<Cell> out = new ArrayList<Cell>(4);
-                       for (Cell cell : r.values()) {
-                               if (family.sameFamily(cell.getName()))
-                                       out.add(cell);
-                       }
-                       return out;
-               }
-       }
-
-       private static class Key {
-               final byte[] key;
-
-               Key(byte[] key) {
-                       this.key = key;
-               }
-
-               @Override
-               public int hashCode() {
-                       int hash = 5381;
-                       for (int ptr = 0; ptr < key.length; ptr++)
-                               hash = ((hash << 5) + hash) + (key[ptr] & 0xff);
-                       return hash;
-               }
-
-               @Override
-               public boolean equals(Object other) {
-                       if (this == other)
-                               return true;
-                       if (other instanceof Key)
-                               return Arrays.equals(key, ((Key) other).key);
-                       return false;
-               }
-
-               @Override
-               public String toString() {
-                       return RawParseUtils.decode(key);
-               }
-       }
-
-       /** A cell value in a column. */
-       public static class Cell {
-               final byte[] row;
-
-               final byte[] name;
-
-               final byte[] value;
-
-               final long timestamp;
-
-               Cell(byte[] row, byte[] name, byte[] value) {
-                       this.row = row;
-                       this.name = name;
-                       this.value = value;
-                       this.timestamp = SystemReader.getInstance().getCurrentTime();
-               }
-
-               /** @return key of the row holding the cell. */
-               public byte[] getRow() {
-                       return row;
-               }
-
-               /** @return name of the cell's column. */
-               public byte[] getName() {
-                       return name;
-               }
-
-               /** @return the cell's value. */
-               public byte[] getValue() {
-                       return value;
-               }
-
-               /** @return system clock time of last modification. */
-               public long getTimestamp() {
-                       return timestamp;
-               }
-
-               @Override
-               public String toString() {
-                       return RawParseUtils.decode(name);
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemoryDatabase.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemoryDatabase.java
deleted file mode 100644 (file)
index 065055b..0000000
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.memory;
-
-import java.io.IOException;
-
-import org.eclipse.jgit.lib.Repository;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.DhtRepository;
-import org.eclipse.jgit.storage.dht.DhtRepositoryBuilder;
-import org.eclipse.jgit.storage.dht.spi.ChunkTable;
-import org.eclipse.jgit.storage.dht.spi.Database;
-import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable;
-import org.eclipse.jgit.storage.dht.spi.RefTable;
-import org.eclipse.jgit.storage.dht.spi.RepositoryIndexTable;
-import org.eclipse.jgit.storage.dht.spi.RepositoryTable;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-
-/**
- * Stores Git repositories in non-persistent JVM heap memory.
- * <p>
- * This database type is only suitable for unit testing, and other toy
- * applications. All chunk data is held within the JVM heap as byte arrays,
- * which is not the most efficient representation available.
- */
-public class MemoryDatabase implements Database {
-       private final RepositoryIndexTable repositoryIndex;
-
-       private final RepositoryTable repository;
-
-       private final RefTable ref;
-
-       private final ObjectIndexTable objectIndex;
-
-       private final ChunkTable chunk;
-
-       /** Initialize an empty database. */
-       public MemoryDatabase() {
-               repositoryIndex = new MemRepositoryIndexTable();
-               repository = new MemRepositoryTable();
-               ref = new MemRefTable();
-               objectIndex = new MemObjectIndexTable();
-               chunk = new MemChunkTable();
-       }
-
-       /**
-        * Open a repository by name on this database.
-        *
-        * @param name
-        *            the name of the repository.
-        * @return the repository instance. If the repository does not yet exist,
-        *         the caller can use {@link Repository#create(boolean)} to create.
-        * @throws IOException
-        */
-       public DhtRepository open(String name) throws IOException {
-               return (DhtRepository) new DhtRepositoryBuilder<DhtRepositoryBuilder, DhtRepository, MemoryDatabase>()
-                               .setDatabase(this) //
-                               .setRepositoryName(name) //
-                               .setMustExist(false) //
-                               .build();
-       }
-
-       public RepositoryIndexTable repositoryIndex() {
-               return repositoryIndex;
-       }
-
-       public RepositoryTable repository() {
-               return repository;
-       }
-
-       public RefTable ref() {
-               return ref;
-       }
-
-       public ObjectIndexTable objectIndex() {
-               return objectIndex;
-       }
-
-       public ChunkTable chunk() {
-               return chunk;
-       }
-
-       public WriteBuffer newWriteBuffer() {
-               return new WriteBuffer() {
-                       public void flush() throws DhtException {
-                               // Do nothing.
-                       }
-
-                       public void abort() throws DhtException {
-                               // Do nothing.
-                       }
-               };
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/util/AbstractWriteBuffer.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/util/AbstractWriteBuffer.java
deleted file mode 100644 (file)
index ad55206..0000000
+++ /dev/null
@@ -1,408 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.util;
-
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.concurrent.Callable;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Future;
-import java.util.concurrent.Semaphore;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-
-import org.eclipse.jgit.storage.dht.AsyncCallback;
-import org.eclipse.jgit.storage.dht.DhtException;
-import org.eclipse.jgit.storage.dht.DhtTimeoutException;
-import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
-
-/**
- * Abstract buffer service built on top of an ExecutorService.
- * <p>
- * Writes are combined together into batches, to reduce RPC overhead when there
- * are many small writes occurring. Batches are sent asynchronously when they
- * reach 512 KiB worth of key/column/value data. The calling application is
- * throttled when the outstanding writes are equal to the buffer size, waiting
- * until the cluster has replied with success or failure.
- * <p>
- * This buffer implementation is not thread-safe, it assumes only one thread
- * will use the buffer instance. (It does however correctly synchronize with the
- * background tasks it spawns.)
- */
-public abstract class AbstractWriteBuffer implements WriteBuffer {
-       private final static int AUTO_FLUSH_SIZE = 512 * 1024;
-
-       private final ExecutorService executor;
-
-       private final int bufferSize;
-
-       private final List<Future<?>> running;
-
-       private final Object runningLock;
-
-       private final Semaphore spaceAvailable;
-
-       private int queuedCount;
-
-       private boolean flushing;
-
-       private Callable<?> finalTask;
-
-       /**
-        * Initialize a buffer with a backing executor service.
-        *
-        * @param executor
-        *            service to run mutation tasks on.
-        * @param bufferSize
-        *            maximum number of bytes to have pending at once.
-        */
-       protected AbstractWriteBuffer(ExecutorService executor, int bufferSize) {
-               this.executor = executor;
-               this.bufferSize = bufferSize;
-               this.running = new LinkedList<Future<?>>();
-               this.runningLock = new Object();
-               this.spaceAvailable = new Semaphore(bufferSize);
-       }
-
-       /**
-        * Notify the buffer data is being added onto it.
-        * <p>
-        * This method waits until the buffer has sufficient space for the requested
-        * data, thereby throttling the calling application code. It returns true if
-        * its recommendation is for the buffer subclass to copy the data onto its
-        * internal buffer and defer starting until later. It returns false if the
-        * recommendation is to start the operation immediately, due to the large
-        * size of the request.
-        * <p>
-        * Buffer implementors should keep in mind that the return value is offered
-        * as advice only, they may choose to implement different behavior.
-        *
-        * @param size
-        *            an estimated number of bytes that the buffer will be
-        *            responsible for until the operation completes. This should
-        *            include the row keys and column headers, in addition to the
-        *            data values.
-        * @return true to enqueue the operation; false to start it right away.
-        * @throws DhtException
-        *             the current thread was interrupted before space became
-        *             available in the buffer.
-        */
-       protected boolean add(int size) throws DhtException {
-               acquireSpace(size);
-               return size < AUTO_FLUSH_SIZE;
-       }
-
-       /**
-        * Notify the buffer bytes were enqueued.
-        *
-        * @param size
-        *            the estimated number of bytes that were enqueued.
-        * @throws DhtException
-        *             a previously started operation completed and failed.
-        */
-       protected void queued(int size) throws DhtException {
-               queuedCount += size;
-
-               if (AUTO_FLUSH_SIZE < queuedCount) {
-                       startQueuedOperations(queuedCount);
-                       queuedCount = 0;
-               }
-       }
-
-       /**
-        * Start all queued operations.
-        * <p>
-        * This method is invoked by {@link #queued(int)} or by {@link #flush()}
-        * when there is a non-zero number of bytes already enqueued as a result of
-        * prior {@link #add(int)} and {#link {@link #queued(int)} calls.
-        * <p>
-        * Implementors should use {@link #start(Callable, int)} to begin their
-        * mutation tasks in the background.
-        *
-        * @param bufferedByteCount
-        *            number of bytes that were already enqueued. This count should
-        *            be passed to {@link #start(Callable, int)}.
-        * @throws DhtException
-        *             a previously started operation completed and failed.
-        */
-       protected abstract void startQueuedOperations(int bufferedByteCount)
-                       throws DhtException;
-
-       public void flush() throws DhtException {
-               try {
-                       flushing = true;
-
-                       if (0 < queuedCount) {
-                               startQueuedOperations(queuedCount);
-                               queuedCount = 0;
-                       }
-
-                       // If a task was created above, try to use the current thread
-                       // instead of burning an executor thread for the final work.
-
-                       if (finalTask != null) {
-                               try {
-                                       waitFor(finalTask);
-                               } finally {
-                                       finalTask = null;
-                               }
-                       }
-
-                       synchronized (runningLock) {
-                               checkRunningTasks(true);
-                       }
-               } finally {
-                       flushing = false;
-               }
-       }
-
-       public void abort() throws DhtException {
-               synchronized (runningLock) {
-                       checkRunningTasks(true);
-               }
-       }
-
-       private void acquireSpace(int sz) throws DhtException {
-               try {
-                       final int permits = permitsForSize(sz);
-                       if (spaceAvailable.tryAcquire(permits))
-                               return;
-
-                       if (0 < queuedCount) {
-                               startQueuedOperations(queuedCount);
-                               queuedCount = 0;
-                       }
-
-                       spaceAvailable.acquire(permits);
-               } catch (InterruptedException e) {
-                       throw new DhtTimeoutException(e);
-               }
-       }
-
-       private int permitsForSize(int size) {
-               // Do not acquire more than the configured buffer size,
-               // even if the actual write size is larger. Trying to
-               // acquire more would never succeed.
-
-               if (size <= 0)
-                       size = 1;
-               return Math.min(size, bufferSize);
-       }
-
-       /**
-        * Start a mutation task.
-        *
-        * @param <T>
-        *            any type the task might return.
-        * @param task
-        *            the mutation task. The result of the task is discarded, so
-        *            callers should perform result validation within the task.
-        * @param size
-        *            number of bytes that are buffered within the task.
-        * @throws DhtException
-        *             a prior task has completed, and failed.
-        */
-       protected <T> void start(final Callable<T> task, int size)
-                       throws DhtException {
-               final int permits = permitsForSize(size);
-               final Callable<T> op = new Callable<T>() {
-                       public T call() throws Exception {
-                               try {
-                                       return task.call();
-                               } finally {
-                                       spaceAvailable.release(permits);
-                               }
-                       }
-               };
-
-               if (flushing && finalTask == null) {
-                       // If invoked by flush(), don't start on an executor.
-                       //
-                       finalTask = op;
-                       return;
-               }
-
-               synchronized (runningLock) {
-                       if (!flushing)
-                               checkRunningTasks(false);
-                       running.add(executor.submit(op));
-               }
-       }
-
-       /**
-        * Wrap a callback to update the buffer.
-        * <p>
-        * Flushing the buffer will wait for the returned callback to complete.
-        *
-        * @param <T>
-        *            any type the task might return.
-        * @param callback
-        *            callback invoked when the task has finished.
-        * @param size
-        *            number of bytes that are buffered within the task.
-        * @return wrapped callback that will update the buffer state when the
-        *         callback is invoked.
-        * @throws DhtException
-        *             a prior task has completed, and failed.
-        */
-       protected <T> AsyncCallback<T> wrap(final AsyncCallback<T> callback,
-                       int size) throws DhtException {
-               int permits = permitsForSize(size);
-               WrappedCallback<T> op = new WrappedCallback<T>(callback, permits);
-               synchronized (runningLock) {
-                       checkRunningTasks(false);
-                       running.add(op);
-               }
-               return op;
-       }
-
-       private void checkRunningTasks(boolean wait) throws DhtException {
-               if (running.isEmpty())
-                       return;
-
-               Iterator<Future<?>> itr = running.iterator();
-               while (itr.hasNext()) {
-                       Future<?> task = itr.next();
-                       if (task.isDone() || wait) {
-                               itr.remove();
-                               waitFor(task);
-                       }
-               }
-       }
-
-       private static void waitFor(Callable<?> task) throws DhtException {
-               try {
-                       task.call();
-               } catch (DhtException err) {
-                       throw err;
-               } catch (Exception err) {
-                       throw new DhtException(err);
-               }
-       }
-
-       private static void waitFor(Future<?> task) throws DhtException {
-               try {
-                       task.get();
-
-               } catch (InterruptedException e) {
-                       throw new DhtTimeoutException(e);
-
-               } catch (ExecutionException err) {
-
-                       Throwable t = err;
-                       while (t != null) {
-                               if (t instanceof DhtException)
-                                       throw (DhtException) t;
-                               t = t.getCause();
-                       }
-
-                       throw new DhtException(err);
-               }
-       }
-
-       private final class WrappedCallback<T> implements AsyncCallback<T>,
-                       Future<T> {
-               private final AsyncCallback<T> callback;
-
-               private final int permits;
-
-               private final CountDownLatch sync;
-
-               private volatile boolean done;
-
-               WrappedCallback(AsyncCallback<T> callback, int permits) {
-                       this.callback = callback;
-                       this.permits = permits;
-                       this.sync = new CountDownLatch(1);
-               }
-
-               public void onSuccess(T result) {
-                       try {
-                               callback.onSuccess(result);
-                       } finally {
-                               done();
-                       }
-               }
-
-               public void onFailure(DhtException error) {
-                       try {
-                               callback.onFailure(error);
-                       } finally {
-                               done();
-                       }
-               }
-
-               private void done() {
-                       spaceAvailable.release(permits);
-                       done = true;
-                       sync.countDown();
-               }
-
-               public boolean cancel(boolean mayInterrupt) {
-                       return false;
-               }
-
-               public T get() throws InterruptedException, ExecutionException {
-                       sync.await();
-                       return null;
-               }
-
-               public T get(long time, TimeUnit unit) throws InterruptedException,
-                               ExecutionException, TimeoutException {
-                       sync.await(time, unit);
-                       return null;
-               }
-
-               public boolean isCancelled() {
-                       return false;
-               }
-
-               public boolean isDone() {
-                       return done;
-               }
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/util/ColumnMatcher.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/util/ColumnMatcher.java
deleted file mode 100644 (file)
index 17ef5dd..0000000
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.util;
-
-import java.util.Arrays;
-
-import org.eclipse.jgit.lib.Constants;
-import org.eclipse.jgit.storage.dht.RowKey;
-import org.eclipse.jgit.util.RawParseUtils;
-
-/** Utility to deal with columns named as byte arrays. */
-public class ColumnMatcher {
-       private final byte[] name;
-
-       /**
-        * Create a new column matcher for the given named string.
-        *
-        * @param nameStr
-        *            the column name, as a string.
-        */
-       public ColumnMatcher(String nameStr) {
-               name = Constants.encode(nameStr);
-       }
-
-       /** @return the column name, encoded in UTF-8. */
-       public byte[] name() {
-               return name;
-       }
-
-       /**
-        * Check if the column is an exact match.
-        *
-        * @param col
-        *            the column as read from the database.
-        * @return true only if {@code col} is exactly the same as this column.
-        */
-       public boolean sameName(byte[] col) {
-               return Arrays.equals(name, col);
-       }
-
-       /**
-        * Check if the column is a member of this family.
-        * <p>
-        * This method checks that {@link #name()} (the string supplied to the
-        * constructor) is a prefix of {@code col}.
-        *
-        * @param col
-        *            the column as read from the database.
-        * @return true if {@code col} is a member of this column family.
-        */
-       public boolean sameFamily(byte[] col) {
-               if (name.length < col.length) {
-                       for (int i = 0; i < name.length; i++) {
-                               if (name[i] != col[i]) {
-                                       return false;
-                               }
-                       }
-                       return true;
-               }
-               return false;
-       }
-
-       /**
-        * Extract the portion of the column name that comes after the family.
-        *
-        * @param col
-        *            the column as read from the database.
-        * @return everything after the family name.
-        */
-       public byte[] suffix(byte[] col) {
-               byte[] r = new byte[col.length - name.length];
-               System.arraycopy(col, name.length, r, 0, r.length);
-               return r;
-       }
-
-       /**
-        * Append a suffix onto this column name.
-        *
-        * @param suffix
-        *            name component to appear after the family name.
-        * @return the joined name, ready for storage in the database.
-        */
-       public byte[] append(RowKey suffix) {
-               return append(suffix.asBytes());
-       }
-
-       /**
-        * Append a suffix onto this column name.
-        *
-        * @param suffix
-        *            name component to appear after the family name.
-        * @return the joined name, ready for storage in the database.
-        */
-       public byte[] append(byte[] suffix) {
-               byte[] r = new byte[name.length + suffix.length];
-               System.arraycopy(name, 0, r, 0, name.length);
-               System.arraycopy(suffix, 0, r, name.length, suffix.length);
-               return r;
-       }
-
-       @Override
-       public String toString() {
-               return RawParseUtils.decode(name);
-       }
-}
diff --git a/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/util/ExecutorTools.java b/org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/util/ExecutorTools.java
deleted file mode 100644 (file)
index ed0b918..0000000
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (C) 2011, Google Inc.
- * and other copyright owners as documented in the project's IP log.
- *
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Distribution License v1.0 which
- * accompanies this distribution, is reproduced below, and is
- * available at http://www.eclipse.org/org/documents/edl-v10.php
- *
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or
- * without modification, are permitted provided that the following
- * conditions are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- *
- * - Redistributions in binary form must reproduce the above
- *   copyright notice, this list of conditions and the following
- *   disclaimer in the documentation and/or other materials provided
- *   with the distribution.
- *
- * - Neither the name of the Eclipse Foundation, Inc. nor the
- *   names of its contributors may be used to endorse or promote
- *   products derived from this software without specific prior
- *   written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
- * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
- * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-package org.eclipse.jgit.storage.dht.spi.util;
-
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ThreadFactory;
-import java.util.concurrent.atomic.AtomicInteger;
-
-/** Optional executor support for implementors to build on top of. */
-public class ExecutorTools {
-       /**
-        * Get the default executor service for this JVM.
-        * <p>
-        * The default executor service is created the first time it is requested,
-        * and is shared with all future requests. It uses a fixed sized thread pool
-        * that is allocated 2 threads per CPU. Each thread is configured to be a
-        * daemon thread, permitting the JVM to do a clean shutdown when the
-        * application thread stop, even if work is still pending in the service.
-        *
-        * @return the default executor service.
-        */
-       public static ExecutorService getDefaultExecutorService() {
-               return DefaultExecutors.service;
-       }
-
-       private static class DefaultExecutors {
-               static final ExecutorService service;
-               static {
-                       int ncpu = Runtime.getRuntime().availableProcessors();
-                       ThreadFactory threadFactory = new ThreadFactory() {
-                               private final AtomicInteger cnt = new AtomicInteger();
-
-                               private final ThreadGroup group = new ThreadGroup("JGit-DHT");
-
-                               public Thread newThread(Runnable body) {
-                                       int id = cnt.incrementAndGet();
-                                       String name = "JGit-DHT-Worker-" + id;
-                                       ClassLoader myCL = getClass().getClassLoader();
-
-                                       Thread thread = new Thread(group, body, name);
-                                       thread.setDaemon(true);
-                                       thread.setContextClassLoader(myCL);
-                                       return thread;
-                               }
-                       };
-                       service = Executors.newFixedThreadPool(2 * ncpu, threadFactory);
-               }
-       }
-
-       private ExecutorTools() {
-               // Static helper class, do not make instances.
-       }
-}
diff --git a/pom.xml b/pom.xml
index d7eaf954506a31aa61a0267d55f73a7e9487e5a1..67184f02dd1486740c0c22aa3308faa166a20338 100644 (file)
--- a/pom.xml
+++ b/pom.xml
 
   <modules>
     <module>org.eclipse.jgit</module>
-       <module>org.eclipse.jgit.generated.storage.dht.proto</module>
-    <module>org.eclipse.jgit.storage.dht</module>
     <module>org.eclipse.jgit.ant</module>
     <module>org.eclipse.jgit.ui</module>
     <module>org.eclipse.jgit.http.server</module>
     <module>org.eclipse.jgit.test</module>
     <module>org.eclipse.jgit.ant.test</module>
     <module>org.eclipse.jgit.http.test</module>
-    <module>org.eclipse.jgit.storage.dht.test</module>
     <module>org.eclipse.jgit.pgm.test</module>
   </modules>