Browse Source

DHT: Replace TinyProtobuf with Google Protocol Buffers

The standard Google distribution of Protocol Buffers in Java is better
maintained than TinyProtobuf, and should be faster for most uses.  It
does use slightly more memory due to many of our key types being
stored as strings in protobuf messages, but this is probably worth the
small hit to memory in exchange for better maintained code that is
easier to reuse in other applications.

Exposing all of our data members to the underlying implementation
makes it easier to develop reporting and data mining tools, or to
expand out a nested structure like RefData into a flat format in a SQL
database table.

Since the C++ `protoc` tool is necessary to convert the protobuf
script into Java code, the generated files are committed as part of
the source repository to make it easier for developers who do not have
this tool installed to still build the overall JGit package and make
use of it.  Reviewers will need to be careful to ensure that any edits
made to a *.proto file come in a commit that also updates the
generated code to match.

CQ: 5135
Change-Id: I53e11e82c186b9cf0d7b368e0276519e6a0b2893
Signed-off-by: Shawn O. Pearce <spearce@spearce.org>
Signed-off-by: Chris Aniszczyk <caniszczyk@gmail.com>
tags/v1.0.0.201106011211-rc3
Shawn O. Pearce 13 years ago
parent
commit
6ec6169215
58 changed files with 12217 additions and 2406 deletions
  1. 6
    0
      .eclipse_iplog
  2. 7
    0
      org.eclipse.jgit.generated.storage.dht.proto/.classpath
  3. 2
    0
      org.eclipse.jgit.generated.storage.dht.proto/.gitignore
  4. 28
    0
      org.eclipse.jgit.generated.storage.dht.proto/.project
  5. 3
    0
      org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.core.resources.prefs
  6. 3
    0
      org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.core.runtime.prefs
  7. 349
    0
      org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.jdt.core.prefs
  8. 62
    0
      org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.jdt.ui.prefs
  9. 11
    0
      org.eclipse.jgit.generated.storage.dht.proto/META-INF/MANIFEST.MF
  10. 5
    0
      org.eclipse.jgit.generated.storage.dht.proto/build.properties
  11. 11
    0
      org.eclipse.jgit.generated.storage.dht.proto/generate.sh
  12. 2
    0
      org.eclipse.jgit.generated.storage.dht.proto/plugin.properties
  13. 120
    0
      org.eclipse.jgit.generated.storage.dht.proto/pom.xml
  14. 86
    0
      org.eclipse.jgit.generated.storage.dht.proto/resources/org/eclipse/jgit/storage/dht/git_cache.proto
  15. 11
    2
      org.eclipse.jgit.generated.storage.dht.proto/resources/org/eclipse/jgit/storage/dht/git_store.proto
  16. 2546
    0
      org.eclipse.jgit.generated.storage.dht.proto/src/org/eclipse/jgit/generated/storage/dht/proto/GitCache.java
  17. 7963
    0
      org.eclipse.jgit.generated.storage.dht.proto/src/org/eclipse/jgit/generated/storage/dht/proto/GitStore.java
  18. 3
    1
      org.eclipse.jgit.storage.dht/META-INF/MANIFEST.MF
  19. 6
    0
      org.eclipse.jgit.storage.dht/pom.xml
  20. 4
    7
      org.eclipse.jgit.storage.dht/resources/org/eclipse/jgit/storage/dht/DhtText.properties
  21. 0
    212
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackInfo.java
  22. 11
    12
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackKey.java
  23. 80
    44
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkFormatter.java
  24. 18
    208
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkInfo.java
  25. 0
    12
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkKey.java
  26. 0
    391
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMeta.java
  27. 111
    0
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMetaUtil.java
  28. 54
    21
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtCachedPack.java
  29. 30
    14
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserter.java
  30. 1
    0
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserterOptions.java
  31. 1
    1
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjectRepresentation.java
  32. 144
    82
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtPackParser.java
  33. 6
    6
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtReader.java
  34. 25
    44
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefDatabase.java
  35. 9
    8
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefUpdate.java
  36. 3
    6
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtText.java
  37. 4
    2
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/LargeNonDeltaObject.java
  38. 36
    122
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectInfo.java
  39. 6
    8
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectWriter.java
  40. 12
    20
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/PackChunk.java
  41. 11
    3
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Prefetcher.java
  42. 2
    3
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/QueueObjectLookup.java
  43. 1
    1
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentChunks.java
  44. 0
    235
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefData.java
  45. 132
    0
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefDataUtil.java
  46. 0
    755
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/TinyProtobuf.java
  47. 3
    2
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/ChunkTable.java
  48. 4
    3
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RefTable.java
  49. 1
    1
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RepositoryTable.java
  50. 135
    74
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheChunkTable.java
  51. 42
    54
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheObjectIndexTable.java
  52. 1
    1
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRefTable.java
  53. 14
    20
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRepositoryTable.java
  54. 28
    9
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemChunkTable.java
  55. 23
    10
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemObjectIndexTable.java
  56. 15
    6
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRefTable.java
  57. 18
    6
      org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRepositoryTable.java
  58. 8
    0
      pom.xml

+ 6
- 0
.eclipse_iplog View File

@@ -120,3 +120,9 @@
license = Apache License, 2.0
use = unmodified source & binary
state = approved

[CQ "5135"]
description = Protocol Buffers Version: 2.4.0a (ATO CQ4876)
license = New BSD license
use = unmodified source & binary
state = approved

+ 7
- 0
org.eclipse.jgit.generated.storage.dht.proto/.classpath View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="output" path="bin"/>
</classpath>

+ 2
- 0
org.eclipse.jgit.generated.storage.dht.proto/.gitignore View File

@@ -0,0 +1,2 @@
/bin
/target

+ 28
- 0
org.eclipse.jgit.generated.storage.dht.proto/.project View File

@@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>org.eclipse.jgit.generated.storage.dht.proto</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.ManifestBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.SchemaBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.pde.PluginNature</nature>
</natures>
</projectDescription>

+ 3
- 0
org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.core.resources.prefs View File

@@ -0,0 +1,3 @@
#Mon Aug 11 16:46:12 PDT 2008
eclipse.preferences.version=1
encoding/<project>=UTF-8

+ 3
- 0
org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.core.runtime.prefs View File

@@ -0,0 +1,3 @@
#Mon Mar 24 18:55:50 EDT 2008
eclipse.preferences.version=1
line.separator=\n

+ 349
- 0
org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.jdt.core.prefs View File

@@ -0,0 +1,349 @@
#Thu May 05 16:40:25 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
org.eclipse.jdt.core.compiler.codegen.unusedLocal=optimize out
org.eclipse.jdt.core.compiler.compliance=1.5
org.eclipse.jdt.core.compiler.debug.lineNumber=do not generate
org.eclipse.jdt.core.compiler.debug.localVariable=do not generate
org.eclipse.jdt.core.compiler.debug.sourceFile=do not generate
org.eclipse.jdt.core.compiler.doc.comment.support=disabled
org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=ignore
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.autoboxing=ignore
org.eclipse.jdt.core.compiler.problem.comparingIdentical=ignore
org.eclipse.jdt.core.compiler.problem.deadCode=ignore
org.eclipse.jdt.core.compiler.problem.deprecation=ignore
org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
org.eclipse.jdt.core.compiler.problem.discouragedReference=ignore
org.eclipse.jdt.core.compiler.problem.emptyStatement=ignore
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.problem.fallthroughCase=ignore
org.eclipse.jdt.core.compiler.problem.fatalOptionalError=disabled
org.eclipse.jdt.core.compiler.problem.fieldHiding=ignore
org.eclipse.jdt.core.compiler.problem.finalParameterBound=ignore
org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=ignore
org.eclipse.jdt.core.compiler.problem.forbiddenReference=ignore
org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=ignore
org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=ignore
org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=ignore
org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=ignore
org.eclipse.jdt.core.compiler.problem.invalidJavadoc=error
org.eclipse.jdt.core.compiler.problem.invalidJavadocTags=enabled
org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsDeprecatedRef=enabled
org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsNotVisibleRef=enabled
org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsVisibility=private
org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore
org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=ignore
org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore
org.eclipse.jdt.core.compiler.problem.missingHashCodeMethod=ignore
org.eclipse.jdt.core.compiler.problem.missingJavadocComments=error
org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsOverriding=disabled
org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsVisibility=protected
org.eclipse.jdt.core.compiler.problem.missingJavadocTagDescription=return_tag
org.eclipse.jdt.core.compiler.problem.missingJavadocTags=error
org.eclipse.jdt.core.compiler.problem.missingJavadocTagsOverriding=disabled
org.eclipse.jdt.core.compiler.problem.missingJavadocTagsVisibility=private
org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=ignore
org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotationForInterfaceMethodImplementation=enabled
org.eclipse.jdt.core.compiler.problem.missingSerialVersion=ignore
org.eclipse.jdt.core.compiler.problem.missingSynchronizedOnInheritedMethod=ignore
org.eclipse.jdt.core.compiler.problem.noEffectAssignment=ignore
org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=ignore
org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
org.eclipse.jdt.core.compiler.problem.nullReference=ignore
org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=ignore
org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore
org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore
org.eclipse.jdt.core.compiler.problem.potentialNullReference=ignore
org.eclipse.jdt.core.compiler.problem.rawTypeReference=ignore
org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore
org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=ignore
org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled
org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=ignore
org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=disabled
org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled
org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore
org.eclipse.jdt.core.compiler.problem.typeParameterHiding=ignore
org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=ignore
org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=ignore
org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=ignore
org.eclipse.jdt.core.compiler.problem.unnecessaryElse=ignore
org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=ignore
org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore
org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=ignore
org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionExemptExceptionAndThrowable=enabled
org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionIncludeDocCommentReference=enabled
org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled
org.eclipse.jdt.core.compiler.problem.unusedImport=ignore
org.eclipse.jdt.core.compiler.problem.unusedLabel=ignore
org.eclipse.jdt.core.compiler.problem.unusedLocal=ignore
org.eclipse.jdt.core.compiler.problem.unusedObjectAllocation=ignore
org.eclipse.jdt.core.compiler.problem.unusedParameter=ignore
org.eclipse.jdt.core.compiler.problem.unusedParameterIncludeDocCommentReference=enabled
org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled
org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled
org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=ignore
org.eclipse.jdt.core.compiler.problem.unusedWarningToken=ignore
org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=ignore
org.eclipse.jdt.core.compiler.source=1.5
org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
org.eclipse.jdt.core.formatter.alignment_for_assignment=0
org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
org.eclipse.jdt.core.formatter.alignment_for_enum_constants=0
org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
org.eclipse.jdt.core.formatter.blank_lines_after_package=1
org.eclipse.jdt.core.formatter.blank_lines_before_field=1
org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
org.eclipse.jdt.core.formatter.blank_lines_before_method=1
org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
org.eclipse.jdt.core.formatter.blank_lines_before_package=0
org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
org.eclipse.jdt.core.formatter.comment.clear_blank_lines=false
org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
org.eclipse.jdt.core.formatter.comment.format_block_comments=true
org.eclipse.jdt.core.formatter.comment.format_comments=true
org.eclipse.jdt.core.formatter.comment.format_header=false
org.eclipse.jdt.core.formatter.comment.format_html=true
org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
org.eclipse.jdt.core.formatter.comment.format_line_comments=true
org.eclipse.jdt.core.formatter.comment.format_source_code=true
org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
org.eclipse.jdt.core.formatter.comment.line_length=80
org.eclipse.jdt.core.formatter.compact_else_if=true
org.eclipse.jdt.core.formatter.continuation_indentation=2
org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
org.eclipse.jdt.core.formatter.indent_empty_lines=false
org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=false
org.eclipse.jdt.core.formatter.indentation.size=4
org.eclipse.jdt.core.formatter.insert_new_line_after_annotation=insert
org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member=insert
org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
org.eclipse.jdt.core.formatter.lineSplit=80
org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
org.eclipse.jdt.core.formatter.tabulation.char=tab
org.eclipse.jdt.core.formatter.tabulation.size=4
org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true

+ 62
- 0
org.eclipse.jgit.generated.storage.dht.proto/.settings/org.eclipse.jdt.ui.prefs View File

@@ -0,0 +1,62 @@
#Thu Aug 26 12:30:58 CDT 2010
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_JGit Format
formatter_settings_version=11
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=java;javax;org;com;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.staticondemandthreshold=99
org.eclipse.jdt.ui.text.custom_code_templates=<?xml version\="1.0" encoding\="UTF-8" standalone\="no"?><templates/>
sp_cleanup.add_default_serial_version_id=true
sp_cleanup.add_generated_serial_version_id=false
sp_cleanup.add_missing_annotations=false
sp_cleanup.add_missing_deprecated_annotations=true
sp_cleanup.add_missing_methods=false
sp_cleanup.add_missing_nls_tags=false
sp_cleanup.add_missing_override_annotations=true
sp_cleanup.add_missing_override_annotations_interface_methods=false
sp_cleanup.add_serial_version_id=false
sp_cleanup.always_use_blocks=true
sp_cleanup.always_use_parentheses_in_expressions=false
sp_cleanup.always_use_this_for_non_static_field_access=false
sp_cleanup.always_use_this_for_non_static_method_access=false
sp_cleanup.convert_to_enhanced_for_loop=false
sp_cleanup.correct_indentation=false
sp_cleanup.format_source_code=true
sp_cleanup.format_source_code_changes_only=true
sp_cleanup.make_local_variable_final=false
sp_cleanup.make_parameters_final=false
sp_cleanup.make_private_fields_final=true
sp_cleanup.make_type_abstract_if_missing_method=false
sp_cleanup.make_variable_declarations_final=false
sp_cleanup.never_use_blocks=false
sp_cleanup.never_use_parentheses_in_expressions=true
sp_cleanup.on_save_use_additional_actions=true
sp_cleanup.organize_imports=false
sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
sp_cleanup.qualify_static_member_accesses_with_declaring_class=false
sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
sp_cleanup.remove_private_constructors=true
sp_cleanup.remove_trailing_whitespaces=true
sp_cleanup.remove_trailing_whitespaces_all=true
sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
sp_cleanup.remove_unnecessary_casts=false
sp_cleanup.remove_unnecessary_nls_tags=false
sp_cleanup.remove_unused_imports=false
sp_cleanup.remove_unused_local_variables=false
sp_cleanup.remove_unused_private_fields=true
sp_cleanup.remove_unused_private_members=false
sp_cleanup.remove_unused_private_methods=true
sp_cleanup.remove_unused_private_types=true
sp_cleanup.sort_members=false
sp_cleanup.sort_members_all=false
sp_cleanup.use_blocks=false
sp_cleanup.use_blocks_only_for_return_and_throw=false
sp_cleanup.use_parentheses_in_expressions=false
sp_cleanup.use_this_for_non_static_field_access=false
sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=true
sp_cleanup.use_this_for_non_static_method_access=false
sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=true

+ 11
- 0
org.eclipse.jgit.generated.storage.dht.proto/META-INF/MANIFEST.MF View File

@@ -0,0 +1,11 @@
Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: %plugin_name
Bundle-SymbolicName: org.eclipse.jgit.generated.storage.dht.proto
Bundle-Version: 1.0.0.qualifier
Bundle-Localization: plugin
Bundle-Vendor: %provider_name
Bundle-ActivationPolicy: lazy
Bundle-RequiredExecutionEnvironment: J2SE-1.5
Export-Package: org.eclipse.jgit.generated.storage.dht.proto;version="1.0.0"
Import-Package: com.google.protobuf;version="[2.4.0,2.5.0)"

+ 5
- 0
org.eclipse.jgit.generated.storage.dht.proto/build.properties View File

@@ -0,0 +1,5 @@
source.. = src/
output.. = bin/
bin.includes = META-INF/,\
.,\
plugin.properties

+ 11
- 0
org.eclipse.jgit.generated.storage.dht.proto/generate.sh View File

@@ -0,0 +1,11 @@
#!/bin/sh
#
# Update generated Java code from protocol buffer descriptions.

set -e

for proto in resources/org/eclipse/jgit/storage/dht/*.proto
do
echo >&2 Generating from $proto
protoc -Iresources --java_out=src $proto
done

+ 2
- 0
org.eclipse.jgit.generated.storage.dht.proto/plugin.properties View File

@@ -0,0 +1,2 @@
plugin_name=JGit DHT Storage Protocol Buffer Messages (Incubation)
provider_name=Eclipse.org

+ 120
- 0
org.eclipse.jgit.generated.storage.dht.proto/pom.xml View File

@@ -0,0 +1,120 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright (C) 2011, Google Inc.
and other copyright owners as documented in the project's IP log.

This program and the accompanying materials are made available
under the terms of the Eclipse Distribution License v1.0 which
accompanies this distribution, is reproduced below, and is
available at http://www.eclipse.org/org/documents/edl-v10.php

All rights reserved.

Redistribution and use in source and binary forms, with or
without modification, are permitted provided that the following
conditions are met:

- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.

- Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.

- Neither the name of the Eclipse Foundation, Inc. nor the
names of its contributors may be used to endorse or promote
products derived from this software without specific prior
written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-->

<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>

<parent>
<groupId>org.eclipse.jgit</groupId>
<artifactId>org.eclipse.jgit-parent</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>

<artifactId>org.eclipse.jgit.generated.storage.dht.proto</artifactId>
<name>JGit - DHT Storage Protocol Buffer Messages</name>

<description>
Compiled protocol buffer messages for DHT storage
</description>

<properties>
<translate-qualifier/>
</properties>

<dependencies>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</dependency>
</dependencies>

<build>
<sourceDirectory>src/</sourceDirectory>

<resources>
<resource>
<directory>.</directory>
<includes>
<include>plugin.properties</include>
</includes>
</resource>
<resource>
<directory>resources/</directory>
</resource>
</resources>

<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<inherited>true</inherited>
<executions>
<execution>
<id>attach-sources</id>
<phase>process-classes</phase>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<archive>
<manifestFile>${source-bundle-manifest}</manifestFile>
</archive>
</configuration>
</execution>
</executions>
</plugin>

<plugin>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<archive>
<manifestFile>${bundle-manifest}</manifestFile>
</archive>
</configuration>
</plugin>
</plugins>
</build>
</project>

+ 86
- 0
org.eclipse.jgit.generated.storage.dht.proto/resources/org/eclipse/jgit/storage/dht/git_cache.proto View File

@@ -0,0 +1,86 @@
// Copyright (C) 2011, Google Inc.
// and other copyright owners as documented in the project's IP log.
//
// This program and the accompanying materials are made available
// under the terms of the Eclipse Distribution License v1.0 which
// accompanies this distribution, is reproduced below, and is
// available at http://www.eclipse.org/org/documents/edl-v10.php
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or
// without modification, are permitted provided that the following
// conditions are met:
//
// - Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
//
// - Neither the name of the Eclipse Foundation, Inc. nor the
// names of its contributors may be used to endorse or promote
// products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
// CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
// NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
//
// WARNING: If you edit this file, run generate.sh
//
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

syntax = "proto2";

package org.eclipse.jgit.storage.dht;
option java_generate_equals_and_hash = true;
option java_package = "org.eclipse.jgit.generated.storage.dht.proto";

import "org/eclipse/jgit/storage/dht/git_store.proto";


// Caches ObjectIndexTable in a single message.
//
message CachedObjectIndex {
message Item {
required string chunk_key = 1;
required ObjectInfo object_info = 2;
optional fixed64 time = 3;
}
repeated Item item = 1;
}


// Caches CachedPackInfo in a single message.
//
message CachedPackInfoList {
repeated CachedPackInfo pack = 1;
}


// Caches ChunkTable in a single message.
//
// WARNING: Formatters for this message are also hand-coded
// inside of the CacheChunkTable class. If you make changes
// to this message, ensure that class is also updated.
//
message CachedChunk {
required bytes data = 1;
optional bytes index = 2;
optional ChunkMeta meta = 3;
}

org.eclipse.jgit.storage.dht/resources/org/eclipse/jgit/storage/dht/git_store.proto → org.eclipse.jgit.generated.storage.dht.proto/resources/org/eclipse/jgit/storage/dht/git_store.proto View File

@@ -39,8 +39,17 @@
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

package git_store;
option java_package = "org.eclipse.jgit.storage.dht.proto";
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
//
// WARNING: If you edit this file, run generate.sh
//
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

syntax = "proto2";

package org.eclipse.jgit.storage.dht;
option java_generate_equals_and_hash = true;
option java_package = "org.eclipse.jgit.generated.storage.dht.proto";


// Entry in RefTable describing the target of the reference.

+ 2546
- 0
org.eclipse.jgit.generated.storage.dht.proto/src/org/eclipse/jgit/generated/storage/dht/proto/GitCache.java
File diff suppressed because it is too large
View File


+ 7963
- 0
org.eclipse.jgit.generated.storage.dht.proto/src/org/eclipse/jgit/generated/storage/dht/proto/GitStore.java
File diff suppressed because it is too large
View File


+ 3
- 1
org.eclipse.jgit.storage.dht/META-INF/MANIFEST.MF View File

@@ -12,7 +12,9 @@ Export-Package: org.eclipse.jgit.storage.dht;version="1.0.0",
org.eclipse.jgit.storage.dht.spi.memory;version="1.0.0"
Bundle-ActivationPolicy: lazy
Bundle-RequiredExecutionEnvironment: J2SE-1.5
Import-Package: org.eclipse.jgit.errors;version="[1.0.0,2.0.0)",
Import-Package: com.google.protobuf;version="[2.4.0,2.5.0)",
org.eclipse.jgit.errors;version="[1.0.0,2.0.0)",
org.eclipse.jgit.generated.storage.dht.proto;version="[1.0.0,2.0.0)",
org.eclipse.jgit.lib;version="[1.0.0,2.0.0)",
org.eclipse.jgit.nls;version="[1.0.0,2.0.0)",
org.eclipse.jgit.revwalk;version="[1.0.0,2.0.0)",

+ 6
- 0
org.eclipse.jgit.storage.dht/pom.xml View File

@@ -70,6 +70,12 @@
<artifactId>org.eclipse.jgit</artifactId>
<version>${project.version}</version>
</dependency>

<dependency>
<groupId>org.eclipse.jgit</groupId>
<artifactId>org.eclipse.jgit.generated.storage.dht.proto</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>

<build>

+ 4
- 7
org.eclipse.jgit.storage.dht/resources/org/eclipse/jgit/storage/dht/DhtText.properties View File

@@ -4,9 +4,12 @@ corruptCompressedObject=Corrupt deflate stream in {0} at {1}
cycleInDeltaChain=Cycle in delta chain {0} offset {1}
databaseRequired=Database is required
expectedObjectSizeDuringCopyAsIs=Object {0} has size of 0
invalidCachedPackInfo=Invalid CachedPackInfo on {0} {1}
invalidChunkKey=Invalid ChunkKey {0}
invalidChunkMeta=Invalid ChunkMeta on {0}
invalidObjectIndexKey=Invalid ObjectIndexKey {0}
invalidObjectInfo=Invalid ObjectInfo on {0}
invalidObjectInfo=Invalid ObjectInfo for {0} from {1}
invalidRefData=Invalid RefData on {0}
missingChunk=Missing {0}
missingLongOffsetBase=Missing base for offset -{1} in meta of {0}
nameRequired=Name or key is required
@@ -17,12 +20,6 @@ objectListCountingFrom=Counting objects in {0}
objectTypeUnknown=unknown
packParserInvalidPointer=Invalid pointer inside pack parser: {0}, chunk {1}, offset {2}.
packParserRollbackFailed=DhtPackParser rollback failed
protobufNegativeValuesNotSupported=Negative values are not supported
protobufNoArray=bytes field requires ByteBuffer.hasArray to be true
protobufNotBooleanValue=bool field {0} has invalid value {1}
protobufUnsupportedFieldType=Unsupported protobuf field type {0}
protobufWrongFieldLength=Field {0} should have length of {1}, found {2}
protobufWrongFieldType=Field {0} is of type {1}, expected {2}
recordingObjects=Recording objects
repositoryAlreadyExists=Repository {0} already exists
repositoryMustBeBare=Only bare repositories are supported

+ 0
- 212
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackInfo.java View File

@@ -1,212 +0,0 @@
/*
* Copyright (C) 2011, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/

package org.eclipse.jgit.storage.dht;

import static org.eclipse.jgit.lib.Constants.OBJECT_ID_STRING_LENGTH;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;

import org.eclipse.jgit.lib.ObjectId;

/**
* Summary information about a cached pack owned by a repository.
*/
public class CachedPackInfo {
/**
* Parse info from the storage system.
*
* @param raw
* the raw encoding of the info.
* @return the info object.
*/
public static CachedPackInfo fromBytes(byte[] raw) {
return fromBytes(TinyProtobuf.decode(raw));
}

/**
* Parse info from the storage system.
*
* @param d
* decoder for the message buffer.
* @return the info object.
*/
public static CachedPackInfo fromBytes(TinyProtobuf.Decoder d) {
CachedPackInfo info = new CachedPackInfo();
PARSE: for (;;) {
switch (d.next()) {
case 0:
break PARSE;
case 1:
info.name = d.stringObjectId();
continue;
case 2:
info.version = d.stringObjectId();
continue;
case 3:
info.objectsTotal = d.int64();
continue;
case 4:
info.objectsDelta = d.int64();
continue;
case 5:
info.bytesTotal = d.int64();
continue;
case 6: {
TinyProtobuf.Decoder m = d.message();
for (;;) {
switch (m.next()) {
case 0:
continue PARSE;
case 1:
info.tips.add(m.stringObjectId());
continue;
default:
m.skip();
continue;
}
}
}
case 7: {
TinyProtobuf.Decoder m = d.message();
for (;;) {
switch (m.next()) {
case 0:
continue PARSE;
case 1:
info.chunks.add(ChunkKey.fromBytes(m));
continue;
default:
m.skip();
continue;
}
}
}
default:
d.skip();
continue;
}
}
return info;
}

private static byte[] asBytes(CachedPackInfo info) {
int tipSize = (2 + OBJECT_ID_STRING_LENGTH) * info.tips.size();
TinyProtobuf.Encoder tipList = TinyProtobuf.encode(tipSize);
for (ObjectId tip : info.tips)
tipList.string(1, tip);

int chunkSize = (2 + ChunkKey.KEYLEN) * info.chunks.size();
TinyProtobuf.Encoder chunkList = TinyProtobuf.encode(chunkSize);
for (ChunkKey key : info.chunks)
chunkList.bytes(1, key.asBytes());

TinyProtobuf.Encoder e = TinyProtobuf.encode(1024);
e.string(1, info.name);
e.string(2, info.version);
e.int64(3, info.objectsTotal);
e.int64IfNotZero(4, info.objectsDelta);
e.int64IfNotZero(5, info.bytesTotal);
e.message(6, tipList);
e.message(7, chunkList);
return e.asByteArray();
}

ObjectId name;

ObjectId version;

SortedSet<ObjectId> tips = new TreeSet<ObjectId>();

long objectsTotal;

long objectsDelta;

long bytesTotal;

List<ChunkKey> chunks = new ArrayList<ChunkKey>();

/** @return name of the information object. */
public CachedPackKey getRowKey() {
return new CachedPackKey(name, version);
}

/** @return number of objects stored in the cached pack. */
public long getObjectsTotal() {
return objectsTotal;
}

/** @return number of objects stored in delta format. */
public long getObjectsDelta() {
return objectsDelta;
}

/** @return number of bytes in the cached pack. */
public long getTotalBytes() {
return bytesTotal;
}

/** @return list of all chunks that make up this pack, in order. */
public List<ChunkKey> getChunkKeys() {
return Collections.unmodifiableList(chunks);
}

/**
* Convert this information into a byte array for storage.
*
* @return the data, encoded as a byte array. This does not include the key,
* callers must store that separately.
*/
public byte[] asBytes() {
return asBytes(this);
}

@Override
public String toString() {
return getRowKey().toString();
}
}

+ 11
- 12
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/CachedPackKey.java View File

@@ -47,6 +47,7 @@ import static org.eclipse.jgit.util.RawParseUtils.decode;

import java.text.MessageFormat;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.lib.ObjectId;

/** Unique identifier of a {@link CachedPackInfo} in the DHT. */
@@ -61,18 +62,6 @@ public final class CachedPackKey implements RowKey {
return fromBytes(key, 0, key.length);
}

/**
* @param d
* decoder to read key from current field from.
* @return the key
*/
public static CachedPackKey fromBytes(TinyProtobuf.Decoder d) {
int len = d.bytesLength();
int ptr = d.bytesOffset();
byte[] buf = d.bytesArray();
return fromBytes(buf, ptr, len);
}

/**
* @param key
* @param ptr
@@ -100,6 +89,16 @@ public final class CachedPackKey implements RowKey {
return new CachedPackKey(name, vers);
}

/**
* @param info
* @return the key
*/
public static CachedPackKey fromInfo(CachedPackInfo info) {
ObjectId name = ObjectId.fromString(info.getName());
ObjectId vers = ObjectId.fromString(info.getVersion());
return new CachedPackKey(name, vers);
}

private final ObjectId name;

private final ObjectId version;

+ 80
- 44
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkFormatter.java View File

@@ -52,10 +52,13 @@ import java.util.List;
import java.util.Map;
import java.util.zip.Deflater;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.storage.dht.ChunkMeta.BaseChunk;
import org.eclipse.jgit.storage.dht.spi.Database;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.transport.PackedObjectInfo;
@@ -75,8 +78,6 @@ class ChunkFormatter {

private final byte[] varIntBuf;

private final ChunkInfo info;

private final int maxObjects;

private Map<ChunkKey, BaseChunkInfo> baseChunks;
@@ -95,25 +96,35 @@ class ChunkFormatter {

private PackChunk.Members builder;

private GitStore.ChunkInfo.Source source;

private boolean fragment;

private int objectType;

private int objectsTotal, objectsWhole, objectsRefDelta, objectsOfsDelta;

private ChunkInfo chunkInfo;

ChunkFormatter(RepositoryKey repo, DhtInserterOptions options) {
this.repo = repo;
this.options = options;
this.varIntBuf = new byte[32];
this.info = new ChunkInfo();
this.chunkData = new byte[options.getChunkSize()];
this.maxObjects = options.getMaxObjectCount();
this.objectType = -1;
}

void setSource(ChunkInfo.Source src) {
info.source = src;
void setSource(GitStore.ChunkInfo.Source src) {
source = src;
}

void setObjectType(int type) {
info.objectType = type;
objectType = type;
}

void setFragment() {
info.fragment = true;
fragment = true;
}

ChunkKey getChunkKey() {
@@ -121,7 +132,7 @@ class ChunkFormatter {
}

ChunkInfo getChunkInfo() {
return info;
return chunkInfo;
}

ChunkMeta getChunkMeta() {
@@ -150,37 +161,58 @@ class ChunkFormatter {
ptr += 4;

md.update(chunkData, 0, ptr);
info.chunkKey = ChunkKey.create(repo, ObjectId.fromRaw(md.digest()));
info.chunkSize = chunkData.length;
ChunkKey key = ChunkKey.create(repo, ObjectId.fromRaw(md.digest()));

GitStore.ChunkInfo.Builder info = GitStore.ChunkInfo.newBuilder();
info.setSource(source);
info.setObjectType(GitStore.ChunkInfo.ObjectType.valueOf(objectType));
if (fragment)
info.setIsFragment(true);
info.setChunkSize(chunkData.length);

GitStore.ChunkInfo.ObjectCounts.Builder cnts = info.getObjectCountsBuilder();
cnts.setTotal(objectsTotal);
if (objectsWhole > 0)
cnts.setWhole(objectsWhole);
if (objectsRefDelta > 0)
cnts.setRefDelta(objectsRefDelta);
if (objectsOfsDelta > 0)
cnts.setOfsDelta(objectsOfsDelta);

builder = new PackChunk.Members();
builder.setChunkKey(info.chunkKey);
builder.setChunkKey(key);
builder.setChunkData(chunkData);

ChunkMeta meta = new ChunkMeta(info.chunkKey);
if (baseChunks != null) {
meta.baseChunks = new ArrayList<BaseChunk>(baseChunks.size());
List<BaseChunk> list = new ArrayList<BaseChunk>(baseChunks.size());
for (BaseChunkInfo b : baseChunks.values()) {
if (0 < b.useCount)
meta.baseChunks.add(new BaseChunk(b.relativeStart, b.key));
if (0 < b.useCount) {
BaseChunk.Builder c = BaseChunk.newBuilder();
c.setRelativeStart(b.relativeStart);
c.setChunkKey(b.key.asString());
list.add(c.build());
}
}
Collections.sort(meta.baseChunks, new Comparator<BaseChunk>() {
Collections.sort(list, new Comparator<BaseChunk>() {
public int compare(BaseChunk a, BaseChunk b) {
return Long.signum(a.relativeStart - b.relativeStart);
return Long.signum(a.getRelativeStart()
- b.getRelativeStart());
}
});
}
if (!meta.isEmpty()) {
ChunkMeta.Builder b = ChunkMeta.newBuilder();
b.addAllBaseChunk(list);
ChunkMeta meta = b.build();
builder.setMeta(meta);
info.metaSize = meta.asBytes().length;
info.setMetaSize(meta.getSerializedSize());
}

if (objectList != null && !objectList.isEmpty()) {
byte[] index = ChunkIndex.create(objectList);
builder.setChunkIndex(index);
info.indexSize = index.length;
info.setIndexSize(index.length);
}

chunkInfo = new ChunkInfo(key, info.build());
return getChunkKey();
}

@@ -198,7 +230,7 @@ class ChunkFormatter {
void safePut(Database db, WriteBuffer dbWriteBuffer) throws DhtException {
WriteBuffer chunkBuf = db.newWriteBuffer();

db.repository().put(repo, info, chunkBuf);
db.repository().put(repo, getChunkInfo(), chunkBuf);
chunkBuf.flush();

db.chunk().put(builder, chunkBuf);
@@ -208,7 +240,7 @@ class ChunkFormatter {
}

void unsafePut(Database db, WriteBuffer dbWriteBuffer) throws DhtException {
db.repository().put(repo, info, dbWriteBuffer);
db.repository().put(repo, getChunkInfo(), dbWriteBuffer);
db.chunk().put(builder, dbWriteBuffer);
linkObjects(db, dbWriteBuffer);
}
@@ -225,11 +257,11 @@ class ChunkFormatter {

boolean whole(Deflater def, int type, byte[] data, int off, final int size,
ObjectId objId) {
if (free() < 10 || maxObjects <= info.objectsTotal)
if (free() < 10 || maxObjects <= objectsTotal)
return false;

header(type, size);
info.objectsWhole++;
objectsWhole++;
currentObjectType = type;

int endOfHeader = ptr;
@@ -257,20 +289,20 @@ class ChunkFormatter {
final int packedSize = ptr - endOfHeader;
objectList.add(new StoredObject(objId, type, mark, packedSize, size));

if (info.objectType < 0)
info.objectType = type;
else if (info.objectType != type)
info.objectType = ChunkInfo.OBJ_MIXED;
if (objectType < 0)
objectType = type;
else if (objectType != type)
objectType = ChunkInfo.OBJ_MIXED;

return true;
}

boolean whole(int type, long inflatedSize) {
if (free() < 10 || maxObjects <= info.objectsTotal)
if (free() < 10 || maxObjects <= objectsTotal)
return false;

header(type, inflatedSize);
info.objectsWhole++;
objectsWhole++;
currentObjectType = type;
return true;
}
@@ -278,11 +310,11 @@ class ChunkFormatter {
boolean ofsDelta(long inflatedSize, long negativeOffset) {
final int ofsPtr = encodeVarInt(negativeOffset);
final int ofsLen = varIntBuf.length - ofsPtr;
if (free() < 10 + ofsLen || maxObjects <= info.objectsTotal)
if (free() < 10 + ofsLen || maxObjects <= objectsTotal)
return false;

header(Constants.OBJ_OFS_DELTA, inflatedSize);
info.objectsOfsDelta++;
objectsOfsDelta++;
currentObjectType = Constants.OBJ_OFS_DELTA;
currentObjectBase = null;

@@ -294,11 +326,11 @@ class ChunkFormatter {
}

boolean refDelta(long inflatedSize, AnyObjectId baseId) {
if (free() < 30 || maxObjects <= info.objectsTotal)
if (free() < 30 || maxObjects <= objectsTotal)
return false;

header(Constants.OBJ_REF_DELTA, inflatedSize);
info.objectsRefDelta++;
objectsRefDelta++;
currentObjectType = Constants.OBJ_REF_DELTA;

baseId.copyRawTo(chunkData, ptr);
@@ -345,7 +377,7 @@ class ChunkFormatter {
}

int getObjectCount() {
return info.objectsTotal;
return objectsTotal;
}

int position() {
@@ -374,32 +406,32 @@ class ChunkFormatter {
}

void adjustObjectCount(int delta, int type) {
info.objectsTotal += delta;
objectsTotal += delta;

switch (type) {
case Constants.OBJ_COMMIT:
case Constants.OBJ_TREE:
case Constants.OBJ_BLOB:
case Constants.OBJ_TAG:
info.objectsWhole += delta;
objectsWhole += delta;
break;

case Constants.OBJ_OFS_DELTA:
info.objectsOfsDelta += delta;
objectsOfsDelta += delta;
if (currentObjectBase != null && --currentObjectBase.useCount == 0)
baseChunks.remove(currentObjectBase.key);
currentObjectBase = null;
break;

case Constants.OBJ_REF_DELTA:
info.objectsRefDelta += delta;
objectsRefDelta += delta;
break;
}
}

private void header(int type, long inflatedSize) {
mark = ptr;
info.objectsTotal++;
objectsTotal++;

long nextLength = inflatedSize >>> 4;
chunkData[ptr++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (type << 4) | (inflatedSize & 0x0F));
@@ -454,8 +486,12 @@ class ChunkFormatter {
}

ObjectInfo link(ChunkKey key) {
final int ptr = (int) getOffset();
return new ObjectInfo(key, -1, type, ptr, packed, inflated, null, false);
GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder();
b.setObjectType(ObjectType.valueOf(type));
b.setOffset((int) getOffset());
b.setPackedSize(packed);
b.setInflatedSize(inflated);
return new ObjectInfo(key, b.build());
}
}
}

+ 18
- 208
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkInfo.java View File

@@ -43,150 +43,32 @@

package org.eclipse.jgit.storage.dht;

import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore;

/**
* Summary information about a chunk owned by a repository.
*/
public class ChunkInfo {
/** Source the chunk (what code path created it). */
public static enum Source implements TinyProtobuf.Enum {
/** Came in over the network from an external source */
RECEIVE(1),
/** Created in this repository (e.g. a merge). */
INSERT(2),
/** Generated during a repack of this repository. */
REPACK(3);

private final int value;

Source(int val) {
this.value = val;
}

public int value() {
return value;
}
}

/** Mixed objects are stored in the chunk (instead of single type). */
public static final int OBJ_MIXED = 0;

private final ChunkKey chunkKey;

private final GitStore.ChunkInfo data;

/**
* Parse info from the storage system.
* Wrap a ChunkInfo message.
*
* @param chunkKey
* the chunk the link points to.
* @param raw
* the raw encoding of the info.
* @return the info object.
* @param key
* associated chunk key.
* @param data
* data.
*/
public static ChunkInfo fromBytes(ChunkKey chunkKey, byte[] raw) {
ChunkInfo info = new ChunkInfo();
info.chunkKey = chunkKey;

TinyProtobuf.Decoder d = TinyProtobuf.decode(raw);
PARSE: for (;;) {
switch (d.next()) {
case 0:
break PARSE;
case 1:
info.source = d.intEnum(Source.values());
continue;
case 2:
info.objectType = d.int32();
continue;
case 3:
info.fragment = d.bool();
continue;
case 4:
info.cachedPack = CachedPackKey.fromBytes(d);
continue;

case 5: {
TinyProtobuf.Decoder m = d.message();
for (;;) {
switch (m.next()) {
case 0:
continue PARSE;
case 1:
info.objectsTotal = m.int32();
continue;
case 2:
info.objectsWhole = m.int32();
continue;
case 3:
info.objectsOfsDelta = m.int32();
continue;
case 4:
info.objectsRefDelta = m.int32();
continue;
default:
m.skip();
continue;
}
}
}
case 6:
info.chunkSize = d.int32();
continue;
case 7:
info.indexSize = d.int32();
continue;
case 8:
info.metaSize = d.int32();
continue;
default:
d.skip();
continue;
}
}
return info;
}

private static byte[] asBytes(ChunkInfo info) {
TinyProtobuf.Encoder objects = TinyProtobuf.encode(48);
objects.int32IfNotZero(1, info.objectsTotal);
objects.int32IfNotZero(2, info.objectsWhole);
objects.int32IfNotZero(3, info.objectsOfsDelta);
objects.int32IfNotZero(4, info.objectsRefDelta);

TinyProtobuf.Encoder e = TinyProtobuf.encode(128);
e.intEnum(1, info.source);
e.int32IfNotNegative(2, info.objectType);
e.boolIfTrue(3, info.fragment);
e.string(4, info.cachedPack);
e.message(5, objects);
e.int32IfNotZero(6, info.chunkSize);
e.int32IfNotZero(7, info.indexSize);
e.int32IfNotZero(8, info.metaSize);
return e.asByteArray();
public ChunkInfo(ChunkKey key, GitStore.ChunkInfo data) {
this.chunkKey = key;
this.data = data;
}

ChunkKey chunkKey;

Source source;

int objectType = -1;

boolean fragment;

CachedPackKey cachedPack;

int objectsTotal;

int objectsWhole;

int objectsOfsDelta;

int objectsRefDelta;

int chunkSize;

int indexSize;

int metaSize;

/** @return the repository that contains the chunk. */
public RepositoryKey getRepositoryKey() {
return chunkKey.getRepositoryKey();
@@ -197,69 +79,9 @@ public class ChunkInfo {
return chunkKey;
}

/** @return source of this chunk. */
public Source getSource() {
return source;
}

/** @return type of object in the chunk, or {@link #OBJ_MIXED}. */
public int getObjectType() {
return objectType;
}

/** @return true if this chunk is part of a large fragmented object. */
public boolean isFragment() {
return fragment;
}

/** @return cached pack this is a member of, or null. */
public CachedPackKey getCachedPack() {
return cachedPack;
}

/** @return size of the chunk's compressed data, in bytes. */
public int getChunkSizeInBytes() {
return chunkSize;
}

/** @return size of the chunk's index data, in bytes. */
public int getIndexSizeInBytes() {
return indexSize;
}

/** @return size of the chunk's meta data, in bytes. */
public int getMetaSizeInBytes() {
return metaSize;
}

/** @return number of objects stored in the chunk. */
public int getObjectsTotal() {
return objectsTotal;
}

/** @return number of whole objects stored in the chunk. */
public int getObjectsWhole() {
return objectsWhole;
}

/** @return number of OFS_DELTA objects stored in the chunk. */
public int getObjectsOffsetDelta() {
return objectsOfsDelta;
}

/** @return number of REF_DELTA objects stored in the chunk. */
public int getObjectsReferenceDelta() {
return objectsRefDelta;
}

/**
* Convert this link into a byte array for storage.
*
* @return the link data, encoded as a byte array. This does not include the
* ChunkKey, callers must store that separately.
*/
public byte[] asBytes() {
return asBytes(this);
/** @return the underlying message containing all data. */
public GitStore.ChunkInfo getData() {
return data;
}

@Override
@@ -267,20 +89,8 @@ public class ChunkInfo {
StringBuilder b = new StringBuilder();
b.append("ChunkInfo:");
b.append(chunkKey);
b.append(" [");
if (getSource() != null)
b.append(" ").append(getSource());
if (isFragment())
b.append(" fragment");
if (getObjectType() != 0)
b.append(" ").append(Constants.typeString(getObjectType()));
if (0 < getObjectsTotal())
b.append(" objects=").append(getObjectsTotal());
if (0 < getChunkSizeInBytes())
b.append(" chunk=").append(getChunkSizeInBytes()).append("B");
if (0 < getIndexSizeInBytes())
b.append(" index=").append(getIndexSizeInBytes()).append("B");
b.append(" ]");
b.append("\n");
b.append(data);
return b.toString();
}
}

+ 0
- 12
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkKey.java View File

@@ -73,18 +73,6 @@ public final class ChunkKey implements RowKey {
return fromBytes(key, 0, key.length);
}

/**
* @param d
* decoder to read key from current field from.
* @return the key
*/
public static ChunkKey fromBytes(TinyProtobuf.Decoder d) {
int len = d.bytesLength();
int ptr = d.bytesOffset();
byte[] buf = d.bytesArray();
return fromBytes(buf, ptr, len);
}

/**
* @param key
* @param ptr

+ 0
- 391
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMeta.java View File

@@ -1,391 +0,0 @@
/*
* Copyright (C) 2011, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/

package org.eclipse.jgit.storage.dht;

import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

/** Metadata stored inline with each PackChunk. */
public class ChunkMeta {
/**
* Convert from byte array.
*
* @param key
* the chunk key this meta object sits in.
* @param raw
* the raw byte array.
* @return the chunk meta.
*/
public static ChunkMeta fromBytes(ChunkKey key, byte[] raw) {
return fromBytes(key, TinyProtobuf.decode(raw));
}

/**
* Convert from byte array.
*
* @param key
* the chunk key this meta object sits in.
* @param d
* the message decoder.
* @return the chunk meta.
*/
public static ChunkMeta fromBytes(ChunkKey key, TinyProtobuf.Decoder d) {
List<BaseChunk> baseChunk = null;
List<ChunkKey> fragment = null;
PrefetchHint commit = null;
PrefetchHint tree = null;

PARSE: for (;;) {
switch (d.next()) {
case 0:
break PARSE;
case 1:
if (baseChunk == null)
baseChunk = new ArrayList<BaseChunk>(4);
baseChunk.add(BaseChunk.fromBytes(d.message()));
continue;
case 2:
if (fragment == null)
fragment = new ArrayList<ChunkKey>(4);
fragment.add(ChunkKey.fromBytes(d));
continue;
case 51:
commit = PrefetchHint.fromBytes(d.message());
continue;
case 52:
tree = PrefetchHint.fromBytes(d.message());
continue;
default:
d.skip();
continue;
}
}

return new ChunkMeta(key, baseChunk, fragment, commit, tree);
}

private final ChunkKey chunkKey;

List<BaseChunk> baseChunks;

List<ChunkKey> fragments;

PrefetchHint commitPrefetch;

PrefetchHint treePrefetch;

ChunkMeta(ChunkKey key) {
this(key, null, null, null, null);
}

ChunkMeta(ChunkKey chunkKey, List<BaseChunk> baseChunk,
List<ChunkKey> fragment, PrefetchHint commit, PrefetchHint tree) {
this.chunkKey = chunkKey;
this.baseChunks = baseChunk;
this.fragments = fragment;
this.commitPrefetch = commit;
this.treePrefetch = tree;
}

/** @return key of the chunk this meta information is for. */
public ChunkKey getChunkKey() {
return chunkKey;
}

BaseChunk getBaseChunk(long position) throws DhtException {
// Chunks are sorted by ascending relative_start order.
// Thus for a pack sequence of: A B C, we have:
//
// -- C relative_start = 10,000
// -- B relative_start = 20,000
// -- A relative_start = 30,000
//
// Indicating that chunk C starts 10,000 bytes before us,
// chunk B starts 20,000 bytes before us (and 10,000 before C),
// chunk A starts 30,000 bytes before us (and 10,000 before B),
//
// If position falls within:
//
// -- C (10k), then position is between 0..10,000
// -- B (20k), then position is between 10,000 .. 20,000
// -- A (30k), then position is between 20,000 .. 30,000

int high = baseChunks.size();
int low = 0;
while (low < high) {
final int mid = (low + high) >>> 1;
final BaseChunk base = baseChunks.get(mid);

if (position > base.relativeStart) {
low = mid + 1;

} else if (mid == 0 || position == base.relativeStart) {
return base;

} else if (baseChunks.get(mid - 1).relativeStart < position) {
return base;

} else {
high = mid;
}
}

throw new DhtException(MessageFormat.format(
DhtText.get().missingLongOffsetBase, chunkKey,
Long.valueOf(position)));
}

/** @return number of fragment chunks that make up the object. */
public int getFragmentCount() {
return fragments != null ? fragments.size() : 0;
}

/**
* Get the nth fragment key.
*
* @param nth
* @return the key.
*/
public ChunkKey getFragmentKey(int nth) {
return fragments.get(nth);
}

/**
* Find the key of the fragment that occurs after this chunk.
*
* @param currentKey
* the current chunk key.
* @return next chunk after this; null if there isn't one.
*/
public ChunkKey getNextFragment(ChunkKey currentKey) {
for (int i = 0; i < fragments.size() - 1; i++) {
if (fragments.get(i).equals(currentKey))
return fragments.get(i + 1);
}
return null;
}

/** @return chunks to visit. */
public PrefetchHint getCommitPrefetch() {
return commitPrefetch;
}

/** @return chunks to visit. */
public PrefetchHint getTreePrefetch() {
return treePrefetch;
}

/** @return true if there is no data in this object worth storing. */
boolean isEmpty() {
if (baseChunks != null && !baseChunks.isEmpty())
return false;
if (fragments != null && !fragments.isEmpty())
return false;
if (commitPrefetch != null && !commitPrefetch.isEmpty())
return false;
if (treePrefetch != null && !treePrefetch.isEmpty())
return false;
return true;
}

/** @return format as byte array for storage. */
public byte[] asBytes() {
TinyProtobuf.Encoder e = TinyProtobuf.encode(256);

if (baseChunks != null) {
for (BaseChunk base : baseChunks)
e.message(1, base.asBytes());
}

if (fragments != null) {
for (ChunkKey key : fragments)
e.bytes(2, key.asBytes());
}

if (commitPrefetch != null)
e.message(51, commitPrefetch.asBytes());
if (treePrefetch != null)
e.message(52, treePrefetch.asBytes());

return e.asByteArray();
}

/** Describes other chunks that contain the bases for this chunk's deltas. */
public static class BaseChunk {
final long relativeStart;

private final ChunkKey chunk;

BaseChunk(long relativeStart, ChunkKey chunk) {
this.relativeStart = relativeStart;
this.chunk = chunk;
}

/** @return bytes backward from current chunk to start of base chunk. */
public long getRelativeStart() {
return relativeStart;
}

/** @return unique key of this chunk. */
public ChunkKey getChunkKey() {
return chunk;
}

TinyProtobuf.Encoder asBytes() {
int max = 11 + 2 + ChunkKey.KEYLEN;
TinyProtobuf.Encoder e = TinyProtobuf.encode(max);
e.int64(1, relativeStart);
e.bytes(2, chunk.asBytes());
return e;
}

static BaseChunk fromBytes(TinyProtobuf.Decoder d) {
long relativeStart = -1;
ChunkKey chunk = null;

PARSE: for (;;) {
switch (d.next()) {
case 0:
break PARSE;
case 1:
relativeStart = d.int64();
continue;
case 2:
chunk = ChunkKey.fromBytes(d);
continue;
default:
d.skip();
continue;
}
}

return new BaseChunk(relativeStart, chunk);
}
}

/** Describes the prefetching for a particular object type. */
public static class PrefetchHint {
private final List<ChunkKey> edge;

private final List<ChunkKey> sequential;

PrefetchHint(List<ChunkKey> edge, List<ChunkKey> sequential) {
if (edge == null)
edge = Collections.emptyList();
else
edge = Collections.unmodifiableList(edge);

if (sequential == null)
sequential = Collections.emptyList();
else
sequential = Collections.unmodifiableList(sequential);

this.edge = edge;
this.sequential = sequential;
}

/** @return chunks on the edge of this chunk. */
public List<ChunkKey> getEdge() {
return edge;
}

/** @return chunks according to sequential ordering. */
public List<ChunkKey> getSequential() {
return sequential;
}

boolean isEmpty() {
return edge.isEmpty() && sequential.isEmpty();
}

TinyProtobuf.Encoder asBytes() {
int max = 0;

max += (2 + ChunkKey.KEYLEN) * edge.size();
max += (2 + ChunkKey.KEYLEN) * sequential.size();

TinyProtobuf.Encoder e = TinyProtobuf.encode(max);
for (ChunkKey key : edge)
e.bytes(1, key.asBytes());
for (ChunkKey key : sequential)
e.bytes(2, key.asBytes());
return e;
}

static PrefetchHint fromBytes(TinyProtobuf.Decoder d) {
ArrayList<ChunkKey> edge = null;
ArrayList<ChunkKey> sequential = null;

PARSE: for (;;) {
switch (d.next()) {
case 0:
break PARSE;
case 1:
if (edge == null)
edge = new ArrayList<ChunkKey>(16);
edge.add(ChunkKey.fromBytes(d));
continue;
case 2:
if (sequential == null)
sequential = new ArrayList<ChunkKey>(16);
sequential.add(ChunkKey.fromBytes(d));
continue;
default:
d.skip();
continue;
}
}

if (edge != null)
edge.trimToSize();

if (sequential != null)
sequential.trimToSize();

return new PrefetchHint(edge, sequential);
}
}
}

+ 111
- 0
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ChunkMetaUtil.java View File

@@ -0,0 +1,111 @@
/*
* Copyright (C) 2011, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/

package org.eclipse.jgit.storage.dht;

import java.text.MessageFormat;
import java.util.List;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk;

class ChunkMetaUtil {
static BaseChunk getBaseChunk(ChunkKey chunkKey, ChunkMeta meta,
long position) throws DhtException {
// Chunks are sorted by ascending relative_start order.
// Thus for a pack sequence of: A B C, we have:
//
// -- C relative_start = 10,000
// -- B relative_start = 20,000
// -- A relative_start = 30,000
//
// Indicating that chunk C starts 10,000 bytes before us,
// chunk B starts 20,000 bytes before us (and 10,000 before C),
// chunk A starts 30,000 bytes before us (and 10,000 before B),
//
// If position falls within:
//
// -- C (10k), then position is between 0..10,000
// -- B (20k), then position is between 10,000 .. 20,000
// -- A (30k), then position is between 20,000 .. 30,000

List<BaseChunk> baseChunks = meta.getBaseChunkList();
int high = baseChunks.size();
int low = 0;
while (low < high) {
final int mid = (low + high) >>> 1;
final BaseChunk base = baseChunks.get(mid);

if (position > base.getRelativeStart()) {
low = mid + 1;

} else if (mid == 0 || position == base.getRelativeStart()) {
return base;

} else if (baseChunks.get(mid - 1).getRelativeStart() < position) {
return base;

} else {
high = mid;
}
}

throw new DhtException(MessageFormat.format(
DhtText.get().missingLongOffsetBase, chunkKey,
Long.valueOf(position)));
}

static ChunkKey getNextFragment(ChunkMeta meta, ChunkKey chunkKey) {
int cnt = meta.getFragmentCount();
for (int i = 0; i < cnt - 1; i++) {
ChunkKey key = ChunkKey.fromString(meta.getFragment(i));
if (chunkKey.equals(key))
return ChunkKey.fromString(meta.getFragment(i + 1));
}
return null;
}

private ChunkMetaUtil() {
// Static utilities only, do not create instances.
}
}

+ 54
- 21
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtCachedPack.java View File

@@ -45,12 +45,16 @@ package org.eclipse.jgit.storage.dht;

import java.io.IOException;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.storage.pack.CachedPack;
import org.eclipse.jgit.storage.pack.ObjectToPack;
@@ -61,7 +65,11 @@ import org.eclipse.jgit.storage.pack.StoredObjectRepresentation;
public class DhtCachedPack extends CachedPack {
private final CachedPackInfo info;

private Set<ChunkKey> chunkKeySet;
private Set<ObjectId> tips;

private Set<ChunkKey> keySet;

private ChunkKey[] keyList;

DhtCachedPack(CachedPackInfo info) {
this.info = info;
@@ -69,7 +77,13 @@ public class DhtCachedPack extends CachedPack {

@Override
public Set<ObjectId> getTips() {
return Collections.unmodifiableSet(info.tips);
if (tips == null) {
tips = new HashSet<ObjectId>();
for (String idString : info.getTipList().getObjectNameList())
tips.add(ObjectId.fromString(idString));
tips = Collections.unmodifiableSet(tips);
}
return tips;
}

@Override
@@ -90,23 +104,37 @@ public class DhtCachedPack extends CachedPack {
@Override
public boolean hasObject(ObjectToPack obj, StoredObjectRepresentation rep) {
DhtObjectRepresentation objrep = (DhtObjectRepresentation) rep;
if (chunkKeySet == null)
chunkKeySet = new HashSet<ChunkKey>(info.chunks);
return chunkKeySet.contains(objrep.getChunkKey());
if (keySet == null)
init();
return keySet.contains(objrep.getChunkKey());
}

private void init() {
ChunkList chunkList = info.getChunkList();
int cnt = chunkList.getChunkKeyCount();
keySet = new HashSet<ChunkKey>();
keyList = new ChunkKey[cnt];
for (int i = 0; i < cnt; i++) {
ChunkKey key = ChunkKey.fromString(chunkList.getChunkKey(i));
keySet.add(key);
keyList[i] = key;
}
}

void copyAsIs(PackOutputStream out, boolean validate, DhtReader ctx)
throws IOException {
if (keyList == null)
init();
Prefetcher p = new Prefetcher(ctx, 0);
p.push(info.chunks);
copyPack(out, ctx, p, validate);
p.push(Arrays.asList(keyList));
copyPack(out, p, validate);
}

private void copyPack(PackOutputStream out, DhtReader ctx,
Prefetcher prefetcher, boolean validate) throws DhtException,
DhtMissingChunkException, IOException {
Map<ChunkKey, Long> startsAt = new HashMap<ChunkKey, Long>();
for (ChunkKey key : info.chunks) {
private void copyPack(PackOutputStream out, Prefetcher prefetcher,
boolean validate) throws DhtException, DhtMissingChunkException,
IOException {
Map<String, Long> startsAt = new HashMap<String, Long>();
for (ChunkKey key : keyList) {
PackChunk chunk = prefetcher.get(key);

// The prefetcher should always produce the chunk for us, if not
@@ -122,29 +150,34 @@ public class DhtCachedPack extends CachedPack {
// incorrectly created and would confuse the client.
//
long position = out.length();
if (chunk.getMeta() != null && chunk.getMeta().baseChunks != null) {
for (ChunkMeta.BaseChunk base : chunk.getMeta().baseChunks) {
ChunkMeta meta = chunk.getMeta();
if (meta != null && meta.getBaseChunkCount() != 0) {
for (ChunkMeta.BaseChunk base : meta.getBaseChunkList()) {
Long act = startsAt.get(base.getChunkKey());
long exp = position - base.getRelativeStart();

if (act == null) {
throw new DhtException(MessageFormat.format(DhtText
.get().wrongChunkPositionInCachedPack, info
.getRowKey(), base.getChunkKey(),
"[not written]", key, exp));
.get().wrongChunkPositionInCachedPack,
rowKey(), base.getChunkKey(),
"[not written]", key, Long.valueOf(exp)));
}

if (act.longValue() != exp) {
throw new DhtException(MessageFormat.format(DhtText
.get().wrongChunkPositionInCachedPack, info
.getRowKey(), base.getChunkKey(), //
act, key, exp));
.get().wrongChunkPositionInCachedPack,
rowKey(), base.getChunkKey(),
act, key, Long.valueOf(exp)));
}
}
}

startsAt.put(key, Long.valueOf(position));
startsAt.put(key.asString(), Long.valueOf(position));
chunk.copyEntireChunkAsIs(out, null, validate);
}
}

private String rowKey() {
return info.getName() + "." + info.getVersion();
}
}

+ 30
- 14
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserter.java View File

@@ -53,6 +53,9 @@ import java.util.Collections;
import java.util.LinkedList;
import java.util.zip.Deflater;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
@@ -174,8 +177,13 @@ class DhtInserter extends ObjectInserter {
ChunkKey key = chunk.end(chunkDigest);
chunk.setChunkIndex(Collections.singletonList(oe));
chunk.safePut(db, dbBuffer());
ObjectInfo info = new ObjectInfo(key, -1, type, position,
packedSize, inflatedSize, null, false);

GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder();
b.setObjectType(ObjectType.valueOf(type));
b.setOffset(position);
b.setPackedSize(packedSize);
b.setInflatedSize(inflatedSize);
ObjectInfo info = new ObjectInfo(key, b.build());
ObjectIndexKey objKey = ObjectIndexKey.create(repo, objId);
db.objectIndex().add(objKey, info, dbBuffer());
return objId;
@@ -188,12 +196,15 @@ class DhtInserter extends ObjectInserter {
chunk = null;

ChunkKey firstChunkKey = fragmentList.get(0);

ChunkMeta.Builder metaBuilder = ChunkMeta.newBuilder();
for (ChunkKey k : fragmentList)
metaBuilder.addFragment(k.asString());
ChunkMeta meta = metaBuilder.build();

for (ChunkKey key : fragmentList) {
PackChunk.Members builder = new PackChunk.Members();
builder.setChunkKey(key);

ChunkMeta meta = new ChunkMeta(key);
meta.fragments = fragmentList;
builder.setMeta(meta);

if (firstChunkKey.equals(key))
@@ -202,8 +213,12 @@ class DhtInserter extends ObjectInserter {
db.chunk().put(builder, dbBuffer());
}

ObjectInfo info = new ObjectInfo(firstChunkKey, -1, type, position,
packedSize, inflatedSize, null, true);
GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder();
b.setObjectType(ObjectType.valueOf(type));
b.setOffset(position);
b.setPackedSize(packedSize);
b.setInflatedSize(inflatedSize);
ObjectInfo info = new ObjectInfo(firstChunkKey, b.build());
ObjectIndexKey objKey = ObjectIndexKey.create(repo, objId);
db.objectIndex().add(objKey, info, dbBuffer());

@@ -234,12 +249,13 @@ class DhtInserter extends ObjectInserter {
// TODO Allow more than one chunk pending at a time, this would
// permit batching puts of the ChunkInfo records.

activeChunk.end(digest());
activeChunk.safePut(db, dbBuffer());
activeChunk = newChunk();

if (activeChunk.whole(deflater(), type, data, off, len, objId))
return objId;
if (!activeChunk.isEmpty()) {
activeChunk.end(digest());
activeChunk.safePut(db, dbBuffer());
activeChunk = newChunk();
if (activeChunk.whole(deflater(), type, data, off, len, objId))
return objId;
}

return insertStream(type, len, asStream(data, off, len));
}
@@ -295,7 +311,7 @@ class DhtInserter extends ObjectInserter {
ChunkFormatter fmt;

fmt = new ChunkFormatter(repo, options);
fmt.setSource(ChunkInfo.Source.INSERT);
fmt.setSource(GitStore.ChunkInfo.Source.INSERT);
return fmt;
}


+ 1
- 0
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtInserterOptions.java View File

@@ -49,6 +49,7 @@ import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
import java.security.SecureRandom;
import java.util.zip.Deflater;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.CoreConfig;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer;

+ 1
- 1
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtObjectRepresentation.java View File

@@ -76,7 +76,7 @@ final class DhtObjectRepresentation extends StoredObjectRepresentation {

@Override
public int getFormat() {
if (info.getDeltaBase() != null)
if (info.isDelta())
return PACK_DELTA;
return PACK_WHOLE;
}

+ 144
- 82
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtPackParser.java View File

@@ -67,10 +67,13 @@ import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.TimeoutException;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.MutableObjectId;
@@ -86,6 +89,8 @@ import org.eclipse.jgit.transport.PackedObjectInfo;
import org.eclipse.jgit.treewalk.CanonicalTreeParser;
import org.eclipse.jgit.util.LongList;

import com.google.protobuf.ByteString;

/** Parses the pack stream into chunks, and indexes the chunks for lookup. */
public class DhtPackParser extends PackParser {
private final DhtObjDatabase objdb;
@@ -112,7 +117,7 @@ public class DhtPackParser extends PackParser {
private Edges[] openEdges;

/** Prior chunks that were written, keyed by object type code. */
private List<ChunkInfo>[] infoByOrder;
private List<ChunkKey>[] chunkByOrder;

/** Information on chunks already written out. */
private Map<ChunkKey, ChunkInfo> infoByKey;
@@ -199,7 +204,7 @@ public class DhtPackParser extends PackParser {
dbWriteBuffer = db.newWriteBuffer();
openChunks = new ChunkFormatter[5];
openEdges = new Edges[5];
infoByOrder = newListArray(5);
chunkByOrder = newListArray(5);
infoByKey = new HashMap<ChunkKey, ChunkInfo>();
dirtyMeta = new HashMap<ChunkKey, ChunkMeta>();
chunkMeta = new HashMap<ChunkKey, ChunkMeta>();
@@ -306,7 +311,7 @@ public class DhtPackParser extends PackParser {
if (!success)
rollback();

infoByOrder = null;
chunkByOrder = null;
objectListByName = null;
objectListByChunk = null;
linkIterators = null;
@@ -332,54 +337,74 @@ public class DhtPackParser extends PackParser {
}

private void putCachedPack() throws DhtException {
CachedPackInfo info = new CachedPackInfo();
CachedPackInfo.Builder info = CachedPackInfo.newBuilder();

for (DhtInfo obj : objectMap) {
if (!obj.isInPack())
return;

if (!obj.isReferenced())
info.tips.add(obj.copy());
info.getTipListBuilder().addObjectName(obj.name());
}

MessageDigest version = Constants.newMessageDigest();
addChunkList(info, version, infoByOrder[OBJ_TAG]);
addChunkList(info, version, infoByOrder[OBJ_COMMIT]);
addChunkList(info, version, infoByOrder[OBJ_TREE]);
addChunkList(info, version, infoByOrder[OBJ_BLOB]);
addChunkList(info, version, chunkByOrder[OBJ_TAG]);
addChunkList(info, version, chunkByOrder[OBJ_COMMIT]);
addChunkList(info, version, chunkByOrder[OBJ_TREE]);
addChunkList(info, version, chunkByOrder[OBJ_BLOB]);

info.name = computePackName();
info.version = ObjectId.fromRaw(version.digest());
info.setName(computePackName().name());
info.setVersion(ObjectId.fromRaw(version.digest()).name());

cachedPackKey = info.getRowKey();
for (List<ChunkInfo> list : infoByOrder) {
cachedPackKey = CachedPackKey.fromInfo(info.build());
for (List<ChunkKey> list : chunkByOrder) {
if (list == null)
continue;
for (ChunkInfo c : list) {
c.cachedPack = cachedPackKey;
if (c.isFragment())
db.repository().put(repo, info, dbWriteBuffer);
for (ChunkKey key : list) {
ChunkInfo oldInfo = infoByKey.get(key);
GitStore.ChunkInfo.Builder b =
GitStore.ChunkInfo.newBuilder(oldInfo.getData());
b.setCachedPackKey(cachedPackKey.asString());
ChunkInfo newInfo = new ChunkInfo(key, b.build());
infoByKey.put(key, newInfo);

// A fragment was already put, and has to be re-put.
// Non-fragments will put later and do not put now.
if (newInfo.getData().getIsFragment())
db.repository().put(repo, newInfo, dbWriteBuffer);
}
}

db.repository().put(repo, info, dbWriteBuffer);
db.repository().put(repo, info.build(), dbWriteBuffer);
}

private void addChunkList(CachedPackInfo info, MessageDigest version,
List<ChunkInfo> list) {
private void addChunkList(CachedPackInfo.Builder info,
MessageDigest version, List<ChunkKey> list) {
if (list == null)
return;

long bytesTotal = info.getBytesTotal();
long objectsTotal = info.getObjectsTotal();
long objectsDelta = info.getObjectsDelta();

byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
for (ChunkInfo c : list) {
int len = c.chunkSize - ChunkFormatter.TRAILER_SIZE;
info.bytesTotal += len;
info.objectsTotal += c.objectsTotal;
info.objectsDelta += c.objectsOfsDelta;
info.objectsDelta += c.objectsRefDelta;
info.chunks.add(c.getChunkKey());
c.getChunkKey().getChunkHash().copyRawTo(buf, 0);
for (ChunkKey key : list) {
ChunkInfo chunkInfo = infoByKey.get(key);
GitStore.ChunkInfo c = chunkInfo.getData();
int len = c.getChunkSize() - ChunkFormatter.TRAILER_SIZE;
bytesTotal += len;
objectsTotal += c.getObjectCounts().getTotal();
objectsDelta += c.getObjectCounts().getOfsDelta();
objectsDelta += c.getObjectCounts().getRefDelta();
info.getChunkListBuilder().addChunkKey(
chunkInfo.getChunkKey().asString());
chunkInfo.getChunkKey().getChunkHash().copyRawTo(buf, 0);
version.update(buf);
}

info.setBytesTotal(bytesTotal);
info.setObjectsTotal(objectsTotal);
info.setObjectsDelta(objectsDelta);
}

private ObjectId computePackName() {
@@ -420,10 +445,10 @@ public class DhtPackParser extends PackParser {
}
}

deleteChunks(infoByOrder[OBJ_COMMIT]);
deleteChunks(infoByOrder[OBJ_TREE]);
deleteChunks(infoByOrder[OBJ_BLOB]);
deleteChunks(infoByOrder[OBJ_TAG]);
deleteChunks(chunkByOrder[OBJ_COMMIT]);
deleteChunks(chunkByOrder[OBJ_TREE]);
deleteChunks(chunkByOrder[OBJ_BLOB]);
deleteChunks(chunkByOrder[OBJ_TAG]);

dbWriteBuffer.flush();
} catch (Throwable err) {
@@ -431,10 +456,9 @@ public class DhtPackParser extends PackParser {
}
}

private void deleteChunks(List<ChunkInfo> list) throws DhtException {
private void deleteChunks(List<ChunkKey> list) throws DhtException {
if (list != null) {
for (ChunkInfo info : list) {
ChunkKey key = info.getChunkKey();
for (ChunkKey key : list) {
db.chunk().remove(key, dbWriteBuffer);
db.repository().remove(repo, key, dbWriteBuffer);
}
@@ -605,60 +629,77 @@ public class DhtPackParser extends PackParser {

private void putChunkIndex(List<DhtInfo> objectList, ChunkKey key, int type)
throws DhtException {
ChunkInfo info = infoByKey.get(key);
info.objectsTotal = objectList.size();
info.objectType = type;
ChunkInfo oldInfo = infoByKey.get(key);
GitStore.ChunkInfo.Builder info
= GitStore.ChunkInfo.newBuilder(oldInfo.getData());

PackChunk.Members builder = new PackChunk.Members();
builder.setChunkKey(key);

byte[] index = ChunkIndex.create(objectList);
info.indexSize = index.length;
info.setIndexSize(index.length);
builder.setChunkIndex(index);

ChunkMeta meta = dirtyMeta.remove(key);
if (meta == null)
meta = chunkMeta.get(key);
if (meta == null)
meta = new ChunkMeta(key);

switch (type) {
case OBJ_COMMIT: {
Edges edges = chunkEdges.get(key);
if (edges != null) {
List<ChunkKey> e = edges.commitEdges;
List<ChunkKey> s = sequentialHint(key, OBJ_COMMIT);
meta.commitPrefetch = new ChunkMeta.PrefetchHint(e, s);
List<ChunkKey> e = edges != null ? edges.commitEdges : null;
List<ChunkKey> s = sequentialHint(key, OBJ_COMMIT);
if (e == null)
e = Collections.emptyList();
if (s == null)
s = Collections.emptyList();
if (!e.isEmpty() || !s.isEmpty()) {
ChunkMeta.Builder m = edit(meta);
ChunkMeta.PrefetchHint.Builder h = m.getCommitPrefetchBuilder();
for (ChunkKey k : e)
h.addEdge(k.asString());
for (ChunkKey k : s)
h.addSequential(k.asString());
meta = m.build();
}
break;
}
case OBJ_TREE: {
List<ChunkKey> s = sequentialHint(key, OBJ_TREE);
meta.treePrefetch = new ChunkMeta.PrefetchHint(null, s);
if (s == null)
s = Collections.emptyList();
if (!s.isEmpty()) {
ChunkMeta.Builder m = edit(meta);
ChunkMeta.PrefetchHint.Builder h = m.getTreePrefetchBuilder();
for (ChunkKey k : s)
h.addSequential(k.asString());
meta = m.build();
}
break;
}
}

if (meta.isEmpty()) {
info.metaSize = 0;
} else {
info.metaSize = meta.asBytes().length;
if (meta != null) {
info.setMetaSize(meta.getSerializedSize());
builder.setMeta(meta);
}

db.repository().put(repo, info, dbWriteBuffer);
ChunkInfo newInfo = new ChunkInfo(key, info.build());
infoByKey.put(key, newInfo);
db.repository().put(repo, newInfo, dbWriteBuffer);
db.chunk().put(builder, dbWriteBuffer);
}

private static ChunkMeta.Builder edit(ChunkMeta meta) {
if (meta != null)
return ChunkMeta.newBuilder(meta);
return ChunkMeta.newBuilder();
}

private List<ChunkKey> sequentialHint(ChunkKey key, int typeCode) {
List<ChunkInfo> infoList = infoByOrder[typeCode];
if (infoList == null)
List<ChunkKey> all = chunkByOrder[typeCode];
if (all == null)
return null;

List<ChunkKey> all = new ArrayList<ChunkKey>(infoList.size());
for (ChunkInfo info : infoList)
all.add(info.getChunkKey());

int idx = all.indexOf(key);
if (0 <= idx) {
int max = options.getPrefetchDepth();
@@ -669,10 +710,10 @@ public class DhtPackParser extends PackParser {
}

private void putDirtyMeta() throws DhtException {
for (ChunkMeta meta : dirtyMeta.values()) {
for (Map.Entry<ChunkKey, ChunkMeta> meta : dirtyMeta.entrySet()) {
PackChunk.Members builder = new PackChunk.Members();
builder.setChunkKey(meta.getChunkKey());
builder.setMeta(meta);
builder.setChunkKey(meta.getKey());
builder.setMeta(meta.getValue());
db.chunk().put(builder, dbWriteBuffer);
}
}
@@ -892,15 +933,15 @@ public class DhtPackParser extends PackParser {

private boolean longOfsDelta(ChunkFormatter w, long infSize, long basePtr) {
final int type = typeOf(basePtr);
final List<ChunkInfo> infoList = infoByOrder[type];
final List<ChunkKey> infoList = chunkByOrder[type];
final int baseIdx = chunkIdx(basePtr);
final ChunkInfo baseInfo = infoList.get(baseIdx);
final ChunkInfo baseInfo = infoByKey.get(infoList.get(baseIdx));

// Go backwards to the start of the base's chunk.
long relativeChunkStart = 0;
for (int i = infoList.size() - 1; baseIdx <= i; i--) {
ChunkInfo info = infoList.get(i);
int packSize = info.chunkSize - ChunkFormatter.TRAILER_SIZE;
GitStore.ChunkInfo info = infoByKey.get(infoList.get(i)).getData();
int packSize = info.getChunkSize() - ChunkFormatter.TRAILER_SIZE;
relativeChunkStart += packSize;
}

@@ -940,14 +981,24 @@ public class DhtPackParser extends PackParser {
if (lastKey != null)
currFragments.add(lastKey);

ChunkMeta.Builder protoBuilder = ChunkMeta.newBuilder();
for (ChunkKey key : currFragments)
protoBuilder.addFragment(key.asString());
ChunkMeta protoMeta = protoBuilder.build();

for (ChunkKey key : currFragments) {
ChunkMeta meta = chunkMeta.get(key);
if (meta == null) {
meta = new ChunkMeta(key);
ChunkMeta oldMeta = chunkMeta.get(key);
if (oldMeta != null) {
ChunkMeta.Builder newMeta = ChunkMeta.newBuilder(oldMeta);
newMeta.clearFragment();
newMeta.mergeFrom(protoMeta);
ChunkMeta meta = newMeta.build();
dirtyMeta.put(key, meta);
chunkMeta.put(key, meta);
} else {
dirtyMeta.put(key, protoMeta);
chunkMeta.put(key, protoMeta);
}
meta.fragments = currFragments;
dirtyMeta.put(key, meta);
}
currFragments = null;
}
@@ -1093,7 +1144,7 @@ public class DhtPackParser extends PackParser {
if (meta == null)
return 0;

ChunkKey next = meta.getNextFragment(dbChunk.getChunkKey());
ChunkKey next = ChunkMetaUtil.getNextFragment(meta, dbChunk.getChunkKey());
if (next == null)
return 0;

@@ -1200,7 +1251,7 @@ public class DhtPackParser extends PackParser {
ChunkFormatter w = openChunks[typeCode];
if (w == null) {
w = new ChunkFormatter(repo, options);
w.setSource(ChunkInfo.Source.RECEIVE);
w.setSource(GitStore.ChunkInfo.Source.RECEIVE);
w.setObjectType(typeCode);
openChunks[typeCode] = w;
}
@@ -1221,9 +1272,9 @@ public class DhtPackParser extends PackParser {
ChunkKey key = w.end(chunkKeyDigest);
ChunkInfo info = w.getChunkInfo();

if (infoByOrder[typeCode] == null)
infoByOrder[typeCode] = new ArrayList<ChunkInfo>();
infoByOrder[typeCode].add(info);
if (chunkByOrder[typeCode] == null)
chunkByOrder[typeCode] = new ArrayList<ChunkKey>();
chunkByOrder[typeCode].add(key);
infoByKey.put(key, info);

if (w.getChunkMeta() != null)
@@ -1260,7 +1311,7 @@ public class DhtPackParser extends PackParser {
}

private long makeObjectPointer(ChunkFormatter w, int typeCode) {
List<ChunkInfo> list = infoByOrder[typeCode];
List<ChunkKey> list = chunkByOrder[typeCode];
int idx = list == null ? 0 : list.size();
int ptr = w.position();
return (((long) typeCode) << 61) | (((long) idx) << 32) | ptr;
@@ -1279,14 +1330,14 @@ public class DhtPackParser extends PackParser {
}

private boolean isInCurrentChunk(long objectPtr) {
List<ChunkInfo> list = infoByOrder[typeOf(objectPtr)];
List<ChunkKey> list = chunkByOrder[typeOf(objectPtr)];
if (list == null)
return chunkIdx(objectPtr) == 0;
return chunkIdx(objectPtr) == list.size();
}

private ChunkKey chunkOf(long objectPtr) throws DhtException {
List<ChunkInfo> list = infoByOrder[typeOf(objectPtr)];
List<ChunkKey> list = chunkByOrder[typeOf(objectPtr)];
int idx = chunkIdx(objectPtr);
if (list == null || list.size() <= idx) {
throw new DhtException(MessageFormat.format(
@@ -1295,7 +1346,7 @@ public class DhtPackParser extends PackParser {
Integer.valueOf(idx), //
Integer.valueOf(offsetOf(objectPtr))));
}
return list.get(idx).getChunkKey();
return list.get(idx);
}

private static DhtException panicCannotInsert() {
@@ -1349,8 +1400,19 @@ public class DhtPackParser extends PackParser {
}

ObjectInfo info(ChunkKey chunkKey) {
return new ObjectInfo(chunkKey, -1, getType(), offsetOf(chunkPtr),
packedSize, inflatedSize, base, isFragmented());
GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder();
b.setObjectType(GitStore.ObjectInfo.ObjectType.valueOf(getType()));
b.setOffset(offsetOf(chunkPtr));
b.setPackedSize(packedSize);
b.setInflatedSize(inflatedSize);
if (base != null) {
byte[] t = new byte[Constants.OBJECT_ID_LENGTH];
base.copyRawTo(t, 0);
b.setDeltaBase(ByteString.copyFrom(t));
}
if (isFragmented())
b.setIsFragmented(true);
return new ObjectInfo(chunkKey, b.build());
}
}


+ 6
- 6
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtReader.java View File

@@ -63,6 +63,7 @@ import java.util.zip.Inflater;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.lib.AbbreviatedObjectId;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.AsyncObjectLoaderQueue;
@@ -76,7 +77,6 @@ import org.eclipse.jgit.revwalk.ObjectWalk;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.storage.dht.RefData.IdWithChunk;
import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.Database;
import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable;
@@ -186,7 +186,7 @@ public class DhtReader extends ObjectReader implements ObjectReuseAsIs {

@Override
public boolean has(AnyObjectId objId, int typeHint) throws IOException {
if (objId instanceof RefData.IdWithChunk)
if (objId instanceof RefDataUtil.IdWithChunk)
return true;

if (recentChunks.has(repo, objId))
@@ -283,8 +283,8 @@ public class DhtReader extends ObjectReader implements ObjectReuseAsIs {
}

ChunkKey key;
if (objId instanceof RefData.IdWithChunk)
key = ((RefData.IdWithChunk) objId).getChunkKey();
if (objId instanceof RefDataUtil.IdWithChunk)
key = ((RefDataUtil.IdWithChunk) objId).getChunkKey();
else
key = repository.getRefDatabase().findChunk(objId);

@@ -331,8 +331,8 @@ public class DhtReader extends ObjectReader implements ObjectReuseAsIs {
}

ChunkKey findChunk(AnyObjectId objId) throws DhtException {
if (objId instanceof IdWithChunk)
return ((IdWithChunk) objId).getChunkKey();
if (objId instanceof RefDataUtil.IdWithChunk)
return ((RefDataUtil.IdWithChunk) objId).getChunkKey();

ChunkKey key = repository.getRefDatabase().findChunk(objId);
if (key != null)

+ 25
- 44
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefDatabase.java View File

@@ -55,6 +55,7 @@ import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;

import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectIdRef.PeeledNonTag;
@@ -68,6 +69,7 @@ import org.eclipse.jgit.lib.SymbolicRef;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevTag;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.storage.dht.RefDataUtil.IdWithChunk;
import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.Database;
import org.eclipse.jgit.util.RefList;
@@ -94,7 +96,7 @@ public class DhtRefDatabase extends RefDatabase {
ChunkKey findChunk(AnyObjectId id) {
RefCache c = cache.get();
if (c != null) {
RefData.IdWithChunk i = c.hints.get(id);
IdWithChunk i = c.hints.get(id);
if (i != null)
return i.getChunkKey();
}
@@ -190,8 +192,8 @@ public class DhtRefDatabase extends RefDatabase {
try {
RepositoryKey repo = repository.getRepositoryKey();
RefKey key = RefKey.create(repo, newLeaf.getName());
RefData oldData = RefData.fromRef(oldLeaf);
RefData newData = RefData.fromRef(newLeaf);
RefData oldData = RefDataUtil.fromRef(oldLeaf);
RefData newData = RefDataUtil.fromRef(newLeaf);
db.ref().compareAndPut(key, oldData, newData);
} catch (TimeoutException e) {
// Ignore a timeout here, we were only trying to update
@@ -214,13 +216,12 @@ public class DhtRefDatabase extends RefDatabase {

ChunkKey key = ctx.findChunk(oId);
if (key != null)
oId = new RefData.IdWithChunk(oId, key);
oId = new IdWithChunk(oId, key);

if (obj instanceof RevTag) {
ObjectId pId = rw.peel(obj);
key = ctx.findChunk(pId);
pId = key != null ? new RefData.IdWithChunk(pId, key) : pId
.copy();
pId = key != null ? new IdWithChunk(pId, key) : pId.copy();
return new PeeledTag(leaf.getStorage(), name, oId, pId);
} else {
return new PeeledNonTag(leaf.getStorage(), name, oId);
@@ -353,7 +354,7 @@ public class DhtRefDatabase extends RefDatabase {
private RefCache read() throws DhtException, TimeoutException {
RefList.Builder<Ref> id = new RefList.Builder<Ref>();
RefList.Builder<Ref> sym = new RefList.Builder<Ref>();
ObjectIdSubclassMap<RefData.IdWithChunk> hints = new ObjectIdSubclassMap<RefData.IdWithChunk>();
ObjectIdSubclassMap<IdWithChunk> hints = new ObjectIdSubclassMap<IdWithChunk>();

for (Map.Entry<RefKey, RefData> e : scan()) {
Ref ref = fromData(e.getKey().getName(), e.getValue());
@@ -362,12 +363,12 @@ public class DhtRefDatabase extends RefDatabase {
sym.add(ref);
id.add(ref);

if (ref.getObjectId() instanceof RefData.IdWithChunk
if (ref.getObjectId() instanceof IdWithChunk
&& !hints.contains(ref.getObjectId()))
hints.add((RefData.IdWithChunk) ref.getObjectId());
if (ref.getPeeledObjectId() instanceof RefData.IdWithChunk
hints.add((IdWithChunk) ref.getObjectId());
if (ref.getPeeledObjectId() instanceof IdWithChunk
&& !hints.contains(ref.getPeeledObjectId()))
hints.add((RefData.IdWithChunk) ref.getPeeledObjectId());
hints.add((IdWithChunk) ref.getPeeledObjectId());
}

id.sort();
@@ -377,40 +378,20 @@ public class DhtRefDatabase extends RefDatabase {
}

private static Ref fromData(String name, RefData data) {
ObjectId oId = null;
boolean peeled = false;
ObjectId pId = null;

TinyProtobuf.Decoder d = data.decode();
DECODE: for (;;) {
switch (d.next()) {
case 0:
break DECODE;

case RefData.TAG_SYMREF: {
String symref = d.string();
Ref leaf = new Unpeeled(NEW, symref, null);
return new SymbolicRef(name, leaf);
}

case RefData.TAG_TARGET:
oId = RefData.IdWithChunk.decode(d.message());
continue;
case RefData.TAG_IS_PEELED:
peeled = d.bool();
continue;
case RefData.TAG_PEELED:
pId = RefData.IdWithChunk.decode(d.message());
continue;
default:
d.skip();
continue;
}
if (data.hasSymref()) {
Ref leaf = new Unpeeled(NEW, data.getSymref(), null);
return new SymbolicRef(name, leaf);
}

if (peeled && pId != null)
if (!data.hasTarget())
return new Unpeeled(LOOSE, name, null);

ObjectId oId = IdWithChunk.create(data.getTarget());
if (data.getIsPeeled() && data.hasPeeled()) {
ObjectId pId = IdWithChunk.create(data.getPeeled());
return new PeeledTag(LOOSE, name, oId, pId);
if (peeled)
}
if (data.getIsPeeled())
return new PeeledNonTag(LOOSE, name, oId);
return new Unpeeled(LOOSE, name, oId);
}
@@ -427,10 +408,10 @@ public class DhtRefDatabase extends RefDatabase {

final RefList<Ref> sym;

final ObjectIdSubclassMap<RefData.IdWithChunk> hints;
final ObjectIdSubclassMap<IdWithChunk> hints;

RefCache(RefList<Ref> ids, RefList<Ref> sym,
ObjectIdSubclassMap<RefData.IdWithChunk> hints) {
ObjectIdSubclassMap<IdWithChunk> hints) {
this.ids = ids;
this.sym = sym;
this.hints = hints;

+ 9
- 8
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtRefUpdate.java View File

@@ -47,6 +47,7 @@ import java.io.IOException;
import java.util.concurrent.TimeoutException;

import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefUpdate;
@@ -106,7 +107,7 @@ class DhtRefUpdate extends RefUpdate {
dstRef = dstRef.getLeaf();

refKey = RefKey.create(repo, dstRef.getName());
oldData = RefData.fromRef(dstRef);
oldData = RefDataUtil.fromRef(dstRef);

if (dstRef.isSymbolic())
setOldObjectId(null);
@@ -157,7 +158,7 @@ class DhtRefUpdate extends RefUpdate {
@Override
protected Result doLink(String target) throws IOException {
try {
newData = RefData.symbolic(target);
newData = RefDataUtil.symbolic(target);
boolean r = db.ref().compareAndPut(refKey, oldData, newData);
if (r) {
getRefDatabase().stored(dstRef.getName(), newData);
@@ -181,19 +182,19 @@ class DhtRefUpdate extends RefUpdate {

ChunkKey key = ctx.findChunk(newId);
if (key != null)
newId = new RefData.IdWithChunk(newId, key);
newId = new RefDataUtil.IdWithChunk(newId, key);

if (obj instanceof RevTag) {
ObjectId pId = rw.peel(obj);
key = ctx.findChunk(pId);
pId = key != null ? new RefData.IdWithChunk(pId, key) : pId;
return RefData.peeled(newId, pId);
pId = key != null ? new RefDataUtil.IdWithChunk(pId, key) : pId;
return RefDataUtil.peeled(newId, pId);
} else if (obj != null)
return RefData.peeled(newId, null);
return RefDataUtil.peeled(newId, null);
else
return RefData.id(newId);
return RefDataUtil.id(newId);
} catch (MissingObjectException e) {
return RefData.id(newId);
return RefDataUtil.id(newId);
}
}
}

+ 3
- 6
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/DhtText.java View File

@@ -59,9 +59,12 @@ public class DhtText extends TranslationBundle {
/***/ public String cycleInDeltaChain;
/***/ public String databaseRequired;
/***/ public String expectedObjectSizeDuringCopyAsIs;
/***/ public String invalidCachedPackInfo;
/***/ public String invalidChunkKey;
/***/ public String invalidChunkMeta;
/***/ public String invalidObjectIndexKey;
/***/ public String invalidObjectInfo;
/***/ public String invalidRefData;
/***/ public String missingChunk;
/***/ public String missingLongOffsetBase;
/***/ public String nameRequired;
@@ -72,12 +75,6 @@ public class DhtText extends TranslationBundle {
/***/ public String objectTypeUnknown;
/***/ public String packParserInvalidPointer;
/***/ public String packParserRollbackFailed;
/***/ public String protobufNegativeValuesNotSupported;
/***/ public String protobufNoArray;
/***/ public String protobufNotBooleanValue;
/***/ public String protobufUnsupportedFieldType;
/***/ public String protobufWrongFieldLength;
/***/ public String protobufWrongFieldType;
/***/ public String recordingObjects;
/***/ public String repositoryAlreadyExists;
/***/ public String repositoryMustBeBare;

+ 4
- 2
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/LargeNonDeltaObject.java View File

@@ -50,6 +50,7 @@ import java.util.zip.InflaterInputStream;

import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectStream;

@@ -102,7 +103,7 @@ class LargeNonDeltaObject extends ObjectLoader {
if (pc != null)
firstChunk = null;
else
pc = ctx.getChunk(meta.getFragmentKey(0));
pc = ctx.getChunk(ChunkKey.fromString(meta.getFragment(0)));

InputStream in = new ChunkInputStream(meta, ctx, pos, pc);
in = new BufferedInputStream(new InflaterInputStream(in), 8192);
@@ -138,7 +139,8 @@ class LargeNonDeltaObject extends ObjectLoader {
if (fragment == meta.getFragmentCount())
return -1;

pc = ctx.getChunk(meta.getFragmentKey(++fragment));
pc = ctx.getChunk(ChunkKey.fromString(
meta.getFragment(++fragment)));
ptr = 0;
n = pc.read(ptr, dstbuf, dstptr, dstlen);
if (n == 0)

+ 36
- 122
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectInfo.java View File

@@ -43,13 +43,12 @@

package org.eclipse.jgit.storage.dht;

import java.text.MessageFormat;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;

import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
import org.eclipse.jgit.lib.ObjectId;

/** Connects an object to the chunk it is stored in. */
@@ -71,106 +70,40 @@ public class ObjectInfo {
Collections.sort(toSort, BY_TIME);
}

private final ChunkKey chunk;

private final long time;

private final GitStore.ObjectInfo data;

/**
* Parse an ObjectInfo from the storage system.
* Wrap an ObjectInfo from the storage system.
*
* @param chunkKey
* the chunk the object points to.
* @param data
* the data of the ObjectInfo.
* @param time
* timestamp of the ObjectInfo. If the implementation does not
* store timestamp data, supply a negative value.
* @return the object's information.
*/
public static ObjectInfo fromBytes(ChunkKey chunkKey, byte[] data, long time) {
return fromBytes(chunkKey, TinyProtobuf.decode(data), time);
public ObjectInfo(ChunkKey chunkKey, GitStore.ObjectInfo data) {
this.chunk = chunkKey;
this.time = 0;
this.data = data;
}

/**
* Parse an ObjectInfo from the storage system.
* Wrap an ObjectInfo from the storage system.
*
* @param chunkKey
* the chunk the object points to.
* @param d
* the data of the ObjectInfo.
* @param time
* timestamp of the ObjectInfo. If the implementation does not
* store timestamp data, supply a negative value.
* @return the object's information.
* timestamp of the ObjectInfo.
* @param data
* the data of the ObjectInfo.
*/
public static ObjectInfo fromBytes(ChunkKey chunkKey,
TinyProtobuf.Decoder d, long time) {
int typeCode = -1;
int offset = -1;
long packedSize = -1;
long inflatedSize = -1;
ObjectId deltaBase = null;
boolean fragmented = false;

PARSE: for (;;) {
switch (d.next()) {
case 0:
break PARSE;
case 1:
typeCode = d.int32();
continue;
case 2:
offset = d.int32();
continue;
case 3:
packedSize = d.int64();
continue;
case 4:
inflatedSize = d.int64();
continue;
case 5:
deltaBase = d.bytesObjectId();
continue;
case 6:
fragmented = d.bool();
continue;
default:
d.skip();
continue;
}
}

if (typeCode < 0 || offset < 0 || packedSize < 0 || inflatedSize < 0)
throw new IllegalArgumentException(MessageFormat.format(
DhtText.get().invalidObjectInfo, chunkKey));

return new ObjectInfo(chunkKey, time, typeCode, offset, //
packedSize, inflatedSize, deltaBase, fragmented);
}

private final ChunkKey chunk;

private final long time;

private final int typeCode;

private final int offset;

private final long packedSize;

private final long inflatedSize;

private final ObjectId deltaBase;

private final boolean fragmented;

ObjectInfo(ChunkKey chunk, long time, int typeCode, int offset,
long packedSize, long inflatedSize, ObjectId base,
boolean fragmented) {
this.chunk = chunk;
public ObjectInfo(ChunkKey chunkKey, long time, GitStore.ObjectInfo data) {
this.chunk = chunkKey;
this.time = time < 0 ? 0 : time;
this.typeCode = typeCode;
this.offset = offset;
this.packedSize = packedSize;
this.inflatedSize = inflatedSize;
this.deltaBase = base;
this.fragmented = fragmented;
this.data = data;
}

/** @return the chunk this link points to. */
@@ -183,54 +116,43 @@ public class ObjectInfo {
return time;
}

/** @return GitStore.ObjectInfo to embed in the database. */
public GitStore.ObjectInfo getData() {
return data;
}

/** @return type of the object, in OBJ_* constants. */
public int getType() {
return typeCode;
return data.getObjectType().getNumber();
}

/** @return size of the object when fully inflated. */
public long getSize() {
return inflatedSize;
return data.getInflatedSize();
}

/** @return true if the object storage uses delta compression. */
public boolean isDelta() {
return getDeltaBase() != null;
return data.hasDeltaBase();
}

/** @return true if the object has been fragmented across chunks. */
public boolean isFragmented() {
return fragmented;
return data.getIsFragmented();
}

int getOffset() {
return offset;
return data.getOffset();
}

long getPackedSize() {
return packedSize;
return data.getPackedSize();
}

ObjectId getDeltaBase() {
return deltaBase;
}

/**
* Convert this ObjectInfo into a byte array for storage.
*
* @return the ObjectInfo data, encoded as a byte array. This does not
* include the ChunkKey, callers must store that separately.
*/
public byte[] asBytes() {
TinyProtobuf.Encoder e = TinyProtobuf.encode(256);
e.int32(1, typeCode);
e.int32(2, offset);
e.int64(3, packedSize);
e.int64(4, inflatedSize);
e.bytes(5, deltaBase);
if (fragmented)
e.bool(6, fragmented);
return e.asByteArray();
if (data.hasDeltaBase())
return ObjectId.fromRaw(data.getDeltaBase().toByteArray(), 0);
return null;
}

@Override
@@ -238,18 +160,10 @@ public class ObjectInfo {
StringBuilder b = new StringBuilder();
b.append("ObjectInfo:");
b.append(chunk);
b.append(" [");
if (0 < time)
b.append(" time=").append(new Date(time));
b.append(" type=").append(Constants.typeString(typeCode));
b.append(" offset=").append(offset);
b.append(" packedSize=").append(packedSize);
b.append(" inflatedSize=").append(inflatedSize);
if (deltaBase != null)
b.append(" deltaBase=").append(deltaBase.name());
if (fragmented)
b.append(" fragmented");
b.append(" ]");
b.append(" @ ").append(new Date(time));
b.append("\n");
b.append(data.toString());
return b.toString();
}
}

+ 6
- 8
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/ObjectWriter.java View File

@@ -43,7 +43,6 @@

package org.eclipse.jgit.storage.dht;

import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
@@ -55,6 +54,7 @@ import java.util.Set;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicReference;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.util.BlockList;

@@ -136,7 +136,7 @@ final class ObjectWriter {
ChunkMeta meta = allMeta.remove(key);
if (meta != null) {
for (int i = 1; i < meta.getFragmentCount(); i++)
keys.add(meta.getFragmentKey(i));
keys.add(ChunkKey.fromString(meta.getFragment(i)));
}
}
order = keys;
@@ -221,7 +221,7 @@ final class ObjectWriter {
throw metaError.get();
}

private class MetaLoader implements AsyncCallback<Collection<ChunkMeta>> {
private class MetaLoader implements AsyncCallback<Map<ChunkKey, ChunkMeta>> {
private final Context context;

private final Set<ChunkKey> keys;
@@ -231,13 +231,11 @@ final class ObjectWriter {
this.keys = keys;
}

public void onSuccess(Collection<ChunkMeta> result) {
public void onSuccess(Map<ChunkKey, ChunkMeta> result) {
try {
synchronized (allMeta) {
for (ChunkMeta meta : result) {
allMeta.put(meta.getChunkKey(), meta);
keys.remove(meta.getChunkKey());
}
allMeta.putAll(result);
keys.removeAll(result.keySet());
}
if (context == Context.FAST_MISSING_OK && !keys.isEmpty()) {
synchronized (metaMissing) {

+ 12
- 20
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/PackChunk.java View File

@@ -63,6 +63,7 @@ import java.util.zip.Inflater;
import org.eclipse.jgit.errors.CorruptObjectException;
import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
@@ -251,8 +252,6 @@ public final class PackChunk {

private volatile Boolean valid;

private volatile ChunkKey nextFragment;

PackChunk(ChunkKey key, byte[] dataBuf, int dataPtr, int dataLen,
ChunkIndex index, ChunkMeta meta) {
this.key = key;
@@ -400,9 +399,12 @@ public final class PackChunk {
base = base - pos;

ChunkMeta.BaseChunk baseChunk;
baseChunk = pc.meta.getBaseChunk(base);
baseChunkKey = baseChunk.getChunkKey();
basePosInChunk = (int) (baseChunk.relativeStart - base);
baseChunk = ChunkMetaUtil.getBaseChunk(
pc.key,
pc.meta,
base);
baseChunkKey = ChunkKey.fromString(baseChunk.getChunkKey());
basePosInChunk = (int) (baseChunk.getRelativeStart() - base);
}

delta = new Delta(delta, //
@@ -559,7 +561,8 @@ public final class PackChunk {
if (inf.needsInput()) {
if (meta.getFragmentCount() <= nextChunk)
break;
pc = reader.getChunk(meta.getFragmentKey(nextChunk++));
pc = reader.getChunk(ChunkKey.fromString(
meta.getFragment(nextChunk++)));
if (meta.getFragmentCount() == nextChunk)
bs = pc.dataLen; // Include trailer on last chunk.
else
@@ -575,7 +578,7 @@ public final class PackChunk {
if (dstoff != sz) {
throw new DataFormatException(MessageFormat.format(
DhtText.get().shortCompressedObject,
meta.getChunkKey(),
ChunkKey.fromString(meta.getFragment(0)),
Integer.valueOf(pos)));
}
return dstbuf;
@@ -683,7 +686,8 @@ public final class PackChunk {
if (isFragment()) {
int cnt = meta.getFragmentCount();
for (int fragId = 1; fragId < cnt; fragId++) {
PackChunk pc = ctx.getChunk(meta.getFragmentKey(fragId));
PackChunk pc = ctx.getChunk(ChunkKey.fromString(
meta.getFragment(fragId)));
pc.copyEntireChunkAsIs(out, obj, validate);
}
}
@@ -728,18 +732,6 @@ public final class PackChunk {
return sz;
}

ChunkKey getNextFragment() {
if (meta == null)
return null;

ChunkKey next = nextFragment;
if (next == null) {
next = meta.getNextFragment(getChunkKey());
nextFragment = next;
}
return next;
}

private static class Delta {
/** Child that applies onto this object. */
final Delta next;

+ 11
- 3
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/Prefetcher.java View File

@@ -59,6 +59,7 @@ import java.util.Set;
import java.util.concurrent.TimeoutException;

import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevTree;
@@ -218,14 +219,21 @@ class Prefetcher implements StreamingCallback<Collection<PackChunk.Members>> {

if (hint != null) {
synchronized (this) {
if (followEdgeHints && !hint.getEdge().isEmpty())
push(hint.getEdge());
if (followEdgeHints && 0 < hint.getEdgeCount())
push(hint.getEdgeList());
else
push(hint.getSequential());
push(hint.getSequentialList());
}
}
}

private void push(List<String> list) {
List<ChunkKey> keys = new ArrayList<ChunkKey>(list.size());
for (String keyString : list)
keys.add(ChunkKey.fromString(keyString));
push(keys);
}

void push(Iterable<ChunkKey> list) {
synchronized (this) {
for (ChunkKey key : list) {

+ 2
- 3
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/QueueObjectLookup.java View File

@@ -55,7 +55,6 @@ import java.util.Map;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.lib.AsyncOperation;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.storage.dht.RefData.IdWithChunk;
import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.Database;

@@ -119,8 +118,8 @@ class QueueObjectLookup<T extends ObjectId> implements AsyncOperation {
RecentInfoCache infoCache = reader.getRecentInfoCache();
List<T> missing = null;
for (T obj : objects) {
if (needChunkOnly && obj instanceof IdWithChunk) {
push(obj, ((IdWithChunk) obj).getChunkKey());
if (needChunkOnly && obj instanceof RefDataUtil.IdWithChunk) {
push(obj, ((RefDataUtil.IdWithChunk) obj).getChunkKey());
continue;
}


+ 1
- 1
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RecentChunks.java View File

@@ -48,7 +48,7 @@ import java.io.IOException;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.storage.dht.DhtReader.ChunkAndOffset;
import org.eclipse.jgit.storage.dht.RefData.IdWithChunk;
import org.eclipse.jgit.storage.dht.RefDataUtil.IdWithChunk;

final class RecentChunks {
private final DhtReader reader;

+ 0
- 235
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefData.java View File

@@ -1,235 +0,0 @@
/*
* Copyright (C) 2011, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/

package org.eclipse.jgit.storage.dht;

import static org.eclipse.jgit.lib.Constants.OBJECT_ID_STRING_LENGTH;
import static org.eclipse.jgit.storage.dht.TinyProtobuf.encode;

import java.util.Arrays;

import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.storage.dht.TinyProtobuf.Encoder;

/**
* Describes the current state of a Git reference.
* <p>
* The reference state contains not just the SHA-1 object name that a reference
* points to, but the state also caches its peeled value if its a tag, and the
* {@link ChunkKey} the object was observed in when the reference was last
* updated. This cached data reduces latency when initially starting to work
* with a repository.
*/
public class RefData {
/** Magic constant meaning does not exist. */
public static final RefData NONE = new RefData(new byte[0]);

static final int TAG_SYMREF = 1;

static final int TAG_TARGET = 2;

static final int TAG_IS_PEELED = 3;

static final int TAG_PEELED = 4;

/**
* @param data
* @return the content
*/
public static RefData fromBytes(byte[] data) {
return new RefData(data);
}

static RefData symbolic(String target) {
Encoder e = encode(2 + target.length());
e.string(TAG_SYMREF, target);
return new RefData(e.asByteArray());
}

static RefData id(AnyObjectId id) {
Encoder e = encode(4 + OBJECT_ID_STRING_LENGTH + ChunkKey.KEYLEN);
e.message(TAG_TARGET, IdWithChunk.encode(id));
return new RefData(e.asByteArray());
}

static RefData fromRef(Ref ref) {
if (ref.isSymbolic())
return symbolic(ref.getTarget().getName());

if (ref.getObjectId() == null)
return RefData.NONE;

int max = 8 + 2 * OBJECT_ID_STRING_LENGTH + 2 * ChunkKey.KEYLEN;
Encoder e = encode(max);
e.message(TAG_TARGET, IdWithChunk.encode(ref.getObjectId()));
if (ref.isPeeled()) {
e.bool(TAG_IS_PEELED, true);
if (ref.getPeeledObjectId() != null)
e.message(TAG_PEELED,
IdWithChunk.encode(ref.getPeeledObjectId()));
}
return new RefData(e.asByteArray());
}

static RefData peeled(ObjectId targetId, ObjectId peeledId) {
int max = 8 + 2 * OBJECT_ID_STRING_LENGTH + 2 * ChunkKey.KEYLEN;
Encoder e = encode(max);
e.message(TAG_TARGET, IdWithChunk.encode(targetId));
e.bool(TAG_IS_PEELED, true);
if (peeledId != null)
e.message(TAG_PEELED, IdWithChunk.encode(peeledId));
return new RefData(e.asByteArray());
}

private final byte[] data;

RefData(byte[] data) {
this.data = data;
}

TinyProtobuf.Decoder decode() {
return TinyProtobuf.decode(data);
}

/** @return the contents, encoded as a byte array for storage. */
public byte[] asBytes() {
return data;
}

@Override
public int hashCode() {
int hash = 5381;
for (int ptr = 0; ptr < data.length; ptr++)
hash = ((hash << 5) + hash) + (data[ptr] & 0xff);
return hash;
}

@Override
public boolean equals(Object other) {
if (other instanceof RefData)
return Arrays.equals(data, ((RefData) other).data);
return false;
}

@Override
public String toString() {
StringBuilder b = new StringBuilder();
TinyProtobuf.Decoder d = decode();
for (;;) {
switch (d.next()) {
case 0:
return b.toString().substring(1);
case TAG_SYMREF:
b.append("\nsymref: ").append(d.string());
continue;
case TAG_TARGET:
b.append("\ntarget: ").append(IdWithChunk.decode(d.message()));
continue;
case TAG_IS_PEELED:
b.append("\nis_peeled: ").append(d.bool());
continue;
case TAG_PEELED:
b.append("\npeeled: ").append(IdWithChunk.decode(d.message()));
continue;
default:
d.skip();
continue;
}
}
}

static class IdWithChunk extends ObjectId {
static ObjectId decode(TinyProtobuf.Decoder d) {
ObjectId id = null;
ChunkKey key = null;
DECODE: for (;;) {
switch (d.next()) {
case 0:
break DECODE;
case 1:
id = d.stringObjectId();
continue;
case 2:
key = ChunkKey.fromBytes(d);
continue;
default:
d.skip();
}
}
return key != null ? new IdWithChunk(id, key) : id;
}

static TinyProtobuf.Encoder encode(AnyObjectId id) {
if (id instanceof IdWithChunk) {
int max = 4 + OBJECT_ID_STRING_LENGTH + ChunkKey.KEYLEN;
TinyProtobuf.Encoder e = TinyProtobuf.encode(max);
e.string(1, id);
e.string(2, ((IdWithChunk) id).chunkKey);
return e;
} else {
int max = 2 + OBJECT_ID_STRING_LENGTH;
TinyProtobuf.Encoder e = TinyProtobuf.encode(max);
e.string(1, id);
return e;
}
}

private final ChunkKey chunkKey;

IdWithChunk(AnyObjectId id, ChunkKey key) {
super(id);
this.chunkKey = key;
}

ChunkKey getChunkKey() {
return chunkKey;
}

@Override
public String toString() {
return name() + "->" + chunkKey;
}
}
}

+ 132
- 0
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/RefDataUtil.java View File

@@ -0,0 +1,132 @@
/*
* Copyright (C) 2011, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/

package org.eclipse.jgit.storage.dht;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;

/** Tools to work with {@link RefData}. */
public class RefDataUtil {
/** Magic constant meaning does not exist. */
public static final RefData NONE = RefData.newBuilder().build();

static RefData symbolic(String target) {
RefData.Builder b = RefData.newBuilder();
b.setSymref(target);
return b.build();
}

static RefData id(AnyObjectId id) {
RefData.Builder b = RefData.newBuilder();
b.setTarget(toRefData(id));
return b.build();
}

static RefData fromRef(Ref ref) {
if (ref.isSymbolic())
return symbolic(ref.getTarget().getName());

if (ref.getObjectId() == null)
return NONE;

RefData.Builder b = RefData.newBuilder();
b.setTarget(toRefData(ref.getObjectId()));
if (ref.isPeeled()) {
b.setIsPeeled(true);
if (ref.getPeeledObjectId() != null)
b.setPeeled(toRefData(ref.getPeeledObjectId()));
}
return b.build();
}

static RefData peeled(ObjectId targetId, ObjectId peeledId) {
RefData.Builder b = RefData.newBuilder();
b.setTarget(toRefData(targetId));
b.setIsPeeled(true);
if (peeledId != null)
b.setPeeled(toRefData(peeledId));
return b.build();
}

private static RefData.Id toRefData(AnyObjectId id) {
RefData.Id.Builder r = RefData.Id.newBuilder();
r.setObjectName(id.name());
if (id instanceof IdWithChunk)
r.setChunkKey(((IdWithChunk) id).getChunkKey().asString());
return r.build();
}

static class IdWithChunk extends ObjectId {
static ObjectId create(RefData.Id src) {
if (src.hasChunkKey()) {
return new IdWithChunk(
ObjectId.fromString(src.getObjectName()),
ChunkKey.fromString(src.getChunkKey()));
}
return ObjectId.fromString(src.getObjectName());
}

private final ChunkKey chunkKey;

IdWithChunk(AnyObjectId id, ChunkKey key) {
super(id);
this.chunkKey = key;
}

ChunkKey getChunkKey() {
return chunkKey;
}

@Override
public String toString() {
return name() + "->" + chunkKey;
}
}

private RefDataUtil() {
// Utility class, do not create instances.
}
}

+ 0
- 755
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/TinyProtobuf.java View File

@@ -1,755 +0,0 @@
/*
* Copyright (C) 2011, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/

package org.eclipse.jgit.storage.dht;

import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
import static org.eclipse.jgit.lib.Constants.OBJECT_ID_STRING_LENGTH;

import java.nio.ByteBuffer;
import java.text.MessageFormat;

import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.util.RawParseUtils;

/**
* A tiny implementation of a subset of the Google Protocol Buffers format.
* <p>
* For more information on the network format, see the canonical documentation
* at <a href="http://code.google.com/p/protobuf/">Google Protocol Buffers</a>.
*/
public class TinyProtobuf {
private static final int WIRE_VARINT = 0;

private static final int WIRE_FIXED_64 = 1;

private static final int WIRE_LEN_DELIM = 2;

private static final int WIRE_FIXED_32 = 5;

/**
* Create a new encoder.
*
* @param estimatedSize
* estimated size of the message. If the size is accurate,
* copying of the result can be avoided during
* {@link Encoder#asByteArray()}. If the size is too small, the
* buffer will grow dynamically.
* @return a new encoder.
*/
public static Encoder encode(int estimatedSize) {
return new Encoder(new byte[estimatedSize]);
}

/**
* Create an encoder that estimates size.
*
* @return a new encoder.
*/
public static Encoder size() {
return new Encoder(null);
}

/**
* Decode a buffer.
*
* @param buf
* the buffer to read.
* @return a new decoder.
*/
public static Decoder decode(byte[] buf) {
return decode(buf, 0, buf.length);
}

/**
* Decode a buffer.
*
* @param buf
* the buffer to read.
* @param off
* offset to begin reading from {@code buf}.
* @param len
* total number of bytes to read from {@code buf}.
* @return a new decoder.
*/
public static Decoder decode(byte[] buf, int off, int len) {
return new Decoder(buf, off, len);
}

/** An enumerated value that encodes/decodes as int32. */
public static interface Enum {
/** @return the wire value. */
public int value();
}

/** Decode fields from a binary protocol buffer. */
public static class Decoder {
private final byte[] buf;

private final int end;

private int ptr;

private int field;

private int type;

private int length;

private Decoder(byte[] buf, int off, int len) {
this.buf = buf;
this.ptr = off;
this.end = off + len;
}

/** @return get the field tag number, 0 on end of buffer. */
public int next() {
if (ptr == end)
return 0;

int fieldAndType = varint32();
field = fieldAndType >>> 3;
type = fieldAndType & 7;
return field;
}

/** Skip the current field's value. */
public void skip() {
switch (type) {
case WIRE_VARINT:
varint64();
break;
case WIRE_FIXED_64:
ptr += 8;
break;
case WIRE_LEN_DELIM:
ptr += varint32();
break;
case WIRE_FIXED_32:
ptr += 4;
break;
default:
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufUnsupportedFieldType, Integer
.valueOf(type)));
}
}

/** @return decode the current field as an int32. */
public int int32() {
checkFieldType(WIRE_VARINT);
return varint32();
}

/** @return decode the current field as an int64. */
public long int64() {
checkFieldType(WIRE_VARINT);
return varint64();
}

/**
* @param <T>
* the type of enumeration.
* @param all
* all of the supported values.
* @return decode the current field as an enumerated value.
*/
public <T extends Enum> T intEnum(T[] all) {
checkFieldType(WIRE_VARINT);
int value = varint32();
for (T t : all) {
if (t.value() == value)
return t;
}
throw new IllegalStateException(MessageFormat.format(
DhtText.get().protobufWrongFieldType, Integer
.valueOf(field), Integer.valueOf(type), all[0]
.getClass().getSimpleName()));
}

/** @return decode the current field as a bool. */
public boolean bool() {
checkFieldType(WIRE_VARINT);
int val = varint32();
switch (val) {
case 0:
return false;
case 1:
return true;
default:
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufNotBooleanValue, Integer.valueOf(field),
Integer.valueOf(val)));
}
}

/** @return decode a fixed 64 bit value. */
public long fixed64() {
checkFieldType(WIRE_FIXED_64);
long val = buf[ptr + 0] & 0xff;
val |= ((long) (buf[ptr + 1] & 0xff)) << (1 * 8);
val |= ((long) (buf[ptr + 2] & 0xff)) << (2 * 8);
val |= ((long) (buf[ptr + 3] & 0xff)) << (3 * 8);
val |= ((long) (buf[ptr + 4] & 0xff)) << (4 * 8);
val |= ((long) (buf[ptr + 5] & 0xff)) << (5 * 8);
val |= ((long) (buf[ptr + 6] & 0xff)) << (6 * 8);
val |= ((long) (buf[ptr + 7] & 0xff)) << (7 * 8);
ptr += 8;
return val;
}

/** @return decode the current field as a string. */
public String string() {
checkFieldType(WIRE_LEN_DELIM);
int len = varint32();
String s = RawParseUtils.decode(buf, ptr, ptr + len);
ptr += len;
return s;
}

/** @return decode the current hex string to an ObjectId. */
public ObjectId stringObjectId() {
checkFieldType(WIRE_LEN_DELIM);
int len = varint32();
if (len != OBJECT_ID_STRING_LENGTH)
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufWrongFieldLength,
Integer.valueOf(field), Integer
.valueOf(OBJECT_ID_STRING_LENGTH), Integer
.valueOf(len)));

ObjectId id = ObjectId.fromString(buf, ptr);
ptr += OBJECT_ID_STRING_LENGTH;
return id;
}

/** @return decode a string from 8 hex digits. */
public int stringHex32() {
checkFieldType(WIRE_LEN_DELIM);
int len = varint32();
if (len != 8)
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufWrongFieldLength,
Integer.valueOf(field), Integer.valueOf(8), Integer
.valueOf(len)));
int val = KeyUtils.parse32(buf, ptr);
ptr += 8;
return val;
}

/** @return decode the current field as an array of bytes. */
public byte[] bytes() {
checkFieldType(WIRE_LEN_DELIM);
byte[] r = new byte[varint32()];
System.arraycopy(buf, ptr, r, 0, r.length);
ptr += r.length;
return r;
}

/** @return backing array of the current field. */
public byte[] bytesArray() {
return buf;
}

/** @return length of field, call before {@link #bytesOffset}. */
public int bytesLength() {
checkFieldType(WIRE_LEN_DELIM);
length = varint32();
return length;
}

/** @return starting offset of the field, after {@link #bytesLength()}. */
public int bytesOffset() {
int start = ptr;
ptr += length;
return start;
}

/** @return decode the current raw bytes to an ObjectId. */
public ObjectId bytesObjectId() {
checkFieldType(WIRE_LEN_DELIM);
int len = varint32();
if (len != OBJECT_ID_LENGTH)
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufWrongFieldLength,
Integer.valueOf(field), Integer
.valueOf(OBJECT_ID_LENGTH), Integer
.valueOf(len)));

ObjectId id = ObjectId.fromRaw(buf, ptr);
ptr += OBJECT_ID_LENGTH;
return id;
}

/** @return decode the current field as a nested message. */
public Decoder message() {
checkFieldType(WIRE_LEN_DELIM);
int len = varint32();
Decoder msg = decode(buf, ptr, len);
ptr += len;
return msg;
}

private int varint32() {
long v = varint64();
if (Integer.MAX_VALUE < v)
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufWrongFieldType, Integer.valueOf(field),
"int64", "int32"));
return (int) v;
}

private long varint64() {
int c = buf[ptr++];
long r = c & 0x7f;
int shift = 7;
while ((c & 0x80) != 0) {
c = buf[ptr++];
r |= ((long) (c & 0x7f)) << shift;
shift += 7;
}
return r;
}

private void checkFieldType(int expected) {
if (type != expected)
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufWrongFieldType, Integer.valueOf(field),
Integer.valueOf(type), Integer.valueOf(expected)));
}
}

/** Encode values into a binary protocol buffer. */
public static class Encoder {
private byte[] buf;

private int ptr;

private Encoder(byte[] buf) {
this.buf = buf;
}

/**
* Encode a variable length positive integer.
*
* @param field
* field tag number.
* @param value
* the value to store. Must be >= 0.
*/
public void int32(int field, int value) {
int64(field, value);
}

/**
* Encode a variable length positive integer.
*
* @param field
* field tag number.
* @param value
* the value to store; omitted if 0.
*/
public void int32IfNotZero(int field, int value) {
int64IfNotZero(field, value);
}

/**
* Encode a variable length positive integer.
*
* @param field
* field tag number.
* @param value
* the value to store; omitted if negative.
*/
public void int32IfNotNegative(int field, int value) {
int64IfNotNegative(field, value);
}

/**
* Encode a variable length positive integer.
*
* @param field
* field tag number.
* @param value
* the value to store. Must be >= 0.
*/
public void int64(int field, long value) {
if (value < 0)
throw new IllegalArgumentException(
DhtText.get().protobufNegativeValuesNotSupported);

field(field, WIRE_VARINT);
varint(value);
}

/**
* Encode a variable length positive integer.
*
* @param field
* field tag number.
* @param value
* the value to store; omitted if 0.
*/
public void int64IfNotZero(int field, long value) {
if (0 != value)
int64(field, value);
}

/**
* Encode a variable length positive integer.
*
* @param field
* field tag number.
* @param value
* the value to store; omitted if negative.
*/
public void int64IfNotNegative(int field, long value) {
if (0 <= value)
int64(field, value);
}

/**
* Encode an enumerated value.
*
* @param <T>
* type of the enumerated values.
* @param field
* field tag number.
* @param value
* value to store; if null the field is omitted.
*/
public <T extends Enum> void intEnum(int field, T value) {
if (value != null) {
field(field, WIRE_VARINT);
varint(value.value());
}
}

/**
* Encode a boolean value.
*
* @param field
* field tag number.
* @param value
* the value to store.
*/
public void bool(int field, boolean value) {
field(field, WIRE_VARINT);
varint(value ? 1 : 0);
}

/**
* Encode a boolean value, only if true.
*
* @param field
* field tag number.
* @param value
* the value to store.
*/
public void boolIfTrue(int field, boolean value) {
if (value)
bool(field, value);
}

/**
* Encode a fixed 64 value.
*
* @param field
* field tag number.
* @param value
* the value to store.
*/
public void fixed64(int field, long value) {
field(field, WIRE_FIXED_64);
if (buf != null) {
ensureSpace(8);

buf[ptr + 0] = (byte) value;
value >>>= 8;

buf[ptr + 1] = (byte) value;
value >>>= 8;

buf[ptr + 3] = (byte) value;
value >>>= 8;

buf[ptr + 3] = (byte) value;
value >>>= 8;

buf[ptr + 4] = (byte) value;
value >>>= 8;

buf[ptr + 5] = (byte) value;
value >>>= 8;

buf[ptr + 6] = (byte) value;
value >>>= 8;

buf[ptr + 7] = (byte) value;
}
ptr += 8;
}

/**
* Encode a length delimited bytes field.
*
* @param field
* field tag number.
* @param value
* the value to store; if null the field is omitted.
*/
public void bytes(int field, byte[] value) {
if (value != null)
bytes(field, value, 0, value.length);
}

/**
* Encode a length delimited bytes field.
*
* @param field
* field tag number.
* @param value
* the value to store; if null the field is omitted.
*/
public void bytes(int field, ByteBuffer value) {
if (value != null) {
if (!value.hasArray())
throw new IllegalArgumentException(DhtText.get().protobufNoArray);
byte[] valBuf = value.array();
int valPtr = value.arrayOffset() + value.position();
int valLen = value.limit() - value.position();
bytes(field, valBuf, valPtr, valLen);
}
}

/**
* Encode a length delimited bytes field.
*
* @param field
* field tag number.
* @param value
* the value to store; if null the field is omitted.
* @param off
* position to copy from.
* @param len
* number of bytes to copy.
*/
public void bytes(int field, byte[] value, int off, int len) {
if (value != null) {
field(field, WIRE_LEN_DELIM);
varint(len);
copy(value, off, len);
}
}

/**
* Encode an ObjectId as a bytes (in raw binary format).
*
* @param field
* field tag number.
* @param value
* the value to store, as a raw binary; if null the field is
* omitted.
*/
public void bytes(int field, AnyObjectId value) {
if (value != null) {
field(field, WIRE_LEN_DELIM);
varint(OBJECT_ID_LENGTH);
if (buf != null) {
ensureSpace(OBJECT_ID_LENGTH);
value.copyRawTo(buf, ptr);
}
ptr += OBJECT_ID_LENGTH;
}
}

/**
* Encode an ObjectId as a string (in hex format).
*
* @param field
* field tag number.
* @param value
* the value to store, as a hex string; if null the field is
* omitted.
*/
public void string(int field, AnyObjectId value) {
if (value != null) {
field(field, WIRE_LEN_DELIM);
varint(OBJECT_ID_STRING_LENGTH);
if (buf != null) {
ensureSpace(OBJECT_ID_STRING_LENGTH);
value.copyTo(buf, ptr);
}
ptr += OBJECT_ID_STRING_LENGTH;
}
}

/**
* Encode a plain Java string.
*
* @param field
* field tag number.
* @param value
* the value to store; if null the field is omitted.
*/
public void string(int field, String value) {
if (value != null)
bytes(field, Constants.encode(value));
}

/**
* Encode a row key as a string.
*
* @param field
* field tag number.
* @param key
* the row key to store as a string; if null the field is
* omitted.
*/
public void string(int field, RowKey key) {
if (key != null)
bytes(field, key.asBytes());
}

/**
* Encode an integer as an 8 byte hex string.
*
* @param field
* field tag number.
* @param value
* value to encode.
*/
public void stringHex32(int field, int value) {
field(field, WIRE_LEN_DELIM);
varint(8);
if (buf != null) {
ensureSpace(8);
KeyUtils.format32(buf, ptr, value);
}
ptr += 8;
}

/**
* Encode a nested message.
*
* @param field
* field tag number.
* @param msg
* message to store; if null or empty the field is omitted.
*/
public void message(int field, Encoder msg) {
if (msg != null && msg.ptr > 0)
bytes(field, msg.buf, 0, msg.ptr);
}

private void field(int field, int type) {
varint((field << 3) | type);
}

private void varint(long value) {
if (buf != null) {
if (buf.length - ptr < 10)
ensureSpace(varintSize(value));

do {
byte b = (byte) (value & 0x7f);
value >>>= 7;
if (value != 0)
b |= 0x80;
buf[ptr++] = b;
} while (value != 0);
} else {
ptr += varintSize(value);
}
}

private static int varintSize(long value) {
value >>>= 7;
int need = 1;
for (; value != 0; value >>>= 7)
need++;
return need;
}

private void copy(byte[] src, int off, int cnt) {
if (buf != null) {
ensureSpace(cnt);
System.arraycopy(src, off, buf, ptr, cnt);
}
ptr += cnt;
}

private void ensureSpace(int need) {
if (buf.length - ptr < need) {
byte[] n = new byte[Math.max(ptr + need, buf.length * 2)];
System.arraycopy(buf, 0, n, 0, ptr);
buf = n;
}
}

/** @return size of the protocol buffer message, in bytes. */
public int size() {
return ptr;
}

/** @return the current buffer, as a byte array. */
public byte[] asByteArray() {
if (ptr == buf.length)
return buf;
byte[] r = new byte[ptr];
System.arraycopy(buf, 0, r, 0, ptr);
return r;
}

/** @return the current buffer. */
public ByteBuffer asByteBuffer() {
return ByteBuffer.wrap(buf, 0, ptr);
}
}

private TinyProtobuf() {
// Don't make instances.
}
}

+ 3
- 2
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/ChunkTable.java View File

@@ -44,11 +44,12 @@
package org.eclipse.jgit.storage.dht.spi;

import java.util.Collection;
import java.util.Map;
import java.util.Set;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.storage.dht.AsyncCallback;
import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.ChunkMeta;
import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.PackChunk;
import org.eclipse.jgit.storage.dht.StreamingCallback;
@@ -113,7 +114,7 @@ public interface ChunkTable {
* results early.
*/
public void getMeta(Context options, Set<ChunkKey> keys,
AsyncCallback<Collection<ChunkMeta>> callback);
AsyncCallback<Map<ChunkKey, ChunkMeta>> callback);

/**
* Put some (or all) of a single chunk.

+ 4
- 3
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RefTable.java View File

@@ -46,8 +46,9 @@ package org.eclipse.jgit.storage.dht.spi;
import java.util.Map;
import java.util.concurrent.TimeoutException;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.RefData;
import org.eclipse.jgit.storage.dht.RefDataUtil;
import org.eclipse.jgit.storage.dht.RefKey;
import org.eclipse.jgit.storage.dht.RepositoryKey;

@@ -100,8 +101,8 @@ public interface RefTable {
* @param oldData
* the old data for the reference. The put only occurs if the
* value is still equal to {@code oldData}. Use
* {@link RefData#NONE} if the reference should not exist and is
* being created.
* {@link RefDataUtil#NONE} if the reference should not exist and
* is being created.
* @param newData
* new value to store.
* @return true if the put was successful; false if the current value does

+ 1
- 1
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/RepositoryTable.java View File

@@ -46,7 +46,7 @@ package org.eclipse.jgit.storage.dht.spi;
import java.util.Collection;
import java.util.concurrent.TimeoutException;

import org.eclipse.jgit.storage.dht.CachedPackInfo;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.storage.dht.CachedPackKey;
import org.eclipse.jgit.storage.dht.ChunkInfo;
import org.eclipse.jgit.storage.dht.ChunkKey;

+ 135
- 74
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheChunkTable.java View File

@@ -44,29 +44,36 @@
package org.eclipse.jgit.storage.dht.spi.cache;

import static java.util.Collections.singleton;
import static java.util.Collections.singletonMap;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.storage.dht.AsyncCallback;
import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.ChunkMeta;
import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.PackChunk;
import org.eclipse.jgit.storage.dht.StreamingCallback;
import org.eclipse.jgit.storage.dht.Sync;
import org.eclipse.jgit.storage.dht.TinyProtobuf;
import org.eclipse.jgit.storage.dht.spi.ChunkTable;
import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change;

import com.google.protobuf.CodedInputStream;
import com.google.protobuf.CodedOutputStream;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.WireFormat;

/** Cache wrapper around ChunkTable. */
public class CacheChunkTable implements ChunkTable {
private final ChunkTable db;
@@ -105,7 +112,7 @@ public class CacheChunkTable implements ChunkTable {
}

public void getMeta(Context options, Set<ChunkKey> keys,
AsyncCallback<Collection<ChunkMeta>> callback) {
AsyncCallback<Map<ChunkKey, ChunkMeta>> callback) {
List<CacheKey> toFind = new ArrayList<CacheKey>(keys.size());
for (ChunkKey k : keys)
toFind.add(nsMeta.key(k));
@@ -118,8 +125,10 @@ public class CacheChunkTable implements ChunkTable {
db.put(chunk, buf.getWriteBuffer());

// Only store fragmented meta. This is all callers should ask for.
if (chunk.hasMeta() && chunk.getMeta().getFragmentCount() != 0)
buf.put(nsMeta.key(chunk.getChunkKey()), chunk.getMeta().asBytes());
if (chunk.hasMeta() && chunk.getMeta().getFragmentCount() != 0) {
buf.put(nsMeta.key(chunk.getChunkKey()),
chunk.getMeta().toByteArray());
}

if (chunk.hasChunkData())
buf.put(nsChunk.key(chunk.getChunkKey()), encode(chunk));
@@ -135,57 +144,99 @@ public class CacheChunkTable implements ChunkTable {
}

private static byte[] encode(PackChunk.Members members) {
final byte[] meta;
if (members.hasMeta())
meta = members.getMeta().asBytes();
else
meta = null;

ByteBuffer chunkData = members.getChunkDataAsByteBuffer();
ByteBuffer chunkIndex = members.getChunkIndexAsByteBuffer();

TinyProtobuf.Encoder sizer = TinyProtobuf.size();
TinyProtobuf.Encoder e = sizer;
do {
e.bytes(1, chunkData);
e.bytes(2, chunkIndex);
e.bytes(3, meta);
if (e == sizer)
e = TinyProtobuf.encode(e.size());
else
return e.asByteArray();
} while (true);
// Its too slow to encode ByteBuffer through the standard code.
// Since the message is only 3 fields, do it by hand.
ByteBuffer data = members.getChunkDataAsByteBuffer();
ByteBuffer index = members.getChunkIndexAsByteBuffer();
ChunkMeta meta = members.getMeta();

int sz = 0;
if (data != null)
sz += computeByteBufferSize(1, data);
if (index != null)
sz += computeByteBufferSize(2, index);
if (meta != null)
sz += CodedOutputStream.computeMessageSize(3, meta);

byte[] r = new byte[sz];
CodedOutputStream out = CodedOutputStream.newInstance(r);
try {
if (data != null)
writeByteBuffer(out, 1, data);
if (index != null)
writeByteBuffer(out, 2, index);
if (meta != null)
out.writeMessage(3, meta);
} catch (IOException err) {
throw new RuntimeException("Cannot buffer chunk", err);
}
return r;
}

private static int computeByteBufferSize(int fieldNumber, ByteBuffer data) {
int n = data.remaining();
return CodedOutputStream.computeTagSize(fieldNumber)
+ CodedOutputStream.computeRawVarint32Size(n)
+ n;
}

private static void writeByteBuffer(CodedOutputStream out, int fieldNumber,
ByteBuffer data) throws IOException {
byte[] d = data.array();
int p = data.arrayOffset() + data.position();
int n = data.remaining();
out.writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED);
out.writeRawVarint32(n);
out.writeRawBytes(d, p, n);
}

private static PackChunk.Members decode(ChunkKey key, byte[] raw) {
PackChunk.Members members = new PackChunk.Members();
members.setChunkKey(key);

TinyProtobuf.Decoder d = TinyProtobuf.decode(raw);
for (;;) {
switch (d.next()) {
case 0:
return members;
case 1: {
int cnt = d.bytesLength();
int ptr = d.bytesOffset();
byte[] buf = d.bytesArray();
members.setChunkData(buf, ptr, cnt);
continue;
}
case 2: {
int cnt = d.bytesLength();
int ptr = d.bytesOffset();
byte[] buf = d.bytesArray();
members.setChunkIndex(buf, ptr, cnt);
continue;
}
case 3:
members.setMeta(ChunkMeta.fromBytes(key, d.message()));
continue;
default:
d.skip();
// Its too slow to convert using the standard code, as copies
// are made. Instead find offsets in the stream and use that.
CodedInputStream in = CodedInputStream.newInstance(raw);
try {
int tag = in.readTag();
for (;;) {
switch (WireFormat.getTagFieldNumber(tag)) {
case 0:
return members;
case 1: {
int cnt = in.readRawVarint32();
int ptr = in.getTotalBytesRead();
members.setChunkData(raw, ptr, cnt);
in.skipRawBytes(cnt);
tag = in.readTag();
if (WireFormat.getTagFieldNumber(tag) != 2)
continue;
}
//$FALL-THROUGH$
case 2: {
int cnt = in.readRawVarint32();
int ptr = in.getTotalBytesRead();
members.setChunkIndex(raw, ptr, cnt);
in.skipRawBytes(cnt);
tag = in.readTag();
if (WireFormat.getTagFieldNumber(tag) != 3)
continue;
}
//$FALL-THROUGH$
case 3: {
int cnt = in.readRawVarint32();
int oldLimit = in.pushLimit(cnt);
members.setMeta(ChunkMeta.parseFrom(in));
in.popLimit(oldLimit);
tag = in.readTag();
continue;
}
default:
in.skipField(tag);
}
}
} catch (IOException err) {
throw new RuntimeException("Cannot decode chunk", err);
}
}

@@ -329,41 +380,49 @@ public class CacheChunkTable implements ChunkTable {

private final Set<ChunkKey> remaining;

private final AsyncCallback<Collection<ChunkMeta>> normalCallback;
private final AsyncCallback<Map<ChunkKey, ChunkMeta>> normalCallback;

private final StreamingCallback<Collection<ChunkMeta>> streamingCallback;
private final StreamingCallback<Map<ChunkKey, ChunkMeta>> streamingCallback;

private final List<ChunkMeta> all;
private final Map<ChunkKey, ChunkMeta> all;

MetaFromCache(Context options, Set<ChunkKey> keys,
AsyncCallback<Collection<ChunkMeta>> callback) {
AsyncCallback<Map<ChunkKey, ChunkMeta>> callback) {
this.options = options;
this.remaining = new HashSet<ChunkKey>(keys);
this.normalCallback = callback;

if (callback instanceof StreamingCallback<?>) {
streamingCallback = (StreamingCallback<Collection<ChunkMeta>>) callback;
streamingCallback = (StreamingCallback<Map<ChunkKey, ChunkMeta>>) callback;
all = null;
} else {
streamingCallback = null;
all = new ArrayList<ChunkMeta>(keys.size());
all = new HashMap<ChunkKey, ChunkMeta>();
}
}

public void onPartialResult(Map<CacheKey, byte[]> result) {
for (Map.Entry<CacheKey, byte[]> ent : result.entrySet()) {
ChunkKey key = ChunkKey.fromBytes(ent.getKey().getBytes());
ChunkMeta meta = ChunkMeta.fromBytes(key, ent.getValue());
ChunkMeta meta;
try {
meta = ChunkMeta.parseFrom(ent.getValue());
} catch (InvalidProtocolBufferException e) {
// Invalid meta message, remove the cell from cache.
client.modify(singleton(Change.remove(ent.getKey())),
Sync.<Void> none());
continue;
}

if (streamingCallback != null) {
streamingCallback.onPartialResult(singleton(meta));
streamingCallback.onPartialResult(singletonMap(key, meta));

synchronized (lock) {
remaining.remove(key);
}
} else {
synchronized (lock) {
all.add(meta);
all.put(key, meta);
remaining.remove(key);
}
}
@@ -391,31 +450,31 @@ public class CacheChunkTable implements ChunkTable {
}

private class MetaFromDatabase implements
StreamingCallback<Collection<ChunkMeta>> {
StreamingCallback<Map<ChunkKey, ChunkMeta>> {
private final Object lock = new Object();

private final List<ChunkMeta> all;
private final Map<ChunkKey, ChunkMeta> all;

private final AsyncCallback<Collection<ChunkMeta>> normalCallback;
private final AsyncCallback<Map<ChunkKey, ChunkMeta>> normalCallback;

private final StreamingCallback<Collection<ChunkMeta>> streamingCallback;
private final StreamingCallback<Map<ChunkKey, ChunkMeta>> streamingCallback;

MetaFromDatabase(List<ChunkMeta> all,
AsyncCallback<Collection<ChunkMeta>> normalCallback,
StreamingCallback<Collection<ChunkMeta>> streamingCallback) {
MetaFromDatabase(Map<ChunkKey, ChunkMeta> all,
AsyncCallback<Map<ChunkKey, ChunkMeta>> normalCallback,
StreamingCallback<Map<ChunkKey, ChunkMeta>> streamingCallback) {
this.all = all;
this.normalCallback = normalCallback;
this.streamingCallback = streamingCallback;
}

public void onPartialResult(Collection<ChunkMeta> result) {
final List<ChunkMeta> toPutIntoCache = copy(result);
public void onPartialResult(Map<ChunkKey, ChunkMeta> result) {
final Map<ChunkKey, ChunkMeta> toPutIntoCache = copy(result);

if (streamingCallback != null)
streamingCallback.onPartialResult(result);
else {
synchronized (lock) {
all.addAll(result);
all.putAll(result);
}
}

@@ -425,20 +484,22 @@ public class CacheChunkTable implements ChunkTable {
//
executor.submit(new Runnable() {
public void run() {
for (ChunkMeta meta : toPutIntoCache) {
ChunkKey key = meta.getChunkKey();
Change op = Change.put(nsMeta.key(key), meta.asBytes());
for (Map.Entry<ChunkKey, ChunkMeta> ent
: toPutIntoCache.entrySet()) {
ChunkKey key = ent.getKey();
Change op = Change.put(nsMeta.key(key),
ent.getValue().toByteArray());
client.modify(singleton(op), none);
}
}
});
}

private <T> List<T> copy(Collection<T> result) {
return new ArrayList<T>(result);
private <K, V> Map<K, V> copy(Map<K, V> result) {
return new HashMap<K, V>(result);
}

public void onSuccess(Collection<ChunkMeta> result) {
public void onSuccess(Map<ChunkKey, ChunkMeta> result) {
if (result != null && !result.isEmpty())
onPartialResult(result);


+ 42
- 54
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheObjectIndexTable.java View File

@@ -44,7 +44,9 @@
package org.eclipse.jgit.storage.dht.spi.cache;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -52,6 +54,7 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;

import org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex;
import org.eclipse.jgit.storage.dht.AsyncCallback;
import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.DhtException;
@@ -59,12 +62,13 @@ import org.eclipse.jgit.storage.dht.ObjectIndexKey;
import org.eclipse.jgit.storage.dht.ObjectInfo;
import org.eclipse.jgit.storage.dht.StreamingCallback;
import org.eclipse.jgit.storage.dht.Sync;
import org.eclipse.jgit.storage.dht.TinyProtobuf;
import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change;

import com.google.protobuf.InvalidProtocolBufferException;

/** Cache wrapper around ObjectIndexTable. */
public class CacheObjectIndexTable implements ObjectIndexTable {
private final ObjectIndexTable db;
@@ -125,58 +129,6 @@ public class CacheObjectIndexTable implements ObjectIndexTable {
buf.remove(ns.key(objId));
}

private static byte[] encode(Collection<ObjectInfo> list) {
TinyProtobuf.Encoder e = TinyProtobuf.encode(128);
for (ObjectInfo info : list) {
TinyProtobuf.Encoder m = TinyProtobuf.encode(128);
m.bytes(1, info.getChunkKey().asBytes());
m.bytes(2, info.asBytes());
m.fixed64(3, info.getTime());
e.message(1, m);
}
return e.asByteArray();
}

private static ObjectInfo decodeItem(TinyProtobuf.Decoder m) {
ChunkKey key = null;
TinyProtobuf.Decoder data = null;
long time = -1;

for (;;) {
switch (m.next()) {
case 0:
return ObjectInfo.fromBytes(key, data, time);
case 1:
key = ChunkKey.fromBytes(m);
continue;
case 2:
data = m.message();
continue;
case 3:
time = m.fixed64();
continue;
default:
m.skip();
}
}
}

private static Collection<ObjectInfo> decode(byte[] raw) {
List<ObjectInfo> res = new ArrayList<ObjectInfo>(1);
TinyProtobuf.Decoder d = TinyProtobuf.decode(raw);
for (;;) {
switch (d.next()) {
case 0:
return res;
case 1:
res.add(decodeItem(d.message()));
continue;
default:
d.skip();
}
}
}

private class LoaderFromCache implements
StreamingCallback<Map<CacheKey, byte[]>> {
private final Object lock = new Object();
@@ -217,7 +169,15 @@ public class CacheObjectIndexTable implements ObjectIndexTable {

for (Map.Entry<CacheKey, byte[]> e : result.entrySet()) {
ObjectIndexKey objKey;
Collection<ObjectInfo> list = decode(e.getValue());
Collection<ObjectInfo> list;
try {
list = decode(e.getValue());
} catch (InvalidProtocolBufferException badCell) {
client.modify(
Collections.singleton(Change.remove(e.getKey())),
Sync.<Void> none());
continue;
}
objKey = ObjectIndexKey.fromBytes(e.getKey().getBytes());

if (tmp != null)
@@ -238,6 +198,21 @@ public class CacheObjectIndexTable implements ObjectIndexTable {
}
}

private Collection<ObjectInfo> decode(byte[] value)
throws InvalidProtocolBufferException {
CachedObjectIndex cacheEntry = CachedObjectIndex.parseFrom(value);
int sz = cacheEntry.getItemCount();
ObjectInfo[] r = new ObjectInfo[sz];
for (int i = 0; i < sz; i++) {
CachedObjectIndex.Item item = cacheEntry.getItem(i);
r[i] = new ObjectInfo(
ChunkKey.fromString(item.getChunkKey()),
item.getTime(),
item.getObjectInfo());
}
return Arrays.asList(r);
}

public void onSuccess(Map<CacheKey, byte[]> result) {
if (result != null && !result.isEmpty())
onPartialResult(result);
@@ -305,6 +280,19 @@ public class CacheObjectIndexTable implements ObjectIndexTable {

client.modify(ops, Sync.<Void> none());
}

private byte[] encode(List<ObjectInfo> items) {
CachedObjectIndex.Builder b;
b = CachedObjectIndex.newBuilder();
for (ObjectInfo info : items) {
CachedObjectIndex.Item.Builder i = b.addItemBuilder();
i.setChunkKey(info.getChunkKey().asString());
i.setObjectInfo(info.getData());
if (0 < info.getTime())
i.setTime(info.getTime());
}
return b.build().toByteArray();
}
});
}


+ 1
- 1
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRefTable.java View File

@@ -46,8 +46,8 @@ package org.eclipse.jgit.storage.dht.spi.cache;
import java.util.Map;
import java.util.concurrent.TimeoutException;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.RefData;
import org.eclipse.jgit.storage.dht.RefKey;
import org.eclipse.jgit.storage.dht.RepositoryKey;
import org.eclipse.jgit.storage.dht.spi.Context;

+ 14
- 20
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/cache/CacheRepositoryTable.java View File

@@ -46,24 +46,24 @@ package org.eclipse.jgit.storage.dht.spi.cache;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singleton;

import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeoutException;

import org.eclipse.jgit.storage.dht.CachedPackInfo;
import org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.storage.dht.CachedPackKey;
import org.eclipse.jgit.storage.dht.ChunkInfo;
import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.RepositoryKey;
import org.eclipse.jgit.storage.dht.Sync;
import org.eclipse.jgit.storage.dht.TinyProtobuf;
import org.eclipse.jgit.storage.dht.spi.RepositoryTable;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change;

import com.google.protobuf.InvalidProtocolBufferException;

/** Cache wrapper around RepositoryTable. */
public class CacheRepositoryTable implements RepositoryTable {
private final RepositoryTable db;
@@ -126,26 +126,20 @@ public class CacheRepositoryTable implements RepositoryTable {

byte[] data = result.get(memKey);
if (data != null) {
List<CachedPackInfo> r = new ArrayList<CachedPackInfo>();
TinyProtobuf.Decoder d = TinyProtobuf.decode(data);
for (;;) {
switch (d.next()) {
case 0:
return r;
case 1:
r.add(CachedPackInfo.fromBytes(d.message()));
continue;
default:
d.skip();
}
try {
return CachedPackInfoList.parseFrom(data).getPackList();
} catch (InvalidProtocolBufferException e) {
// Invalidate the cache entry and fall through.
client.modify(singleton(Change.remove(memKey)), none);
}
}

Collection<CachedPackInfo> r = db.getCachedPacks(repo);
TinyProtobuf.Encoder e = TinyProtobuf.encode(1024);
for (CachedPackInfo info : r)
e.bytes(1, info.asBytes());
client.modify(singleton(Change.put(memKey, e.asByteArray())), none);
CachedPackInfoList.Builder list = CachedPackInfoList.newBuilder();
list.addAllPack(r);
client.modify(
singleton(Change.put(memKey, list.build().toByteArray())),
none);
return r;
}


+ 28
- 9
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemChunkTable.java View File

@@ -43,21 +43,27 @@

package org.eclipse.jgit.storage.dht.spi.memory;

import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.storage.dht.AsyncCallback;
import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.ChunkMeta;
import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.DhtText;
import org.eclipse.jgit.storage.dht.PackChunk;
import org.eclipse.jgit.storage.dht.spi.ChunkTable;
import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;

import com.google.protobuf.InvalidProtocolBufferException;

final class MemChunkTable implements ChunkTable {
private final MemTable table = new MemTable();

@@ -89,8 +95,15 @@ final class MemChunkTable implements ChunkTable {
m.setChunkIndex(cell.getValue());

cell = table.get(row, colMeta.name());
if (cell != null)
m.setMeta(ChunkMeta.fromBytes(chunk, cell.getValue()));
if (cell != null) {
try {
m.setMeta(ChunkMeta.parseFrom(cell.getValue()));
} catch (InvalidProtocolBufferException err) {
callback.onFailure(new DhtException(MessageFormat.format(
DhtText.get().invalidChunkMeta, chunk), err));
return;
}
}

out.add(m);
}
@@ -99,15 +112,21 @@ final class MemChunkTable implements ChunkTable {
}

public void getMeta(Context options, Set<ChunkKey> keys,
AsyncCallback<Collection<ChunkMeta>> callback) {
int cnt = keys.size();
List<ChunkMeta> out = new ArrayList<ChunkMeta>(cnt);
AsyncCallback<Map<ChunkKey, ChunkMeta>> callback) {
Map<ChunkKey, ChunkMeta> out = new HashMap<ChunkKey, ChunkMeta>();

for (ChunkKey chunk : keys) {
byte[] row = chunk.asBytes();
MemTable.Cell cell = table.get(row, colMeta.name());
if (cell != null)
out.add(ChunkMeta.fromBytes(chunk, cell.getValue()));
if (cell != null) {
try {
out.put(chunk, ChunkMeta.parseFrom(cell.getValue()));
} catch (InvalidProtocolBufferException err) {
callback.onFailure(new DhtException(MessageFormat.format(
DhtText.get().invalidChunkMeta, chunk), err));
return;
}
}
}

callback.onSuccess(out);
@@ -124,7 +143,7 @@ final class MemChunkTable implements ChunkTable {
table.put(row, colIndex.name(), chunk.getChunkIndex());

if (chunk.hasMeta())
table.put(row, colMeta.name(), chunk.getMeta().asBytes());
table.put(row, colMeta.name(), chunk.getMeta().toByteArray());
}

public void remove(ChunkKey key, WriteBuffer buffer) throws DhtException {

+ 23
- 10
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemObjectIndexTable.java View File

@@ -43,15 +43,18 @@

package org.eclipse.jgit.storage.dht.spi.memory;

import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
import org.eclipse.jgit.storage.dht.AsyncCallback;
import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.DhtText;
import org.eclipse.jgit.storage.dht.ObjectIndexKey;
import org.eclipse.jgit.storage.dht.ObjectInfo;
import org.eclipse.jgit.storage.dht.spi.Context;
@@ -59,6 +62,8 @@ import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;

import com.google.protobuf.InvalidProtocolBufferException;

final class MemObjectIndexTable implements ObjectIndexTable {
private final MemTable table = new MemTable();

@@ -70,17 +75,25 @@ final class MemObjectIndexTable implements ObjectIndexTable {

for (ObjectIndexKey objId : objects) {
for (MemTable.Cell cell : table.scanFamily(objId.asBytes(), colInfo)) {
Collection<ObjectInfo> info = out.get(objId);
if (info == null) {
info = new ArrayList<ObjectInfo>(4);
out.put(objId, info);
Collection<ObjectInfo> chunks = out.get(objId);
ChunkKey chunkKey;
if (chunks == null) {
chunks = new ArrayList<ObjectInfo>(4);
out.put(objId, chunks);
}

ChunkKey chunk = ChunkKey.fromBytes(
colInfo.suffix(cell.getName()));
byte[] value = cell.getValue();
long time = cell.getTimestamp();
info.add(ObjectInfo.fromBytes(chunk, value, time));
chunkKey = ChunkKey.fromBytes(colInfo.suffix(cell.getName()));
try {
chunks.add(new ObjectInfo(
chunkKey,
cell.getTimestamp(),
GitStore.ObjectInfo.parseFrom(cell.getValue())));
} catch (InvalidProtocolBufferException badCell) {
callback.onFailure(new DhtException(MessageFormat.format(
DhtText.get().invalidObjectInfo, objId, chunkKey),
badCell));
return;
}
}
}

@@ -91,7 +104,7 @@ final class MemObjectIndexTable implements ObjectIndexTable {
throws DhtException {
ChunkKey chunk = info.getChunkKey();
table.put(objId.asBytes(), colInfo.append(chunk.asBytes()),
info.asBytes());
info.getData().toByteArray());
}

public void remove(ObjectIndexKey objId, ChunkKey chunk, WriteBuffer buffer)

+ 15
- 6
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRefTable.java View File

@@ -43,18 +43,23 @@

package org.eclipse.jgit.storage.dht.spi.memory;

import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeoutException;

import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.RefData;
import org.eclipse.jgit.storage.dht.DhtText;
import org.eclipse.jgit.storage.dht.RefDataUtil;
import org.eclipse.jgit.storage.dht.RefKey;
import org.eclipse.jgit.storage.dht.RepositoryKey;
import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.RefTable;
import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;

import com.google.protobuf.InvalidProtocolBufferException;

final class MemRefTable implements RefTable {
private final MemTable table = new MemTable();

@@ -65,8 +70,12 @@ final class MemRefTable implements RefTable {
Map<RefKey, RefData> out = new HashMap<RefKey, RefData>();
for (MemTable.Cell cell : table.scanFamily(repository.asBytes(), colRef)) {
RefKey ref = RefKey.fromBytes(colRef.suffix(cell.getName()));
RefData val = RefData.fromBytes(cell.getValue());
out.put(ref, val);
try {
out.put(ref, RefData.parseFrom(cell.getValue()));
} catch (InvalidProtocolBufferException badCell) {
throw new DhtException(MessageFormat.format(
DhtText.get().invalidRefData, ref), badCell);
}
}
return out;
}
@@ -77,8 +86,8 @@ final class MemRefTable implements RefTable {
return table.compareAndSet( //
repo.asBytes(), //
colRef.append(refKey.asBytes()), //
oldData != RefData.NONE ? oldData.asBytes() : null, //
newData.asBytes());
oldData != RefDataUtil.NONE ? oldData.toByteArray() : null, //
newData.toByteArray());
}

public boolean compareAndRemove(RefKey refKey, RefData oldData)
@@ -87,7 +96,7 @@ final class MemRefTable implements RefTable {
return table.compareAndSet( //
repo.asBytes(), //
colRef.append(refKey.asBytes()), //
oldData != RefData.NONE ? oldData.asBytes() : null, //
oldData != RefDataUtil.NONE ? oldData.toByteArray() : null, //
null);
}
}

+ 18
- 6
org.eclipse.jgit.storage.dht/src/org/eclipse/jgit/storage/dht/spi/memory/MemRepositoryTable.java View File

@@ -43,22 +43,26 @@

package org.eclipse.jgit.storage.dht.spi.memory;

import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;

import org.eclipse.jgit.storage.dht.CachedPackInfo;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.storage.dht.CachedPackKey;
import org.eclipse.jgit.storage.dht.ChunkInfo;
import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.DhtText;
import org.eclipse.jgit.storage.dht.RepositoryKey;
import org.eclipse.jgit.storage.dht.spi.RepositoryTable;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;

import com.google.protobuf.InvalidProtocolBufferException;

final class MemRepositoryTable implements RepositoryTable {
private final AtomicInteger nextId = new AtomicInteger();

@@ -76,7 +80,7 @@ final class MemRepositoryTable implements RepositoryTable {
throws DhtException {
table.put(repo.asBytes(),
colChunkInfo.append(info.getChunkKey().asBytes()),
info.asBytes());
info.getData().toByteArray());
}

public void remove(RepositoryKey repo, ChunkKey chunk, WriteBuffer buffer)
@@ -87,16 +91,24 @@ final class MemRepositoryTable implements RepositoryTable {
public Collection<CachedPackInfo> getCachedPacks(RepositoryKey repo)
throws DhtException, TimeoutException {
List<CachedPackInfo> out = new ArrayList<CachedPackInfo>(4);
for (MemTable.Cell cell : table.scanFamily(repo.asBytes(), colCachedPack))
out.add(CachedPackInfo.fromBytes(cell.getValue()));
for (MemTable.Cell cell : table.scanFamily(repo.asBytes(), colCachedPack)) {
try {
out.add(CachedPackInfo.parseFrom(cell.getValue()));
} catch (InvalidProtocolBufferException e) {
throw new DhtException(MessageFormat.format(
DhtText.get().invalidCachedPackInfo, repo,
CachedPackKey.fromBytes(cell.getName())), e);
}
}
return out;
}

public void put(RepositoryKey repo, CachedPackInfo info, WriteBuffer buffer)
throws DhtException {
CachedPackKey key = CachedPackKey.fromInfo(info);
table.put(repo.asBytes(),
colCachedPack.append(info.getRowKey().asBytes()),
info.asBytes());
colCachedPack.append(key.asBytes()),
info.toByteArray());
}

public void remove(RepositoryKey repo, CachedPackKey key, WriteBuffer buffer)

+ 8
- 0
pom.xml View File

@@ -135,6 +135,7 @@
<args4j-version>2.0.12</args4j-version>
<servlet-api-version>2.5</servlet-api-version>
<jetty-version>7.1.6.v20100715</jetty-version>
<protobuf-version>2.4.0a</protobuf-version>
</properties>

<build>
@@ -348,6 +349,12 @@
<artifactId>jetty-servlet</artifactId>
<version>${jetty-version}</version>
</dependency>

<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>${protobuf-version}</version>
</dependency>
</dependencies>
</dependencyManagement>

@@ -392,6 +399,7 @@

<modules>
<module>org.eclipse.jgit</module>
<module>org.eclipse.jgit.generated.storage.dht.proto</module>
<module>org.eclipse.jgit.storage.dht</module>
<module>org.eclipse.jgit.ant</module>
<module>org.eclipse.jgit.ui</module>

Loading…
Cancel
Save