aboutsummaryrefslogtreecommitdiffstats
path: root/org.eclipse.jgit
diff options
context:
space:
mode:
Diffstat (limited to 'org.eclipse.jgit')
-rw-r--r--org.eclipse.jgit/.settings/.api_filters27
-rw-r--r--org.eclipse.jgit/.settings/org.eclipse.jdt.ui.prefs13
-rw-r--r--org.eclipse.jgit/.settings/org.eclipse.pde.api.tools.prefs12
-rw-r--r--org.eclipse.jgit/META-INF/MANIFEST.MF89
-rw-r--r--org.eclipse.jgit/META-INF/SOURCE-MANIFEST.MF4
-rw-r--r--org.eclipse.jgit/pom.xml6
-rw-r--r--org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties50
-rw-r--r--org.eclipse.jgit/resources/org/eclipse/jgit/internal/storage/dfs/DfsText.properties2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java29
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/CleanCommand.java5
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/CloneCommand.java15
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/DescribeCommand.java88
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/FetchCommand.java33
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java15
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/PullCommand.java47
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java1
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java31
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/StashApplyCommand.java21
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/StashCreateCommand.java12
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/StashDropCommand.java15
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleSyncCommand.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java43
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/TransportCommand.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/errors/PatchApplyException.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/api/errors/PatchFormatException.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attribute.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attributes.java24
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesHandler.java14
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesRule.java6
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffConfig.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffEntry.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/diff/MyersDiff.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java96
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/errors/CorruptPackIndexException.java94
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/errors/TooLargeObjectInPackException.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/errors/TranslationBundleLoadingException.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/events/ListenerList.java13
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedEvent.java129
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedListener.java (renamed from org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackKey.java)31
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java1
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/hooks/GitHook.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/ignore/FastIgnoreRule.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/IMatcher.java12
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/NameMatcher.java78
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/PathMatcher.java121
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/Strings.java16
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/WildMatcher.java3
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java42
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckError.java152
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckPackParser.java336
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/package-info.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/BlockBasedFile.java210
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DeltaBaseCache.java8
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlock.java22
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java139
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsFsck.java172
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java186
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java29
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java137
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java256
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackDescription.java124
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java334
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackParser.java9
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java19
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftable.java178
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsStreamKey.java125
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsText.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java26
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableStack.java108
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/CheckoutEntryImpl.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java42
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java10
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java27
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInputStream.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInserter.java708
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackedBatchRefUpdate.java531
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java416
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryRename.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryUpdate.java13
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogEntryImpl.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogWriter.java137
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SimpleDataOutput.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCache.java42
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WriteConfig.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/io/BlockSource.java185
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaIndexScanner.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/ObjectToPack.java1
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java3
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockReader.java589
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockSizeTooSmallException.java62
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockWriter.java605
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/EmptyLogCursor.java76
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/LogCursor.java72
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/MergedReftable.java376
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/RefCursor.java75
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/Reftable.java262
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableCompactor.java281
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConfig.java216
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConstants.java85
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableOutputStream.java247
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableReader.java683
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableWriter.java813
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java217
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/BitmapObject.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/BlobObjectChecker.java92
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/CheckoutEntry.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/Config.java294
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java13
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java16
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/DefaultTypedConfigGetter.java299
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/FileMode.java6
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java36
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java22
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectInserter.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/RefUpdate.java126
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogEntry.java35
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogReader.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java8
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryBuilder.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/SubmoduleConfig.java4
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/TypedConfigGetter.java179
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/lib/UserConfig.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeFormatterPass.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/merge/ResolveMerger.java100
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/nls/TranslationBundle.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/revwalk/filter/SkipRevFilter.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/storage/pack/PackConfig.java30
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/submodule/SubmoduleWalk.java63
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java10
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/BaseReceivePack.java11
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/Daemon.java131
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/DaemonService.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/FetchProcess.java51
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/HttpConfig.java412
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/JschConfigSessionFactory.java168
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java3
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/NetRC.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/OpenSshConfig.java831
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java236
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/PackedObjectInfo.java23
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/PushConfig.java95
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceiveCommand.java189
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteConfig.java57
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteSession.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/SignedPushConfig.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/SshSessionFactory.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/SshTransport.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java28
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportGitSsh.java36
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java534
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java82
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/http/HttpConnection.java24
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/transport/resolver/FileResolver.java6
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeOptions.java8
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/InterIndexDiffFilter.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java35
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/util/FS_POSIX.java2
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java15
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/util/LongMap.java (renamed from org.eclipse.jgit/src/org/eclipse/jgit/transport/LongMap.java)42
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/util/NB.java41
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java29
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/util/RefList.java5
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/util/RelativeDateFormatter.java7
-rw-r--r--org.eclipse.jgit/src/org/eclipse/jgit/util/io/EolStreamTypeUtil.java16
172 files changed, 13236 insertions, 1931 deletions
diff --git a/org.eclipse.jgit/.settings/.api_filters b/org.eclipse.jgit/.settings/.api_filters
index 7c175e3905..e9511890fd 100644
--- a/org.eclipse.jgit/.settings/.api_filters
+++ b/org.eclipse.jgit/.settings/.api_filters
@@ -3,8 +3,8 @@
<resource path="META-INF/MANIFEST.MF">
<filter id="924844039">
<message_arguments>
- <message_argument value="4.8.1"/>
- <message_argument value="4.8.0"/>
+ <message_argument value="4.9.4"/>
+ <message_argument value="4.9.0"/>
</message_arguments>
</filter>
</resource>
@@ -18,39 +18,48 @@
<filter id="1141899266">
<message_arguments>
<message_argument value="4.5"/>
- <message_argument value="4.8"/>
+ <message_argument value="4.9"/>
<message_argument value="CONFIG_KEY_SUPPORTSATOMICFILECREATION"/>
</message_arguments>
</filter>
</resource>
<resource path="src/org/eclipse/jgit/lib/Constants.java" type="org.eclipse.jgit.lib.Constants">
- <filter comment="LOCK_SUFFIX was backported to 4.7.3" id="1141899266">
+ <filter id="1141899266">
<message_arguments>
<message_argument value="4.7"/>
- <message_argument value="4.8"/>
+ <message_argument value="4.9"/>
<message_argument value="LOCK_SUFFIX"/>
</message_arguments>
</filter>
</resource>
+ <resource path="src/org/eclipse/jgit/merge/ResolveMerger.java" type="org.eclipse.jgit.merge.ResolveMerger">
+ <filter id="1141899266">
+ <message_arguments>
+ <message_argument value="3.5"/>
+ <message_argument value="4.9"/>
+ <message_argument value="processEntry(CanonicalTreeParser, CanonicalTreeParser, CanonicalTreeParser, DirCacheBuildIterator, WorkingTreeIterator, boolean)"/>
+ </message_arguments>
+ </filter>
+ </resource>
<resource path="src/org/eclipse/jgit/util/FS.java" type="org.eclipse.jgit.util.FS">
<filter id="1141899266">
<message_arguments>
<message_argument value="4.5"/>
- <message_argument value="4.8"/>
+ <message_argument value="4.9"/>
<message_argument value="createNewFile(File)"/>
</message_arguments>
</filter>
<filter id="1141899266">
<message_arguments>
<message_argument value="4.5"/>
- <message_argument value="4.8"/>
+ <message_argument value="4.9"/>
<message_argument value="supportsAtomicCreateNewFile()"/>
</message_arguments>
</filter>
<filter id="1141899266">
<message_arguments>
<message_argument value="4.7"/>
- <message_argument value="4.8"/>
+ <message_argument value="4.9"/>
<message_argument value="createNewFileAtomic(File)"/>
</message_arguments>
</filter>
@@ -59,7 +68,7 @@
<filter id="1141899266">
<message_arguments>
<message_argument value="4.7"/>
- <message_argument value="4.8"/>
+ <message_argument value="4.9"/>
<message_argument value="LockToken"/>
</message_arguments>
</filter>
diff --git a/org.eclipse.jgit/.settings/org.eclipse.jdt.ui.prefs b/org.eclipse.jgit/.settings/org.eclipse.jdt.ui.prefs
index c336cce6ed..fef3713825 100644
--- a/org.eclipse.jgit/.settings/org.eclipse.jdt.ui.prefs
+++ b/org.eclipse.jgit/.settings/org.eclipse.jdt.ui.prefs
@@ -9,21 +9,23 @@ org.eclipse.jdt.ui.staticondemandthreshold=99
org.eclipse.jdt.ui.text.custom_code_templates=<?xml version\="1.0" encoding\="UTF-8"?><templates/>
sp_cleanup.add_default_serial_version_id=true
sp_cleanup.add_generated_serial_version_id=false
-sp_cleanup.add_missing_annotations=false
+sp_cleanup.add_missing_annotations=true
sp_cleanup.add_missing_deprecated_annotations=true
sp_cleanup.add_missing_methods=false
sp_cleanup.add_missing_nls_tags=false
sp_cleanup.add_missing_override_annotations=true
-sp_cleanup.add_missing_override_annotations_interface_methods=false
+sp_cleanup.add_missing_override_annotations_interface_methods=true
sp_cleanup.add_serial_version_id=false
sp_cleanup.always_use_blocks=true
sp_cleanup.always_use_parentheses_in_expressions=false
sp_cleanup.always_use_this_for_non_static_field_access=false
sp_cleanup.always_use_this_for_non_static_method_access=false
+sp_cleanup.convert_functional_interfaces=false
sp_cleanup.convert_to_enhanced_for_loop=false
sp_cleanup.correct_indentation=false
sp_cleanup.format_source_code=true
sp_cleanup.format_source_code_changes_only=true
+sp_cleanup.insert_inferred_type_arguments=false
sp_cleanup.make_local_variable_final=false
sp_cleanup.make_parameters_final=false
sp_cleanup.make_private_fields_final=true
@@ -39,11 +41,12 @@ sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=
sp_cleanup.qualify_static_member_accesses_with_declaring_class=false
sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_type_arguments=true
sp_cleanup.remove_trailing_whitespaces=true
sp_cleanup.remove_trailing_whitespaces_all=true
sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
-sp_cleanup.remove_unnecessary_casts=false
-sp_cleanup.remove_unnecessary_nls_tags=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
sp_cleanup.remove_unused_imports=false
sp_cleanup.remove_unused_local_variables=false
sp_cleanup.remove_unused_private_fields=true
@@ -52,8 +55,10 @@ sp_cleanup.remove_unused_private_methods=true
sp_cleanup.remove_unused_private_types=true
sp_cleanup.sort_members=false
sp_cleanup.sort_members_all=false
+sp_cleanup.use_anonymous_class_creation=false
sp_cleanup.use_blocks=false
sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_lambda=false
sp_cleanup.use_parentheses_in_expressions=false
sp_cleanup.use_this_for_non_static_field_access=false
sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=true
diff --git a/org.eclipse.jgit/.settings/org.eclipse.pde.api.tools.prefs b/org.eclipse.jgit/.settings/org.eclipse.pde.api.tools.prefs
index cd148d9049..c0030ded71 100644
--- a/org.eclipse.jgit/.settings/org.eclipse.pde.api.tools.prefs
+++ b/org.eclipse.jgit/.settings/org.eclipse.pde.api.tools.prefs
@@ -1,4 +1,4 @@
-#Tue Oct 18 00:52:01 CEST 2011
+ANNOTATION_ELEMENT_TYPE_ADDED_FIELD=Error
ANNOTATION_ELEMENT_TYPE_ADDED_METHOD_WITHOUT_DEFAULT_VALUE=Error
ANNOTATION_ELEMENT_TYPE_CHANGED_TYPE_CONVERSION=Error
ANNOTATION_ELEMENT_TYPE_REMOVED_FIELD=Error
@@ -8,6 +8,10 @@ API_COMPONENT_ELEMENT_TYPE_REMOVED_API_TYPE=Error
API_COMPONENT_ELEMENT_TYPE_REMOVED_REEXPORTED_API_TYPE=Error
API_COMPONENT_ELEMENT_TYPE_REMOVED_REEXPORTED_TYPE=Error
API_COMPONENT_ELEMENT_TYPE_REMOVED_TYPE=Error
+API_USE_SCAN_FIELD_SEVERITY=Error
+API_USE_SCAN_METHOD_SEVERITY=Error
+API_USE_SCAN_TYPE_SEVERITY=Error
+CLASS_ELEMENT_TYPE_ADDED_FIELD=Error
CLASS_ELEMENT_TYPE_ADDED_METHOD=Error
CLASS_ELEMENT_TYPE_ADDED_RESTRICTIONS=Error
CLASS_ELEMENT_TYPE_ADDED_TYPE_PARAMETER=Error
@@ -47,6 +51,7 @@ ILLEGAL_IMPLEMENT=Warning
ILLEGAL_INSTANTIATE=Warning
ILLEGAL_OVERRIDE=Warning
ILLEGAL_REFERENCE=Warning
+INTERFACE_ELEMENT_TYPE_ADDED_DEFAULT_METHOD=Error
INTERFACE_ELEMENT_TYPE_ADDED_FIELD=Error
INTERFACE_ELEMENT_TYPE_ADDED_METHOD=Error
INTERFACE_ELEMENT_TYPE_ADDED_RESTRICTIONS=Error
@@ -58,6 +63,7 @@ INTERFACE_ELEMENT_TYPE_REMOVED_FIELD=Error
INTERFACE_ELEMENT_TYPE_REMOVED_METHOD=Error
INTERFACE_ELEMENT_TYPE_REMOVED_TYPE_MEMBER=Error
INTERFACE_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
+INVALID_ANNOTATION=Ignore
INVALID_JAVADOC_TAG=Ignore
INVALID_REFERENCE_IN_SYSTEM_LIBRARIES=Error
LEAK_EXTEND=Warning
@@ -75,6 +81,7 @@ METHOD_ELEMENT_TYPE_CHANGED_STATIC_TO_NON_STATIC=Error
METHOD_ELEMENT_TYPE_CHANGED_VARARGS_TO_ARRAY=Error
METHOD_ELEMENT_TYPE_REMOVED_ANNOTATION_DEFAULT_VALUE=Error
METHOD_ELEMENT_TYPE_REMOVED_TYPE_PARAMETER=Error
+MISSING_EE_DESCRIPTIONS=Warning
TYPE_PARAMETER_ELEMENT_TYPE_ADDED_CLASS_BOUND=Error
TYPE_PARAMETER_ELEMENT_TYPE_ADDED_INTERFACE_BOUND=Error
TYPE_PARAMETER_ELEMENT_TYPE_CHANGED_CLASS_BOUND=Error
@@ -83,10 +90,13 @@ TYPE_PARAMETER_ELEMENT_TYPE_REMOVED_CLASS_BOUND=Error
TYPE_PARAMETER_ELEMENT_TYPE_REMOVED_INTERFACE_BOUND=Error
UNUSED_PROBLEM_FILTERS=Warning
automatically_removed_unused_problem_filters=false
+changed_execution_env=Error
eclipse.preferences.version=1
incompatible_api_component_version=Error
incompatible_api_component_version_include_major_without_breaking_change=Disabled
incompatible_api_component_version_include_minor_without_api_change=Disabled
+incompatible_api_component_version_report_major_without_breaking_change=Warning
+incompatible_api_component_version_report_minor_without_api_change=Ignore
invalid_since_tag_version=Error
malformed_since_tag=Error
missing_since_tag=Error
diff --git a/org.eclipse.jgit/META-INF/MANIFEST.MF b/org.eclipse.jgit/META-INF/MANIFEST.MF
index 9b6a9f182a..2a82ec5ad6 100644
--- a/org.eclipse.jgit/META-INF/MANIFEST.MF
+++ b/org.eclipse.jgit/META-INF/MANIFEST.MF
@@ -2,12 +2,12 @@ Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: %plugin_name
Bundle-SymbolicName: org.eclipse.jgit
-Bundle-Version: 4.8.1.qualifier
+Bundle-Version: 4.9.5.qualifier
Bundle-Localization: plugin
Bundle-Vendor: %provider_name
Bundle-ActivationPolicy: lazy
-Export-Package: org.eclipse.jgit.annotations;version="4.8.1",
- org.eclipse.jgit.api;version="4.8.1";
+Export-Package: org.eclipse.jgit.annotations;version="4.9.5",
+ org.eclipse.jgit.api;version="4.9.5";
uses:="org.eclipse.jgit.revwalk,
org.eclipse.jgit.treewalk.filter,
org.eclipse.jgit.diff,
@@ -21,51 +21,52 @@ Export-Package: org.eclipse.jgit.annotations;version="4.8.1",
org.eclipse.jgit.submodule,
org.eclipse.jgit.transport,
org.eclipse.jgit.merge",
- org.eclipse.jgit.api.errors;version="4.8.1";uses:="org.eclipse.jgit.lib,org.eclipse.jgit.errors",
- org.eclipse.jgit.attributes;version="4.8.1",
- org.eclipse.jgit.blame;version="4.8.1";
+ org.eclipse.jgit.api.errors;version="4.9.5";uses:="org.eclipse.jgit.lib,org.eclipse.jgit.errors",
+ org.eclipse.jgit.attributes;version="4.9.5",
+ org.eclipse.jgit.blame;version="4.9.5";
uses:="org.eclipse.jgit.lib,
org.eclipse.jgit.revwalk,
org.eclipse.jgit.treewalk.filter,
org.eclipse.jgit.diff",
- org.eclipse.jgit.diff;version="4.8.1";
+ org.eclipse.jgit.diff;version="4.9.5";
uses:="org.eclipse.jgit.patch,
org.eclipse.jgit.lib,
org.eclipse.jgit.treewalk,
org.eclipse.jgit.revwalk,
org.eclipse.jgit.treewalk.filter,
org.eclipse.jgit.util",
- org.eclipse.jgit.dircache;version="4.8.1";
+ org.eclipse.jgit.dircache;version="4.9.5";
uses:="org.eclipse.jgit.lib,
org.eclipse.jgit.treewalk,
org.eclipse.jgit.util,
org.eclipse.jgit.events,
org.eclipse.jgit.attributes",
- org.eclipse.jgit.errors;version="4.8.1";
+ org.eclipse.jgit.errors;version="4.9.5";
uses:="org.eclipse.jgit.lib,
org.eclipse.jgit.internal.storage.pack,
org.eclipse.jgit.transport,
org.eclipse.jgit.dircache",
- org.eclipse.jgit.events;version="4.8.1";uses:="org.eclipse.jgit.lib",
- org.eclipse.jgit.fnmatch;version="4.8.1",
- org.eclipse.jgit.gitrepo;version="4.8.1";
+ org.eclipse.jgit.events;version="4.9.5";uses:="org.eclipse.jgit.lib",
+ org.eclipse.jgit.fnmatch;version="4.9.5",
+ org.eclipse.jgit.gitrepo;version="4.9.5";
uses:="org.eclipse.jgit.api,
org.eclipse.jgit.lib,
org.eclipse.jgit.revwalk,
org.xml.sax.helpers,
org.xml.sax",
- org.eclipse.jgit.gitrepo.internal;version="4.8.1";x-internal:=true,
- org.eclipse.jgit.hooks;version="4.8.1";uses:="org.eclipse.jgit.lib",
- org.eclipse.jgit.ignore;version="4.8.1",
- org.eclipse.jgit.ignore.internal;version="4.8.1";x-friends:="org.eclipse.jgit.test",
- org.eclipse.jgit.internal;version="4.8.1";x-friends:="org.eclipse.jgit.test,org.eclipse.jgit.http.test",
- org.eclipse.jgit.internal.ketch;version="4.8.1";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
- org.eclipse.jgit.internal.storage.dfs;version="4.8.1";
+ org.eclipse.jgit.gitrepo.internal;version="4.9.5";x-internal:=true,
+ org.eclipse.jgit.hooks;version="4.9.5";uses:="org.eclipse.jgit.lib",
+ org.eclipse.jgit.ignore;version="4.9.5",
+ org.eclipse.jgit.ignore.internal;version="4.9.5";x-friends:="org.eclipse.jgit.test",
+ org.eclipse.jgit.internal;version="4.9.5";x-friends:="org.eclipse.jgit.test,org.eclipse.jgit.http.test",
+ org.eclipse.jgit.internal.fsck;version="4.9.5";x-friends:="org.eclipse.jgit.test",
+ org.eclipse.jgit.internal.ketch;version="4.9.5";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
+ org.eclipse.jgit.internal.storage.dfs;version="4.9.5";
x-friends:="org.eclipse.jgit.test,
org.eclipse.jgit.http.server,
org.eclipse.jgit.http.test,
org.eclipse.jgit.lfs.test",
- org.eclipse.jgit.internal.storage.file;version="4.8.1";
+ org.eclipse.jgit.internal.storage.file;version="4.9.5";
x-friends:="org.eclipse.jgit.test,
org.eclipse.jgit.junit,
org.eclipse.jgit.junit.http,
@@ -73,9 +74,11 @@ Export-Package: org.eclipse.jgit.annotations;version="4.8.1",
org.eclipse.jgit.lfs,
org.eclipse.jgit.pgm,
org.eclipse.jgit.pgm.test",
- org.eclipse.jgit.internal.storage.pack;version="4.8.1";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
- org.eclipse.jgit.internal.storage.reftree;version="4.8.1";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
- org.eclipse.jgit.lib;version="4.8.1";
+ org.eclipse.jgit.internal.storage.io;version="4.9.5";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
+ org.eclipse.jgit.internal.storage.pack;version="4.9.5";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
+ org.eclipse.jgit.internal.storage.reftable;version="4.9.5";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
+ org.eclipse.jgit.internal.storage.reftree;version="4.9.5";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
+ org.eclipse.jgit.lib;version="4.9.5";
uses:="org.eclipse.jgit.revwalk,
org.eclipse.jgit.treewalk.filter,
org.eclipse.jgit.util,
@@ -85,33 +88,33 @@ Export-Package: org.eclipse.jgit.annotations;version="4.8.1",
org.eclipse.jgit.treewalk,
org.eclipse.jgit.transport,
org.eclipse.jgit.submodule",
- org.eclipse.jgit.lib.internal;version="4.8.1";x-internal:=true,
- org.eclipse.jgit.merge;version="4.8.1";
+ org.eclipse.jgit.lib.internal;version="4.9.5";x-internal:=true,
+ org.eclipse.jgit.merge;version="4.9.5";
uses:="org.eclipse.jgit.lib,
org.eclipse.jgit.treewalk,
org.eclipse.jgit.revwalk,
org.eclipse.jgit.diff,
org.eclipse.jgit.dircache,
org.eclipse.jgit.api",
- org.eclipse.jgit.nls;version="4.8.1",
- org.eclipse.jgit.notes;version="4.8.1";
+ org.eclipse.jgit.nls;version="4.9.5",
+ org.eclipse.jgit.notes;version="4.9.5";
uses:="org.eclipse.jgit.lib,
org.eclipse.jgit.treewalk,
org.eclipse.jgit.revwalk,
org.eclipse.jgit.merge",
- org.eclipse.jgit.patch;version="4.8.1";uses:="org.eclipse.jgit.lib,org.eclipse.jgit.diff",
- org.eclipse.jgit.revplot;version="4.8.1";uses:="org.eclipse.jgit.lib,org.eclipse.jgit.revwalk",
- org.eclipse.jgit.revwalk;version="4.8.1";
+ org.eclipse.jgit.patch;version="4.9.5";uses:="org.eclipse.jgit.lib,org.eclipse.jgit.diff",
+ org.eclipse.jgit.revplot;version="4.9.5";uses:="org.eclipse.jgit.lib,org.eclipse.jgit.revwalk",
+ org.eclipse.jgit.revwalk;version="4.9.5";
uses:="org.eclipse.jgit.lib,
org.eclipse.jgit.treewalk,
org.eclipse.jgit.treewalk.filter,
org.eclipse.jgit.diff,
org.eclipse.jgit.revwalk.filter",
- org.eclipse.jgit.revwalk.filter;version="4.8.1";uses:="org.eclipse.jgit.revwalk,org.eclipse.jgit.lib,org.eclipse.jgit.util",
- org.eclipse.jgit.storage.file;version="4.8.1";uses:="org.eclipse.jgit.lib,org.eclipse.jgit.util",
- org.eclipse.jgit.storage.pack;version="4.8.1";uses:="org.eclipse.jgit.lib",
- org.eclipse.jgit.submodule;version="4.8.1";uses:="org.eclipse.jgit.lib,org.eclipse.jgit.treewalk.filter,org.eclipse.jgit.treewalk",
- org.eclipse.jgit.transport;version="4.8.1";
+ org.eclipse.jgit.revwalk.filter;version="4.9.5";uses:="org.eclipse.jgit.revwalk,org.eclipse.jgit.lib,org.eclipse.jgit.util",
+ org.eclipse.jgit.storage.file;version="4.9.5";uses:="org.eclipse.jgit.lib,org.eclipse.jgit.util",
+ org.eclipse.jgit.storage.pack;version="4.9.5";uses:="org.eclipse.jgit.lib",
+ org.eclipse.jgit.submodule;version="4.9.5";uses:="org.eclipse.jgit.lib,org.eclipse.jgit.treewalk.filter,org.eclipse.jgit.treewalk",
+ org.eclipse.jgit.transport;version="4.9.5";
uses:="org.eclipse.jgit.transport.resolver,
org.eclipse.jgit.revwalk,
org.eclipse.jgit.internal.storage.pack,
@@ -123,24 +126,24 @@ Export-Package: org.eclipse.jgit.annotations;version="4.8.1",
org.eclipse.jgit.transport.http,
org.eclipse.jgit.errors,
org.eclipse.jgit.storage.pack",
- org.eclipse.jgit.transport.http;version="4.8.1";uses:="javax.net.ssl",
- org.eclipse.jgit.transport.resolver;version="4.8.1";uses:="org.eclipse.jgit.lib,org.eclipse.jgit.transport",
- org.eclipse.jgit.treewalk;version="4.8.1";
+ org.eclipse.jgit.transport.http;version="4.9.5";uses:="javax.net.ssl",
+ org.eclipse.jgit.transport.resolver;version="4.9.5";uses:="org.eclipse.jgit.lib,org.eclipse.jgit.transport",
+ org.eclipse.jgit.treewalk;version="4.9.5";
uses:="org.eclipse.jgit.lib,
org.eclipse.jgit.revwalk,
org.eclipse.jgit.attributes,
org.eclipse.jgit.treewalk.filter,
org.eclipse.jgit.util,
org.eclipse.jgit.dircache",
- org.eclipse.jgit.treewalk.filter;version="4.8.1";uses:="org.eclipse.jgit.treewalk",
- org.eclipse.jgit.util;version="4.8.1";
+ org.eclipse.jgit.treewalk.filter;version="4.9.5";uses:="org.eclipse.jgit.treewalk",
+ org.eclipse.jgit.util;version="4.9.5";
uses:="org.eclipse.jgit.lib,
org.eclipse.jgit.transport.http,
org.eclipse.jgit.storage.file,
org.ietf.jgss",
- org.eclipse.jgit.util.io;version="4.8.1",
- org.eclipse.jgit.util.sha1;version="4.8.1",
- org.eclipse.jgit.util.time;version="4.8.1"
+ org.eclipse.jgit.util.io;version="4.9.5",
+ org.eclipse.jgit.util.sha1;version="4.9.5",
+ org.eclipse.jgit.util.time;version="4.9.5"
Bundle-RequiredExecutionEnvironment: JavaSE-1.8
Import-Package: com.googlecode.javaewah;version="[1.1.6,2.0.0)",
com.jcraft.jsch;version="[0.1.37,0.2.0)",
diff --git a/org.eclipse.jgit/META-INF/SOURCE-MANIFEST.MF b/org.eclipse.jgit/META-INF/SOURCE-MANIFEST.MF
index 63e6bbccbe..bc1f02a13b 100644
--- a/org.eclipse.jgit/META-INF/SOURCE-MANIFEST.MF
+++ b/org.eclipse.jgit/META-INF/SOURCE-MANIFEST.MF
@@ -3,5 +3,5 @@ Bundle-ManifestVersion: 2
Bundle-Name: org.eclipse.jgit - Sources
Bundle-SymbolicName: org.eclipse.jgit.source
Bundle-Vendor: Eclipse.org - JGit
-Bundle-Version: 4.8.1.qualifier
-Eclipse-SourceBundle: org.eclipse.jgit;version="4.8.1.qualifier";roots="."
+Bundle-Version: 4.9.5.qualifier
+Eclipse-SourceBundle: org.eclipse.jgit;version="4.9.5.qualifier";roots="."
diff --git a/org.eclipse.jgit/pom.xml b/org.eclipse.jgit/pom.xml
index 673f3ffc0d..cbd7afea2f 100644
--- a/org.eclipse.jgit/pom.xml
+++ b/org.eclipse.jgit/pom.xml
@@ -53,7 +53,7 @@
<parent>
<groupId>org.eclipse.jgit</groupId>
<artifactId>org.eclipse.jgit-parent</artifactId>
- <version>4.8.1-SNAPSHOT</version>
+ <version>4.9.5-SNAPSHOT</version>
</parent>
<artifactId>org.eclipse.jgit</artifactId>
@@ -206,8 +206,8 @@
<pluginManagement>
<plugins>
<plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>findbugs-maven-plugin</artifactId>
+ <groupId>com.github.hazendaz.spotbugs</groupId>
+ <artifactId>spotbugs-maven-plugin</artifactId>
<configuration>
<excludeFilterFile>findBugs/FindBugsExcludeFilter.xml</excludeFilterFile>
</configuration>
diff --git a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
index c9aaa39945..f717cdcb98 100644
--- a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
+++ b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
@@ -63,7 +63,7 @@ cannotCreateDirectory=Cannot create directory {0}
cannotCreateHEAD=cannot create HEAD
cannotCreateIndexfile=Cannot create an index file with name {0}
cannotCreateTempDir=Cannot create a temp dir
-cannotDeleteCheckedOutBranch=Branch {0} is checked out and can not be deleted
+cannotDeleteCheckedOutBranch=Branch {0} is checked out and cannot be deleted
cannotDeleteFile=Cannot delete file: {0}
cannotDeleteObjectsPath=Cannot delete {0}/{1}: {2}
cannotDeleteStaleTrackingRef=Cannot delete stale tracking ref {0}
@@ -81,7 +81,7 @@ cannotListRefs=cannot list refs
cannotLock=Cannot lock {0}. Ensure that no other process has an open file handle on the lock file {0}.lock, then you may delete the lock file and retry.
cannotLockPackIn=Cannot lock pack in {0}
cannotMatchOnEmptyString=Cannot match on empty string.
-cannotMkdirObjectPath=Cannot mkdir {0}/{1}: {2}
+cannotMkdirObjectPath=Cannot create directory {0}/{1}: {2}
cannotMoveIndexTo=Cannot move index to {0}
cannotMovePackTo=Cannot move pack to {0}
cannotOpenService=cannot open {0}
@@ -89,6 +89,7 @@ cannotParseDate=The date specification "{0}" could not be parsed with the follow
cannotParseGitURIish=Cannot parse Git URI-ish
cannotPullOnARepoWithState=Cannot pull into a repository with state: {0}
cannotRead=Cannot read {0}
+cannotReadBackDelta=Cannot read delta type {0}
cannotReadBlob=Cannot read blob {0}
cannotReadCommit=Cannot read commit {0}
cannotReadFile=Cannot read file {0}
@@ -121,6 +122,7 @@ cloneNonEmptyDirectory=Destination path "{0}" already exists and is not an empty
closed=closed
closeLockTokenFailed=Closing LockToken ''{0}'' failed
collisionOn=Collision on {0}
+commandClosedStderrButDidntExit=Command {0} closed stderr stream but didn''t exit within timeout {1} seconds
commandRejectedByHook=Rejected by "{0}" hook.\n{1}
commandWasCalledInTheWrongState=Command {0} was called in the wrong state
commitAlreadyExists=exists {0}
@@ -211,12 +213,14 @@ corruptPack=Pack file {0} is corrupt, removing it from pack list
createBranchFailedUnknownReason=Create branch failed for unknown reason
createBranchUnexpectedResult=Create branch returned unexpected result {0}
createNewFileFailed=Could not create new file {0}
+createRequiresZeroOldId=Create requires old ID to be zero
credentialPassword=Password
credentialUsername=Username
daemonAlreadyRunning=Daemon already running
daysAgo={0} days ago
deleteBranchUnexpectedResult=Delete branch returned unexpected result {0}
deleteFileFailed=Could not delete file {0}
+deleteRequiresZeroNewId=Delete requires new ID to be zero
deleteTagUnexpectedResult=Delete tag returned unexpected result {0}
deletingNotSupported=Deleting {0} not supported.
destinationIsNotAWildcard=Destination is not a wildcard.
@@ -245,6 +249,7 @@ encryptionError=Encryption error: {0}
encryptionOnlyPBE=Encryption error: only password-based encryption (PBE) algorithms are supported.
endOfFileInEscape=End of file in escape
entryNotFoundByPath=Entry not found by path: {0}
+enumValueNotSupported0=Invalid value: {0}
enumValueNotSupported2=Invalid value: {0}.{1}={2}
enumValueNotSupported3=Invalid value: {0}.{1}.{2}={3}
enumValuesNotAvailable=Enumerated values of type {0} not available
@@ -260,6 +265,7 @@ exceptionCaughtDuringExecutionOfHook=Exception caught during execution of "{0}"
exceptionCaughtDuringExecutionOfAddCommand=Exception caught during execution of add command
exceptionCaughtDuringExecutionOfArchiveCommand=Exception caught during execution of archive command
exceptionCaughtDuringExecutionOfCherryPickCommand=Exception caught during execution of cherry-pick command. {0}
+exceptionCaughtDuringExecutionOfCommand=Exception caught during execution of command ''{0}'' in ''{1}'', return code ''{2}'', error message ''{3}''
exceptionCaughtDuringExecutionOfCommitCommand=Exception caught during execution of commit command
exceptionCaughtDuringExecutionOfFetchCommand=Exception caught during execution of fetch command
exceptionCaughtDuringExecutionOfLsRemoteCommand=Exception caught during execution of ls-remote command
@@ -270,7 +276,6 @@ exceptionCaughtDuringExecutionOfResetCommand=Exception caught during execution o
exceptionCaughtDuringExecutionOfRevertCommand=Exception caught during execution of revert command. {0}
exceptionCaughtDuringExecutionOfRmCommand=Exception caught during execution of rm command
exceptionCaughtDuringExecutionOfTagCommand=Exception caught during execution of tag command
-exceptionCaughtDuringExcecutionOfCommand=Exception caught during execution of command ''{0}'' in ''{1}'', return code ''{2}'', error message ''{3}''
exceptionHookExecutionInterrupted=Execution of "{0}" hook interrupted.
exceptionOccurredDuringAddingOfOptionToALogCommand=Exception occurred during adding of {0} as option to a Log command
exceptionOccurredDuringReadingOfGIT_DIR=Exception occurred during reading of $GIT_DIR/{0}. {1}
@@ -308,6 +313,8 @@ gcTooManyUnpruned=Too many loose, unpruneable objects after garbage collection.
gitmodulesNotFound=.gitmodules not found in tree.
headRequiredToStash=HEAD required to stash local changes
hoursAgo={0} hours ago
+httpConfigCannotNormalizeURL=Cannot normalize URL path {0}: too many .. segments
+httpConfigInvalidURL=Cannot parse URL from subsection http.{0} in git config; ignored.
hugeIndexesAreNotSupportedByJgitYet=Huge indexes are not supported by jgit, yet
hunkBelongsToAnotherFile=Hunk belongs to another file
hunkDisconnectedFromFile=Hunk disconnected from file
@@ -366,12 +373,17 @@ invalidPathContainsSeparator=Invalid path (contains separator ''{0}''): {1}
invalidPathPeriodAtEndWindows=Invalid path (period at end is ignored by Windows): {0}
invalidPathSpaceAtEndWindows=Invalid path (space at end is ignored by Windows): {0}
invalidPathReservedOnWindows=Invalid path (''{0}'' is reserved on Windows): {1}
+invalidRedirectLocation=Invalid redirect location {0} -> {1}
invalidReflogRevision=Invalid reflog revision: {0}
invalidRefName=Invalid ref name: {0}
+invalidReftableBlock=Invalid reftable block
+invalidReftableCRC=Invalid reftable CRC-32
+invalidReftableFile=Invalid reftable file
invalidRemote=Invalid remote: {0}
invalidRepositoryStateNoHead=Invalid repository --- cannot read HEAD
invalidShallowObject=invalid shallow object {0}, expected commit
invalidStageForPath=Invalid stage {0} for path {1}
+invalidSystemProperty=Invalid system property ''{0}'': ''{1}''; using default value {2}
invalidTagOption=Invalid tag option: {0}
invalidTimeout=Invalid timeout: {0}
invalidTimeUnitValue2=Invalid time unit value: {0}.{1}={2}
@@ -410,8 +422,11 @@ mergeRecursiveReturnedNoCommit=Merge returned no commit:\n Depth {0}\n Head one
mergeRecursiveTooManyMergeBasesFor = "More than {0} merge bases for:\n a {1}\n b {2} found:\n count {3}"
messageAndTaggerNotAllowedInUnannotatedTags = Unannotated tags cannot have a message or tagger
minutesAgo={0} minutes ago
+mismatchOffset=mismatch offset for object {0}
+mismatchCRC=mismatch CRC for object {0}
missingAccesskey=Missing accesskey.
missingConfigurationForKey=No value for key {0} found in configuration
+missingCRC=missing CRC for object {0}
missingDeltaBase=delta base
missingForwardImageInGITBinaryPatch=Missing forward-image in GIT binary patch
missingObject=Missing {0} {1}
@@ -429,6 +444,7 @@ need2Arguments=Need 2 arguments
needPackOut=need packOut
needsAtLeastOneEntry=Needs at least one entry
needsWorkdir=Needs workdir
+newIdMustNotBeNull=New ID must not be null
newlineInQuotesNotAllowed=Newline in quotes not allowed
noApplyInDelete=No apply in delete
noClosingBracket=No closing {0} found for {1} at index {2}.
@@ -462,6 +478,7 @@ objectNotFound=Object {0} not found.
objectNotFoundIn=Object {0} not found in {1}.
obtainingCommitsForCherryPick=Obtaining commits that need to be cherry-picked
offsetWrittenDeltaBaseForObjectNotFoundInAPack=Offset-written delta base for object not found in a pack
+oldIdMustNotBeNull=Expected old ID must not be null
onlyAlreadyUpToDateAndFastForwardMergesAreAvailable=only already-up-to-date and fast forward merges are available
onlyOneFetchSupported=Only one fetch supported
onlyOneOperationCallPerConnectionIsSupported=Only one operation call per connection is supported.
@@ -469,6 +486,7 @@ openFilesMustBeAtLeast1=Open files must be >= 1
openingConnection=Opening connection
operationCanceled=Operation {0} was canceled
outputHasAlreadyBeenStarted=Output has already been started.
+overflowedReftableBlock=Overflowed reftable block
packChecksumMismatch=Pack checksum mismatch detected for pack file {0}
packCorruptedWhileWritingToFilesystem=Pack corrupted while writing to filesystem
packDoesNotMatchIndex=Pack {0} does not match index
@@ -496,6 +514,7 @@ patchFormatException=Format error: {0}
pathIsNotInWorkingDir=Path is not in working dir
pathNotConfigured=Submodule path is not configured
peeledLineBeforeRef=Peeled line before ref.
+peeledRefIsRequired=Peeled ref is required.
peerDidNotSupplyACompleteObjectGraph=peer did not supply a complete object graph
personIdentEmailNonNull=E-mail address of PersonIdent must not be null.
personIdentNameNonNull=Name of PersonIdent must not be null.
@@ -524,10 +543,15 @@ receivePackObjectTooLarge2=Object too large ({0} bytes), rejecting the pack. Max
receivePackInvalidLimit=Illegal limit parameter value {0}
receivePackTooLarge=Pack exceeds the limit of {0} bytes, rejecting the pack
receivingObjects=Receiving objects
+redirectBlocked=Redirection blocked: redirect {0} -> {1} not allowed
+redirectHttp=URI ''{0}'': following HTTP redirect #{1} {2} -> {3}
+redirectLimitExceeded=Redirected more than {0} times; aborted at {1} -> {2}
+redirectLocationMissing=Invalid redirect: no redirect location for {0}
+redirectsOff=Cannot redirect because http.followRedirects is false (HTTP status {0})
refAlreadyExists=already exists
refAlreadyExists1=Ref {0} already exists
reflogEntryNotFound=Entry {0} not found in reflog for ''{1}''
-refNotResolved=Ref {0} can not be resolved
+refNotResolved=Ref {0} cannot be resolved
refUpdateReturnCodeWas=RefUpdate return code was: {0}
remoteConfigHasNoURIAssociated=Remote config "{0}" has no URIs associated
remoteDoesNotHaveSpec=Remote does not have {0} available for fetch.
@@ -571,7 +595,7 @@ secondsAgo={0} seconds ago
selectingCommits=Selecting commits
sequenceTooLargeForDiffAlgorithm=Sequence too large for difference algorithm.
serviceNotEnabledNoName=Service not enabled
-serviceNotPermitted={0} not permitted
+serviceNotPermitted={1} not permitted on ''{0}''
sha1CollisionDetected1=SHA-1 collision detected on {0}
shallowCommitsAlreadyInitialized=Shallow commits have already been initialized
shallowPacksRequireDepthWalk=Shallow packs require a DepthWalk
@@ -589,6 +613,15 @@ sourceIsNotAWildcard=Source is not a wildcard.
sourceRefDoesntResolveToAnyObject=Source ref {0} doesn''t resolve to any object.
sourceRefNotSpecifiedForRefspec=Source ref not specified for refspec: {0}
squashCommitNotUpdatingHEAD=Squash commit -- not updating HEAD
+sshUserNameError=Jsch error: failed to set SSH user name correctly to ''{0}''; using ''{1}'' picked up from SSH config file.
+sslFailureExceptionMessage=Secure connection to {0} could not be stablished because of SSL problems
+sslFailureInfo=A secure connection to {0}\ncould not be established because the server''s certificate could not be validated.
+sslFailureCause=SSL reported: {0}
+sslFailureTrustExplanation=Do you want to skip SSL verification for this server?
+sslTrustAlways=Always skip SSL verification for this server from now on
+sslTrustForRepo=Skip SSL verification for git operations for repository {0}
+sslTrustNow=Skip SSL verification for this single git operation
+sslVerifyCannotSave=Could not save setting for http.sslVerify
staleRevFlagsOn=Stale RevFlags on {0}
startingReadStageWithoutWrittenRequestDataPendingIsNotSupported=Starting read stage without written request data pending is not supported
stashApplyConflict=Applying stashed changes resulted in a conflict
@@ -600,6 +633,7 @@ stashCommitIncorrectNumberOfParents=Stashed commit ''{0}'' does have {1} parent
stashDropDeleteRefFailed=Deleting stash reference failed with result: {0}
stashDropFailed=Dropping stashed commit failed
stashDropMissingReflog=Stash reflog does not contain entry ''{0}''
+stashDropNotSupported=Dropping stash not supported on this ref backend
stashFailed=Stashing local changes did not successfully complete
stashResolveFailed=Reference ''{0}'' does not resolve to stashed commit
statelessRPCRequiresOptionToBeEnabled=stateless RPC requires {0} to be enabled
@@ -616,6 +650,7 @@ tagAlreadyExists=tag ''{0}'' already exists
tagNameInvalid=tag name {0} is invalid
tagOnRepoWithoutHEADCurrentlyNotSupported=Tag on repository without HEAD currently not supported
theFactoryMustNotBeNull=The factory must not be null
+threadInterruptedWhileRunning="Current thread interrupted while running {0}"
timeIsUncertain=Time is uncertain
timerAlreadyTerminated=Timer already terminated
tooManyCommands=Too many commands
@@ -653,10 +688,12 @@ unableToRemovePath=Unable to remove path ''{0}''
unableToStore=Unable to store {0}.
unableToWrite=Unable to write {0}
unauthorized=Unauthorized
+underflowedReftableBlock=Underflowed reftable block
unencodeableFile=Unencodable file: {0}
unexpectedCompareResult=Unexpected metadata comparison result: {0}
unexpectedEndOfConfigFile=Unexpected end of config file
unexpectedEndOfInput=Unexpected end of input
+unexpectedEofInPack=Unexpected EOF in partially created pack
unexpectedHunkTrailer=Unexpected hunk trailer
unexpectedOddResult=odd: {0} + {1} - {2}
unexpectedRefReport={0}: unexpected ref report: {1}
@@ -667,6 +704,7 @@ unknownDIRCVersion=Unknown DIRC version {0}
unknownHost=unknown host
unknownIndexVersionOrCorruptIndex=Unknown index version (or corrupt index): {0}
unknownObject=unknown object
+unknownObjectInIndex=unknown object {0} found in index but not in pack file
unknownObjectType=Unknown object type {0}.
unknownObjectType2=unknown
unknownRepositoryFormat=Unknown repository format
@@ -689,7 +727,9 @@ unsupportedMark=Mark not supported
unsupportedOperationNotAddAtEnd=Not add-at-end: {0}
unsupportedPackIndexVersion=Unsupported pack index version {0}
unsupportedPackVersion=Unsupported pack version {0}.
+unsupportedReftableVersion=Unsupported reftable version {0}.
unsupportedRepositoryDescription=Repository description not supported
+updateRequiresOldIdAndNewId=Update requires both old ID and new ID to be nonzero
updatingHeadFailed=Updating HEAD failed
updatingReferences=Updating references
updatingRefFailed=Updating the ref {0} to {1} failed. ReturnCode from RefUpdate.update() was {2}
diff --git a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/storage/dfs/DfsText.properties b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/storage/dfs/DfsText.properties
index 4bbc4cc71a..2c4bd06a33 100644
--- a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/storage/dfs/DfsText.properties
+++ b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/storage/dfs/DfsText.properties
@@ -1,6 +1,4 @@
cannotReadIndex=Cannot read index {0}
-cannotReadBackDelta=Cannot read delta type {0}
shortReadOfBlock=Short read of block at {0} in pack {1}; expected {2} bytes, received only {3}
shortReadOfIndex=Short read of index {0}
-unexpectedEofInPack=Unexpected EOF in partially created pack
willNotStoreEmptyPack=Cannot store empty pack
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java
index 21d62837e9..6b20da3ede 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java
@@ -47,8 +47,10 @@ import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.EnumSet;
+import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
+import java.util.Set;
import org.eclipse.jgit.api.CheckoutResult.Status;
import org.eclipse.jgit.api.errors.CheckoutConflictException;
@@ -66,6 +68,7 @@ import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.dircache.DirCacheIterator;
import org.eclipse.jgit.errors.AmbiguousObjectException;
import org.eclipse.jgit.errors.UnmergedPathException;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants;
@@ -175,6 +178,8 @@ public class CheckoutCommand extends GitCommand<Ref> {
private boolean checkoutAllPaths;
+ private Set<String> actuallyModifiedPaths;
+
/**
* @param repo
*/
@@ -410,7 +415,8 @@ public class CheckoutCommand extends GitCommand<Ref> {
}
/**
- * Checkout paths into index and working directory
+ * Checkout paths into index and working directory, firing a
+ * {@link WorkingTreeModifiedEvent} if the working tree was modified.
*
* @return this instance
* @throws IOException
@@ -418,6 +424,7 @@ public class CheckoutCommand extends GitCommand<Ref> {
*/
protected CheckoutCommand checkoutPaths() throws IOException,
RefNotFoundException {
+ actuallyModifiedPaths = new HashSet<>();
DirCache dc = repo.lockDirCache();
try (RevWalk revWalk = new RevWalk(repo);
TreeWalk treeWalk = new TreeWalk(repo,
@@ -432,7 +439,16 @@ public class CheckoutCommand extends GitCommand<Ref> {
checkoutPathsFromCommit(treeWalk, dc, commit);
}
} finally {
- dc.unlock();
+ try {
+ dc.unlock();
+ } finally {
+ WorkingTreeModifiedEvent event = new WorkingTreeModifiedEvent(
+ actuallyModifiedPaths, null);
+ actuallyModifiedPaths = null;
+ if (!event.isEmpty()) {
+ repo.fireEvent(event);
+ }
+ }
}
return this;
}
@@ -461,9 +477,11 @@ public class CheckoutCommand extends GitCommand<Ref> {
int stage = ent.getStage();
if (stage > DirCacheEntry.STAGE_0) {
if (checkoutStage != null) {
- if (stage == checkoutStage.number)
+ if (stage == checkoutStage.number) {
checkoutPath(ent, r, new CheckoutMetadata(
eolStreamType, filterCommand));
+ actuallyModifiedPaths.add(path);
+ }
} else {
UnmergedPathException e = new UnmergedPathException(
ent);
@@ -472,6 +490,7 @@ public class CheckoutCommand extends GitCommand<Ref> {
} else {
checkoutPath(ent, r, new CheckoutMetadata(eolStreamType,
filterCommand));
+ actuallyModifiedPaths.add(path);
}
}
});
@@ -492,13 +511,15 @@ public class CheckoutCommand extends GitCommand<Ref> {
final EolStreamType eolStreamType = treeWalk.getEolStreamType();
final String filterCommand = treeWalk
.getFilterCommand(Constants.ATTR_FILTER_TYPE_SMUDGE);
- editor.add(new PathEdit(treeWalk.getPathString()) {
+ final String path = treeWalk.getPathString();
+ editor.add(new PathEdit(path) {
@Override
public void apply(DirCacheEntry ent) {
ent.setObjectId(blobId);
ent.setFileMode(mode);
checkoutPath(ent, r,
new CheckoutMetadata(eolStreamType, filterCommand));
+ actuallyModifiedPaths.add(path);
}
});
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/CleanCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/CleanCommand.java
index c58efb1478..e41a03b81a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/CleanCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/CleanCommand.java
@@ -54,6 +54,7 @@ import java.util.TreeSet;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.JGitInternalException;
import org.eclipse.jgit.errors.NoWorkTreeException;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.FileUtils;
@@ -135,6 +136,10 @@ public class CleanCommand extends GitCommand<Set<String>> {
}
} catch (IOException e) {
throw new JGitInternalException(e.getMessage(), e);
+ } finally {
+ if (!files.isEmpty()) {
+ repo.fireEvent(new WorkingTreeModifiedEvent(null, files));
+ }
}
return files;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/CloneCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/CloneCommand.java
index d450c64679..bde8e63d1d 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/CloneCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/CloneCommand.java
@@ -50,6 +50,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
+import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.InvalidRemoteException;
import org.eclipse.jgit.api.errors.JGitInternalException;
@@ -157,6 +158,16 @@ public class CloneCommand extends TransportCommand<CloneCommand, Git> {
}
/**
+ * Get the git directory. This is primarily used for tests.
+ *
+ * @return the git directory
+ */
+ @Nullable
+ File getDirectory() {
+ return directory;
+ }
+
+ /**
* Executes the {@code Clone} command.
*
* The Git instance returned by this command needs to be closed by the
@@ -232,9 +243,9 @@ public class CloneCommand extends TransportCommand<CloneCommand, Git> {
return false;
}
- private void verifyDirectories(URIish u) {
+ void verifyDirectories(URIish u) {
if (directory == null && gitDir == null) {
- directory = new File(u.getHumanishName(), Constants.DOT_GIT);
+ directory = new File(u.getHumanishName() + (bare ? Constants.DOT_GIT_EXT : "")); //$NON-NLS-1$
}
directoryExistsInitially = directory != null && directory.exists();
gitDirExistsInitially = gitDir != null && gitDir.exists();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java
index 274ece6dca..e29fc05463 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java
@@ -482,7 +482,7 @@ public class CommitCommand extends GitCommand<RevCommit> {
JGitText.get().entryNotFoundByPath, only.get(i)));
// there must be at least one change
- if (emptyCommit)
+ if (emptyCommit && !allowEmpty.booleanValue())
// Would like to throw a EmptyCommitException. But this would break the API
// TODO(ch): Change this in the next release
throw new JGitInternalException(JGitText.get().emptyCommit);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/DescribeCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/DescribeCommand.java
index 389c511e1c..68b1bd9e29 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/DescribeCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/DescribeCommand.java
@@ -47,17 +47,22 @@ import static org.eclipse.jgit.lib.Constants.R_TAGS;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
+import java.util.stream.Collectors;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.JGitInternalException;
import org.eclipse.jgit.api.errors.RefNotFoundException;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
+import org.eclipse.jgit.errors.InvalidPatternException;
import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.ignore.internal.IMatcher;
+import org.eclipse.jgit.ignore.internal.PathMatcher;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
@@ -94,6 +99,11 @@ public class DescribeCommand extends GitCommand<String> {
private boolean longDesc;
/**
+ * Pattern matchers to be applied to tags under consideration
+ */
+ private List<IMatcher> matchers = new ArrayList<>();
+
+ /**
*
* @param repo
*/
@@ -170,6 +180,55 @@ public class DescribeCommand extends GitCommand<String> {
}
/**
+ * Sets one or more {@code glob(7)} patterns that tags must match to be considered.
+ * If multiple patterns are provided, tags only need match one of them.
+ *
+ * @param patterns the {@code glob(7)} pattern or patterns
+ * @return {@code this}
+ * @throws InvalidPatternException if the pattern passed in was invalid.
+ *
+ * @see <a
+ * href="https://www.kernel.org/pub/software/scm/git/docs/git-describe.html"
+ * >Git documentation about describe</a>
+ * @since 4.9
+ */
+ public DescribeCommand setMatch(String... patterns) throws InvalidPatternException {
+ for (String p : patterns) {
+ matchers.add(PathMatcher.createPathMatcher(p, null, false));
+ }
+ return this;
+ }
+
+ private Optional<Ref> getBestMatch(List<Ref> tags) {
+ if (tags == null || tags.size() == 0) {
+ return Optional.empty();
+ } else if (matchers.size() == 0) {
+ // No matchers, simply return the first tag entry
+ return Optional.of(tags.get(0));
+ } else {
+ // Find the first tag that matches one of the matchers; precedence according to matcher definition order
+ for (IMatcher matcher : matchers) {
+ Optional<Ref> match = tags.stream()
+ .filter(tag -> matcher.matches(tag.getName(), false,
+ false))
+ .findFirst();
+ if (match.isPresent()) {
+ return match;
+ }
+ }
+ return Optional.empty();
+ }
+ }
+
+ private ObjectId getObjectIdFromRef(Ref r) {
+ ObjectId key = repo.peel(r).getPeeledObjectId();
+ if (key == null) {
+ key = r.getObjectId();
+ }
+ return key;
+ }
+
+ /**
* Describes the specified commit. Target defaults to HEAD if no commit was
* set explicitly.
*
@@ -189,14 +248,9 @@ public class DescribeCommand extends GitCommand<String> {
if (target == null)
setTarget(Constants.HEAD);
- Map<ObjectId, Ref> tags = new HashMap<>();
-
- for (Ref r : repo.getRefDatabase().getRefs(R_TAGS).values()) {
- ObjectId key = repo.peel(r).getPeeledObjectId();
- if (key == null)
- key = r.getObjectId();
- tags.put(key, r);
- }
+ Collection<Ref> tagList = repo.getRefDatabase().getRefs(R_TAGS).values();
+ Map<ObjectId, List<Ref>> tags = tagList.stream()
+ .collect(Collectors.groupingBy(this::getObjectIdFromRef));
// combined flags of all the candidate instances
final RevFlagSet allFlags = new RevFlagSet();
@@ -242,11 +296,11 @@ public class DescribeCommand extends GitCommand<String> {
}
List<Candidate> candidates = new ArrayList<>(); // all the candidates we find
- // is the target already pointing to a tag? if so, we are done!
- Ref lucky = tags.get(target);
- if (lucky != null) {
- return longDesc ? longDescription(lucky, 0, target) : lucky
- .getName().substring(R_TAGS.length());
+ // is the target already pointing to a suitable tag? if so, we are done!
+ Optional<Ref> bestMatch = getBestMatch(tags.get(target));
+ if (bestMatch.isPresent()) {
+ return longDesc ? longDescription(bestMatch.get(), 0, target) :
+ bestMatch.get().getName().substring(R_TAGS.length());
}
w.markStart(target);
@@ -258,9 +312,9 @@ public class DescribeCommand extends GitCommand<String> {
// if a tag already dominates this commit,
// then there's no point in picking a tag on this commit
// since the one that dominates it is always more preferable
- Ref t = tags.get(c);
- if (t != null) {
- Candidate cd = new Candidate(c, t);
+ bestMatch = getBestMatch(tags.get(c));
+ if (bestMatch.isPresent()) {
+ Candidate cd = new Candidate(c, bestMatch.get());
candidates.add(cd);
cd.depth = seen;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/FetchCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/FetchCommand.java
index 785c20c8af..5270283edd 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/FetchCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/FetchCommand.java
@@ -42,12 +42,16 @@
*/
package org.eclipse.jgit.api;
+import static java.util.stream.Collectors.toList;
+
import java.io.IOException;
import java.net.URISyntaxException;
import java.text.MessageFormat;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
+import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.InvalidConfigurationException;
import org.eclipse.jgit.api.errors.InvalidRemoteException;
@@ -191,6 +195,7 @@ public class FetchCommand extends TransportCommand<FetchCommand, FetchResult> {
.setThin(thin).setRefSpecs(refSpecs)
.setDryRun(dryRun)
.setRecurseSubmodules(recurseMode);
+ configure(f);
if (callback != null) {
callback.fetchingSubmodule(walk.getPath());
}
@@ -258,11 +263,19 @@ public class FetchCommand extends TransportCommand<FetchCommand, FetchResult> {
* Set the mode to be used for recursing into submodules.
*
* @param recurse
+ * corresponds to the
+ * --recurse-submodules/--no-recurse-submodules options. If
+ * {@code null} use the value of the
+ * {@code submodule.name.fetchRecurseSubmodules} option
+ * configured per submodule. If not specified there, use the
+ * value of the {@code fetch.recurseSubmodules} option configured
+ * in git config. If not configured in either, "on-demand" is the
+ * built-in default.
* @return {@code this}
* @since 4.7
*/
public FetchCommand setRecurseSubmodules(
- FetchRecurseSubmodulesMode recurse) {
+ @Nullable FetchRecurseSubmodulesMode recurse) {
checkCallable();
submoduleRecurseMode = recurse;
return this;
@@ -382,13 +395,21 @@ public class FetchCommand extends TransportCommand<FetchCommand, FetchResult> {
*
* @param specs
* @return {@code this}
+ * @since 4.9
+ */
+ public FetchCommand setRefSpecs(String... specs) {
+ return setRefSpecs(
+ Arrays.stream(specs).map(RefSpec::new).collect(toList()));
+ }
+
+ /**
+ * The ref specs to be used in the fetch operation
+ *
+ * @param specs
+ * @return {@code this}
*/
public FetchCommand setRefSpecs(RefSpec... specs) {
- checkCallable();
- this.refSpecs.clear();
- for (RefSpec spec : specs)
- refSpecs.add(spec);
- return this;
+ return setRefSpecs(Arrays.asList(specs));
}
/**
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java
index b5d9e8a6b6..75460fbd14 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java
@@ -53,6 +53,7 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
+import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.api.MergeResult.MergeStatus;
import org.eclipse.jgit.api.errors.CheckoutConflictException;
import org.eclipse.jgit.api.errors.ConcurrentRefUpdateException;
@@ -63,6 +64,7 @@ import org.eclipse.jgit.api.errors.NoHeadException;
import org.eclipse.jgit.api.errors.NoMessageException;
import org.eclipse.jgit.api.errors.WrongRepositoryStateException;
import org.eclipse.jgit.dircache.DirCacheCheckout;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Config.ConfigEnum;
@@ -354,6 +356,10 @@ public class MergeCommand extends GitCommand<MergeResult> {
.getMergeResults();
failingPaths = resolveMerger.getFailingPaths();
unmergedPaths = resolveMerger.getUnmergedPaths();
+ if (!resolveMerger.getModifiedFiles().isEmpty()) {
+ repo.fireEvent(new WorkingTreeModifiedEvent(
+ resolveMerger.getModifiedFiles(), null));
+ }
} else
noProblems = merger.merge(headCommit, srcCommit);
refLogMessage.append(": Merge made by "); //$NON-NLS-1$
@@ -554,12 +560,15 @@ public class MergeCommand extends GitCommand<MergeResult> {
* Sets the fast forward mode.
*
* @param fastForwardMode
- * corresponds to the --ff/--no-ff/--ff-only options. --ff is the
- * default option.
+ * corresponds to the --ff/--no-ff/--ff-only options. If
+ * {@code null} use the value of the {@code merge.ff} option
+ * configured in git config. If this option is not configured
+ * --ff is the built-in default.
* @return {@code this}
* @since 2.2
*/
- public MergeCommand setFastForward(FastForwardMode fastForwardMode) {
+ public MergeCommand setFastForward(
+ @Nullable FastForwardMode fastForwardMode) {
checkCallable();
this.fastForwardMode = fastForwardMode;
return this;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/PullCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/PullCommand.java
index 9c5ae432e2..aa97996dfc 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/PullCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/PullCommand.java
@@ -47,6 +47,9 @@ package org.eclipse.jgit.api;
import java.io.IOException;
import java.text.MessageFormat;
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.api.MergeCommand.FastForwardMode;
+import org.eclipse.jgit.api.MergeCommand.FastForwardMode.Merge;
import org.eclipse.jgit.api.RebaseCommand.Operation;
import org.eclipse.jgit.api.errors.CanceledException;
import org.eclipse.jgit.api.errors.DetachedHeadException;
@@ -96,6 +99,8 @@ public class PullCommand extends TransportCommand<PullCommand, PullResult> {
private TagOpt tagOption;
+ private FastForwardMode fastForwardMode;
+
private FetchRecurseSubmodulesMode submoduleRecurseMode = null;
/**
@@ -347,10 +352,9 @@ public class PullCommand extends TransportCommand<PullCommand, PullResult> {
result = new PullResult(fetchRes, remote, rebaseRes);
} else {
MergeCommand merge = new MergeCommand(repo);
- merge.include(upstreamName, commitToMerge);
- merge.setStrategy(strategy);
- merge.setProgressMonitor(monitor);
- MergeResult mergeRes = merge.call();
+ MergeResult mergeRes = merge.include(upstreamName, commitToMerge)
+ .setStrategy(strategy).setProgressMonitor(monitor)
+ .setFastForward(getFastForwardMode()).call();
monitor.update(1);
result = new PullResult(fetchRes, remote, mergeRes);
}
@@ -433,14 +437,36 @@ public class PullCommand extends TransportCommand<PullCommand, PullResult> {
}
/**
+ * Sets the fast forward mode. It is used if pull is configured to do a
+ * merge as opposed to rebase. If non-{@code null} takes precedence over the
+ * fast-forward mode configured in git config.
+ *
+ * @param fastForwardMode
+ * corresponds to the --ff/--no-ff/--ff-only options. If
+ * {@code null} use the value of {@code pull.ff} configured in
+ * git config. If {@code pull.ff} is not configured fall back to
+ * the value of {@code merge.ff}. If {@code merge.ff} is not
+ * configured --ff is the built-in default.
+ * @return {@code this}
+ * @since 4.9
+ */
+ public PullCommand setFastForward(
+ @Nullable FastForwardMode fastForwardMode) {
+ checkCallable();
+ this.fastForwardMode = fastForwardMode;
+ return this;
+ }
+
+ /**
* Set the mode to be used for recursing into submodules.
*
* @param recurse
* @return {@code this}
* @since 4.7
+ * @see FetchCommand#setRecurseSubmodules(FetchRecurseSubmodulesMode)
*/
public PullCommand setRecurseSubmodules(
- FetchRecurseSubmodulesMode recurse) {
+ @Nullable FetchRecurseSubmodulesMode recurse) {
this.submoduleRecurseMode = recurse;
return this;
}
@@ -470,4 +496,15 @@ public class PullCommand extends TransportCommand<PullCommand, PullResult> {
}
return mode;
}
+
+ private FastForwardMode getFastForwardMode() {
+ if (fastForwardMode != null) {
+ return fastForwardMode;
+ }
+ Config config = repo.getConfig();
+ Merge ffMode = config.getEnum(Merge.values(),
+ ConfigConstants.CONFIG_PULL_SECTION, null,
+ ConfigConstants.CONFIG_KEY_FF, null);
+ return ffMode != null ? FastForwardMode.valueOf(ffMode) : null;
+ }
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java
index 850ff49695..955c50b376 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/RebaseCommand.java
@@ -425,6 +425,7 @@ public class RebaseCommand extends GitCommand<RebaseResult> {
refUpdate.setNewObjectId(commitId);
refUpdate.setRefLogIdent(refLogIdent);
refUpdate.setRefLogMessage(refLogMessage, false);
+ refUpdate.setForceRefLog(true);
if (currentRef != null)
refUpdate.setExpectedOldObjectId(currentRef.getObjectId());
else
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java
index 04caa0f159..394bea5b62 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java
@@ -109,4 +109,4 @@ public class ReflogCommand extends GitCommand<Collection<ReflogEntry>> {
}
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java
index 9e2cf31100..48c23f59c2 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java
@@ -44,8 +44,10 @@ package org.eclipse.jgit.api;
import java.io.File;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
+import java.util.List;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.JGitInternalException;
@@ -53,6 +55,7 @@ import org.eclipse.jgit.api.errors.NoFilepatternException;
import org.eclipse.jgit.dircache.DirCache;
import org.eclipse.jgit.dircache.DirCacheBuildIterator;
import org.eclipse.jgit.dircache.DirCacheBuilder;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.FileMode;
@@ -145,6 +148,7 @@ public class RmCommand extends GitCommand<DirCache> {
checkCallable();
DirCache dc = null;
+ List<String> actuallyDeletedFiles = new ArrayList<>();
try (final TreeWalk tw = new TreeWalk(repo)) {
dc = repo.lockDirCache();
DirCacheBuilder builder = dc.builder();
@@ -157,11 +161,14 @@ public class RmCommand extends GitCommand<DirCache> {
if (!cached) {
final FileMode mode = tw.getFileMode(0);
if (mode.getObjectType() == Constants.OBJ_BLOB) {
+ String relativePath = tw.getPathString();
final File path = new File(repo.getWorkTree(),
- tw.getPathString());
+ relativePath);
// Deleting a blob is simply a matter of removing
// the file or symlink named by the tree entry.
- delete(path);
+ if (delete(path)) {
+ actuallyDeletedFiles.add(relativePath);
+ }
}
}
}
@@ -171,16 +178,28 @@ public class RmCommand extends GitCommand<DirCache> {
throw new JGitInternalException(
JGitText.get().exceptionCaughtDuringExecutionOfRmCommand, e);
} finally {
- if (dc != null)
- dc.unlock();
+ try {
+ if (dc != null) {
+ dc.unlock();
+ }
+ } finally {
+ if (!actuallyDeletedFiles.isEmpty()) {
+ repo.fireEvent(new WorkingTreeModifiedEvent(null,
+ actuallyDeletedFiles));
+ }
+ }
}
return dc;
}
- private void delete(File p) {
- while (p != null && !p.equals(repo.getWorkTree()) && p.delete())
+ private boolean delete(File p) {
+ boolean deleted = false;
+ while (p != null && !p.equals(repo.getWorkTree()) && p.delete()) {
+ deleted = true;
p = p.getParentFile();
+ }
+ return deleted;
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/StashApplyCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/StashApplyCommand.java
index 10ec2a6a5a..b56fb2519b 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/StashApplyCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/StashApplyCommand.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2012, GitHub Inc.
+ * Copyright (C) 2012, 2017 GitHub Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -44,6 +44,9 @@ package org.eclipse.jgit.api;
import java.io.IOException;
import java.text.MessageFormat;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.InvalidRefNameException;
@@ -58,6 +61,7 @@ import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata;
import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.dircache.DirCacheIterator;
import org.eclipse.jgit.errors.CheckoutConflictException;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
@@ -198,7 +202,13 @@ public class StashApplyCommand extends GitCommand<ObjectId> {
"stash" }); //$NON-NLS-1$
merger.setBase(stashHeadCommit);
merger.setWorkingTreeIterator(new FileTreeIterator(repo));
- if (merger.merge(headCommit, stashCommit)) {
+ boolean mergeSucceeded = merger.merge(headCommit, stashCommit);
+ List<String> modifiedByMerge = merger.getModifiedFiles();
+ if (!modifiedByMerge.isEmpty()) {
+ repo.fireEvent(
+ new WorkingTreeModifiedEvent(modifiedByMerge, null));
+ }
+ if (mergeSucceeded) {
DirCache dc = repo.lockDirCache();
DirCacheCheckout dco = new DirCacheCheckout(repo, headTree,
dc, merger.getResultTreeId());
@@ -329,6 +339,7 @@ public class StashApplyCommand extends GitCommand<ObjectId> {
private void resetUntracked(RevTree tree) throws CheckoutConflictException,
IOException {
+ Set<String> actuallyModifiedPaths = new HashSet<>();
// TODO maybe NameConflictTreeWalk ?
try (TreeWalk walk = new TreeWalk(repo)) {
walk.addTree(tree);
@@ -361,6 +372,12 @@ public class StashApplyCommand extends GitCommand<ObjectId> {
checkoutPath(entry, reader,
new CheckoutMetadata(eolStreamType, null));
+ actuallyModifiedPaths.add(entry.getPathString());
+ }
+ } finally {
+ if (!actuallyModifiedPaths.isEmpty()) {
+ repo.fireEvent(new WorkingTreeModifiedEvent(
+ actuallyModifiedPaths, null));
}
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/StashCreateCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/StashCreateCommand.java
index 681f8e65ae..77a7fffb70 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/StashCreateCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/StashCreateCommand.java
@@ -62,6 +62,7 @@ import org.eclipse.jgit.dircache.DirCacheEditor.PathEdit;
import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.dircache.DirCacheIterator;
import org.eclipse.jgit.errors.UnmergedPathException;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.CommitBuilder;
import org.eclipse.jgit.lib.Constants;
@@ -211,6 +212,7 @@ public class StashCreateCommand extends GitCommand<RevCommit> {
refUpdate.setNewObjectId(commitId);
refUpdate.setRefLogIdent(refLogIdent);
refUpdate.setRefLogMessage(refLogMessage, false);
+ refUpdate.setForceRefLog(true);
if (currentRef != null)
refUpdate.setExpectedOldObjectId(currentRef.getObjectId());
else
@@ -240,6 +242,7 @@ public class StashCreateCommand extends GitCommand<RevCommit> {
public RevCommit call() throws GitAPIException {
checkCallable();
+ List<String> deletedFiles = new ArrayList<>();
Ref head = getHead();
try (ObjectReader reader = repo.newObjectReader()) {
RevCommit headCommit = parseCommit(reader, head.getObjectId());
@@ -377,9 +380,11 @@ public class StashCreateCommand extends GitCommand<RevCommit> {
// Remove untracked files
if (includeUntracked) {
for (DirCacheEntry entry : untracked) {
+ String repoRelativePath = entry.getPathString();
File file = new File(repo.getWorkTree(),
- entry.getPathString());
+ repoRelativePath);
FileUtils.delete(file);
+ deletedFiles.add(repoRelativePath);
}
}
@@ -394,6 +399,11 @@ public class StashCreateCommand extends GitCommand<RevCommit> {
return parseCommit(reader, commitId);
} catch (IOException e) {
throw new JGitInternalException(JGitText.get().stashFailed, e);
+ } finally {
+ if (!deletedFiles.isEmpty()) {
+ repo.fireEvent(
+ new WorkingTreeModifiedEvent(null, deletedFiles));
+ }
}
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/StashDropCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/StashDropCommand.java
index e215bdf7d3..85e7b3d298 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/StashDropCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/StashDropCommand.java
@@ -56,6 +56,7 @@ import org.eclipse.jgit.api.errors.JGitInternalException;
import org.eclipse.jgit.api.errors.RefNotFoundException;
import org.eclipse.jgit.errors.LockFailedException;
import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.file.RefDirectory;
import org.eclipse.jgit.internal.storage.file.ReflogWriter;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
@@ -68,6 +69,9 @@ import org.eclipse.jgit.util.FileUtils;
/**
* Command class to delete a stashed commit reference
+ * <p>
+ * Currently only supported on a traditional file repository using
+ * one-file-per-ref reflogs.
*
* @see <a href="http://www.kernel.org/pub/software/scm/git/docs/git-stash.html"
* >Git documentation about Stash</a>
@@ -84,6 +88,10 @@ public class StashDropCommand extends GitCommand<ObjectId> {
*/
public StashDropCommand(Repository repo) {
super(repo);
+ if (!(repo.getRefDatabase() instanceof RefDirectory)) {
+ throw new UnsupportedOperationException(
+ JGitText.get().stashDropNotSupported);
+ }
}
/**
@@ -205,10 +213,11 @@ public class StashDropCommand extends GitCommand<ObjectId> {
return null;
}
- ReflogWriter writer = new ReflogWriter(repo, true);
+ RefDirectory refdb = (RefDirectory) repo.getRefDatabase();
+ ReflogWriter writer = new ReflogWriter(refdb, true);
String stashLockRef = ReflogWriter.refLockFor(R_STASH);
- File stashLockFile = writer.logFor(stashLockRef);
- File stashFile = writer.logFor(R_STASH);
+ File stashLockFile = refdb.logFor(stashLockRef);
+ File stashFile = refdb.logFor(R_STASH);
if (stashLockFile.exists())
throw new JGitInternalException(JGitText.get().stashDropFailed,
new LockFailedException(stashFile));
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleSyncCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleSyncCommand.java
index f97dce9833..b5c0b1564f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleSyncCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleSyncCommand.java
@@ -162,4 +162,4 @@ public class SubmoduleSyncCommand extends GitCommand<Map<String, String>> {
throw new JGitInternalException(e.getMessage(), e);
}
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java
index 4d3dff02cd..4faaac2dbc 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java
@@ -91,6 +91,10 @@ public class SubmoduleUpdateCommand extends
private CloneCommand.Callback callback;
+ private FetchCommand.Callback fetchCallback;
+
+ private boolean fetch = false;
+
/**
* @param repo
*/
@@ -114,6 +118,19 @@ public class SubmoduleUpdateCommand extends
}
/**
+ * Whether to fetch the submodules before we update them. By default, this
+ * is set to <code>false</code>
+ *
+ * @param fetch
+ * @return this command
+ * @since 4.9
+ */
+ public SubmoduleUpdateCommand setFetch(final boolean fetch) {
+ this.fetch = fetch;
+ return this;
+ }
+
+ /**
* Add repository-relative submodule path to initialize
*
* @param path
@@ -161,7 +178,7 @@ public class SubmoduleUpdateCommand extends
continue;
Repository submoduleRepo = generator.getRepository();
- // Clone repository is not present
+ // Clone repository if not present
if (submoduleRepo == null) {
if (callback != null) {
callback.cloningSubmodule(generator.getPath());
@@ -175,6 +192,16 @@ public class SubmoduleUpdateCommand extends
if (monitor != null)
clone.setProgressMonitor(monitor);
submoduleRepo = clone.call().getRepository();
+ } else if (this.fetch) {
+ if (fetchCallback != null) {
+ fetchCallback.fetchingSubmodule(generator.getPath());
+ }
+ FetchCommand fetchCommand = Git.wrap(submoduleRepo).fetch();
+ if (monitor != null) {
+ fetchCommand.setProgressMonitor(monitor);
+ }
+ configure(fetchCommand);
+ fetchCommand.call();
}
try (RevWalk walk = new RevWalk(submoduleRepo)) {
@@ -247,4 +274,18 @@ public class SubmoduleUpdateCommand extends
this.callback = callback;
return this;
}
+
+ /**
+ * Set status callback for submodule fetch operation.
+ *
+ * @param callback
+ * the callback
+ * @return {@code this}
+ * @since 4.9
+ */
+ public SubmoduleUpdateCommand setFetchCallback(
+ FetchCommand.Callback callback) {
+ this.fetchCallback = callback;
+ return this;
+ }
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/TransportCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/TransportCommand.java
index 3d2e46b26e..1541df5d39 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/TransportCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/TransportCommand.java
@@ -95,7 +95,7 @@ public abstract class TransportCommand<C extends GitCommand, T> extends
/**
* @param timeout
- * the timeout used for the transport step
+ * the timeout (in seconds) used for the transport step
* @return {@code this}
*/
public C setTimeout(int timeout) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/errors/PatchApplyException.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/errors/PatchApplyException.java
index 389c776736..4329860cd3 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/errors/PatchApplyException.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/errors/PatchApplyException.java
@@ -44,9 +44,9 @@ package org.eclipse.jgit.api.errors;
/**
* Exception thrown when applying a patch fails
- *
+ *
* @since 2.0
- *
+ *
*/
public class PatchApplyException extends GitAPIException {
private static final long serialVersionUID = 1L;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/errors/PatchFormatException.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/errors/PatchFormatException.java
index caff942a46..02ab423366 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/errors/PatchFormatException.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/errors/PatchFormatException.java
@@ -50,9 +50,9 @@ import org.eclipse.jgit.patch.FormatError;
/**
* Exception thrown when applying a patch fails due to an invalid format
- *
+ *
* @since 2.0
- *
+ *
*/
public class PatchFormatException extends GitAPIException {
private static final long serialVersionUID = 1L;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attribute.java b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attribute.java
index 905ad76929..c256b738b5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attribute.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attribute.java
@@ -193,4 +193,4 @@ public final class Attribute {
return key + "=" + value; //$NON-NLS-1$
}
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attributes.java b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attributes.java
index 0810e31682..d3826b3d9c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attributes.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attributes.java
@@ -1,5 +1,6 @@
/*
- * Copyright (C) 2015, Ivan Motsch <ivan.motsch@bsiag.com>
+ * Copyright (C) 2015, Ivan Motsch <ivan.motsch@bsiag.com>,
+ * Copyright (C) 2017, Obeo (mathieu.cartaud@obeo.fr)
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
@@ -48,6 +49,7 @@ import java.util.LinkedHashMap;
import java.util.Map;
import org.eclipse.jgit.attributes.Attribute.State;
+import org.eclipse.jgit.lib.Constants;
/**
* Represents a set of attributes for a path
@@ -170,6 +172,26 @@ public final class Attributes {
return a != null ? a.getValue() : null;
}
+ /**
+ * Test if the given attributes implies to handle the related entry as a
+ * binary file (i.e. if the entry has an -merge or a merge=binary attribute)
+ * or if it can be content merged.
+ *
+ * @return <code>true</code> if the entry can be content merged,
+ * <code>false</code> otherwise
+ * @since 4.9
+ */
+ public boolean canBeContentMerged() {
+ if (isUnset(Constants.ATTR_MERGE)) {
+ return false;
+ } else if (isCustom(Constants.ATTR_MERGE)
+ && getValue(Constants.ATTR_MERGE)
+ .equals(Constants.ATTR_BUILTIN_BINARY_MERGER)) {
+ return false;
+ }
+ return true;
+ }
+
@Override
public String toString() {
StringBuilder buf = new StringBuilder();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesHandler.java b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesHandler.java
index 3bf4179e7d..8d928e3749 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesHandler.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesHandler.java
@@ -144,7 +144,8 @@ public class AttributesHandler {
mergeInfoAttributes(entryPath, isDirectory, attributes);
// Gets the attributes located on the current entry path
- mergePerDirectoryEntryAttributes(entryPath, isDirectory,
+ mergePerDirectoryEntryAttributes(entryPath, entryPath.lastIndexOf('/'),
+ isDirectory,
treeWalk.getTree(WorkingTreeIterator.class),
treeWalk.getTree(DirCacheIterator.class),
treeWalk.getTree(CanonicalTreeParser.class),
@@ -206,6 +207,8 @@ public class AttributesHandler {
* the path to test. The path must be relative to this attribute
* node's own repository path, and in repository path format
* (uses '/' and not '\').
+ * @param nameRoot
+ * index of the '/' preceeding the current level, or -1 if none
* @param isDirectory
* true if the target item is a directory.
* @param workingTreeIterator
@@ -217,7 +220,7 @@ public class AttributesHandler {
* @throws IOException
*/
private void mergePerDirectoryEntryAttributes(String entryPath,
- boolean isDirectory,
+ int nameRoot, boolean isDirectory,
@Nullable WorkingTreeIterator workingTreeIterator,
@Nullable DirCacheIterator dirCacheIterator,
@Nullable CanonicalTreeParser otherTree, Attributes result)
@@ -228,9 +231,12 @@ public class AttributesHandler {
AttributesNode attributesNode = attributesNode(
treeWalk, workingTreeIterator, dirCacheIterator, otherTree);
if (attributesNode != null) {
- mergeAttributes(attributesNode, entryPath, isDirectory, result);
+ mergeAttributes(attributesNode,
+ entryPath.substring(nameRoot + 1), isDirectory,
+ result);
}
- mergePerDirectoryEntryAttributes(entryPath, isDirectory,
+ mergePerDirectoryEntryAttributes(entryPath,
+ entryPath.lastIndexOf('/', nameRoot - 1), isDirectory,
parentOf(workingTreeIterator), parentOf(dirCacheIterator),
parentOf(otherTree), result);
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesRule.java b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesRule.java
index c9c69db868..3cf5de8be5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesRule.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesRule.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2010, Red Hat Inc.
+ * Copyright (C) 2010, 2017 Red Hat Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -210,7 +210,7 @@ public class AttributesRule {
return false;
if (relativeTarget.length() == 0)
return false;
- boolean match = matcher.matches(relativeTarget, isDirectory);
+ boolean match = matcher.matches(relativeTarget, isDirectory, true);
return match;
}
@@ -225,4 +225,4 @@ public class AttributesRule {
return sb.toString();
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffConfig.java
index 324b99eb58..ee709495e5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffConfig.java
@@ -54,12 +54,7 @@ import org.eclipse.jgit.util.StringUtils;
/** Keeps track of diff related configuration options. */
public class DiffConfig {
/** Key for {@link Config#get(SectionParser)}. */
- public static final Config.SectionParser<DiffConfig> KEY = new SectionParser<DiffConfig>() {
- @Override
- public DiffConfig parse(final Config cfg) {
- return new DiffConfig(cfg);
- }
- };
+ public static final Config.SectionParser<DiffConfig> KEY = DiffConfig::new;
/** Permissible values for {@code diff.renames}. */
public static enum RenameDetectionType {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffEntry.java b/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffEntry.java
index e1dfcff6bb..5eb1942629 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffEntry.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffEntry.java
@@ -525,4 +525,4 @@ public class DiffEntry {
buf.append("]");
return buf.toString();
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/diff/MyersDiff.java b/org.eclipse.jgit/src/org/eclipse/jgit/diff/MyersDiff.java
index e1bda116bf..a3860de04c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/diff/MyersDiff.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/diff/MyersDiff.java
@@ -119,7 +119,7 @@ public class MyersDiff<S extends Sequence> {
public <S extends Sequence> void diffNonCommon(EditList edits,
HashedSequenceComparator<S> cmp, HashedSequence<S> a,
HashedSequence<S> b, Edit region) {
- new MyersDiff<S>(edits, cmp, a, b, region);
+ new MyersDiff<>(edits, cmp, a, b, region);
}
};
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java
index aed76ac66b..a6ab9c8921 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java
@@ -50,6 +50,7 @@ import java.nio.file.StandardCopyOption;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
+import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -61,6 +62,7 @@ import org.eclipse.jgit.errors.CorruptObjectException;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.IndexWriteException;
import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.CoreConfig.AutoCRLF;
@@ -85,6 +87,7 @@ import org.eclipse.jgit.treewalk.filter.PathFilter;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.FS.ExecutionResult;
import org.eclipse.jgit.util.FileUtils;
+import org.eclipse.jgit.util.IntList;
import org.eclipse.jgit.util.RawParseUtils;
import org.eclipse.jgit.util.SystemReader;
import org.eclipse.jgit.util.io.EolStreamTypeUtil;
@@ -151,6 +154,8 @@ public class DirCacheCheckout {
private boolean emptyDirCache;
+ private boolean performingCheckout;
+
/**
* @return a list of updated paths and smudgeFilterCommands
*/
@@ -432,10 +437,11 @@ public class DirCacheCheckout {
}
/**
- * Execute this checkout
+ * Execute this checkout. A {@link WorkingTreeModifiedEvent} is fired if the
+ * working tree was modified; even if the checkout fails.
*
* @return <code>false</code> if this method could not delete all the files
- * which should be deleted (e.g. because of of the files was
+ * which should be deleted (e.g. because one of the files was
* locked). In this case {@link #getToBeDeleted()} lists the files
* which should be tried to be deleted outside of this method.
* Although <code>false</code> is returned the checkout was
@@ -448,7 +454,17 @@ public class DirCacheCheckout {
try {
return doCheckout();
} finally {
- dc.unlock();
+ try {
+ dc.unlock();
+ } finally {
+ if (performingCheckout) {
+ WorkingTreeModifiedEvent event = new WorkingTreeModifiedEvent(
+ getUpdated().keySet(), getRemoved());
+ if (!event.isEmpty()) {
+ repo.fireEvent(event);
+ }
+ }
+ }
}
}
@@ -472,11 +488,13 @@ public class DirCacheCheckout {
// update our index
builder.finish();
+ performingCheckout = true;
File file = null;
String last = null;
// when deleting files process them in the opposite order as they have
// been reported. This ensures the files are deleted before we delete
// their parent folders
+ IntList nonDeleted = new IntList();
for (int i = removed.size() - 1; i >= 0; i--) {
String r = removed.get(i);
file = new File(repo.getWorkTree(), r);
@@ -486,25 +504,47 @@ public class DirCacheCheckout {
// a submodule, in which case we shall not attempt
// to delete it. A submodule is not empty, so it
// is safe to check this after a failed delete.
- if (!repo.getFS().isDirectory(file))
+ if (!repo.getFS().isDirectory(file)) {
+ nonDeleted.add(i);
toBeDeleted.add(r);
+ }
} else {
if (last != null && !isSamePrefix(r, last))
removeEmptyParents(new File(repo.getWorkTree(), last));
last = r;
}
}
- if (file != null)
+ if (file != null) {
removeEmptyParents(file);
-
- for (Map.Entry<String, CheckoutMetadata> e : updated.entrySet()) {
- String path = e.getKey();
- CheckoutMetadata meta = e.getValue();
- DirCacheEntry entry = dc.getEntry(path);
- if (!FileMode.GITLINK.equals(entry.getRawMode()))
- checkoutEntry(repo, entry, objectReader, false, meta);
}
-
+ removed = filterOut(removed, nonDeleted);
+ nonDeleted = null;
+ Iterator<Map.Entry<String, CheckoutMetadata>> toUpdate = updated
+ .entrySet().iterator();
+ Map.Entry<String, CheckoutMetadata> e = null;
+ try {
+ while (toUpdate.hasNext()) {
+ e = toUpdate.next();
+ String path = e.getKey();
+ CheckoutMetadata meta = e.getValue();
+ DirCacheEntry entry = dc.getEntry(path);
+ if (!FileMode.GITLINK.equals(entry.getRawMode())) {
+ checkoutEntry(repo, entry, objectReader, false, meta);
+ }
+ e = null;
+ }
+ } catch (Exception ex) {
+ // We didn't actually modify the current entry nor any that
+ // might follow.
+ if (e != null) {
+ toUpdate.remove();
+ }
+ while (toUpdate.hasNext()) {
+ e = toUpdate.next();
+ toUpdate.remove();
+ }
+ throw ex;
+ }
// commit the index builder - a new index is persisted
if (!builder.commit())
throw new IndexWriteException();
@@ -512,6 +552,36 @@ public class DirCacheCheckout {
return toBeDeleted.size() == 0;
}
+ private static ArrayList<String> filterOut(ArrayList<String> strings,
+ IntList indicesToRemove) {
+ int n = indicesToRemove.size();
+ if (n == strings.size()) {
+ return new ArrayList<>(0);
+ }
+ switch (n) {
+ case 0:
+ return strings;
+ case 1:
+ strings.remove(indicesToRemove.get(0));
+ return strings;
+ default:
+ int length = strings.size();
+ ArrayList<String> result = new ArrayList<>(length - n);
+ // Process indicesToRemove from the back; we know that it
+ // contains indices in descending order.
+ int j = n - 1;
+ int idx = indicesToRemove.get(j);
+ for (int i = 0; i < length; i++) {
+ if (i == idx) {
+ idx = (--j >= 0) ? indicesToRemove.get(j) : -1;
+ } else {
+ result.add(strings.get(i));
+ }
+ }
+ return result;
+ }
+ }
+
private static boolean isSamePrefix(String a, String b) {
int as = a.lastIndexOf('/');
int bs = b.lastIndexOf('/');
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/errors/CorruptPackIndexException.java b/org.eclipse.jgit/src/org/eclipse/jgit/errors/CorruptPackIndexException.java
new file mode 100644
index 0000000000..65d83b3513
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/errors/CorruptPackIndexException.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.errors;
+
+import org.eclipse.jgit.annotations.Nullable;
+
+/**
+ * Exception thrown when encounters a corrupt pack index file.
+ *
+ * @since 4.9
+ */
+public class CorruptPackIndexException extends Exception {
+ private static final long serialVersionUID = 1L;
+
+ /** The error type of a corrupt index file. */
+ public enum ErrorType {
+ /** Offset does not match index in pack file. */
+ MISMATCH_OFFSET,
+ /** CRC does not match CRC of the object data in pack file. */
+ MISMATCH_CRC,
+ /** CRC is not present in index file. */
+ MISSING_CRC,
+ /** Object in pack is not present in index file. */
+ MISSING_OBJ,
+ /** Object in index file is not present in pack file. */
+ UNKNOWN_OBJ,
+ }
+
+ private ErrorType errorType;
+
+ /**
+ * Report a specific error condition discovered in an index file.
+ *
+ * @param message
+ * the error message.
+ * @param errorType
+ * the error type of corruption.
+ */
+ public CorruptPackIndexException(String message, ErrorType errorType) {
+ super(message);
+ this.errorType = errorType;
+ }
+
+ /**
+ * Specific the reason of the corrupt index file.
+ *
+ * @return error condition or null.
+ */
+ @Nullable
+ public ErrorType getErrorType() {
+ return errorType;
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/errors/TooLargeObjectInPackException.java b/org.eclipse.jgit/src/org/eclipse/jgit/errors/TooLargeObjectInPackException.java
index b5b1af542e..ece76ed287 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/errors/TooLargeObjectInPackException.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/errors/TooLargeObjectInPackException.java
@@ -92,4 +92,4 @@ public class TooLargeObjectInPackException extends TransportException {
public TooLargeObjectInPackException(URIish uri, String s) {
super(uri.setPass(null) + ": " + s); //$NON-NLS-1$
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/errors/TranslationBundleLoadingException.java b/org.eclipse.jgit/src/org/eclipse/jgit/errors/TranslationBundleLoadingException.java
index 4f297b982b..6cb332d483 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/errors/TranslationBundleLoadingException.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/errors/TranslationBundleLoadingException.java
@@ -69,4 +69,4 @@ public class TranslationBundleLoadingException extends TranslationBundleExceptio
+ bundleClass.getName() + ", " + locale.toString() + "]", //$NON-NLS-1$ //$NON-NLS-2$
bundleClass, locale, cause);
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/events/ListenerList.java b/org.eclipse.jgit/src/org/eclipse/jgit/events/ListenerList.java
index 12ef533add..cea03db2ec 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/events/ListenerList.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/events/ListenerList.java
@@ -53,6 +53,19 @@ public class ListenerList {
private final ConcurrentMap<Class<? extends RepositoryListener>, CopyOnWriteArrayList<ListenerHandle>> lists = new ConcurrentHashMap<>();
/**
+ * Register a {@link WorkingTreeModifiedListener}.
+ *
+ * @param listener
+ * the listener implementation.
+ * @return handle to later remove the listener.
+ * @since 4.9
+ */
+ public ListenerHandle addWorkingTreeModifiedListener(
+ WorkingTreeModifiedListener listener) {
+ return addListener(WorkingTreeModifiedListener.class, listener);
+ }
+
+ /**
* Register an IndexChangedListener.
*
* @param listener
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedEvent.java b/org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedEvent.java
new file mode 100644
index 0000000000..6517823db2
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedEvent.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2017, Thomas Wolf <thomas.wolf@paranor.ch>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.eclipse.jgit.events;
+
+import java.util.Collection;
+import java.util.Collections;
+
+import org.eclipse.jgit.annotations.NonNull;
+
+/**
+ * A {@link RepositoryEvent} describing changes to the working tree. It is fired
+ * whenever a {@link org.eclipse.jgit.dircache.DirCacheCheckout} modifies
+ * (adds/deletes/updates) files in the working tree.
+ *
+ * @since 4.9
+ */
+public class WorkingTreeModifiedEvent
+ extends RepositoryEvent<WorkingTreeModifiedListener> {
+
+ private Collection<String> modified;
+
+ private Collection<String> deleted;
+
+ /**
+ * Creates a new {@link WorkingTreeModifiedEvent} with the given
+ * collections.
+ *
+ * @param modified
+ * repository-relative paths that were added or updated
+ * @param deleted
+ * repository-relative paths that were deleted
+ */
+ public WorkingTreeModifiedEvent(Collection<String> modified,
+ Collection<String> deleted) {
+ this.modified = modified;
+ this.deleted = deleted;
+ }
+
+ /**
+ * Determines whether there are any changes recorded in this event.
+ *
+ * @return {@code true} if no files were modified or deleted, {@code false}
+ * otherwise
+ */
+ public boolean isEmpty() {
+ return (modified == null || modified.isEmpty())
+ && (deleted == null || deleted.isEmpty());
+ }
+
+ /**
+ * Retrieves the {@link Collection} of repository-relative paths of files
+ * that were modified (added or updated).
+ *
+ * @return the set
+ */
+ public @NonNull Collection<String> getModified() {
+ Collection<String> result = modified;
+ if (result == null) {
+ result = Collections.emptyList();
+ modified = result;
+ }
+ return result;
+ }
+
+ /**
+ * Retrieves the {@link Collection} of repository-relative paths of files
+ * that were deleted.
+ *
+ * @return the set
+ */
+ public @NonNull Collection<String> getDeleted() {
+ Collection<String> result = deleted;
+ if (result == null) {
+ result = Collections.emptyList();
+ deleted = result;
+ }
+ return result;
+ }
+
+ @Override
+ public Class<WorkingTreeModifiedListener> getListenerType() {
+ return WorkingTreeModifiedListener.class;
+ }
+
+ @Override
+ public void dispatch(WorkingTreeModifiedListener listener) {
+ listener.onWorkingTreeModified(this);
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackKey.java b/org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedListener.java
index 98a2a943e3..402a900226 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackKey.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedListener.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2011, Google Inc.
+ * Copyright (C) 2017, Thomas Wolf <thomas.wolf@paranor.ch>
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -41,20 +41,21 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-package org.eclipse.jgit.internal.storage.dfs;
+package org.eclipse.jgit.events;
-import java.util.concurrent.atomic.AtomicLong;
-
-final class DfsPackKey {
- final int hash;
-
- final AtomicLong cachedSize;
+/**
+ * Receives {@link WorkingTreeModifiedEvent}s, which are fired whenever a
+ * {@link org.eclipse.jgit.dircache.DirCacheCheckout} modifies
+ * (adds/deletes/updates) files in the working tree.
+ *
+ * @since 4.9
+ */
+public interface WorkingTreeModifiedListener extends RepositoryListener {
- DfsPackKey() {
- // Multiply by 31 here so we can more directly combine with another
- // value without doing the multiply there.
- //
- hash = System.identityHashCode(this) * 31;
- cachedSize = new AtomicLong();
- }
+ /**
+ * Respond to working tree modifications.
+ *
+ * @param event
+ */
+ void onWorkingTreeModified(WorkingTreeModifiedEvent event);
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java
index 1de8a0be2e..219babdf95 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/gitrepo/RepoCommand.java
@@ -49,7 +49,6 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.lang.UnsupportedOperationException;
import java.net.URI;
import java.text.MessageFormat;
import java.util.ArrayList;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/hooks/GitHook.java b/org.eclipse.jgit/src/org/eclipse/jgit/hooks/GitHook.java
index 62a674924a..b684dd6232 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/hooks/GitHook.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/hooks/GitHook.java
@@ -167,4 +167,4 @@ abstract class GitHook<T> implements Callable<T> {
}
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/FastIgnoreRule.java b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/FastIgnoreRule.java
index ef67d49419..7298a082c7 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/FastIgnoreRule.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/FastIgnoreRule.java
@@ -155,7 +155,7 @@ public class FastIgnoreRule {
return false;
if (path.length() == 0)
return false;
- boolean match = matcher.matches(path, directory);
+ boolean match = matcher.matches(path, directory, false);
return match;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/IMatcher.java b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/IMatcher.java
index 61f7b83400..5b184cb19f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/IMatcher.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/IMatcher.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2014, Andrey Loskutov <loskutov@gmx.de>
+ * Copyright (C) 2014, 2017 Andrey Loskutov <loskutov@gmx.de>
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -52,7 +52,8 @@ public interface IMatcher {
*/
public static final IMatcher NO_MATCH = new IMatcher() {
@Override
- public boolean matches(String path, boolean assumeDirectory) {
+ public boolean matches(String path, boolean assumeDirectory,
+ boolean pathMatch) {
return false;
}
@@ -71,9 +72,14 @@ public interface IMatcher {
* @param assumeDirectory
* true to assume this path as directory (even if it doesn't end
* with a slash)
+ * @param pathMatch
+ * {@code true} if the match is for the full path: prefix-only
+ * matches are not allowed, and {@link NameMatcher}s must match
+ * only the last component (if they can -- they may not, if they
+ * are anchored at the beginning)
* @return true if this matcher pattern matches given string
*/
- boolean matches(String path, boolean assumeDirectory);
+ boolean matches(String path, boolean assumeDirectory, boolean pathMatch);
/**
* Matches only part of given string
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/NameMatcher.java b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/NameMatcher.java
index 00651237db..9667837a41 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/NameMatcher.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/NameMatcher.java
@@ -64,26 +64,59 @@ public class NameMatcher extends AbstractMatcher {
pattern = Strings.deleteBackslash(pattern);
}
beginning = pattern.length() == 0 ? false : pattern.charAt(0) == slash;
- if (!beginning)
+ if (!beginning) {
this.subPattern = pattern;
- else
+ } else {
this.subPattern = pattern.substring(1);
+ }
}
@Override
- public boolean matches(String path, boolean assumeDirectory) {
- int end = 0;
- int firstChar = 0;
- do {
- firstChar = getFirstNotSlash(path, end);
- end = getFirstSlash(path, firstChar);
- boolean match = matches(path, firstChar, end, assumeDirectory);
- if (match)
+ public boolean matches(String path, boolean assumeDirectory,
+ boolean pathMatch) {
+ // A NameMatcher's pattern does not contain a slash.
+ int start = 0;
+ int stop = path.length();
+ if (stop > 0 && path.charAt(0) == slash) {
+ start++;
+ }
+ if (pathMatch) {
+ // Can match only after the last slash
+ int lastSlash = path.lastIndexOf(slash, stop - 1);
+ if (lastSlash == stop - 1) {
+ // Skip trailing slash
+ lastSlash = path.lastIndexOf(slash, lastSlash - 1);
+ stop--;
+ }
+ boolean match;
+ if (lastSlash < start) {
+ match = matches(path, start, stop, assumeDirectory);
+ } else {
+ // Can't match if the path contains a slash if the pattern is
+ // anchored at the beginning
+ match = !beginning
+ && matches(path, lastSlash + 1, stop, assumeDirectory);
+ }
+ if (match && dirOnly) {
+ match = assumeDirectory;
+ }
+ return match;
+ }
+ while (start < stop) {
+ int end = path.indexOf(slash, start);
+ if (end < 0) {
+ end = stop;
+ }
+ if (end > start && matches(path, start, end, assumeDirectory)) {
// make sure the directory matches: either if we are done with
// segment and there is next one, or if the directory is assumed
- return !dirOnly ? true : (end > 0 && end != path.length())
- || assumeDirectory;
- } while (!beginning && end != path.length());
+ return !dirOnly || assumeDirectory || end < stop;
+ }
+ if (beginning) {
+ break;
+ }
+ start = end + 1;
+ }
return false;
}
@@ -92,25 +125,18 @@ public class NameMatcher extends AbstractMatcher {
boolean assumeDirectory) {
// faster local access, same as in string.indexOf()
String s = subPattern;
- if (s.length() != (endExcl - startIncl))
+ int length = s.length();
+ if (length != (endExcl - startIncl)) {
return false;
- for (int i = 0; i < s.length(); i++) {
+ }
+ for (int i = 0; i < length; i++) {
char c1 = s.charAt(i);
char c2 = segment.charAt(i + startIncl);
- if (c1 != c2)
+ if (c1 != c2) {
return false;
+ }
}
return true;
}
- private int getFirstNotSlash(String s, int start) {
- int slashIdx = s.indexOf(slash, start);
- return slashIdx == start ? start + 1 : start;
- }
-
- private int getFirstSlash(String s, int start) {
- int slashIdx = s.indexOf(slash, start);
- return slashIdx == -1 ? s.length() : slashIdx;
- }
-
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/PathMatcher.java b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/PathMatcher.java
index 65224eab91..9b3a2aac78 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/PathMatcher.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/PathMatcher.java
@@ -52,7 +52,6 @@ import java.util.ArrayList;
import java.util.List;
import org.eclipse.jgit.errors.InvalidPatternException;
-import org.eclipse.jgit.ignore.FastIgnoreRule;
import org.eclipse.jgit.ignore.internal.Strings.PatternState;
/**
@@ -68,9 +67,10 @@ public class PathMatcher extends AbstractMatcher {
private final char slash;
- private boolean beginning;
+ private final boolean beginning;
- PathMatcher(String pattern, Character pathSeparator, boolean dirOnly)
+ private PathMatcher(String pattern, Character pathSeparator,
+ boolean dirOnly)
throws InvalidPatternException {
super(pattern, dirOnly);
slash = getPathSeparator(pathSeparator);
@@ -87,7 +87,7 @@ public class PathMatcher extends AbstractMatcher {
&& count(path, slash, true) > 0;
}
- static private List<IMatcher> createMatchers(List<String> segments,
+ private static List<IMatcher> createMatchers(List<String> segments,
Character pathSeparator, boolean dirOnly)
throws InvalidPatternException {
List<IMatcher> matchers = new ArrayList<>(segments.size());
@@ -171,10 +171,12 @@ public class PathMatcher extends AbstractMatcher {
}
@Override
- public boolean matches(String path, boolean assumeDirectory) {
- if (matchers == null)
- return simpleMatch(path, assumeDirectory);
- return iterate(path, 0, path.length(), assumeDirectory);
+ public boolean matches(String path, boolean assumeDirectory,
+ boolean pathMatch) {
+ if (matchers == null) {
+ return simpleMatch(path, assumeDirectory, pathMatch);
+ }
+ return iterate(path, 0, path.length(), assumeDirectory, pathMatch);
}
/*
@@ -182,31 +184,31 @@ public class PathMatcher extends AbstractMatcher {
* wildcards or single segments (mean: this is multi-segment path which must
* be at the beginning of the another string)
*/
- private boolean simpleMatch(String path, boolean assumeDirectory) {
+ private boolean simpleMatch(String path, boolean assumeDirectory,
+ boolean pathMatch) {
boolean hasSlash = path.indexOf(slash) == 0;
- if (beginning && !hasSlash)
+ if (beginning && !hasSlash) {
path = slash + path;
-
- if (!beginning && hasSlash)
+ }
+ if (!beginning && hasSlash) {
path = path.substring(1);
-
- if (path.equals(pattern))
- // Exact match
- if (dirOnly && !assumeDirectory)
- // Directory expectations not met
- return false;
- else
- // Directory expectations met
- return true;
-
+ }
+ if (path.equals(pattern)) {
+ // Exact match: must meet directory expectations
+ return !dirOnly || assumeDirectory;
+ }
/*
* Add slashes for startsWith check. This avoids matching e.g.
* "/src/new" to /src/newfile" but allows "/src/new" to match
* "/src/new/newfile", as is the git standard
*/
- if (path.startsWith(pattern + FastIgnoreRule.PATH_SEPARATOR))
+ String prefix = pattern + slash;
+ if (pathMatch) {
+ return path.equals(prefix) && (!dirOnly || assumeDirectory);
+ }
+ if (path.startsWith(prefix)) {
return true;
-
+ }
return false;
}
@@ -217,61 +219,100 @@ public class PathMatcher extends AbstractMatcher {
"Path matcher works only on entire paths"); //$NON-NLS-1$
}
- boolean iterate(final String path, final int startIncl, final int endExcl,
- boolean assumeDirectory) {
+ private boolean iterate(final String path, final int startIncl,
+ final int endExcl, boolean assumeDirectory, boolean pathMatch) {
int matcher = 0;
int right = startIncl;
boolean match = false;
int lastWildmatch = -1;
+ // ** matches may get extended if a later match fails. When that
+ // happens, we must extend the ** by exactly one segment.
+ // wildmatchBacktrackPos records the end of the segment after a **
+ // match, so that we can reset correctly.
+ int wildmatchBacktrackPos = -1;
while (true) {
int left = right;
right = path.indexOf(slash, right);
if (right == -1) {
- if (left < endExcl)
+ if (left < endExcl) {
match = matches(matcher, path, left, endExcl,
assumeDirectory);
+ } else {
+ // a/** should not match a/ or a
+ match = match && matchers.get(matcher) != WILD;
+ }
if (match) {
- if (matcher == matchers.size() - 2
- && matchers.get(matcher + 1) == WILD)
- // ** can match *nothing*: a/b/** match also a/b
- return true;
if (matcher < matchers.size() - 1
&& matchers.get(matcher) == WILD) {
// ** can match *nothing*: a/**/b match also a/b
matcher++;
match = matches(matcher, path, left, endExcl,
assumeDirectory);
- } else if (dirOnly && !assumeDirectory)
+ } else if (dirOnly && !assumeDirectory) {
// Directory expectations not met
return false;
+ }
}
return match && matcher + 1 == matchers.size();
}
- if (right - left > 0)
+ if (wildmatchBacktrackPos < 0) {
+ wildmatchBacktrackPos = right;
+ }
+ if (right - left > 0) {
match = matches(matcher, path, left, right, assumeDirectory);
- else {
+ } else {
// path starts with slash???
right++;
continue;
}
if (match) {
- if (matchers.get(matcher) == WILD) {
+ boolean wasWild = matchers.get(matcher) == WILD;
+ if (wasWild) {
lastWildmatch = matcher;
+ wildmatchBacktrackPos = -1;
// ** can match *nothing*: a/**/b match also a/b
right = left - 1;
}
matcher++;
- if (matcher == matchers.size())
- return true;
- } else if (lastWildmatch != -1)
+ if (matcher == matchers.size()) {
+ // We had a prefix match here.
+ if (!pathMatch) {
+ return true;
+ } else {
+ if (right == endExcl - 1) {
+ // Extra slash at the end: actually a full match.
+ // Must meet directory expectations
+ return !dirOnly || assumeDirectory;
+ }
+ // Prefix matches only if pattern ended with /**
+ if (wasWild) {
+ return true;
+ }
+ if (lastWildmatch >= 0) {
+ // Consider pattern **/x and input x/x.
+ // We've matched the prefix x/ so far: we
+ // must try to extend the **!
+ matcher = lastWildmatch + 1;
+ right = wildmatchBacktrackPos;
+ wildmatchBacktrackPos = -1;
+ } else {
+ return false;
+ }
+ }
+ }
+ } else if (lastWildmatch != -1) {
matcher = lastWildmatch + 1;
- else
+ right = wildmatchBacktrackPos;
+ wildmatchBacktrackPos = -1;
+ } else {
return false;
+ }
right++;
}
}
- boolean matches(int matcherIdx, String path, int startIncl, int endExcl,
+ private boolean matches(int matcherIdx, String path, int startIncl,
+ int endExcl,
boolean assumeDirectory) {
IMatcher matcher = matchers.get(matcherIdx);
return matcher.matches(path, startIncl, endExcl, assumeDirectory);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/Strings.java b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/Strings.java
index da482fa50a..800cdb9952 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/Strings.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/Strings.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2014, Andrey Loskutov <loskutov@gmx.de>
+ * Copyright (C) 2014, 2017 Andrey Loskutov <loskutov@gmx.de>
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -123,12 +123,15 @@ public class Strings {
static int count(String s, char c, boolean ignoreFirstLast) {
int start = 0;
int count = 0;
- while (true) {
+ int length = s.length();
+ while (start < length) {
start = s.indexOf(c, start);
- if (start == -1)
+ if (start == -1) {
break;
- if (!ignoreFirstLast || (start != 0 && start != s.length()))
+ }
+ if (!ignoreFirstLast || (start != 0 && start != length - 1)) {
count++;
+ }
start++;
}
return count;
@@ -360,7 +363,10 @@ public class Strings {
case '[':
if (in_brackets > 0) {
- sb.append('\\').append('[');
+ if (!seenEscape) {
+ sb.append('\\');
+ }
+ sb.append('[');
ignoreLastBracket = true;
} else {
if (!seenEscape) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/WildMatcher.java b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/WildMatcher.java
index 93ea13c726..363b3cee84 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/WildMatcher.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/WildMatcher.java
@@ -62,7 +62,8 @@ public final class WildMatcher extends AbstractMatcher {
}
@Override
- public final boolean matches(String path, boolean assumeDirectory) {
+ public final boolean matches(String path, boolean assumeDirectory,
+ boolean pathMatch) {
return true;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
index a8dfc2d2a2..ec19cdc2c2 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
@@ -148,6 +148,7 @@ public class JGitText extends TranslationBundle {
/***/ public String cannotParseGitURIish;
/***/ public String cannotPullOnARepoWithState;
/***/ public String cannotRead;
+ /***/ public String cannotReadBackDelta;
/***/ public String cannotReadBlob;
/***/ public String cannotReadCommit;
/***/ public String cannotReadFile;
@@ -180,6 +181,7 @@ public class JGitText extends TranslationBundle {
/***/ public String closeLockTokenFailed;
/***/ public String closed;
/***/ public String collisionOn;
+ /***/ public String commandClosedStderrButDidntExit;
/***/ public String commandRejectedByHook;
/***/ public String commandWasCalledInTheWrongState;
/***/ public String commitAlreadyExists;
@@ -270,12 +272,14 @@ public class JGitText extends TranslationBundle {
/***/ public String createBranchFailedUnknownReason;
/***/ public String createBranchUnexpectedResult;
/***/ public String createNewFileFailed;
+ /***/ public String createRequiresZeroOldId;
/***/ public String credentialPassword;
/***/ public String credentialUsername;
/***/ public String daemonAlreadyRunning;
/***/ public String daysAgo;
/***/ public String deleteBranchUnexpectedResult;
/***/ public String deleteFileFailed;
+ /***/ public String deleteRequiresZeroNewId;
/***/ public String deleteTagUnexpectedResult;
/***/ public String deletingNotSupported;
/***/ public String destinationIsNotAWildcard;
@@ -304,6 +308,7 @@ public class JGitText extends TranslationBundle {
/***/ public String encryptionOnlyPBE;
/***/ public String endOfFileInEscape;
/***/ public String entryNotFoundByPath;
+ /***/ public String enumValueNotSupported0;
/***/ public String enumValueNotSupported2;
/***/ public String enumValueNotSupported3;
/***/ public String enumValuesNotAvailable;
@@ -319,6 +324,7 @@ public class JGitText extends TranslationBundle {
/***/ public String exceptionCaughtDuringExecutionOfAddCommand;
/***/ public String exceptionCaughtDuringExecutionOfArchiveCommand;
/***/ public String exceptionCaughtDuringExecutionOfCherryPickCommand;
+ /***/ public String exceptionCaughtDuringExecutionOfCommand;
/***/ public String exceptionCaughtDuringExecutionOfCommitCommand;
/***/ public String exceptionCaughtDuringExecutionOfFetchCommand;
/***/ public String exceptionCaughtDuringExecutionOfLsRemoteCommand;
@@ -329,7 +335,6 @@ public class JGitText extends TranslationBundle {
/***/ public String exceptionCaughtDuringExecutionOfRevertCommand;
/***/ public String exceptionCaughtDuringExecutionOfRmCommand;
/***/ public String exceptionCaughtDuringExecutionOfTagCommand;
- /***/ public String exceptionCaughtDuringExcecutionOfCommand;
/***/ public String exceptionHookExecutionInterrupted;
/***/ public String exceptionOccurredDuringAddingOfOptionToALogCommand;
/***/ public String exceptionOccurredDuringReadingOfGIT_DIR;
@@ -367,6 +372,8 @@ public class JGitText extends TranslationBundle {
/***/ public String gitmodulesNotFound;
/***/ public String headRequiredToStash;
/***/ public String hoursAgo;
+ /***/ public String httpConfigCannotNormalizeURL;
+ /***/ public String httpConfigInvalidURL;
/***/ public String hugeIndexesAreNotSupportedByJgitYet;
/***/ public String hunkBelongsToAnotherFile;
/***/ public String hunkDisconnectedFromFile;
@@ -425,11 +432,16 @@ public class JGitText extends TranslationBundle {
/***/ public String invalidPathPeriodAtEndWindows;
/***/ public String invalidPathSpaceAtEndWindows;
/***/ public String invalidPathReservedOnWindows;
+ /***/ public String invalidRedirectLocation;
/***/ public String invalidReflogRevision;
/***/ public String invalidRefName;
+ /***/ public String invalidReftableBlock;
+ /***/ public String invalidReftableCRC;
+ /***/ public String invalidReftableFile;
/***/ public String invalidRemote;
/***/ public String invalidShallowObject;
/***/ public String invalidStageForPath;
+ /***/ public String invalidSystemProperty;
/***/ public String invalidTagOption;
/***/ public String invalidTimeout;
/***/ public String invalidTimeUnitValue2;
@@ -469,8 +481,11 @@ public class JGitText extends TranslationBundle {
/***/ public String mergeRecursiveTooManyMergeBasesFor;
/***/ public String messageAndTaggerNotAllowedInUnannotatedTags;
/***/ public String minutesAgo;
+ /***/ public String mismatchOffset;
+ /***/ public String mismatchCRC;
/***/ public String missingAccesskey;
/***/ public String missingConfigurationForKey;
+ /***/ public String missingCRC;
/***/ public String missingDeltaBase;
/***/ public String missingForwardImageInGITBinaryPatch;
/***/ public String missingObject;
@@ -488,6 +503,7 @@ public class JGitText extends TranslationBundle {
/***/ public String needPackOut;
/***/ public String needsAtLeastOneEntry;
/***/ public String needsWorkdir;
+ /***/ public String newIdMustNotBeNull;
/***/ public String newlineInQuotesNotAllowed;
/***/ public String noApplyInDelete;
/***/ public String noClosingBracket;
@@ -521,6 +537,7 @@ public class JGitText extends TranslationBundle {
/***/ public String objectNotFoundIn;
/***/ public String obtainingCommitsForCherryPick;
/***/ public String offsetWrittenDeltaBaseForObjectNotFoundInAPack;
+ /***/ public String oldIdMustNotBeNull;
/***/ public String onlyAlreadyUpToDateAndFastForwardMergesAreAvailable;
/***/ public String onlyOneFetchSupported;
/***/ public String onlyOneOperationCallPerConnectionIsSupported;
@@ -528,6 +545,7 @@ public class JGitText extends TranslationBundle {
/***/ public String openingConnection;
/***/ public String operationCanceled;
/***/ public String outputHasAlreadyBeenStarted;
+ /***/ public String overflowedReftableBlock;
/***/ public String packChecksumMismatch;
/***/ public String packCorruptedWhileWritingToFilesystem;
/***/ public String packDoesNotMatchIndex;
@@ -555,6 +573,7 @@ public class JGitText extends TranslationBundle {
/***/ public String pathIsNotInWorkingDir;
/***/ public String pathNotConfigured;
/***/ public String peeledLineBeforeRef;
+ /***/ public String peeledRefIsRequired;
/***/ public String peerDidNotSupplyACompleteObjectGraph;
/***/ public String personIdentEmailNonNull;
/***/ public String personIdentNameNonNull;
@@ -583,6 +602,11 @@ public class JGitText extends TranslationBundle {
/***/ public String receivePackInvalidLimit;
/***/ public String receivePackTooLarge;
/***/ public String receivingObjects;
+ /***/ public String redirectBlocked;
+ /***/ public String redirectHttp;
+ /***/ public String redirectLimitExceeded;
+ /***/ public String redirectLocationMissing;
+ /***/ public String redirectsOff;
/***/ public String refAlreadyExists;
/***/ public String refAlreadyExists1;
/***/ public String reflogEntryNotFound;
@@ -648,6 +672,15 @@ public class JGitText extends TranslationBundle {
/***/ public String sourceRefDoesntResolveToAnyObject;
/***/ public String sourceRefNotSpecifiedForRefspec;
/***/ public String squashCommitNotUpdatingHEAD;
+ /***/ public String sshUserNameError;
+ /***/ public String sslFailureExceptionMessage;
+ /***/ public String sslFailureInfo;
+ /***/ public String sslFailureCause;
+ /***/ public String sslFailureTrustExplanation;
+ /***/ public String sslTrustAlways;
+ /***/ public String sslTrustForRepo;
+ /***/ public String sslTrustNow;
+ /***/ public String sslVerifyCannotSave;
/***/ public String staleRevFlagsOn;
/***/ public String startingReadStageWithoutWrittenRequestDataPendingIsNotSupported;
/***/ public String stashApplyConflict;
@@ -659,6 +692,7 @@ public class JGitText extends TranslationBundle {
/***/ public String stashDropDeleteRefFailed;
/***/ public String stashDropFailed;
/***/ public String stashDropMissingReflog;
+ /***/ public String stashDropNotSupported;
/***/ public String stashFailed;
/***/ public String stashResolveFailed;
/***/ public String statelessRPCRequiresOptionToBeEnabled;
@@ -676,6 +710,7 @@ public class JGitText extends TranslationBundle {
/***/ public String tagOnRepoWithoutHEADCurrentlyNotSupported;
/***/ public String transactionAborted;
/***/ public String theFactoryMustNotBeNull;
+ /***/ public String threadInterruptedWhileRunning;
/***/ public String timeIsUncertain;
/***/ public String timerAlreadyTerminated;
/***/ public String tooManyCommands;
@@ -712,10 +747,12 @@ public class JGitText extends TranslationBundle {
/***/ public String unableToStore;
/***/ public String unableToWrite;
/***/ public String unauthorized;
+ /***/ public String underflowedReftableBlock;
/***/ public String unencodeableFile;
/***/ public String unexpectedCompareResult;
/***/ public String unexpectedEndOfConfigFile;
/***/ public String unexpectedEndOfInput;
+ /***/ public String unexpectedEofInPack;
/***/ public String unexpectedHunkTrailer;
/***/ public String unexpectedOddResult;
/***/ public String unexpectedRefReport;
@@ -726,6 +763,7 @@ public class JGitText extends TranslationBundle {
/***/ public String unknownHost;
/***/ public String unknownIndexVersionOrCorruptIndex;
/***/ public String unknownObject;
+ /***/ public String unknownObjectInIndex;
/***/ public String unknownObjectType;
/***/ public String unknownObjectType2;
/***/ public String unknownRepositoryFormat;
@@ -748,7 +786,9 @@ public class JGitText extends TranslationBundle {
/***/ public String unsupportedOperationNotAddAtEnd;
/***/ public String unsupportedPackIndexVersion;
/***/ public String unsupportedPackVersion;
+ /***/ public String unsupportedReftableVersion;
/***/ public String unsupportedRepositoryDescription;
+ /***/ public String updateRequiresOldIdAndNewId;
/***/ public String updatingHeadFailed;
/***/ public String updatingReferences;
/***/ public String updatingRefFailed;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckError.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckError.java
new file mode 100644
index 0000000000..588ed9bf8a
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckError.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.eclipse.jgit.internal.fsck;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.errors.CorruptPackIndexException;
+import org.eclipse.jgit.errors.CorruptPackIndexException.ErrorType;
+import org.eclipse.jgit.lib.ObjectChecker;
+import org.eclipse.jgit.lib.ObjectId;
+
+/** Holds all fsck errors of a git repository. */
+public class FsckError {
+ /** Represents a corrupt object. */
+ public static class CorruptObject {
+ final ObjectId id;
+
+ final int type;
+
+ ObjectChecker.ErrorType errorType;
+
+ /**
+ * @param id
+ * the object identifier.
+ * @param type
+ * type of the object.
+ */
+ public CorruptObject(ObjectId id, int type) {
+ this.id = id;
+ this.type = type;
+ }
+
+ void setErrorType(ObjectChecker.ErrorType errorType) {
+ this.errorType = errorType;
+ }
+
+ /** @return identifier of the object. */
+ public ObjectId getId() {
+ return id;
+ }
+
+ /** @return type of the object. */
+ public int getType() {
+ return type;
+ }
+
+ /** @return error type of the corruption. */
+ @Nullable
+ public ObjectChecker.ErrorType getErrorType() {
+ return errorType;
+ }
+ }
+
+ /** Represents a corrupt pack index file. */
+ public static class CorruptIndex {
+ String fileName;
+
+ CorruptPackIndexException.ErrorType errorType;
+
+ /**
+ * @param fileName
+ * the file name of the pack index.
+ * @param errorType
+ * the type of error as reported in
+ * {@link CorruptPackIndexException}.
+ */
+ public CorruptIndex(String fileName, ErrorType errorType) {
+ this.fileName = fileName;
+ this.errorType = errorType;
+ }
+
+ /** @return the file name of the index file. */
+ public String getFileName() {
+ return fileName;
+ }
+
+ /** @return the error type of the corruption. */
+ public ErrorType getErrorType() {
+ return errorType;
+ }
+ }
+
+ private final Set<CorruptObject> corruptObjects = new HashSet<>();
+
+ private final Set<ObjectId> missingObjects = new HashSet<>();
+
+ private final Set<CorruptIndex> corruptIndices = new HashSet<>();
+
+ private final Set<String> nonCommitHeads = new HashSet<>();
+
+ /** @return corrupt objects from all pack files. */
+ public Set<CorruptObject> getCorruptObjects() {
+ return corruptObjects;
+ }
+
+ /** @return missing objects that should present in pack files. */
+ public Set<ObjectId> getMissingObjects() {
+ return missingObjects;
+ }
+
+ /** @return corrupt index files associated with the packs. */
+ public Set<CorruptIndex> getCorruptIndices() {
+ return corruptIndices;
+ }
+
+ /** @return refs/heads/* point to non-commit object. */
+ public Set<String> getNonCommitHeads() {
+ return nonCommitHeads;
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckPackParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckPackParser.java
new file mode 100644
index 0000000000..3a678a72df
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckPackParser.java
@@ -0,0 +1,336 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.fsck;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.Channels;
+import java.text.MessageFormat;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.zip.CRC32;
+
+import org.eclipse.jgit.errors.CorruptObjectException;
+import org.eclipse.jgit.errors.CorruptPackIndexException;
+import org.eclipse.jgit.errors.CorruptPackIndexException.ErrorType;
+import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.fsck.FsckError.CorruptObject;
+import org.eclipse.jgit.internal.storage.dfs.ReadableChannel;
+import org.eclipse.jgit.internal.storage.file.PackIndex;
+import org.eclipse.jgit.internal.storage.file.PackIndex.MutableEntry;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.ObjectChecker;
+import org.eclipse.jgit.lib.ObjectDatabase;
+import org.eclipse.jgit.lib.ObjectIdOwnerMap;
+import org.eclipse.jgit.transport.PackParser;
+import org.eclipse.jgit.transport.PackedObjectInfo;
+
+/** A read-only pack parser for object validity checking. */
+public class FsckPackParser extends PackParser {
+ private final CRC32 crc;
+
+ private final ReadableChannel channel;
+
+ private final Set<CorruptObject> corruptObjects = new HashSet<>();
+
+ private long expectedObjectCount = -1L;
+
+ private long offset;
+
+ private int blockSize;
+
+ /**
+ * @param db
+ * the object database which stores repository's data.
+ * @param channel
+ * readable channel of the pack file.
+ */
+ public FsckPackParser(ObjectDatabase db, ReadableChannel channel) {
+ super(db, Channels.newInputStream(channel));
+ this.channel = channel;
+ setCheckObjectCollisions(false);
+ this.crc = new CRC32();
+ this.blockSize = channel.blockSize() > 0 ? channel.blockSize() : 65536;
+ }
+
+ @Override
+ protected void onPackHeader(long objCnt) throws IOException {
+ if (expectedObjectCount >= 0) {
+ // Some DFS pack files don't contain the correct object count, e.g.
+ // INSERT/RECEIVE packs don't always contain the correct object
+ // count in their headers. Overwrite the expected object count
+ // after parsing the pack header.
+ setExpectedObjectCount(expectedObjectCount);
+ }
+ }
+
+ @Override
+ protected void onBeginWholeObject(long streamPosition, int type,
+ long inflatedSize) throws IOException {
+ crc.reset();
+ }
+
+ @Override
+ protected void onObjectHeader(Source src, byte[] raw, int pos, int len)
+ throws IOException {
+ crc.update(raw, pos, len);
+ }
+
+ @Override
+ protected void onObjectData(Source src, byte[] raw, int pos, int len)
+ throws IOException {
+ crc.update(raw, pos, len);
+ }
+
+ @Override
+ protected void onEndWholeObject(PackedObjectInfo info) throws IOException {
+ info.setCRC((int) crc.getValue());
+ }
+
+ @Override
+ protected void onBeginOfsDelta(long deltaStreamPosition,
+ long baseStreamPosition, long inflatedSize) throws IOException {
+ crc.reset();
+ }
+
+ @Override
+ protected void onBeginRefDelta(long deltaStreamPosition, AnyObjectId baseId,
+ long inflatedSize) throws IOException {
+ crc.reset();
+ }
+
+ @Override
+ protected UnresolvedDelta onEndDelta() throws IOException {
+ UnresolvedDelta delta = new UnresolvedDelta();
+ delta.setCRC((int) crc.getValue());
+ return delta;
+ }
+
+ @Override
+ protected void onInflatedObjectData(PackedObjectInfo obj, int typeCode,
+ byte[] data) throws IOException {
+ // FsckPackParser ignores this event.
+ }
+
+ @Override
+ protected void verifySafeObject(final AnyObjectId id, final int type,
+ final byte[] data) {
+ try {
+ super.verifySafeObject(id, type, data);
+ } catch (CorruptObjectException e) {
+ // catch the exception and continue parse the pack file
+ CorruptObject o = new CorruptObject(id.toObjectId(), type);
+ if (e.getErrorType() != null) {
+ o.setErrorType(e.getErrorType());
+ }
+ corruptObjects.add(o);
+ }
+ }
+
+ @Override
+ protected void onPackFooter(byte[] hash) throws IOException {
+ // Do nothing.
+ }
+
+ @Override
+ protected boolean onAppendBase(int typeCode, byte[] data,
+ PackedObjectInfo info) throws IOException {
+ // Do nothing.
+ return false;
+ }
+
+ @Override
+ protected void onEndThinPack() throws IOException {
+ // Do nothing.
+ }
+
+ @Override
+ protected ObjectTypeAndSize seekDatabase(PackedObjectInfo obj,
+ ObjectTypeAndSize info) throws IOException {
+ crc.reset();
+ offset = obj.getOffset();
+ return readObjectHeader(info);
+ }
+
+ @Override
+ protected ObjectTypeAndSize seekDatabase(UnresolvedDelta delta,
+ ObjectTypeAndSize info) throws IOException {
+ crc.reset();
+ offset = delta.getOffset();
+ return readObjectHeader(info);
+ }
+
+ @Override
+ protected int readDatabase(byte[] dst, int pos, int cnt)
+ throws IOException {
+ // read from input instead of database.
+ int n = read(offset, dst, pos, cnt);
+ if (n > 0) {
+ offset += n;
+ }
+ return n;
+ }
+
+ int read(long channelPosition, byte[] dst, int pos, int cnt)
+ throws IOException {
+ long block = channelPosition / blockSize;
+ byte[] bytes = readFromChannel(block);
+ if (bytes == null) {
+ return -1;
+ }
+ int offs = (int) (channelPosition - block * blockSize);
+ int bytesToCopy = Math.min(cnt, bytes.length - offs);
+ if (bytesToCopy < 1) {
+ return -1;
+ }
+ System.arraycopy(bytes, offs, dst, pos, bytesToCopy);
+ return bytesToCopy;
+ }
+
+ private byte[] readFromChannel(long block) throws IOException {
+ channel.position(block * blockSize);
+ ByteBuffer buf = ByteBuffer.allocate(blockSize);
+ int totalBytesRead = 0;
+ while (totalBytesRead < blockSize) {
+ int bytesRead = channel.read(buf);
+ if (bytesRead == -1) {
+ if (totalBytesRead == 0) {
+ return null;
+ }
+ return Arrays.copyOf(buf.array(), totalBytesRead);
+ }
+ totalBytesRead += bytesRead;
+ }
+ return buf.array();
+ }
+
+ @Override
+ protected boolean checkCRC(int oldCRC) {
+ return oldCRC == (int) crc.getValue();
+ }
+
+ @Override
+ protected void onStoreStream(byte[] raw, int pos, int len)
+ throws IOException {
+ // Do nothing.
+ }
+
+ /**
+ * @return corrupt objects that reported by {@link ObjectChecker}.
+ */
+ public Set<CorruptObject> getCorruptObjects() {
+ return corruptObjects;
+ }
+
+ /**
+ * Verify the existing index file with all objects from the pack.
+ *
+ * @param idx
+ * index file associate with the pack
+ * @throws CorruptPackIndexException
+ * when the index file is corrupt.
+ */
+ public void verifyIndex(PackIndex idx)
+ throws CorruptPackIndexException {
+ ObjectIdOwnerMap<ObjFromPack> inPack = new ObjectIdOwnerMap<>();
+ for (int i = 0; i < getObjectCount(); i++) {
+ PackedObjectInfo entry = getObject(i);
+ inPack.add(new ObjFromPack(entry));
+
+ long offs = idx.findOffset(entry);
+ if (offs == -1) {
+ throw new CorruptPackIndexException(
+ MessageFormat.format(JGitText.get().missingObject,
+ Integer.valueOf(entry.getType()),
+ entry.getName()),
+ ErrorType.MISSING_OBJ);
+ } else if (offs != entry.getOffset()) {
+ throw new CorruptPackIndexException(MessageFormat
+ .format(JGitText.get().mismatchOffset, entry.getName()),
+ ErrorType.MISMATCH_OFFSET);
+ }
+
+ try {
+ if (idx.hasCRC32Support()
+ && (int) idx.findCRC32(entry) != entry.getCRC()) {
+ throw new CorruptPackIndexException(
+ MessageFormat.format(JGitText.get().mismatchCRC,
+ entry.getName()),
+ ErrorType.MISMATCH_CRC);
+ }
+ } catch (MissingObjectException e) {
+ throw new CorruptPackIndexException(MessageFormat
+ .format(JGitText.get().missingCRC, entry.getName()),
+ ErrorType.MISSING_CRC);
+ }
+ }
+
+ for (MutableEntry entry : idx) {
+ if (!inPack.contains(entry.toObjectId())) {
+ throw new CorruptPackIndexException(MessageFormat.format(
+ JGitText.get().unknownObjectInIndex, entry.name()),
+ ErrorType.UNKNOWN_OBJ);
+ }
+ }
+ }
+
+ /**
+ * Set the object count for overwriting the expected object count from pack
+ * header.
+ *
+ * @param objectCount
+ * the actual expected object count.
+ */
+ public void overwriteObjectCount(long objectCount) {
+ this.expectedObjectCount = objectCount;
+ }
+
+ static class ObjFromPack extends ObjectIdOwnerMap.Entry {
+ ObjFromPack(AnyObjectId id) {
+ super(id);
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/package-info.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/package-info.java
new file mode 100644
index 0000000000..361b61fb0e
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * Git fsck support.
+ */
+package org.eclipse.jgit.internal.fsck;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/BlockBasedFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/BlockBasedFile.java
new file mode 100644
index 0000000000..b9758bd64e
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/BlockBasedFile.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.text.MessageFormat;
+
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.errors.PackInvalidException;
+import org.eclipse.jgit.internal.storage.pack.PackExt;
+
+/** Block based file stored in {@link DfsBlockCache}. */
+abstract class BlockBasedFile {
+ /** Cache that owns this file and its data. */
+ final DfsBlockCache cache;
+
+ /** Unique identity of this file while in-memory. */
+ final DfsStreamKey key;
+
+ /** Description of the associated pack file's storage. */
+ final DfsPackDescription desc;
+ final PackExt ext;
+
+ /**
+ * Preferred alignment for loading blocks from the backing file.
+ * <p>
+ * It is initialized to 0 and filled in on the first read made from the
+ * file. Block sizes may be odd, e.g. 4091, caused by the underling DFS
+ * storing 4091 user bytes and 5 bytes block metadata into a lower level
+ * 4096 byte block on disk.
+ */
+ volatile int blockSize;
+
+ /**
+ * Total number of bytes in this pack file.
+ * <p>
+ * This field initializes to -1 and gets populated when a block is loaded.
+ */
+ volatile long length;
+
+ /** True once corruption has been detected that cannot be worked around. */
+ volatile boolean invalid;
+
+ BlockBasedFile(DfsBlockCache cache, DfsPackDescription desc, PackExt ext) {
+ this.cache = cache;
+ this.key = desc.getStreamKey(ext);
+ this.desc = desc;
+ this.ext = ext;
+ }
+
+ String getFileName() {
+ return desc.getFileName(ext);
+ }
+
+ boolean invalid() {
+ return invalid;
+ }
+
+ void setInvalid() {
+ invalid = true;
+ }
+
+ void setBlockSize(int newSize) {
+ blockSize = newSize;
+ }
+
+ long alignToBlock(long pos) {
+ int size = blockSize;
+ if (size == 0)
+ size = cache.getBlockSize();
+ return (pos / size) * size;
+ }
+
+ int blockSize(ReadableChannel rc) {
+ // If the block alignment is not yet known, discover it. Prefer the
+ // larger size from either the cache or the file itself.
+ int size = blockSize;
+ if (size == 0) {
+ size = rc.blockSize();
+ if (size <= 0)
+ size = cache.getBlockSize();
+ else if (size < cache.getBlockSize())
+ size = (cache.getBlockSize() / size) * size;
+ blockSize = size;
+ }
+ return size;
+ }
+
+ DfsBlock getOrLoadBlock(long pos, DfsReader ctx) throws IOException {
+ return cache.getOrLoad(this, pos, ctx, null);
+ }
+
+ DfsBlock readOneBlock(long pos, DfsReader ctx,
+ @Nullable ReadableChannel fileChannel) throws IOException {
+ if (invalid)
+ throw new PackInvalidException(getFileName());
+
+ ctx.stats.readBlock++;
+ long start = System.nanoTime();
+ ReadableChannel rc = fileChannel != null ? fileChannel
+ : ctx.db.openFile(desc, ext);
+ try {
+ int size = blockSize(rc);
+ pos = (pos / size) * size;
+
+ // If the size of the file is not yet known, try to discover it.
+ // Channels may choose to return -1 to indicate they don't
+ // know the length yet, in this case read up to the size unit
+ // given by the caller, then recheck the length.
+ long len = length;
+ if (len < 0) {
+ len = rc.size();
+ if (0 <= len)
+ length = len;
+ }
+
+ if (0 <= len && len < pos + size)
+ size = (int) (len - pos);
+ if (size <= 0)
+ throw new EOFException(MessageFormat.format(
+ DfsText.get().shortReadOfBlock, Long.valueOf(pos),
+ getFileName(), Long.valueOf(0), Long.valueOf(0)));
+
+ byte[] buf = new byte[size];
+ rc.position(pos);
+ int cnt = read(rc, ByteBuffer.wrap(buf, 0, size));
+ ctx.stats.readBlockBytes += cnt;
+ if (cnt != size) {
+ if (0 <= len) {
+ throw new EOFException(MessageFormat.format(
+ DfsText.get().shortReadOfBlock, Long.valueOf(pos),
+ getFileName(), Integer.valueOf(size),
+ Integer.valueOf(cnt)));
+ }
+
+ // Assume the entire thing was read in a single shot, compact
+ // the buffer to only the space required.
+ byte[] n = new byte[cnt];
+ System.arraycopy(buf, 0, n, 0, n.length);
+ buf = n;
+ } else if (len < 0) {
+ // With no length at the start of the read, the channel should
+ // have the length available at the end.
+ length = len = rc.size();
+ }
+
+ return new DfsBlock(key, pos, buf);
+ } finally {
+ if (rc != fileChannel) {
+ rc.close();
+ }
+ ctx.stats.readBlockMicros += elapsedMicros(start);
+ }
+ }
+
+ static int read(ReadableChannel rc, ByteBuffer buf) throws IOException {
+ int n;
+ do {
+ n = rc.read(buf);
+ } while (0 < n && buf.hasRemaining());
+ return buf.position();
+ }
+
+ static long elapsedMicros(long start) {
+ return (System.nanoTime() - start) / 1000L;
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DeltaBaseCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DeltaBaseCache.java
index 64a63d7c75..bd4b4d23f4 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DeltaBaseCache.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DeltaBaseCache.java
@@ -75,7 +75,7 @@ final class DeltaBaseCache {
table = new Entry[1 << TABLE_BITS];
}
- Entry get(DfsPackKey key, long position) {
+ Entry get(DfsStreamKey key, long position) {
Entry e = table[hash(position)];
for (; e != null; e = e.tableNext) {
if (e.offset == position && key.equals(e.pack)) {
@@ -86,7 +86,7 @@ final class DeltaBaseCache {
return null;
}
- void put(DfsPackKey key, long offset, int objectType, byte[] data) {
+ void put(DfsStreamKey key, long offset, int objectType, byte[] data) {
if (data.length > maxByteCount)
return; // Too large to cache.
@@ -189,7 +189,7 @@ final class DeltaBaseCache {
}
static class Entry {
- final DfsPackKey pack;
+ final DfsStreamKey pack;
final long offset;
final int type;
final byte[] data;
@@ -198,7 +198,7 @@ final class DeltaBaseCache {
Entry lruPrev;
Entry lruNext;
- Entry(DfsPackKey key, long offset, int type, byte[] data) {
+ Entry(DfsStreamKey key, long offset, int type, byte[] data) {
this.pack = key;
this.offset = offset;
this.type = type;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlock.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlock.java
index 4a33fb87e1..62a9be3e5c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlock.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlock.java
@@ -46,24 +46,22 @@
package org.eclipse.jgit.internal.storage.dfs;
import java.io.IOException;
+import java.nio.ByteBuffer;
import java.util.zip.CRC32;
import java.util.zip.DataFormatException;
import java.util.zip.Inflater;
import org.eclipse.jgit.internal.storage.pack.PackOutputStream;
-/** A cached slice of a {@link DfsPackFile}. */
+/** A cached slice of a {@link BlockBasedFile}. */
final class DfsBlock {
- final DfsPackKey pack;
-
+ final DfsStreamKey stream;
final long start;
-
final long end;
-
private final byte[] block;
- DfsBlock(DfsPackKey p, long pos, byte[] buf) {
- pack = p;
+ DfsBlock(DfsStreamKey p, long pos, byte[] buf) {
+ stream = p;
start = pos;
end = pos + buf.length;
block = buf;
@@ -73,8 +71,14 @@ final class DfsBlock {
return block.length;
}
- boolean contains(DfsPackKey want, long pos) {
- return pack == want && start <= pos && pos < end;
+ ByteBuffer zeroCopyByteBuffer(int n) {
+ ByteBuffer b = ByteBuffer.wrap(block);
+ b.position(n);
+ return b;
+ }
+
+ boolean contains(DfsStreamKey want, long pos) {
+ return stream.equals(want) && start <= pos && pos < end;
}
int copy(long pos, byte[] dstbuf, int dstoff, int cnt) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
index 6fff656e7b..45202b5b08 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
@@ -45,23 +45,20 @@
package org.eclipse.jgit.internal.storage.dfs;
import java.io.IOException;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReferenceArray;
import java.util.concurrent.locks.ReentrantLock;
+import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.internal.JGitText;
/**
- * Caches slices of a {@link DfsPackFile} in memory for faster read access.
+ * Caches slices of a {@link BlockBasedFile} in memory for faster read access.
* <p>
* The DfsBlockCache serves as a Java based "buffer cache", loading segments of
- * a DfsPackFile into the JVM heap prior to use. As JGit often wants to do reads
- * of only tiny slices of a file, the DfsBlockCache tries to smooth out these
- * tiny reads into larger block-sized IO operations.
+ * a BlockBasedFile into the JVM heap prior to use. As JGit often wants to do
+ * reads of only tiny slices of a file, the DfsBlockCache tries to smooth out
+ * these tiny reads into larger block-sized IO operations.
* <p>
* Whenever a cache miss occurs, loading is invoked by exactly one thread for
* the given <code>(DfsPackKey,position)</code> key tuple. This is ensured by an
@@ -108,14 +105,7 @@ public final class DfsBlockCache {
* settings, usually too low of a limit.
*/
public static void reconfigure(DfsBlockCacheConfig cfg) {
- DfsBlockCache nc = new DfsBlockCache(cfg);
- DfsBlockCache oc = cache;
- cache = nc;
-
- if (oc != null) {
- for (DfsPackFile pack : oc.getPackFiles())
- pack.key.cachedSize.set(0);
- }
+ cache = new DfsBlockCache(cfg);
}
/** @return the currently active DfsBlockCache. */
@@ -153,12 +143,6 @@ public final class DfsBlockCache {
/** As {@link #blockSize} is a power of 2, bits to shift for a / blockSize. */
private final int blockSizeShift;
- /** Cache of pack files, indexed by description. */
- private final Map<DfsPackDescription, DfsPackFile> packCache;
-
- /** View of pack files in the pack cache. */
- private final Collection<DfsPackFile> packFiles;
-
/** Number of times a block was found in the cache. */
private final AtomicLong statHit;
@@ -194,13 +178,12 @@ public final class DfsBlockCache {
blockSizeShift = Integer.numberOfTrailingZeros(blockSize);
clockLock = new ReentrantLock(true /* fair */);
- clockHand = new Ref<>(new DfsPackKey(), -1, 0, null);
+ String none = ""; //$NON-NLS-1$
+ clockHand = new Ref<>(
+ DfsStreamKey.of(new DfsRepositoryDescription(none), none),
+ -1, 0, null);
clockHand.next = clockHand;
- packCache = new ConcurrentHashMap<>(
- 16, 0.75f, 1);
- packFiles = Collections.unmodifiableCollection(packCache.values());
-
statHit = new AtomicLong();
statMiss = new AtomicLong();
}
@@ -249,38 +232,6 @@ public final class DfsBlockCache {
return statEvict;
}
- /**
- * Get the pack files stored in this cache.
- *
- * @return a collection of pack files, some of which may not actually be
- * present; the caller should check the pack's cached size.
- */
- public Collection<DfsPackFile> getPackFiles() {
- return packFiles;
- }
-
- DfsPackFile getOrCreate(DfsPackDescription dsc, DfsPackKey key) {
- // TODO This table grows without bound. It needs to clean up
- // entries that aren't in cache anymore, and aren't being used
- // by a live DfsObjDatabase reference.
-
- DfsPackFile pack = packCache.get(dsc);
- if (pack != null && !pack.invalid()) {
- return pack;
- }
-
- // 'pack' either didn't exist or was invalid. Compute a new
- // entry atomically (guaranteed by ConcurrentHashMap).
- return packCache.compute(dsc, (k, v) -> {
- if (v != null && !v.invalid()) { // valid value added by
- return v; // another thread
- } else {
- return new DfsPackFile(
- this, dsc, key != null ? key : new DfsPackKey());
- }
- });
- }
-
private int hash(int packHash, long off) {
return packHash + (int) (off >>> blockSizeShift);
}
@@ -302,26 +253,28 @@ public final class DfsBlockCache {
/**
* Lookup a cached object, creating and loading it if it doesn't exist.
*
- * @param pack
+ * @param file
* the pack that "contains" the cached object.
* @param position
* offset within <code>pack</code> of the object.
* @param ctx
* current thread's reader.
+ * @param fileChannel
+ * optional channel to read {@code pack}.
* @return the object reference.
* @throws IOException
* the reference was not in the cache and could not be loaded.
*/
- DfsBlock getOrLoad(DfsPackFile pack, long position, DfsReader ctx)
- throws IOException {
+ DfsBlock getOrLoad(BlockBasedFile file, long position, DfsReader ctx,
+ @Nullable ReadableChannel fileChannel) throws IOException {
final long requestedPosition = position;
- position = pack.alignToBlock(position);
+ position = file.alignToBlock(position);
- DfsPackKey key = pack.key;
+ DfsStreamKey key = file.key;
int slot = slot(key, position);
HashEntry e1 = table.get(slot);
DfsBlock v = scan(e1, key, position);
- if (v != null) {
+ if (v != null && v.contains(key, requestedPosition)) {
ctx.stats.blockCacheHit++;
statHit.incrementAndGet();
return v;
@@ -345,7 +298,7 @@ public final class DfsBlockCache {
statMiss.incrementAndGet();
boolean credit = true;
try {
- v = pack.readOneBlock(position, ctx);
+ v = file.readOneBlock(requestedPosition, ctx, fileChannel);
credit = false;
} finally {
if (credit)
@@ -358,7 +311,6 @@ public final class DfsBlockCache {
e2 = table.get(slot);
}
- key.cachedSize.addAndGet(v.size());
Ref<DfsBlock> ref = new Ref<>(key, position, v.size(), v);
ref.hot = true;
for (;;) {
@@ -374,9 +326,9 @@ public final class DfsBlockCache {
// If the block size changed from the default, it is possible the block
// that was loaded is the wrong block for the requested position.
- if (v.contains(pack.key, requestedPosition))
+ if (v.contains(file.key, requestedPosition))
return v;
- return getOrLoad(pack, requestedPosition, ctx);
+ return getOrLoad(file, requestedPosition, ctx, fileChannel);
}
@SuppressWarnings("unchecked")
@@ -406,7 +358,6 @@ public final class DfsBlockCache {
dead.next = null;
dead.value = null;
live -= dead.size;
- dead.pack.cachedSize.addAndGet(-dead.size);
statEvict++;
} while (maxBytes < live);
clockHand = prev;
@@ -439,10 +390,14 @@ public final class DfsBlockCache {
}
void put(DfsBlock v) {
- put(v.pack, v.start, v.size(), v);
+ put(v.stream, v.start, v.size(), v);
}
- <T> Ref<T> put(DfsPackKey key, long pos, int size, T v) {
+ <T> Ref<T> putRef(DfsStreamKey key, long size, T v) {
+ return put(key, 0, (int) Math.min(size, Integer.MAX_VALUE), v);
+ }
+
+ <T> Ref<T> put(DfsStreamKey key, long pos, int size, T v) {
int slot = slot(key, pos);
HashEntry e1 = table.get(slot);
Ref<T> ref = scanRef(e1, key, pos);
@@ -462,7 +417,6 @@ public final class DfsBlockCache {
}
}
- key.cachedSize.addAndGet(size);
ref = new Ref<>(key, pos, size, v);
ref.hot = true;
for (;;) {
@@ -478,12 +432,12 @@ public final class DfsBlockCache {
return ref;
}
- boolean contains(DfsPackKey key, long position) {
+ boolean contains(DfsStreamKey key, long position) {
return scan(table.get(slot(key, position)), key, position) != null;
}
@SuppressWarnings("unchecked")
- <T> T get(DfsPackKey key, long position) {
+ <T> T get(DfsStreamKey key, long position) {
T val = (T) scan(table.get(slot(key, position)), key, position);
if (val == null)
statMiss.incrementAndGet();
@@ -492,31 +446,36 @@ public final class DfsBlockCache {
return val;
}
- private <T> T scan(HashEntry n, DfsPackKey pack, long position) {
- Ref<T> r = scanRef(n, pack, position);
+ private <T> T scan(HashEntry n, DfsStreamKey key, long position) {
+ Ref<T> r = scanRef(n, key, position);
return r != null ? r.get() : null;
}
+ <T> Ref<T> getRef(DfsStreamKey key) {
+ Ref<T> r = scanRef(table.get(slot(key, 0)), key, 0);
+ if (r != null)
+ statHit.incrementAndGet();
+ else
+ statMiss.incrementAndGet();
+ return r;
+ }
+
@SuppressWarnings("unchecked")
- private <T> Ref<T> scanRef(HashEntry n, DfsPackKey pack, long position) {
+ private <T> Ref<T> scanRef(HashEntry n, DfsStreamKey key, long position) {
for (; n != null; n = n.next) {
Ref<T> r = n.ref;
- if (r.pack == pack && r.position == position)
+ if (r.position == position && r.key.equals(key))
return r.get() != null ? r : null;
}
return null;
}
- void remove(DfsPackFile pack) {
- packCache.remove(pack.getPackDescription());
- }
-
- private int slot(DfsPackKey pack, long position) {
- return (hash(pack.hash, position) >>> 1) % tableSize;
+ private int slot(DfsStreamKey key, long position) {
+ return (hash(key.hash, position) >>> 1) % tableSize;
}
- private ReentrantLock lockFor(DfsPackKey pack, long position) {
- return loadLocks[(hash(pack.hash, position) >>> 1) % loadLocks.length];
+ private ReentrantLock lockFor(DfsStreamKey key, long position) {
+ return loadLocks[(hash(key.hash, position) >>> 1) % loadLocks.length];
}
private static HashEntry clean(HashEntry top) {
@@ -542,15 +501,15 @@ public final class DfsBlockCache {
}
static final class Ref<T> {
- final DfsPackKey pack;
+ final DfsStreamKey key;
final long position;
final int size;
volatile T value;
Ref next;
volatile boolean hot;
- Ref(DfsPackKey pack, long position, int size, T v) {
- this.pack = pack;
+ Ref(DfsStreamKey key, long position, int size, T v) {
+ this.key = key;
this.position = position;
this.size = size;
this.value = v;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsFsck.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsFsck.java
new file mode 100644
index 0000000000..75eade2273
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsFsck.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
+import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+import org.eclipse.jgit.errors.CorruptPackIndexException;
+import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.fsck.FsckError;
+import org.eclipse.jgit.internal.fsck.FsckError.CorruptIndex;
+import org.eclipse.jgit.internal.fsck.FsckPackParser;
+import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.NullProgressMonitor;
+import org.eclipse.jgit.lib.ObjectChecker;
+import org.eclipse.jgit.lib.ProgressMonitor;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.revwalk.ObjectWalk;
+import org.eclipse.jgit.revwalk.RevObject;
+
+/** Verify the validity and connectivity of a DFS repository. */
+public class DfsFsck {
+ private final DfsRepository repo;
+ private final DfsObjDatabase objdb;
+ private ObjectChecker objChecker = new ObjectChecker();
+
+ /**
+ * Initialize DFS fsck.
+ *
+ * @param repository
+ * the dfs repository to check.
+ */
+ public DfsFsck(DfsRepository repository) {
+ repo = repository;
+ objdb = repo.getObjectDatabase();
+ }
+
+ /**
+ * Verify the integrity and connectivity of all objects in the object
+ * database.
+ *
+ * @param pm
+ * callback to provide progress feedback during the check.
+ * @return all errors about the repository.
+ * @throws IOException
+ * if encounters IO errors during the process.
+ */
+ public FsckError check(ProgressMonitor pm) throws IOException {
+ if (pm == null) {
+ pm = NullProgressMonitor.INSTANCE;
+ }
+
+ FsckError errors = new FsckError();
+ checkPacks(pm, errors);
+ checkConnectivity(pm, errors);
+ return errors;
+ }
+
+ private void checkPacks(ProgressMonitor pm, FsckError errors)
+ throws IOException, FileNotFoundException {
+ try (DfsReader ctx = objdb.newReader()) {
+ for (DfsPackFile pack : objdb.getPacks()) {
+ DfsPackDescription packDesc = pack.getPackDescription();
+ try (ReadableChannel rc = objdb.openFile(packDesc, PACK)) {
+ verifyPack(pm, errors, ctx, pack, rc);
+ } catch (MissingObjectException e) {
+ errors.getMissingObjects().add(e.getObjectId());
+ } catch (CorruptPackIndexException e) {
+ errors.getCorruptIndices().add(new CorruptIndex(
+ pack.getPackDescription().getFileName(INDEX),
+ e.getErrorType()));
+ }
+ }
+ }
+ }
+
+ private void verifyPack(ProgressMonitor pm, FsckError errors, DfsReader ctx,
+ DfsPackFile pack, ReadableChannel ch)
+ throws IOException, CorruptPackIndexException {
+ FsckPackParser fpp = new FsckPackParser(objdb, ch);
+ fpp.setObjectChecker(objChecker);
+ fpp.overwriteObjectCount(pack.getPackDescription().getObjectCount());
+ fpp.parse(pm);
+ errors.getCorruptObjects().addAll(fpp.getCorruptObjects());
+
+ fpp.verifyIndex(pack.getPackIndex(ctx));
+ }
+
+ private void checkConnectivity(ProgressMonitor pm, FsckError errors)
+ throws IOException {
+ pm.beginTask(JGitText.get().countingObjects, ProgressMonitor.UNKNOWN);
+ try (ObjectWalk ow = new ObjectWalk(repo)) {
+ for (Ref r : repo.getAllRefs().values()) {
+ RevObject tip;
+ try {
+ tip = ow.parseAny(r.getObjectId());
+ if (r.getLeaf().getName().startsWith(Constants.R_HEADS)
+ && tip.getType() != Constants.OBJ_COMMIT) {
+ // heads should only point to a commit object
+ errors.getNonCommitHeads().add(r.getLeaf().getName());
+ }
+ } catch (MissingObjectException e) {
+ errors.getMissingObjects().add(e.getObjectId());
+ continue;
+ }
+ ow.markStart(tip);
+ }
+ try {
+ ow.checkConnectivity();
+ } catch (MissingObjectException e) {
+ errors.getMissingObjects().add(e.getObjectId());
+ }
+ }
+ pm.endTask();
+ }
+
+ /**
+ * Use a customized object checker instead of the default one. Caller can
+ * specify a skip list to ignore some errors.
+ *
+ * @param objChecker
+ * A customized object checker.
+ */
+ public void setObjectChecker(ObjectChecker objChecker) {
+ this.objChecker = objChecker;
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java
index 55f9cc2127..304a93128f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollector.java
@@ -50,13 +50,16 @@ import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.GC
import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.INSERT;
import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.RECEIVE;
import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.UNREACHABLE_GARBAGE;
+import static org.eclipse.jgit.internal.storage.dfs.DfsPackCompactor.configureReftable;
import static org.eclipse.jgit.internal.storage.pack.PackExt.BITMAP_INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK;
+import static org.eclipse.jgit.internal.storage.pack.PackExt.REFTABLE;
import static org.eclipse.jgit.internal.storage.pack.PackWriter.NONE;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.EnumSet;
@@ -72,6 +75,9 @@ import org.eclipse.jgit.internal.storage.file.PackIndex;
import org.eclipse.jgit.internal.storage.file.PackReverseIndex;
import org.eclipse.jgit.internal.storage.pack.PackExt;
import org.eclipse.jgit.internal.storage.pack.PackWriter;
+import org.eclipse.jgit.internal.storage.reftable.ReftableCompactor;
+import org.eclipse.jgit.internal.storage.reftable.ReftableConfig;
+import org.eclipse.jgit.internal.storage.reftable.ReftableWriter;
import org.eclipse.jgit.internal.storage.reftree.RefTreeNames;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants;
@@ -94,14 +100,16 @@ public class DfsGarbageCollector {
private final DfsObjDatabase objdb;
private final List<DfsPackDescription> newPackDesc;
-
private final List<PackStatistics> newPackStats;
-
private final List<ObjectIdSet> newPackObj;
private DfsReader ctx;
private PackConfig packConfig;
+ private ReftableConfig reftableConfig;
+ private boolean convertToReftable = true;
+ private long reftableInitialMinUpdateIndex = 1;
+ private long reftableInitialMaxUpdateIndex = 1;
// See packIsCoalesceableGarbage(), below, for how these two variables
// interact.
@@ -110,8 +118,10 @@ public class DfsGarbageCollector {
private long startTimeMillis;
private List<DfsPackFile> packsBefore;
+ private List<DfsReftable> reftablesBefore;
private List<DfsPackFile> expiredGarbagePacks;
+ private Collection<Ref> refsBefore;
private Set<ObjectId> allHeadsAndTags;
private Set<ObjectId> allTags;
private Set<ObjectId> nonHeads;
@@ -151,6 +161,60 @@ public class DfsGarbageCollector {
return this;
}
+ /**
+ * @param cfg
+ * configuration to write a reftable. Reftable writing is
+ * disabled (default) when {@code cfg} is {@code null}.
+ * @return {@code this}
+ */
+ public DfsGarbageCollector setReftableConfig(ReftableConfig cfg) {
+ reftableConfig = cfg;
+ return this;
+ }
+
+ /**
+ * @param convert
+ * if true, {@link #setReftableConfig(ReftableConfig)} has been
+ * set non-null, and a GC reftable doesn't yet exist, the garbage
+ * collector will make one by scanning the existing references,
+ * and writing a new reftable. Default is {@code true}.
+ * @return {@code this}
+ */
+ public DfsGarbageCollector setConvertToReftable(boolean convert) {
+ convertToReftable = convert;
+ return this;
+ }
+
+ /**
+ * Set minUpdateIndex for the initial reftable created during conversion.
+ *
+ * @param u
+ * minUpdateIndex for the initial reftable created by scanning
+ * {@link DfsRefDatabase#getRefs(String)}. Ignored unless caller
+ * has also set {@link #setReftableConfig(ReftableConfig)}.
+ * Defaults to {@code 1}. Must be {@code u >= 0}.
+ * @return {@code this}
+ */
+ public DfsGarbageCollector setReftableInitialMinUpdateIndex(long u) {
+ reftableInitialMinUpdateIndex = Math.max(u, 0);
+ return this;
+ }
+
+ /**
+ * Set maxUpdateIndex for the initial reftable created during conversion.
+ *
+ * @param u
+ * maxUpdateIndex for the initial reftable created by scanning
+ * {@link DfsRefDatabase#getRefs(String)}. Ignored unless caller
+ * has also set {@link #setReftableConfig(ReftableConfig)}.
+ * Defaults to {@code 1}. Must be {@code u >= 0}.
+ * @return {@code this}
+ */
+ public DfsGarbageCollector setReftableInitialMaxUpdateIndex(long u) {
+ reftableInitialMaxUpdateIndex = Math.max(0, u);
+ return this;
+ }
+
/** @return garbage packs smaller than this size will be repacked. */
public long getCoalesceGarbageLimit() {
return coalesceGarbageLimit;
@@ -240,8 +304,9 @@ public class DfsGarbageCollector {
refdb.refresh();
objdb.clearCache();
- Collection<Ref> refsBefore = getAllRefs();
+ refsBefore = getAllRefs();
readPacksBefore();
+ readReftablesBefore();
Set<ObjectId> allHeads = new HashSet<>();
allHeadsAndTags = new HashSet<>();
@@ -274,6 +339,12 @@ public class DfsGarbageCollector {
// Hoist all branch tips and tags earlier in the pack file
tagTargets.addAll(allHeadsAndTags);
+ // Combine the GC_REST objects into the GC pack if requested
+ if (packConfig.getSinglePack()) {
+ allHeadsAndTags.addAll(nonHeads);
+ nonHeads.clear();
+ }
+
boolean rollback = true;
try {
packHeads(pm);
@@ -327,6 +398,11 @@ public class DfsGarbageCollector {
}
}
+ private void readReftablesBefore() throws IOException {
+ DfsReftable[] tables = objdb.getReftables();
+ reftablesBefore = new ArrayList<>(Arrays.asList(tables));
+ }
+
private boolean packIsExpiredGarbage(DfsPackDescription d, long now) {
// Consider the garbage pack as expired when it's older than
// garbagePackTtl. This check gives concurrent inserter threads
@@ -401,7 +477,7 @@ public class DfsGarbageCollector {
}
/** @return all of the source packs that fed into this compaction. */
- public List<DfsPackDescription> getSourcePacks() {
+ public Set<DfsPackDescription> getSourcePacks() {
return toPrune();
}
@@ -415,28 +491,37 @@ public class DfsGarbageCollector {
return newPackStats;
}
- private List<DfsPackDescription> toPrune() {
- int cnt = packsBefore.size();
- List<DfsPackDescription> all = new ArrayList<>(cnt);
+ private Set<DfsPackDescription> toPrune() {
+ Set<DfsPackDescription> toPrune = new HashSet<>();
for (DfsPackFile pack : packsBefore) {
- all.add(pack.getPackDescription());
+ toPrune.add(pack.getPackDescription());
+ }
+ if (reftableConfig != null) {
+ for (DfsReftable table : reftablesBefore) {
+ toPrune.add(table.getPackDescription());
+ }
}
for (DfsPackFile pack : expiredGarbagePacks) {
- all.add(pack.getPackDescription());
+ toPrune.add(pack.getPackDescription());
}
- return all;
+ return toPrune;
}
private void packHeads(ProgressMonitor pm) throws IOException {
- if (allHeadsAndTags.isEmpty())
+ if (allHeadsAndTags.isEmpty()) {
+ writeReftable();
return;
+ }
try (PackWriter pw = newPackWriter()) {
pw.setTagTargets(tagTargets);
pw.preparePack(pm, allHeadsAndTags, NONE, NONE, allTags);
- if (0 < pw.getObjectCount())
- writePack(GC, pw, pm,
- estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC));
+ if (0 < pw.getObjectCount()) {
+ long estSize = estimateGcPackSize(INSERT, RECEIVE, COMPACT, GC);
+ writePack(GC, pw, pm, estSize);
+ } else {
+ writeReftable();
+ }
}
}
@@ -554,25 +639,32 @@ public class DfsGarbageCollector {
estimatedPackSize);
newPackDesc.add(pack);
+ if (source == GC && reftableConfig != null) {
+ writeReftable(pack);
+ }
+
try (DfsOutputStream out = objdb.writeFile(pack, PACK)) {
pw.writePack(pm, pm, out);
pack.addFileExt(PACK);
+ pack.setBlockSize(PACK, out.blockSize());
}
- try (CountingOutputStream cnt =
- new CountingOutputStream(objdb.writeFile(pack, INDEX))) {
+ try (DfsOutputStream out = objdb.writeFile(pack, INDEX)) {
+ CountingOutputStream cnt = new CountingOutputStream(out);
pw.writeIndex(cnt);
pack.addFileExt(INDEX);
pack.setFileSize(INDEX, cnt.getCount());
+ pack.setBlockSize(INDEX, out.blockSize());
pack.setIndexVersion(pw.getIndexVersion());
}
if (pw.prepareBitmapIndex(pm)) {
- try (CountingOutputStream cnt = new CountingOutputStream(
- objdb.writeFile(pack, BITMAP_INDEX))) {
+ try (DfsOutputStream out = objdb.writeFile(pack, BITMAP_INDEX)) {
+ CountingOutputStream cnt = new CountingOutputStream(out);
pw.writeBitmapIndex(cnt);
pack.addFileExt(BITMAP_INDEX);
pack.setFileSize(BITMAP_INDEX, cnt.getCount());
+ pack.setBlockSize(BITMAP_INDEX, out.blockSize());
}
}
@@ -581,8 +673,62 @@ public class DfsGarbageCollector {
pack.setLastModified(startTimeMillis);
newPackStats.add(stats);
newPackObj.add(pw.getObjectSet());
-
- DfsBlockCache.getInstance().getOrCreate(pack, null);
return pack;
}
+
+ private void writeReftable() throws IOException {
+ if (reftableConfig != null) {
+ DfsPackDescription pack = objdb.newPack(GC);
+ newPackDesc.add(pack);
+ writeReftable(pack);
+ }
+ }
+
+ private void writeReftable(DfsPackDescription pack) throws IOException {
+ if (convertToReftable && !hasGcReftable()) {
+ writeReftable(pack, refsBefore);
+ return;
+ }
+
+ try (ReftableStack stack = ReftableStack.open(ctx, reftablesBefore)) {
+ ReftableCompactor compact = new ReftableCompactor();
+ compact.addAll(stack.readers());
+ compact.setIncludeDeletes(false);
+ compactReftable(pack, compact);
+ }
+ }
+
+ private boolean hasGcReftable() {
+ for (DfsReftable table : reftablesBefore) {
+ if (table.getPackDescription().getPackSource() == GC) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private void writeReftable(DfsPackDescription pack, Collection<Ref> refs)
+ throws IOException {
+ try (DfsOutputStream out = objdb.writeFile(pack, REFTABLE)) {
+ ReftableConfig cfg = configureReftable(reftableConfig, out);
+ ReftableWriter writer = new ReftableWriter(cfg)
+ .setMinUpdateIndex(reftableInitialMinUpdateIndex)
+ .setMaxUpdateIndex(reftableInitialMaxUpdateIndex)
+ .begin(out)
+ .sortAndWriteRefs(refs)
+ .finish();
+ pack.addFileExt(REFTABLE);
+ pack.setReftableStats(writer.getStats());
+ }
+ }
+
+ private void compactReftable(DfsPackDescription pack,
+ ReftableCompactor compact) throws IOException {
+ try (DfsOutputStream out = objdb.writeFile(pack, REFTABLE)) {
+ compact.setConfig(configureReftable(reftableConfig, out));
+ compact.compact(out);
+ pack.addFileExt(REFTABLE);
+ pack.setReftableStats(compact.getStats());
+ }
+ }
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
index e65c9fda7a..19e86522c6 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsInserter.java
@@ -104,7 +104,7 @@ public class DfsInserter extends ObjectInserter {
ObjectIdOwnerMap<PackedObjectInfo> objectMap;
DfsBlockCache cache;
- DfsPackKey packKey;
+ DfsStreamKey packKey;
DfsPackDescription packDsc;
PackStream packOut;
private boolean rollback;
@@ -221,7 +221,7 @@ public class DfsInserter extends ObjectInserter {
db.commitPack(Collections.singletonList(packDsc), null);
rollback = false;
- DfsPackFile p = cache.getOrCreate(packDsc, packKey);
+ DfsPackFile p = new DfsPackFile(cache, packDsc);
if (index != null)
p.setPackIndex(index);
db.addPack(p);
@@ -281,8 +281,10 @@ public class DfsInserter extends ObjectInserter {
rollback = true;
packDsc = db.newPack(DfsObjDatabase.PackSource.INSERT);
- packOut = new PackStream(db.writeFile(packDsc, PACK));
- packKey = new DfsPackKey();
+ DfsOutputStream dfsOut = db.writeFile(packDsc, PACK);
+ packDsc.setBlockSize(PACK, dfsOut.blockSize());
+ packOut = new PackStream(dfsOut);
+ packKey = packDsc.getStreamKey(PACK);
// Write the header as though it were a single object pack.
byte[] buf = packOut.hdrBuf;
@@ -312,13 +314,14 @@ public class DfsInserter extends ObjectInserter {
packIndex = PackIndex.read(buf.openInputStream());
}
- DfsOutputStream os = db.writeFile(pack, INDEX);
- try (CountingOutputStream cnt = new CountingOutputStream(os)) {
+ try (DfsOutputStream os = db.writeFile(pack, INDEX)) {
+ CountingOutputStream cnt = new CountingOutputStream(os);
if (buf != null)
buf.writeTo(cnt, null);
else
index(cnt, packHash, list);
pack.addFileExt(INDEX);
+ pack.setBlockSize(INDEX, os.blockSize());
pack.setFileSize(INDEX, cnt.getCount());
} finally {
if (buf != null) {
@@ -497,7 +500,7 @@ public class DfsInserter extends ObjectInserter {
inf.setInput(currBuf, s, n);
return n;
}
- throw new EOFException(DfsText.get().unexpectedEofInPack);
+ throw new EOFException(JGitText.get().unexpectedEofInPack);
}
private DfsBlock getOrLoadBlock(long pos) throws IOException {
@@ -510,7 +513,7 @@ public class DfsInserter extends ObjectInserter {
for (int p = 0; p < blockSize;) {
int n = out.read(s + p, ByteBuffer.wrap(d, p, blockSize - p));
if (n <= 0)
- throw new EOFException(DfsText.get().unexpectedEofInPack);
+ throw new EOFException(JGitText.get().unexpectedEofInPack);
p += n;
}
b = new DfsBlock(packKey, s, d);
@@ -566,13 +569,13 @@ public class DfsInserter extends ObjectInserter {
byte[] buf = buffer();
int cnt = packOut.read(obj.getOffset(), buf, 0, 20);
if (cnt <= 0)
- throw new EOFException(DfsText.get().unexpectedEofInPack);
+ throw new EOFException(JGitText.get().unexpectedEofInPack);
int c = buf[0] & 0xff;
int type = (c >> 4) & 7;
if (type == OBJ_OFS_DELTA || type == OBJ_REF_DELTA)
throw new IOException(MessageFormat.format(
- DfsText.get().cannotReadBackDelta, Integer.toString(type)));
+ JGitText.get().cannotReadBackDelta, Integer.toString(type)));
if (typeHint != OBJ_ANY && type != typeHint) {
throw new IncorrectObjectTypeException(objectId.copy(), typeHint);
}
@@ -582,7 +585,7 @@ public class DfsInserter extends ObjectInserter {
int shift = 4;
while ((c & 0x80) != 0) {
if (ptr >= cnt)
- throw new EOFException(DfsText.get().unexpectedEofInPack);
+ throw new EOFException(JGitText.get().unexpectedEofInPack);
c = buf[ptr++] & 0xff;
sz += ((long) (c & 0x7f)) << shift;
shift += 7;
@@ -633,11 +636,11 @@ public class DfsInserter extends ObjectInserter {
private final int type;
private final long size;
- private final DfsPackKey srcPack;
+ private final DfsStreamKey srcPack;
private final long pos;
StreamLoader(ObjectId id, int type, long sz,
- DfsPackKey key, long pos) {
+ DfsStreamKey key, long pos) {
this.id = id;
this.type = type;
this.size = sz;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
index 32ee6c288e..9439822016 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsObjDatabase.java
@@ -48,6 +48,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -61,7 +62,9 @@ import org.eclipse.jgit.lib.ObjectReader;
/** Manages objects stored in {@link DfsPackFile} on a storage system. */
public abstract class DfsObjDatabase extends ObjectDatabase {
- private static final PackList NO_PACKS = new PackList(new DfsPackFile[0]) {
+ private static final PackList NO_PACKS = new PackList(
+ new DfsPackFile[0],
+ new DfsReftable[0]) {
@Override
boolean dirty() {
return true;
@@ -192,6 +195,18 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
}
/**
+ * Scan and list all available reftable files in the repository.
+ *
+ * @return list of available reftables. The returned array is shared with
+ * the implementation and must not be modified by the caller.
+ * @throws IOException
+ * the pack list cannot be initialized.
+ */
+ public DfsReftable[] getReftables() throws IOException {
+ return getPackList().reftables;
+ }
+
+ /**
* Scan and list all available pack files in the repository.
*
* @return list of available packs, with some additional metadata. The
@@ -220,6 +235,16 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
}
/**
+ * List currently known reftable files in the repository, without scanning.
+ *
+ * @return list of available reftables. The returned array is shared with
+ * the implementation and must not be modified by the caller.
+ */
+ public DfsReftable[] getCurrentReftables() {
+ return getCurrentPackList().reftables;
+ }
+
+ /**
* List currently known pack files in the repository, without scanning.
*
* @return list of available packs, with some additional metadata. The
@@ -428,7 +453,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
DfsPackFile[] packs = new DfsPackFile[1 + o.packs.length];
packs[0] = newPack;
System.arraycopy(o.packs, 0, packs, 1, o.packs.length);
- n = new PackListImpl(packs);
+ n = new PackListImpl(packs, o.reftables);
} while (!packList.compareAndSet(o, n));
}
@@ -454,60 +479,93 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
private PackList scanPacksImpl(PackList old) throws IOException {
DfsBlockCache cache = DfsBlockCache.getInstance();
- Map<DfsPackDescription, DfsPackFile> forReuse = reuseMap(old);
+ Map<DfsPackDescription, DfsPackFile> packs = packMap(old);
+ Map<DfsPackDescription, DfsReftable> reftables = reftableMap(old);
+
List<DfsPackDescription> scanned = listPacks();
Collections.sort(scanned);
- List<DfsPackFile> list = new ArrayList<>(scanned.size());
+ List<DfsPackFile> newPacks = new ArrayList<>(scanned.size());
+ List<DfsReftable> newReftables = new ArrayList<>(scanned.size());
boolean foundNew = false;
for (DfsPackDescription dsc : scanned) {
- DfsPackFile oldPack = forReuse.remove(dsc);
+ DfsPackFile oldPack = packs.remove(dsc);
if (oldPack != null) {
- list.add(oldPack);
- } else {
- list.add(cache.getOrCreate(dsc, null));
+ newPacks.add(oldPack);
+ } else if (dsc.hasFileExt(PackExt.PACK)) {
+ newPacks.add(new DfsPackFile(cache, dsc));
+ foundNew = true;
+ }
+
+ DfsReftable oldReftable = reftables.remove(dsc);
+ if (oldReftable != null) {
+ newReftables.add(oldReftable);
+ } else if (dsc.hasFileExt(PackExt.REFTABLE)) {
+ newReftables.add(new DfsReftable(cache, dsc));
foundNew = true;
}
}
- for (DfsPackFile p : forReuse.values())
- p.close();
- if (list.isEmpty())
- return new PackListImpl(NO_PACKS.packs);
+ if (newPacks.isEmpty())
+ return new PackListImpl(NO_PACKS.packs, NO_PACKS.reftables);
if (!foundNew) {
old.clearDirty();
return old;
}
- return new PackListImpl(list.toArray(new DfsPackFile[list.size()]));
+ Collections.sort(newReftables, reftableComparator());
+ return new PackListImpl(
+ newPacks.toArray(new DfsPackFile[0]),
+ newReftables.toArray(new DfsReftable[0]));
}
- private static Map<DfsPackDescription, DfsPackFile> reuseMap(PackList old) {
- Map<DfsPackDescription, DfsPackFile> forReuse
- = new HashMap<>();
+ private static Map<DfsPackDescription, DfsPackFile> packMap(PackList old) {
+ Map<DfsPackDescription, DfsPackFile> forReuse = new HashMap<>();
for (DfsPackFile p : old.packs) {
- if (p.invalid()) {
- // The pack instance is corrupted, and cannot be safely used
- // again. Do not include it in our reuse map.
- //
- p.close();
- continue;
+ if (!p.invalid()) {
+ forReuse.put(p.desc, p);
}
+ }
+ return forReuse;
+ }
- DfsPackFile prior = forReuse.put(p.getPackDescription(), p);
- if (prior != null) {
- // This should never occur. It should be impossible for us
- // to have two pack files with the same name, as all of them
- // came out of the same directory. If it does, we promised to
- // close any PackFiles we did not reuse, so close the second,
- // readers are likely to be actively using the first.
- //
- forReuse.put(prior.getPackDescription(), prior);
- p.close();
+ private static Map<DfsPackDescription, DfsReftable> reftableMap(PackList old) {
+ Map<DfsPackDescription, DfsReftable> forReuse = new HashMap<>();
+ for (DfsReftable p : old.reftables) {
+ if (!p.invalid()) {
+ forReuse.put(p.desc, p);
}
}
return forReuse;
}
+ /** @return comparator to sort {@link DfsReftable} by priority. */
+ protected Comparator<DfsReftable> reftableComparator() {
+ return (fa, fb) -> {
+ DfsPackDescription a = fa.getPackDescription();
+ DfsPackDescription b = fb.getPackDescription();
+
+ // GC, COMPACT reftables first by higher category.
+ int c = category(b) - category(a);
+ if (c != 0) {
+ return c;
+ }
+
+ // Lower maxUpdateIndex first.
+ c = Long.signum(a.getMaxUpdateIndex() - b.getMaxUpdateIndex());
+ if (c != 0) {
+ return c;
+ }
+
+ // Older reftable first.
+ return Long.signum(a.getLastModified() - b.getLastModified());
+ };
+ }
+
+ static int category(DfsPackDescription d) {
+ PackSource s = d.getPackSource();
+ return s != null ? s.category : 0;
+ }
+
/** Clears the cached list of packs, forcing them to be scanned again. */
protected void clearCache() {
packList.set(NO_PACKS);
@@ -515,12 +573,7 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
@Override
public void close() {
- // PackList packs = packList.get();
packList.set(NO_PACKS);
-
- // TODO Close packs if they aren't cached.
- // for (DfsPackFile p : packs.packs)
- // p.close();
}
/** Snapshot of packs scanned in a single pass. */
@@ -528,10 +581,14 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
/** All known packs, sorted. */
public final DfsPackFile[] packs;
+ /** All known reftables, sorted. */
+ public final DfsReftable[] reftables;
+
private long lastModified = -1;
- PackList(DfsPackFile[] packs) {
+ PackList(DfsPackFile[] packs, DfsReftable[] reftables) {
this.packs = packs;
+ this.reftables = reftables;
}
/** @return last modified time of all packs, in milliseconds. */
@@ -562,8 +619,8 @@ public abstract class DfsObjDatabase extends ObjectDatabase {
private static final class PackListImpl extends PackList {
private volatile boolean dirty;
- PackListImpl(DfsPackFile[] packs) {
- super(packs);
+ PackListImpl(DfsPackFile[] packs, DfsReftable[] reftables) {
+ super(packs, reftables);
}
@Override
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java
index f7c87a4e79..99663eb738 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackCompactor.java
@@ -44,21 +44,29 @@
package org.eclipse.jgit.internal.storage.dfs;
import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.COMPACT;
+import static org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource.GC;
import static org.eclipse.jgit.internal.storage.pack.PackExt.INDEX;
import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK;
+import static org.eclipse.jgit.internal.storage.pack.PackExt.REFTABLE;
import static org.eclipse.jgit.internal.storage.pack.StoredObjectRepresentation.PACK_DELTA;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
+import java.util.HashSet;
+import java.util.Iterator;
import java.util.List;
+import java.util.Set;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.file.PackIndex;
import org.eclipse.jgit.internal.storage.file.PackReverseIndex;
import org.eclipse.jgit.internal.storage.pack.PackWriter;
+import org.eclipse.jgit.internal.storage.reftable.ReftableCompactor;
+import org.eclipse.jgit.internal.storage.reftable.ReftableConfig;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.NullProgressMonitor;
import org.eclipse.jgit.lib.ObjectId;
@@ -89,16 +97,15 @@ import org.eclipse.jgit.util.io.CountingOutputStream;
*/
public class DfsPackCompactor {
private final DfsRepository repo;
-
private final List<DfsPackFile> srcPacks;
-
+ private final List<DfsReftable> srcReftables;
private final List<ObjectIdSet> exclude;
- private final List<DfsPackDescription> newPacks;
-
- private final List<PackStatistics> newStats;
+ private PackStatistics newStats;
+ private DfsPackDescription outDesc;
private int autoAddSize;
+ private ReftableConfig reftableConfig;
private RevWalk rw;
private RevFlag added;
@@ -114,9 +121,19 @@ public class DfsPackCompactor {
repo = repository;
autoAddSize = 5 * 1024 * 1024; // 5 MiB
srcPacks = new ArrayList<>();
+ srcReftables = new ArrayList<>();
exclude = new ArrayList<>(4);
- newPacks = new ArrayList<>(1);
- newStats = new ArrayList<>(1);
+ }
+
+ /**
+ * @param cfg
+ * configuration to write a reftable. Reftable compacting is
+ * disabled (default) when {@code cfg} is {@code null}.
+ * @return {@code this}
+ */
+ public DfsPackCompactor setReftableConfig(ReftableConfig cfg) {
+ reftableConfig = cfg;
+ return this;
}
/**
@@ -137,7 +154,19 @@ public class DfsPackCompactor {
}
/**
- * Automatically select packs to be included, and add them.
+ * Add a reftable to be compacted.
+ *
+ * @param table
+ * a reftable to combine.
+ * @return {@code this}
+ */
+ public DfsPackCompactor add(DfsReftable table) {
+ srcReftables.add(table);
+ return this;
+ }
+
+ /**
+ * Automatically select pack and reftables to be included, and add them.
* <p>
* Packs are selected based on size, smaller packs get included while bigger
* ones are omitted.
@@ -155,6 +184,16 @@ public class DfsPackCompactor {
else
exclude(pack);
}
+
+ if (reftableConfig != null) {
+ for (DfsReftable table : objdb.getReftables()) {
+ DfsPackDescription d = table.getPackDescription();
+ if (d.getPackSource() != GC
+ && d.getFileSize(REFTABLE) < autoAddSize) {
+ add(table);
+ }
+ }
+ }
return this;
}
@@ -197,58 +236,68 @@ public class DfsPackCompactor {
* the packs cannot be compacted.
*/
public void compact(ProgressMonitor pm) throws IOException {
- if (pm == null)
+ if (pm == null) {
pm = NullProgressMonitor.INSTANCE;
+ }
DfsObjDatabase objdb = repo.getObjectDatabase();
try (DfsReader ctx = objdb.newReader()) {
- PackConfig pc = new PackConfig(repo);
- pc.setIndexVersion(2);
- pc.setDeltaCompress(false);
- pc.setReuseDeltas(true);
- pc.setReuseObjects(true);
+ if (reftableConfig != null && !srcReftables.isEmpty()) {
+ compactReftables(ctx);
+ }
+ compactPacks(ctx, pm);
+
+ List<DfsPackDescription> commit = getNewPacks();
+ Collection<DfsPackDescription> remove = toPrune();
+ if (!commit.isEmpty() || !remove.isEmpty()) {
+ objdb.commitPack(commit, remove);
+ }
+ } finally {
+ rw = null;
+ }
+ }
+
+ private void compactPacks(DfsReader ctx, ProgressMonitor pm)
+ throws IOException, IncorrectObjectTypeException {
+ DfsObjDatabase objdb = repo.getObjectDatabase();
+ PackConfig pc = new PackConfig(repo);
+ pc.setIndexVersion(2);
+ pc.setDeltaCompress(false);
+ pc.setReuseDeltas(true);
+ pc.setReuseObjects(true);
+
+ PackWriter pw = new PackWriter(pc, ctx);
+ try {
+ pw.setDeltaBaseAsOffset(true);
+ pw.setReuseDeltaCommits(false);
- PackWriter pw = new PackWriter(pc, ctx);
+ addObjectsToPack(pw, ctx, pm);
+ if (pw.getObjectCount() == 0) {
+ return;
+ }
+
+ boolean rollback = true;
+ initOutDesc(objdb);
try {
- pw.setDeltaBaseAsOffset(true);
- pw.setReuseDeltaCommits(false);
-
- addObjectsToPack(pw, ctx, pm);
- if (pw.getObjectCount() == 0) {
- List<DfsPackDescription> remove = toPrune();
- if (remove.size() > 0)
- objdb.commitPack(
- Collections.<DfsPackDescription>emptyList(),
- remove);
- return;
- }
+ writePack(objdb, outDesc, pw, pm);
+ writeIndex(objdb, outDesc, pw);
- boolean rollback = true;
- DfsPackDescription pack = objdb.newPack(COMPACT,
- estimatePackSize());
- try {
- writePack(objdb, pack, pw, pm);
- writeIndex(objdb, pack, pw);
-
- PackStatistics stats = pw.getStatistics();
- pw.close();
- pw = null;
-
- pack.setPackStats(stats);
- objdb.commitPack(Collections.singletonList(pack), toPrune());
- newPacks.add(pack);
- newStats.add(stats);
- rollback = false;
- } finally {
- if (rollback)
- objdb.rollbackPack(Collections.singletonList(pack));
- }
+ PackStatistics stats = pw.getStatistics();
+ pw.close();
+ pw = null;
+
+ outDesc.setPackStats(stats);
+ newStats = stats;
+ rollback = false;
} finally {
- if (pw != null)
- pw.close();
+ if (rollback) {
+ objdb.rollbackPack(Collections.singletonList(outDesc));
+ }
}
} finally {
- rw = null;
+ if (pw != null) {
+ pw.close();
+ }
}
}
@@ -263,27 +312,81 @@ public class DfsPackCompactor {
return size;
}
+ private void compactReftables(DfsReader ctx) throws IOException {
+ DfsObjDatabase objdb = repo.getObjectDatabase();
+ Collections.sort(srcReftables, objdb.reftableComparator());
+
+ try (ReftableStack stack = ReftableStack.open(ctx, srcReftables)) {
+ initOutDesc(objdb);
+ ReftableCompactor compact = new ReftableCompactor();
+ compact.addAll(stack.readers());
+ compact.setIncludeDeletes(true);
+ writeReftable(objdb, outDesc, compact);
+ }
+ }
+
+ private void initOutDesc(DfsObjDatabase objdb) throws IOException {
+ if (outDesc == null) {
+ outDesc = objdb.newPack(COMPACT, estimatePackSize());
+ }
+ }
+
/** @return all of the source packs that fed into this compaction. */
- public List<DfsPackDescription> getSourcePacks() {
- return toPrune();
+ public Collection<DfsPackDescription> getSourcePacks() {
+ Set<DfsPackDescription> src = new HashSet<>();
+ for (DfsPackFile pack : srcPacks) {
+ src.add(pack.getPackDescription());
+ }
+ for (DfsReftable table : srcReftables) {
+ src.add(table.getPackDescription());
+ }
+ return src;
}
/** @return new packs created by this compaction. */
public List<DfsPackDescription> getNewPacks() {
- return newPacks;
+ return outDesc != null
+ ? Collections.singletonList(outDesc)
+ : Collections.emptyList();
}
/** @return statistics corresponding to the {@link #getNewPacks()}. */
public List<PackStatistics> getNewPackStatistics() {
- return newStats;
+ return newStats != null
+ ? Collections.singletonList(newStats)
+ : Collections.emptyList();
}
- private List<DfsPackDescription> toPrune() {
- int cnt = srcPacks.size();
- List<DfsPackDescription> all = new ArrayList<>(cnt);
- for (DfsPackFile pack : srcPacks)
- all.add(pack.getPackDescription());
- return all;
+ private Collection<DfsPackDescription> toPrune() {
+ Set<DfsPackDescription> packs = new HashSet<>();
+ for (DfsPackFile pack : srcPacks) {
+ packs.add(pack.getPackDescription());
+ }
+
+ Set<DfsPackDescription> reftables = new HashSet<>();
+ for (DfsReftable table : srcReftables) {
+ reftables.add(table.getPackDescription());
+ }
+
+ for (Iterator<DfsPackDescription> i = packs.iterator(); i.hasNext();) {
+ DfsPackDescription d = i.next();
+ if (d.hasFileExt(REFTABLE) && !reftables.contains(d)) {
+ i.remove();
+ }
+ }
+
+ for (Iterator<DfsPackDescription> i = reftables.iterator();
+ i.hasNext();) {
+ DfsPackDescription d = i.next();
+ if (d.hasFileExt(PACK) && !packs.contains(d)) {
+ i.remove();
+ }
+ }
+
+ Set<DfsPackDescription> toPrune = new HashSet<>();
+ toPrune.addAll(packs);
+ toPrune.addAll(reftables);
+ return toPrune;
}
private void addObjectsToPack(PackWriter pw, DfsReader ctx,
@@ -370,30 +473,47 @@ public class DfsPackCompactor {
private static void writePack(DfsObjDatabase objdb,
DfsPackDescription pack,
PackWriter pw, ProgressMonitor pm) throws IOException {
- DfsOutputStream out = objdb.writeFile(pack, PACK);
- try {
+ try (DfsOutputStream out = objdb.writeFile(pack, PACK)) {
pw.writePack(pm, pm, out);
pack.addFileExt(PACK);
- } finally {
- out.close();
+ pack.setBlockSize(PACK, out.blockSize());
}
}
private static void writeIndex(DfsObjDatabase objdb,
DfsPackDescription pack,
PackWriter pw) throws IOException {
- DfsOutputStream out = objdb.writeFile(pack, INDEX);
- try {
+ try (DfsOutputStream out = objdb.writeFile(pack, INDEX)) {
CountingOutputStream cnt = new CountingOutputStream(out);
pw.writeIndex(cnt);
pack.addFileExt(INDEX);
pack.setFileSize(INDEX, cnt.getCount());
+ pack.setBlockSize(INDEX, out.blockSize());
pack.setIndexVersion(pw.getIndexVersion());
- } finally {
- out.close();
}
}
+ private void writeReftable(DfsObjDatabase objdb, DfsPackDescription pack,
+ ReftableCompactor compact) throws IOException {
+ try (DfsOutputStream out = objdb.writeFile(pack, REFTABLE)) {
+ compact.setConfig(configureReftable(reftableConfig, out));
+ compact.compact(out);
+ pack.addFileExt(REFTABLE);
+ pack.setReftableStats(compact.getStats());
+ }
+ }
+
+ static ReftableConfig configureReftable(ReftableConfig cfg,
+ DfsOutputStream out) {
+ int bs = out.blockSize();
+ if (bs > 0) {
+ cfg = new ReftableConfig(cfg);
+ cfg.setRefBlockSize(bs);
+ cfg.setAlignBlocks(true);
+ }
+ return cfg;
+ }
+
private static class ObjectIdWithOffset extends ObjectId {
final long offset;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackDescription.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackDescription.java
index e825f1a8be..e865e6b542 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackDescription.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackDescription.java
@@ -44,12 +44,13 @@
package org.eclipse.jgit.internal.storage.dfs;
import static org.eclipse.jgit.internal.storage.pack.PackExt.PACK;
+import static org.eclipse.jgit.internal.storage.pack.PackExt.REFTABLE;
-import java.util.HashMap;
-import java.util.Map;
+import java.util.Arrays;
import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource;
import org.eclipse.jgit.internal.storage.pack.PackExt;
+import org.eclipse.jgit.internal.storage.reftable.ReftableWriter;
import org.eclipse.jgit.storage.pack.PackStatistics;
/**
@@ -62,25 +63,20 @@ import org.eclipse.jgit.storage.pack.PackStatistics;
*/
public class DfsPackDescription implements Comparable<DfsPackDescription> {
private final DfsRepositoryDescription repoDesc;
-
private final String packName;
-
private PackSource packSource;
-
private long lastModified;
-
- private final Map<PackExt, Long> sizeMap;
-
+ private long[] sizeMap;
+ private int[] blockSizeMap;
private long objectCount;
-
private long deltaCount;
+ private long minUpdateIndex;
+ private long maxUpdateIndex;
- private PackStatistics stats;
-
+ private PackStatistics packStats;
+ private ReftableWriter.Stats refStats;
private int extensions;
-
private int indexVersion;
-
private long estimatedPackSize;
/**
@@ -102,7 +98,10 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
this.repoDesc = repoDesc;
int dot = name.lastIndexOf('.');
this.packName = (dot < 0) ? name : name.substring(0, dot);
- this.sizeMap = new HashMap<>(PackExt.values().length * 2);
+
+ int extCnt = PackExt.values().length;
+ sizeMap = new long[extCnt];
+ blockSizeMap = new int[extCnt];
}
/** @return description of the repository. */
@@ -138,6 +137,15 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
return packName + '.' + ext.getExtension();
}
+ /**
+ * @param ext
+ * the file extension.
+ * @return cache key for use by the block cache.
+ */
+ public DfsStreamKey getStreamKey(PackExt ext) {
+ return DfsStreamKey.of(getRepositoryDescription(), getFileName(ext));
+ }
+
/** @return the source of the pack. */
public PackSource getPackSource() {
return packSource;
@@ -168,6 +176,36 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
return this;
}
+ /** @return minUpdateIndex for the reftable, if present. */
+ public long getMinUpdateIndex() {
+ return minUpdateIndex;
+ }
+
+ /**
+ * @param min
+ * minUpdateIndex for the reftable, or 0.
+ * @return {@code this}
+ */
+ public DfsPackDescription setMinUpdateIndex(long min) {
+ minUpdateIndex = min;
+ return this;
+ }
+
+ /** @return maxUpdateIndex for the reftable, if present. */
+ public long getMaxUpdateIndex() {
+ return maxUpdateIndex;
+ }
+
+ /**
+ * @param max
+ * maxUpdateIndex for the reftable, or 0.
+ * @return {@code this}
+ */
+ public DfsPackDescription setMaxUpdateIndex(long max) {
+ maxUpdateIndex = max;
+ return this;
+ }
+
/**
* @param ext
* the file extension.
@@ -177,7 +215,11 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
* @return {@code this}
*/
public DfsPackDescription setFileSize(PackExt ext, long bytes) {
- sizeMap.put(ext, Long.valueOf(Math.max(0, bytes)));
+ int i = ext.getPosition();
+ if (i >= sizeMap.length) {
+ sizeMap = Arrays.copyOf(sizeMap, i + 1);
+ }
+ sizeMap[i] = Math.max(0, bytes);
return this;
}
@@ -187,8 +229,36 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
* @return size of the file, in bytes. If 0 the file size is not yet known.
*/
public long getFileSize(PackExt ext) {
- Long size = sizeMap.get(ext);
- return size == null ? 0 : size.longValue();
+ int i = ext.getPosition();
+ return i < sizeMap.length ? sizeMap[i] : 0;
+ }
+
+ /**
+ * @param ext
+ * the file extension.
+ * @return blockSize of the file, in bytes. If 0 the blockSize size is not
+ * yet known and may be discovered when opening the file.
+ */
+ public int getBlockSize(PackExt ext) {
+ int i = ext.getPosition();
+ return i < blockSizeMap.length ? blockSizeMap[i] : 0;
+ }
+
+ /**
+ * @param ext
+ * the file extension.
+ * @param blockSize
+ * blockSize of the file, in bytes. If 0 the blockSize is not
+ * known and will be determined on first read.
+ * @return {@code this}
+ */
+ public DfsPackDescription setBlockSize(PackExt ext, int blockSize) {
+ int i = ext.getPosition();
+ if (i >= blockSizeMap.length) {
+ blockSizeMap = Arrays.copyOf(blockSizeMap, i + 1);
+ }
+ blockSizeMap[i] = Math.max(0, blockSize);
+ return this;
}
/**
@@ -247,24 +317,38 @@ public class DfsPackDescription implements Comparable<DfsPackDescription> {
* is being committed to the repository.
*/
public PackStatistics getPackStats() {
- return stats;
+ return packStats;
}
DfsPackDescription setPackStats(PackStatistics stats) {
- this.stats = stats;
+ this.packStats = stats;
setFileSize(PACK, stats.getTotalBytes());
setObjectCount(stats.getTotalObjects());
setDeltaCount(stats.getTotalDeltas());
return this;
}
+ /** @return stats from the sibling reftable, if created. */
+ public ReftableWriter.Stats getReftableStats() {
+ return refStats;
+ }
+
+ void setReftableStats(ReftableWriter.Stats stats) {
+ this.refStats = stats;
+ setMinUpdateIndex(stats.minUpdateIndex());
+ setMaxUpdateIndex(stats.maxUpdateIndex());
+ setFileSize(REFTABLE, stats.totalBytes());
+ setBlockSize(REFTABLE, stats.refBlockSize());
+ }
+
/**
* Discard the pack statistics, if it was populated.
*
* @return {@code this}
*/
public DfsPackDescription clearPackStats() {
- stats = null;
+ packStats = null;
+ refStats = null;
return this;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
index ae2e7e4127..dfb41e204f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
@@ -72,7 +72,6 @@ import org.eclipse.jgit.internal.storage.file.PackBitmapIndex;
import org.eclipse.jgit.internal.storage.file.PackIndex;
import org.eclipse.jgit.internal.storage.file.PackReverseIndex;
import org.eclipse.jgit.internal.storage.pack.BinaryDelta;
-import org.eclipse.jgit.internal.storage.pack.PackExt;
import org.eclipse.jgit.internal.storage.pack.PackOutputStream;
import org.eclipse.jgit.internal.storage.pack.StoredObjectRepresentation;
import org.eclipse.jgit.lib.AbbreviatedObjectId;
@@ -88,53 +87,7 @@ import org.eclipse.jgit.util.LongList;
* delta packed format yielding high compression of lots of object where some
* objects are similar.
*/
-public final class DfsPackFile {
- /**
- * File offset used to cache {@link #index} in {@link DfsBlockCache}.
- * <p>
- * To better manage memory, the forward index is stored as a single block in
- * the block cache under this file position. A negative value is used
- * because it cannot occur in a normal pack file, and it is less likely to
- * collide with a valid data block from the file as the high bits will all
- * be set when treated as an unsigned long by the cache code.
- */
- private static final long POS_INDEX = -1;
-
- /** Offset used to cache {@link #reverseIndex}. See {@link #POS_INDEX}. */
- private static final long POS_REVERSE_INDEX = -2;
-
- /** Offset used to cache {@link #bitmapIndex}. See {@link #POS_INDEX}. */
- private static final long POS_BITMAP_INDEX = -3;
-
- /** Cache that owns this pack file and its data. */
- private final DfsBlockCache cache;
-
- /** Description of the pack file's storage. */
- private final DfsPackDescription packDesc;
-
- /** Unique identity of this pack while in-memory. */
- final DfsPackKey key;
-
- /**
- * Total number of bytes in this pack file.
- * <p>
- * This field initializes to -1 and gets populated when a block is loaded.
- */
- volatile long length;
-
- /**
- * Preferred alignment for loading blocks from the backing file.
- * <p>
- * It is initialized to 0 and filled in on the first read made from the
- * file. Block sizes may be odd, e.g. 4091, caused by the underling DFS
- * storing 4091 user bytes and 5 bytes block metadata into a lower level
- * 4096 byte block on disk.
- */
- private volatile int blockSize;
-
- /** True once corruption has been detected that cannot be worked around. */
- private volatile boolean invalid;
-
+public final class DfsPackFile extends BlockBasedFile {
/**
* Lock for initialization of {@link #index} and {@link #corruptObjects}.
* <p>
@@ -167,22 +120,22 @@ public final class DfsPackFile {
* cache that owns the pack data.
* @param desc
* description of the pack within the DFS.
- * @param key
- * interned key used to identify blocks in the block cache.
*/
- DfsPackFile(DfsBlockCache cache, DfsPackDescription desc, DfsPackKey key) {
- this.cache = cache;
- this.packDesc = desc;
- this.key = key;
-
- length = desc.getFileSize(PACK);
- if (length <= 0)
- length = -1;
+ DfsPackFile(DfsBlockCache cache, DfsPackDescription desc) {
+ super(cache, desc, PACK);
+
+ int bs = desc.getBlockSize(PACK);
+ if (bs > 0) {
+ setBlockSize(bs);
+ }
+
+ long sz = desc.getFileSize(PACK);
+ length = sz > 0 ? sz : -1;
}
/** @return description that was originally used to configure this pack file. */
public DfsPackDescription getPackDescription() {
- return packDesc;
+ return desc;
}
/**
@@ -193,24 +146,11 @@ public final class DfsPackFile {
return idxref != null && idxref.has();
}
- /** @return bytes cached in memory for this pack, excluding the index. */
- public long getCachedSize() {
- return key.cachedSize.get();
- }
-
- String getPackName() {
- return packDesc.getFileName(PACK);
- }
-
- void setBlockSize(int newSize) {
- blockSize = newSize;
- }
-
void setPackIndex(PackIndex idx) {
long objCnt = idx.getObjectCount();
int recSize = Constants.OBJECT_ID_LENGTH + 8;
- int sz = (int) Math.min(objCnt * recSize, Integer.MAX_VALUE);
- index = cache.put(key, POS_INDEX, sz, idx);
+ long sz = objCnt * recSize;
+ index = cache.putRef(desc.getStreamKey(INDEX), sz, idx);
}
/**
@@ -236,7 +176,7 @@ public final class DfsPackFile {
}
if (invalid)
- throw new PackInvalidException(getPackName());
+ throw new PackInvalidException(getFileName());
Repository.getGlobalListenerList()
.dispatch(new BeforeDfsPackIndexLoadedEvent(this));
@@ -249,11 +189,21 @@ public final class DfsPackFile {
return idx;
}
+ DfsStreamKey idxKey = desc.getStreamKey(INDEX);
+ idxref = cache.getRef(idxKey);
+ if (idxref != null) {
+ PackIndex idx = idxref.get();
+ if (idx != null) {
+ index = idxref;
+ return idx;
+ }
+ }
+
PackIndex idx;
try {
ctx.stats.readIdx++;
long start = System.nanoTime();
- ReadableChannel rc = ctx.db.openFile(packDesc, INDEX);
+ ReadableChannel rc = ctx.db.openFile(desc, INDEX);
try {
InputStream in = Channels.newInputStream(rc);
int wantSize = 8192;
@@ -270,18 +220,14 @@ public final class DfsPackFile {
}
} catch (EOFException e) {
invalid = true;
- IOException e2 = new IOException(MessageFormat.format(
+ throw new IOException(MessageFormat.format(
DfsText.get().shortReadOfIndex,
- packDesc.getFileName(INDEX)));
- e2.initCause(e);
- throw e2;
+ desc.getFileName(INDEX)), e);
} catch (IOException e) {
invalid = true;
- IOException e2 = new IOException(MessageFormat.format(
+ throw new IOException(MessageFormat.format(
DfsText.get().cannotReadIndex,
- packDesc.getFileName(INDEX)));
- e2.initCause(e);
- throw e2;
+ desc.getFileName(INDEX)), e);
}
setPackIndex(idx);
@@ -289,17 +235,14 @@ public final class DfsPackFile {
}
}
- private static long elapsedMicros(long start) {
- return (System.nanoTime() - start) / 1000L;
- }
-
final boolean isGarbage() {
- return packDesc.getPackSource() == UNREACHABLE_GARBAGE;
+ return desc.getPackSource() == UNREACHABLE_GARBAGE;
}
PackBitmapIndex getBitmapIndex(DfsReader ctx) throws IOException {
- if (invalid || isGarbage())
+ if (invalid || isGarbage() || !desc.hasFileExt(BITMAP_INDEX))
return null;
+
DfsBlockCache.Ref<PackBitmapIndex> idxref = bitmapIndex;
if (idxref != null) {
PackBitmapIndex idx = idxref.get();
@@ -307,9 +250,6 @@ public final class DfsPackFile {
return idx;
}
- if (!packDesc.hasFileExt(PackExt.BITMAP_INDEX))
- return null;
-
synchronized (initLock) {
idxref = bitmapIndex;
if (idxref != null) {
@@ -318,12 +258,22 @@ public final class DfsPackFile {
return idx;
}
+ DfsStreamKey bitmapKey = desc.getStreamKey(BITMAP_INDEX);
+ idxref = cache.getRef(bitmapKey);
+ if (idxref != null) {
+ PackBitmapIndex idx = idxref.get();
+ if (idx != null) {
+ bitmapIndex = idxref;
+ return idx;
+ }
+ }
+
long size;
PackBitmapIndex idx;
try {
ctx.stats.readBitmap++;
long start = System.nanoTime();
- ReadableChannel rc = ctx.db.openFile(packDesc, BITMAP_INDEX);
+ ReadableChannel rc = ctx.db.openFile(desc, BITMAP_INDEX);
try {
InputStream in = Channels.newInputStream(rc);
int wantSize = 8192;
@@ -342,21 +292,16 @@ public final class DfsPackFile {
ctx.stats.readIdxMicros += elapsedMicros(start);
}
} catch (EOFException e) {
- IOException e2 = new IOException(MessageFormat.format(
+ throw new IOException(MessageFormat.format(
DfsText.get().shortReadOfIndex,
- packDesc.getFileName(BITMAP_INDEX)));
- e2.initCause(e);
- throw e2;
+ desc.getFileName(BITMAP_INDEX)), e);
} catch (IOException e) {
- IOException e2 = new IOException(MessageFormat.format(
+ throw new IOException(MessageFormat.format(
DfsText.get().cannotReadIndex,
- packDesc.getFileName(BITMAP_INDEX)));
- e2.initCause(e);
- throw e2;
+ desc.getFileName(BITMAP_INDEX)), e);
}
- bitmapIndex = cache.put(key, POS_BITMAP_INDEX,
- (int) Math.min(size, Integer.MAX_VALUE), idx);
+ bitmapIndex = cache.putRef(bitmapKey, size, idx);
return idx;
}
}
@@ -377,11 +322,21 @@ public final class DfsPackFile {
return revidx;
}
+ DfsStreamKey revKey =
+ new DfsStreamKey.ForReverseIndex(desc.getStreamKey(INDEX));
+ revref = cache.getRef(revKey);
+ if (revref != null) {
+ PackReverseIndex idx = revref.get();
+ if (idx != null) {
+ reverseIndex = revref;
+ return idx;
+ }
+ }
+
PackIndex idx = idx(ctx);
PackReverseIndex revidx = new PackReverseIndex(idx);
- int sz = (int) Math.min(
- idx.getObjectCount() * 8, Integer.MAX_VALUE);
- reverseIndex = cache.put(key, POS_REVERSE_INDEX, sz, revidx);
+ long cnt = idx.getObjectCount();
+ reverseIndex = cache.putRef(revKey, cnt * 8, revidx);
return revidx;
}
}
@@ -430,13 +385,6 @@ public final class DfsPackFile {
idx(ctx).resolve(matches, id, matchLimit);
}
- /** Release all memory used by this DfsPackFile instance. */
- public void close() {
- cache.remove(this);
- index = null;
- reverseIndex = null;
- }
-
/**
* Obtain the total number of objects available in this pack. This method
* relies on pack index, giving number of effectively available objects.
@@ -489,21 +437,42 @@ public final class DfsPackFile {
private void copyPackThroughCache(PackOutputStream out, DfsReader ctx)
throws IOException {
- long position = 12;
- long remaining = length - (12 + 20);
- while (0 < remaining) {
- DfsBlock b = cache.getOrLoad(this, position, ctx);
- int ptr = (int) (position - b.start);
- int n = (int) Math.min(b.size() - ptr, remaining);
- b.write(out, position, n);
- position += n;
- remaining -= n;
+ ReadableChannel rc = null;
+ try {
+ long position = 12;
+ long remaining = length - (12 + 20);
+ while (0 < remaining) {
+ DfsBlock b;
+ if (rc != null) {
+ b = cache.getOrLoad(this, position, ctx, rc);
+ } else {
+ b = cache.get(key, alignToBlock(position));
+ if (b == null) {
+ rc = ctx.db.openFile(desc, PACK);
+ int sz = ctx.getOptions().getStreamPackBufferSize();
+ if (sz > 0) {
+ rc.setReadAheadBytes(sz);
+ }
+ b = cache.getOrLoad(this, position, ctx, rc);
+ }
+ }
+
+ int ptr = (int) (position - b.start);
+ int n = (int) Math.min(b.size() - ptr, remaining);
+ b.write(out, position, n);
+ position += n;
+ remaining -= n;
+ }
+ } finally {
+ if (rc != null) {
+ rc.close();
+ }
}
}
private long copyPackBypassCache(PackOutputStream out, DfsReader ctx)
throws IOException {
- try (ReadableChannel rc = ctx.db.openFile(packDesc, PACK)) {
+ try (ReadableChannel rc = ctx.db.openFile(desc, PACK)) {
ByteBuffer buf = newCopyBuffer(out, rc);
if (ctx.getOptions().getStreamPackBufferSize() > 0)
rc.setReadAheadBytes(ctx.getOptions().getStreamPackBufferSize());
@@ -642,7 +611,7 @@ public final class DfsPackFile {
setCorrupt(src.offset);
throw new CorruptObjectException(MessageFormat.format(
JGitText.get().objectAtHasBadZlibStream,
- Long.valueOf(src.offset), getPackName()));
+ Long.valueOf(src.offset), getFileName()));
}
} else if (validate) {
assert(crc1 != null);
@@ -684,7 +653,7 @@ public final class DfsPackFile {
CorruptObjectException corruptObject = new CorruptObjectException(
MessageFormat.format(
JGitText.get().objectAtHasBadZlibStream,
- Long.valueOf(src.offset), getPackName()));
+ Long.valueOf(src.offset), getFileName()));
corruptObject.initCause(dataFormat);
StoredObjectRepresentationNotAvailableException gone;
@@ -746,24 +715,16 @@ public final class DfsPackFile {
if (crc2.getValue() != expectedCRC) {
throw new CorruptObjectException(MessageFormat.format(
JGitText.get().objectAtHasBadZlibStream,
- Long.valueOf(src.offset), getPackName()));
+ Long.valueOf(src.offset), getFileName()));
}
}
}
}
- boolean invalid() {
- return invalid;
- }
-
- void setInvalid() {
- invalid = true;
- }
-
private IOException packfileIsTruncated() {
invalid = true;
return new IOException(MessageFormat.format(
- JGitText.get().packfileIsTruncated, getPackName()));
+ JGitText.get().packfileIsTruncated, getFileName()));
}
private void readFully(long position, byte[] dstbuf, int dstoff, int cnt,
@@ -772,103 +733,6 @@ public final class DfsPackFile {
throw new EOFException();
}
- long alignToBlock(long pos) {
- int size = blockSize;
- if (size == 0)
- size = cache.getBlockSize();
- return (pos / size) * size;
- }
-
- DfsBlock getOrLoadBlock(long pos, DfsReader ctx) throws IOException {
- return cache.getOrLoad(this, pos, ctx);
- }
-
- DfsBlock readOneBlock(long pos, DfsReader ctx)
- throws IOException {
- if (invalid)
- throw new PackInvalidException(getPackName());
-
- ctx.stats.readBlock++;
- long start = System.nanoTime();
- ReadableChannel rc = ctx.db.openFile(packDesc, PACK);
- try {
- int size = blockSize(rc);
- pos = (pos / size) * size;
-
- // If the size of the file is not yet known, try to discover it.
- // Channels may choose to return -1 to indicate they don't
- // know the length yet, in this case read up to the size unit
- // given by the caller, then recheck the length.
- long len = length;
- if (len < 0) {
- len = rc.size();
- if (0 <= len)
- length = len;
- }
-
- if (0 <= len && len < pos + size)
- size = (int) (len - pos);
- if (size <= 0)
- throw new EOFException(MessageFormat.format(
- DfsText.get().shortReadOfBlock, Long.valueOf(pos),
- getPackName(), Long.valueOf(0), Long.valueOf(0)));
-
- byte[] buf = new byte[size];
- rc.position(pos);
- int cnt = read(rc, ByteBuffer.wrap(buf, 0, size));
- ctx.stats.readBlockBytes += cnt;
- if (cnt != size) {
- if (0 <= len) {
- throw new EOFException(MessageFormat.format(
- DfsText.get().shortReadOfBlock,
- Long.valueOf(pos),
- getPackName(),
- Integer.valueOf(size),
- Integer.valueOf(cnt)));
- }
-
- // Assume the entire thing was read in a single shot, compact
- // the buffer to only the space required.
- byte[] n = new byte[cnt];
- System.arraycopy(buf, 0, n, 0, n.length);
- buf = n;
- } else if (len < 0) {
- // With no length at the start of the read, the channel should
- // have the length available at the end.
- length = len = rc.size();
- }
-
- return new DfsBlock(key, pos, buf);
- } finally {
- rc.close();
- ctx.stats.readBlockMicros += elapsedMicros(start);
- }
- }
-
- private int blockSize(ReadableChannel rc) {
- // If the block alignment is not yet known, discover it. Prefer the
- // larger size from either the cache or the file itself.
- int size = blockSize;
- if (size == 0) {
- size = rc.blockSize();
- if (size <= 0)
- size = cache.getBlockSize();
- else if (size < cache.getBlockSize())
- size = (cache.getBlockSize() / size) * size;
- blockSize = size;
- }
- return size;
- }
-
- private static int read(ReadableChannel rc, ByteBuffer buf)
- throws IOException {
- int n;
- do {
- n = rc.read(buf);
- } while (0 < n && buf.hasRemaining());
- return buf.position();
- }
-
ObjectLoader load(DfsReader ctx, long pos)
throws IOException {
try {
@@ -1005,7 +869,7 @@ public final class DfsPackFile {
CorruptObjectException coe = new CorruptObjectException(
MessageFormat.format(
JGitText.get().objectAtHasBadZlibStream, Long.valueOf(pos),
- getPackName()));
+ getFileName()));
coe.initCause(dfe);
throw coe;
}
@@ -1153,7 +1017,7 @@ public final class DfsPackFile {
CorruptObjectException coe = new CorruptObjectException(
MessageFormat.format(
JGitText.get().objectAtHasBadZlibStream, Long.valueOf(pos),
- getPackName()));
+ getFileName()));
coe.initCause(dfe);
throw coe;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackParser.java
index 6430ea9c2a..fd99db1e28 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackParser.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackParser.java
@@ -94,7 +94,7 @@ public class DfsPackParser extends PackParser {
private DfsPackDescription packDsc;
/** Key used during delta resolution reading delta chains. */
- private DfsPackKey packKey;
+ private DfsStreamKey packKey;
/** If the index was small enough, the entire index after writing. */
private PackIndex packIndex;
@@ -150,12 +150,13 @@ public class DfsPackParser extends PackParser {
readBlock = null;
packDsc.addFileExt(PACK);
packDsc.setFileSize(PACK, packEnd);
+ packDsc.setBlockSize(PACK, blockSize);
writePackIndex();
objdb.commitPack(Collections.singletonList(packDsc), null);
rollback = false;
- DfsPackFile p = blockCache.getOrCreate(packDsc, packKey);
+ DfsPackFile p = new DfsPackFile(blockCache, packDsc);
p.setBlockSize(blockSize);
if (packIndex != null)
p.setPackIndex(packIndex);
@@ -206,9 +207,9 @@ public class DfsPackParser extends PackParser {
}
packDsc = objdb.newPack(DfsObjDatabase.PackSource.RECEIVE);
- packKey = new DfsPackKey();
-
out = objdb.writeFile(packDsc, PACK);
+ packKey = packDsc.getStreamKey(PACK);
+
int size = out.blockSize();
if (size <= 0)
size = blockCache.getBlockSize();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
index d611469afc..3c8422077b 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
@@ -655,7 +655,7 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
/**
* Copy bytes from the window to a caller supplied buffer.
*
- * @param pack
+ * @param file
* the file the desired window is stored within.
* @param position
* position within the file to read from.
@@ -674,24 +674,24 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
* this cursor does not match the provider or id and the proper
* window could not be acquired through the provider's cache.
*/
- int copy(DfsPackFile pack, long position, byte[] dstbuf, int dstoff, int cnt)
- throws IOException {
+ int copy(BlockBasedFile file, long position, byte[] dstbuf, int dstoff,
+ int cnt) throws IOException {
if (cnt == 0)
return 0;
- long length = pack.length;
+ long length = file.length;
if (0 <= length && length <= position)
return 0;
int need = cnt;
do {
- pin(pack, position);
+ pin(file, position);
int r = block.copy(position, dstbuf, dstoff, need);
position += r;
dstoff += r;
need -= r;
if (length < 0)
- length = pack.length;
+ length = file.length;
} while (0 < need && position < length);
return cnt - need;
}
@@ -756,15 +756,14 @@ public class DfsReader extends ObjectReader implements ObjectReuseAsIs {
inf.reset();
}
- void pin(DfsPackFile pack, long position) throws IOException {
- DfsBlock b = block;
- if (b == null || !b.contains(pack.key, position)) {
+ void pin(BlockBasedFile file, long position) throws IOException {
+ if (block == null || !block.contains(file.key, position)) {
// If memory is low, we may need what is in our window field to
// be cleaned up by the GC during the get for the next window.
// So we always clear it, even though we are just going to set
// it again.
block = null;
- block = pack.getOrLoadBlock(position, this);
+ block = file.getOrLoadBlock(position, this);
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftable.java
new file mode 100644
index 0000000000..5a8ea92a84
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReftable.java
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import static org.eclipse.jgit.internal.storage.pack.PackExt.REFTABLE;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.eclipse.jgit.internal.storage.io.BlockSource;
+import org.eclipse.jgit.internal.storage.reftable.ReftableReader;
+
+/** A reftable stored in {@link DfsBlockCache}. */
+public class DfsReftable extends BlockBasedFile {
+ /**
+ * Construct a reader for an existing reftable.
+ *
+ * @param desc
+ * description of the reftable within the DFS.
+ */
+ public DfsReftable(DfsPackDescription desc) {
+ this(DfsBlockCache.getInstance(), desc);
+ }
+
+ /**
+ * Construct a reader for an existing reftable.
+ *
+ * @param cache
+ * cache that will store the reftable data.
+ * @param desc
+ * description of the reftable within the DFS.
+ */
+ public DfsReftable(DfsBlockCache cache, DfsPackDescription desc) {
+ super(cache, desc, REFTABLE);
+
+ int bs = desc.getBlockSize(REFTABLE);
+ if (bs > 0) {
+ setBlockSize(bs);
+ }
+
+ long sz = desc.getFileSize(REFTABLE);
+ length = sz > 0 ? sz : -1;
+ }
+
+ /** @return description that was originally used to configure this file. */
+ public DfsPackDescription getPackDescription() {
+ return desc;
+ }
+
+ /**
+ * Open reader on the reftable.
+ * <p>
+ * The returned reader is not thread safe.
+ *
+ * @param ctx
+ * reader to access the DFS storage.
+ * @return cursor to read the table; caller must close.
+ * @throws IOException
+ * table cannot be opened.
+ */
+ public ReftableReader open(DfsReader ctx) throws IOException {
+ return new ReftableReader(new CacheSource(this, cache, ctx));
+ }
+
+ private static final class CacheSource extends BlockSource {
+ private final DfsReftable file;
+ private final DfsBlockCache cache;
+ private final DfsReader ctx;
+ private ReadableChannel ch;
+ private int readAhead;
+
+ CacheSource(DfsReftable file, DfsBlockCache cache, DfsReader ctx) {
+ this.file = file;
+ this.cache = cache;
+ this.ctx = ctx;
+ }
+
+ @Override
+ public ByteBuffer read(long pos, int cnt) throws IOException {
+ if (ch == null && readAhead > 0 && notInCache(pos)) {
+ open().setReadAheadBytes(readAhead);
+ }
+
+ DfsBlock block = cache.getOrLoad(file, pos, ctx, ch);
+ if (block.start == pos && block.size() >= cnt) {
+ return block.zeroCopyByteBuffer(cnt);
+ }
+
+ byte[] dst = new byte[cnt];
+ ByteBuffer buf = ByteBuffer.wrap(dst);
+ buf.position(ctx.copy(file, pos, dst, 0, cnt));
+ return buf;
+ }
+
+ private boolean notInCache(long pos) {
+ return cache.get(file.key, file.alignToBlock(pos)) == null;
+ }
+
+ @Override
+ public long size() throws IOException {
+ long n = file.length;
+ if (n < 0) {
+ n = open().size();
+ file.length = n;
+ }
+ return n;
+ }
+
+ @Override
+ public void adviseSequentialRead(long start, long end) {
+ int sz = ctx.getOptions().getStreamPackBufferSize();
+ if (sz > 0) {
+ readAhead = (int) Math.min(sz, end - start);
+ }
+ }
+
+ private ReadableChannel open() throws IOException {
+ if (ch == null) {
+ ch = ctx.db.openFile(file.desc, file.ext);
+ }
+ return ch;
+ }
+
+ @Override
+ public void close() {
+ if (ch != null) {
+ try {
+ ch.close();
+ } catch (IOException e) {
+ // Ignore read close failures.
+ } finally {
+ ch = null;
+ }
+ }
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsStreamKey.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsStreamKey.java
new file mode 100644
index 0000000000..54a74899ea
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsStreamKey.java
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2011, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import java.util.Arrays;
+
+/** Key used by {@link DfsBlockCache} to disambiguate streams. */
+public abstract class DfsStreamKey {
+ /**
+ * @param repo
+ * description of the containing repository.
+ * @param name
+ * compute the key from a string name.
+ * @return key for {@code name}
+ */
+ public static DfsStreamKey of(DfsRepositoryDescription repo, String name) {
+ return new ByteArrayDfsStreamKey(repo, name.getBytes(UTF_8));
+ }
+
+ final int hash;
+
+ /**
+ * @param hash
+ * hash of the other identifying components of the key.
+ */
+ protected DfsStreamKey(int hash) {
+ // Multiply by 31 here so we can more directly combine with another
+ // value without doing the multiply there.
+ this.hash = hash * 31;
+ }
+
+ @Override
+ public int hashCode() {
+ return hash;
+ }
+
+ @Override
+ public abstract boolean equals(Object o);
+
+ @SuppressWarnings("boxing")
+ @Override
+ public String toString() {
+ return String.format("DfsStreamKey[hash=%08x]", hash); //$NON-NLS-1$
+ }
+
+ private static final class ByteArrayDfsStreamKey extends DfsStreamKey {
+ private final DfsRepositoryDescription repo;
+ private final byte[] name;
+
+ ByteArrayDfsStreamKey(DfsRepositoryDescription repo, byte[] name) {
+ super(repo.hashCode() * 31 + Arrays.hashCode(name));
+ this.repo = repo;
+ this.name = name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o instanceof ByteArrayDfsStreamKey) {
+ ByteArrayDfsStreamKey k = (ByteArrayDfsStreamKey) o;
+ return hash == k.hash
+ && repo.equals(k.repo)
+ && Arrays.equals(name, k.name);
+ }
+ return false;
+ }
+ }
+
+ static final class ForReverseIndex extends DfsStreamKey {
+ private final DfsStreamKey idxKey;
+
+ ForReverseIndex(DfsStreamKey idxKey) {
+ super(idxKey.hash + 1);
+ this.idxKey = idxKey;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ return o instanceof ForReverseIndex
+ && idxKey.equals(((ForReverseIndex) o).idxKey);
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsText.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsText.java
index 862454759e..dedcab04d8 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsText.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsText.java
@@ -55,9 +55,7 @@ public class DfsText extends TranslationBundle {
// @formatter:off
/***/ public String cannotReadIndex;
- /***/ public String cannotReadBackDelta;
/***/ public String shortReadOfBlock;
/***/ public String shortReadOfIndex;
- /***/ public String unexpectedEofInPack;
/***/ public String willNotStoreEmptyPack;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java
index 527e46b733..1e31878b39 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/InMemoryRepository.java
@@ -53,7 +53,7 @@ public class InMemoryRepository extends DfsRepository {
static final AtomicInteger packId = new AtomicInteger();
- private final DfsObjDatabase objdb;
+ private final MemObjDatabase objdb;
private final RefDatabase refdb;
private String gitwebDescription;
private boolean performsAtomicTransactions = true;
@@ -75,7 +75,7 @@ public class InMemoryRepository extends DfsRepository {
}
@Override
- public DfsObjDatabase getObjectDatabase() {
+ public MemObjDatabase getObjectDatabase() {
return objdb;
}
@@ -106,13 +106,23 @@ public class InMemoryRepository extends DfsRepository {
gitwebDescription = d;
}
- private class MemObjDatabase extends DfsObjDatabase {
+ /** DfsObjDatabase used by InMemoryRepository. */
+ public static class MemObjDatabase extends DfsObjDatabase {
private List<DfsPackDescription> packs = new ArrayList<>();
+ private int blockSize;
MemObjDatabase(DfsRepository repo) {
super(repo, new DfsReaderOptions());
}
+ /**
+ * @param blockSize
+ * force a different block size for testing.
+ */
+ public void setReadableChannelBlockSizeForTest(int blockSize) {
+ this.blockSize = blockSize;
+ }
+
@Override
protected synchronized List<DfsPackDescription> listPacks() {
return packs;
@@ -152,7 +162,7 @@ public class InMemoryRepository extends DfsRepository {
byte[] file = memPack.fileMap.get(ext);
if (file == null)
throw new FileNotFoundException(desc.getFileName(ext));
- return new ByteArrayReadableChannel(file);
+ return new ByteArrayReadableChannel(file, blockSize);
}
@Override
@@ -216,13 +226,13 @@ public class InMemoryRepository extends DfsRepository {
private static class ByteArrayReadableChannel implements ReadableChannel {
private final byte[] data;
-
+ private final int blockSize;
private int position;
-
private boolean open = true;
- ByteArrayReadableChannel(byte[] buf) {
+ ByteArrayReadableChannel(byte[] buf, int blockSize) {
data = buf;
+ this.blockSize = blockSize;
}
@Override
@@ -262,7 +272,7 @@ public class InMemoryRepository extends DfsRepository {
@Override
public int blockSize() {
- return 0;
+ return blockSize;
}
@Override
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableStack.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableStack.java
new file mode 100644
index 0000000000..8d1cc989da
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ReftableStack.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.eclipse.jgit.internal.storage.reftable.Reftable;
+
+/** Tracks multiple open {@link Reftable} instances. */
+public class ReftableStack implements AutoCloseable {
+ /**
+ * Opens a stack of tables for reading.
+ *
+ * @param ctx
+ * context to read the tables with. This {@code ctx} will be
+ * retained by the stack and each of the table readers.
+ * @param tables
+ * the tables to open.
+ * @return stack reference to close the tables.
+ * @throws IOException
+ * a table could not be opened
+ */
+ public static ReftableStack open(DfsReader ctx, List<DfsReftable> tables)
+ throws IOException {
+ ReftableStack stack = new ReftableStack(tables.size());
+ boolean close = true;
+ try {
+ for (DfsReftable t : tables) {
+ stack.tables.add(t.open(ctx));
+ }
+ close = false;
+ return stack;
+ } finally {
+ if (close) {
+ stack.close();
+ }
+ }
+ }
+
+ private final List<Reftable> tables;
+
+ private ReftableStack(int tableCnt) {
+ this.tables = new ArrayList<>(tableCnt);
+ }
+
+ /**
+ * @return unmodifiable list of tables, in the same order the files were
+ * passed to {@link #open(DfsReader, List)}.
+ */
+ public List<Reftable> readers() {
+ return Collections.unmodifiableList(tables);
+ }
+
+ @Override
+ public void close() {
+ for (Reftable t : tables) {
+ try {
+ t.close();
+ } catch (IOException e) {
+ // Ignore close failures.
+ }
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/CheckoutEntryImpl.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/CheckoutEntryImpl.java
index 4b4337d1ed..2eacb7a793 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/CheckoutEntryImpl.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/CheckoutEntryImpl.java
@@ -74,4 +74,4 @@ public class CheckoutEntryImpl implements CheckoutEntry {
public String getToBranch() {
return to;
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java
index 6a674aa658..646feac8cf 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java
@@ -216,7 +216,7 @@ public class FileRepository extends Repository {
ConfigConstants.CONFIG_KEY_REPO_FORMAT_VERSION, 0);
String reftype = repoConfig.getString(
- "extensions", null, "refsStorage"); //$NON-NLS-1$ //$NON-NLS-2$
+ "extensions", null, "refStorage"); //$NON-NLS-1$ //$NON-NLS-2$
if (repositoryFormatVersion >= 1 && reftype != null) {
if (StringUtils.equalsIgnoreCase(reftype, "reftree")) { //$NON-NLS-1$
refs = new RefTreeDatabase(this, new RefDirectory(this));
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
index 7ff209fb81..9300f022b7 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
@@ -106,10 +106,10 @@ import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Ref.Storage;
-import org.eclipse.jgit.lib.internal.WorkQueue;
import org.eclipse.jgit.lib.RefDatabase;
import org.eclipse.jgit.lib.ReflogEntry;
import org.eclipse.jgit.lib.ReflogReader;
+import org.eclipse.jgit.lib.internal.WorkQueue;
import org.eclipse.jgit.revwalk.ObjectWalk;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevWalk;
@@ -851,6 +851,12 @@ public class GC {
tagTargets.addAll(allHeadsAndTags);
nonHeads.addAll(indexObjects);
+ // Combine the GC_REST objects into the GC pack if requested
+ if (pconfig != null && pconfig.getSinglePack()) {
+ allHeadsAndTags.addAll(nonHeads);
+ nonHeads.clear();
+ }
+
List<PackFile> ret = new ArrayList<>(2);
PackFile heads = null;
if (!allHeadsAndTags.isEmpty()) {
@@ -884,6 +890,7 @@ public class GC {
prunePacked();
deleteEmptyRefsFolders();
deleteOrphans();
+ deleteTempPacksIdx();
lastPackedRefs = refsBefore;
lastRepackTime = time;
@@ -991,6 +998,28 @@ public class GC {
}
}
+ private void deleteTempPacksIdx() {
+ Path packDir = Paths.get(repo.getObjectsDirectory().getAbsolutePath(),
+ "pack"); //$NON-NLS-1$
+ Instant threshold = Instant.now().minus(1, ChronoUnit.DAYS);
+ try (DirectoryStream<Path> stream =
+ Files.newDirectoryStream(packDir, "gc_*_tmp")) { //$NON-NLS-1$
+ stream.forEach(t -> {
+ try {
+ Instant lastModified = Files.getLastModifiedTime(t)
+ .toInstant();
+ if (lastModified.isBefore(threshold)) {
+ Files.deleteIfExists(t);
+ }
+ } catch (IOException e) {
+ LOG.error(e.getMessage(), e);
+ }
+ });
+ } catch (IOException e) {
+ LOG.error(e.getMessage(), e);
+ }
+ }
+
/**
* @param ref
* the ref which log should be inspected
@@ -1205,16 +1234,7 @@ public class GC {
// rename the temporary files to real files
File realPack = nameFor(id, ".pack"); //$NON-NLS-1$
- // if the packfile already exists (because we are rewriting a
- // packfile for the same set of objects maybe with different
- // PackConfig) then make sure we get rid of all handles on the file.
- // Windows will not allow for rename otherwise.
- if (realPack.exists())
- for (PackFile p : repo.getObjectDatabase().getPacks())
- if (realPack.getPath().equals(p.getPackFile().getPath())) {
- p.close();
- break;
- }
+ repo.getObjectDatabase().closeAllPackHandles(realPack);
tmpPack.setReadOnly();
FileUtils.rename(tmpPack, realPack, StandardCopyOption.ATOMIC_MOVE);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java
index 35049d4063..e5fc0c5bad 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java
@@ -43,11 +43,6 @@
package org.eclipse.jgit.internal.storage.file;
-import org.eclipse.jgit.api.errors.JGitInternalException;
-import org.eclipse.jgit.lib.ConfigConstants;
-import org.eclipse.jgit.util.GitDateParser;
-import org.eclipse.jgit.util.SystemReader;
-
import static java.nio.charset.StandardCharsets.UTF_8;
import java.io.File;
@@ -58,6 +53,11 @@ import java.nio.file.attribute.FileTime;
import java.text.ParseException;
import java.time.Instant;
+import org.eclipse.jgit.api.errors.JGitInternalException;
+import org.eclipse.jgit.lib.ConfigConstants;
+import org.eclipse.jgit.util.GitDateParser;
+import org.eclipse.jgit.util.SystemReader;
+
/**
* This class manages the gc.log file for a {@link FileRepository}.
*/
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java
index bda5cbeba4..3f82e2a20d 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java
@@ -78,4 +78,4 @@ public class InfoAttributesNode extends AttributesNode {
return r.getRules().isEmpty() ? null : r;
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java
index 15c5280517..56f42b3761 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java
@@ -385,7 +385,7 @@ public class LockFile {
};
}
- private void requireLock() {
+ void requireLock() {
if (os == null) {
unlock();
throw new IllegalStateException(MessageFormat.format(JGitText.get().lockOnNotHeld, ref));
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java
index d953b87c4b..153c7dd925 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java
@@ -220,6 +220,16 @@ public class ObjectDirectory extends FileObjectDatabase {
return new ObjectDirectoryInserter(this, config);
}
+ /**
+ * Create a new inserter that inserts all objects as pack files, not loose
+ * objects.
+ *
+ * @return new inserter.
+ */
+ public PackInserter newPackInserter() {
+ return new PackInserter(this);
+ }
+
@Override
public void close() {
unpackedObjectCache.clear();
@@ -814,8 +824,6 @@ public class ObjectDirectory extends FileObjectDatabase {
final PackFile[] oldList = o.packs;
final String name = pf.getPackFile().getName();
for (PackFile p : oldList) {
- if (PackFile.SORT.compare(pf, p) < 0)
- break;
if (name.equals(p.getPackFile().getName()))
return;
}
@@ -971,6 +979,21 @@ public class ObjectDirectory extends FileObjectDatabase {
return nameSet;
}
+ void closeAllPackHandles(File packFile) {
+ // if the packfile already exists (because we are rewriting a
+ // packfile for the same set of objects maybe with different
+ // PackConfig) then make sure we get rid of all handles on the file.
+ // Windows will not allow for rename otherwise.
+ if (packFile.exists()) {
+ for (PackFile p : getPacks()) {
+ if (packFile.getPath().equals(p.getPackFile().getPath())) {
+ p.close();
+ break;
+ }
+ }
+ }
+ }
+
AlternateHandle[] myAlternates() {
AlternateHandle[] alt = alternates.get();
if (alt == null) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInputStream.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInputStream.java
index 154809bad8..962f765aae 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInputStream.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInputStream.java
@@ -82,4 +82,4 @@ class PackInputStream extends InputStream {
public void close() {
wc.close();
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInserter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInserter.java
new file mode 100644
index 0000000000..ff959e8c95
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInserter.java
@@ -0,0 +1,708 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.file;
+
+import static java.nio.file.StandardCopyOption.ATOMIC_MOVE;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+import static org.eclipse.jgit.lib.Constants.OBJ_OFS_DELTA;
+import static org.eclipse.jgit.lib.Constants.OBJ_REF_DELTA;
+
+import java.io.BufferedInputStream;
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FilterInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.RandomAccessFile;
+import java.nio.channels.Channels;
+import java.text.MessageFormat;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.zip.CRC32;
+import java.util.zip.DataFormatException;
+import java.util.zip.Deflater;
+import java.util.zip.DeflaterOutputStream;
+import java.util.zip.Inflater;
+import java.util.zip.InflaterInputStream;
+
+import org.eclipse.jgit.errors.CorruptObjectException;
+import org.eclipse.jgit.errors.IncorrectObjectTypeException;
+import org.eclipse.jgit.errors.LargeObjectException;
+import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.lib.AbbreviatedObjectId;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.InflaterCache;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdOwnerMap;
+import org.eclipse.jgit.lib.ObjectInserter;
+import org.eclipse.jgit.lib.ObjectLoader;
+import org.eclipse.jgit.lib.ObjectReader;
+import org.eclipse.jgit.lib.ObjectStream;
+import org.eclipse.jgit.transport.PackParser;
+import org.eclipse.jgit.transport.PackedObjectInfo;
+import org.eclipse.jgit.util.BlockList;
+import org.eclipse.jgit.util.FileUtils;
+import org.eclipse.jgit.util.IO;
+import org.eclipse.jgit.util.NB;
+import org.eclipse.jgit.util.io.CountingOutputStream;
+import org.eclipse.jgit.util.sha1.SHA1;
+
+/**
+ * Object inserter that inserts one pack per call to {@link #flush()}, and never
+ * inserts loose objects.
+ */
+public class PackInserter extends ObjectInserter {
+ /** Always produce version 2 indexes, to get CRC data. */
+ private static final int INDEX_VERSION = 2;
+
+ private final ObjectDirectory db;
+
+ private List<PackedObjectInfo> objectList;
+ private ObjectIdOwnerMap<PackedObjectInfo> objectMap;
+ private boolean rollback;
+ private boolean checkExisting = true;
+
+ private int compression = Deflater.BEST_COMPRESSION;
+ private File tmpPack;
+ private PackStream packOut;
+ private Inflater cachedInflater;
+
+ PackInserter(ObjectDirectory db) {
+ this.db = db;
+ }
+
+ /**
+ * @param check
+ * if false, will write out possibly-duplicate objects without
+ * first checking whether they exist in the repo; default is true.
+ */
+ public void checkExisting(boolean check) {
+ checkExisting = check;
+ }
+
+ /**
+ * @param compression
+ * compression level for zlib deflater.
+ */
+ public void setCompressionLevel(int compression) {
+ this.compression = compression;
+ }
+
+ int getBufferSize() {
+ return buffer().length;
+ }
+
+ @Override
+ public ObjectId insert(int type, byte[] data, int off, int len)
+ throws IOException {
+ ObjectId id = idFor(type, data, off, len);
+ if (objectMap != null && objectMap.contains(id)) {
+ return id;
+ }
+ // Ignore loose objects, which are potentially unreachable.
+ if (checkExisting && db.hasPackedObject(id)) {
+ return id;
+ }
+
+ long offset = beginObject(type, len);
+ packOut.compress.write(data, off, len);
+ packOut.compress.finish();
+ return endObject(id, offset);
+ }
+
+ @Override
+ public ObjectId insert(int type, long len, InputStream in)
+ throws IOException {
+ byte[] buf = buffer();
+ if (len <= buf.length) {
+ IO.readFully(in, buf, 0, (int) len);
+ return insert(type, buf, 0, (int) len);
+ }
+
+ long offset = beginObject(type, len);
+ SHA1 md = digest();
+ md.update(Constants.encodedTypeString(type));
+ md.update((byte) ' ');
+ md.update(Constants.encodeASCII(len));
+ md.update((byte) 0);
+
+ while (0 < len) {
+ int n = in.read(buf, 0, (int) Math.min(buf.length, len));
+ if (n <= 0) {
+ throw new EOFException();
+ }
+ md.update(buf, 0, n);
+ packOut.compress.write(buf, 0, n);
+ len -= n;
+ }
+ packOut.compress.finish();
+ return endObject(md.toObjectId(), offset);
+ }
+
+ private long beginObject(int type, long len) throws IOException {
+ if (packOut == null) {
+ beginPack();
+ }
+ long offset = packOut.getOffset();
+ packOut.beginObject(type, len);
+ return offset;
+ }
+
+ private ObjectId endObject(ObjectId id, long offset) {
+ PackedObjectInfo obj = new PackedObjectInfo(id);
+ obj.setOffset(offset);
+ obj.setCRC((int) packOut.crc32.getValue());
+ objectList.add(obj);
+ objectMap.addIfAbsent(obj);
+ return id;
+ }
+
+ private static File idxFor(File packFile) {
+ String p = packFile.getName();
+ return new File(
+ packFile.getParentFile(),
+ p.substring(0, p.lastIndexOf('.')) + ".idx"); //$NON-NLS-1$
+ }
+
+ private void beginPack() throws IOException {
+ objectList = new BlockList<>();
+ objectMap = new ObjectIdOwnerMap<>();
+
+ rollback = true;
+ tmpPack = File.createTempFile("insert_", ".pack", db.getDirectory()); //$NON-NLS-1$ //$NON-NLS-2$
+ packOut = new PackStream(tmpPack);
+
+ // Write the header as though it were a single object pack.
+ packOut.write(packOut.hdrBuf, 0, writePackHeader(packOut.hdrBuf, 1));
+ }
+
+ private static int writePackHeader(byte[] buf, int objectCount) {
+ System.arraycopy(Constants.PACK_SIGNATURE, 0, buf, 0, 4);
+ NB.encodeInt32(buf, 4, 2); // Always use pack version 2.
+ NB.encodeInt32(buf, 8, objectCount);
+ return 12;
+ }
+
+ @Override
+ public PackParser newPackParser(InputStream in) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public ObjectReader newReader() {
+ return new Reader();
+ }
+
+ @Override
+ public void flush() throws IOException {
+ if (tmpPack == null) {
+ return;
+ }
+
+ if (packOut == null) {
+ throw new IOException();
+ }
+
+ byte[] packHash;
+ try {
+ packHash = packOut.finishPack();
+ } finally {
+ packOut = null;
+ }
+
+ Collections.sort(objectList);
+ File tmpIdx = idxFor(tmpPack);
+ writePackIndex(tmpIdx, packHash, objectList);
+
+ File realPack = new File(
+ new File(db.getDirectory(), "pack"), //$NON-NLS-1$
+ "pack-" + computeName(objectList).name() + ".pack"); //$NON-NLS-1$ //$NON-NLS-2$
+ db.closeAllPackHandles(realPack);
+ tmpPack.setReadOnly();
+ FileUtils.rename(tmpPack, realPack, ATOMIC_MOVE);
+
+ File realIdx = idxFor(realPack);
+ tmpIdx.setReadOnly();
+ try {
+ FileUtils.rename(tmpIdx, realIdx, ATOMIC_MOVE);
+ } catch (IOException e) {
+ File newIdx = new File(
+ realIdx.getParentFile(), realIdx.getName() + ".new"); //$NON-NLS-1$
+ try {
+ FileUtils.rename(tmpIdx, newIdx, ATOMIC_MOVE);
+ } catch (IOException e2) {
+ newIdx = tmpIdx;
+ e = e2;
+ }
+ throw new IOException(MessageFormat.format(
+ JGitText.get().panicCantRenameIndexFile, newIdx,
+ realIdx), e);
+ }
+
+ db.openPack(realPack);
+ rollback = false;
+ clear();
+ }
+
+ private static void writePackIndex(File idx, byte[] packHash,
+ List<PackedObjectInfo> list) throws IOException {
+ try (OutputStream os = new FileOutputStream(idx)) {
+ PackIndexWriter w = PackIndexWriter.createVersion(os, INDEX_VERSION);
+ w.write(list, packHash);
+ }
+ }
+
+ private ObjectId computeName(List<PackedObjectInfo> list) {
+ SHA1 md = digest().reset();
+ byte[] buf = buffer();
+ for (PackedObjectInfo otp : list) {
+ otp.copyRawTo(buf, 0);
+ md.update(buf, 0, OBJECT_ID_LENGTH);
+ }
+ return ObjectId.fromRaw(md.digest());
+ }
+
+ @Override
+ public void close() {
+ try {
+ if (packOut != null) {
+ try {
+ packOut.close();
+ } catch (IOException err) {
+ // Ignore a close failure, the pack should be removed.
+ }
+ }
+ if (rollback && tmpPack != null) {
+ try {
+ FileUtils.delete(tmpPack);
+ } catch (IOException e) {
+ // Still delete idx.
+ }
+ try {
+ FileUtils.delete(idxFor(tmpPack));
+ } catch (IOException e) {
+ // Ignore error deleting temp idx.
+ }
+ rollback = false;
+ }
+ } finally {
+ clear();
+ try {
+ InflaterCache.release(cachedInflater);
+ } finally {
+ cachedInflater = null;
+ }
+ }
+ }
+
+ private void clear() {
+ objectList = null;
+ objectMap = null;
+ tmpPack = null;
+ packOut = null;
+ }
+
+ private Inflater inflater() {
+ if (cachedInflater == null) {
+ cachedInflater = InflaterCache.get();
+ } else {
+ cachedInflater.reset();
+ }
+ return cachedInflater;
+ }
+
+ /**
+ * Stream that writes to a pack file.
+ * <p>
+ * Backed by two views of the same open file descriptor: a random-access file,
+ * and an output stream. Seeking in the file causes subsequent writes to the
+ * output stream to occur wherever the file pointer is pointing, so we need to
+ * take care to always seek to the end of the file before writing a new
+ * object.
+ * <p>
+ * Callers should always use {@link #seek(long)} to seek, rather than reaching
+ * into the file member. As long as this contract is followed, calls to {@link
+ * #write(byte[], int, int)} are guaranteed to write at the end of the file,
+ * even if there have been intermediate seeks.
+ */
+ private class PackStream extends OutputStream {
+ final byte[] hdrBuf;
+ final CRC32 crc32;
+ final DeflaterOutputStream compress;
+
+ private final RandomAccessFile file;
+ private final CountingOutputStream out;
+ private final Deflater deflater;
+
+ private boolean atEnd;
+
+ PackStream(File pack) throws IOException {
+ file = new RandomAccessFile(pack, "rw"); //$NON-NLS-1$
+ out = new CountingOutputStream(new FileOutputStream(file.getFD()));
+ deflater = new Deflater(compression);
+ compress = new DeflaterOutputStream(this, deflater, 8192);
+ hdrBuf = new byte[32];
+ crc32 = new CRC32();
+ atEnd = true;
+ }
+
+ long getOffset() {
+ // This value is accurate as long as we only ever write to the end of the
+ // file, and don't seek back to overwrite any previous segments. Although
+ // this is subtle, storing the stream counter this way is still preferable
+ // to returning file.length() here, as it avoids a syscall and possible
+ // IOException.
+ return out.getCount();
+ }
+
+ void seek(long offset) throws IOException {
+ file.seek(offset);
+ atEnd = false;
+ }
+
+ void beginObject(int objectType, long length) throws IOException {
+ crc32.reset();
+ deflater.reset();
+ write(hdrBuf, 0, encodeTypeSize(objectType, length));
+ }
+
+ private int encodeTypeSize(int type, long rawLength) {
+ long nextLength = rawLength >>> 4;
+ hdrBuf[0] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (type << 4) | (rawLength & 0x0F));
+ rawLength = nextLength;
+ int n = 1;
+ while (rawLength > 0) {
+ nextLength >>>= 7;
+ hdrBuf[n++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (rawLength & 0x7F));
+ rawLength = nextLength;
+ }
+ return n;
+ }
+
+ @Override
+ public void write(final int b) throws IOException {
+ hdrBuf[0] = (byte) b;
+ write(hdrBuf, 0, 1);
+ }
+
+ @Override
+ public void write(byte[] data, int off, int len) throws IOException {
+ crc32.update(data, off, len);
+ if (!atEnd) {
+ file.seek(file.length());
+ atEnd = true;
+ }
+ out.write(data, off, len);
+ }
+
+ byte[] finishPack() throws IOException {
+ // Overwrite placeholder header with actual object count, then hash. This
+ // method intentionally uses direct seek/write calls rather than the
+ // wrappers which keep track of atEnd. This leaves atEnd, the file
+ // pointer, and out's counter in an inconsistent state; that's ok, since
+ // this method closes the file anyway.
+ try {
+ file.seek(0);
+ out.write(hdrBuf, 0, writePackHeader(hdrBuf, objectList.size()));
+
+ byte[] buf = buffer();
+ SHA1 md = digest().reset();
+ file.seek(0);
+ while (true) {
+ int r = file.read(buf);
+ if (r < 0) {
+ break;
+ }
+ md.update(buf, 0, r);
+ }
+ byte[] packHash = md.digest();
+ out.write(packHash, 0, packHash.length);
+ return packHash;
+ } finally {
+ close();
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ deflater.end();
+ try {
+ out.close();
+ } finally {
+ file.close();
+ }
+ }
+
+ byte[] inflate(long filePos, int len) throws IOException, DataFormatException {
+ byte[] dstbuf;
+ try {
+ dstbuf = new byte[len];
+ } catch (OutOfMemoryError noMemory) {
+ return null; // Caller will switch to large object streaming.
+ }
+
+ byte[] srcbuf = buffer();
+ Inflater inf = inflater();
+ filePos += setInput(filePos, inf, srcbuf);
+ for (int dstoff = 0;;) {
+ int n = inf.inflate(dstbuf, dstoff, dstbuf.length - dstoff);
+ dstoff += n;
+ if (inf.finished()) {
+ return dstbuf;
+ }
+ if (inf.needsInput()) {
+ filePos += setInput(filePos, inf, srcbuf);
+ } else if (n == 0) {
+ throw new DataFormatException();
+ }
+ }
+ }
+
+ private int setInput(long filePos, Inflater inf, byte[] buf)
+ throws IOException {
+ if (file.getFilePointer() != filePos) {
+ seek(filePos);
+ }
+ int n = file.read(buf);
+ if (n < 0) {
+ throw new EOFException(JGitText.get().unexpectedEofInPack);
+ }
+ inf.setInput(buf, 0, n);
+ return n;
+ }
+ }
+
+ private class Reader extends ObjectReader {
+ private final ObjectReader ctx;
+
+ private Reader() {
+ ctx = db.newReader();
+ setStreamFileThreshold(ctx.getStreamFileThreshold());
+ }
+
+ @Override
+ public ObjectReader newReader() {
+ return db.newReader();
+ }
+
+ @Override
+ public ObjectInserter getCreatedFromInserter() {
+ return PackInserter.this;
+ }
+
+ @Override
+ public Collection<ObjectId> resolve(AbbreviatedObjectId id)
+ throws IOException {
+ Collection<ObjectId> stored = ctx.resolve(id);
+ if (objectList == null) {
+ return stored;
+ }
+
+ Set<ObjectId> r = new HashSet<>(stored.size() + 2);
+ r.addAll(stored);
+ for (PackedObjectInfo obj : objectList) {
+ if (id.prefixCompare(obj) == 0) {
+ r.add(obj.copy());
+ }
+ }
+ return r;
+ }
+
+ @Override
+ public ObjectLoader open(AnyObjectId objectId, int typeHint)
+ throws MissingObjectException, IncorrectObjectTypeException,
+ IOException {
+ if (objectMap == null) {
+ return ctx.open(objectId, typeHint);
+ }
+
+ PackedObjectInfo obj = objectMap.get(objectId);
+ if (obj == null) {
+ return ctx.open(objectId, typeHint);
+ }
+
+ byte[] buf = buffer();
+ packOut.seek(obj.getOffset());
+ int cnt = packOut.file.read(buf, 0, 20);
+ if (cnt <= 0) {
+ throw new EOFException(JGitText.get().unexpectedEofInPack);
+ }
+
+ int c = buf[0] & 0xff;
+ int type = (c >> 4) & 7;
+ if (type == OBJ_OFS_DELTA || type == OBJ_REF_DELTA) {
+ throw new IOException(MessageFormat.format(
+ JGitText.get().cannotReadBackDelta, Integer.toString(type)));
+ }
+ if (typeHint != OBJ_ANY && type != typeHint) {
+ throw new IncorrectObjectTypeException(objectId.copy(), typeHint);
+ }
+
+ long sz = c & 0x0f;
+ int ptr = 1;
+ int shift = 4;
+ while ((c & 0x80) != 0) {
+ if (ptr >= cnt) {
+ throw new EOFException(JGitText.get().unexpectedEofInPack);
+ }
+ c = buf[ptr++] & 0xff;
+ sz += ((long) (c & 0x7f)) << shift;
+ shift += 7;
+ }
+
+ long zpos = obj.getOffset() + ptr;
+ if (sz < getStreamFileThreshold()) {
+ byte[] data = inflate(obj, zpos, (int) sz);
+ if (data != null) {
+ return new ObjectLoader.SmallObject(type, data);
+ }
+ }
+ return new StreamLoader(type, sz, zpos);
+ }
+
+ private byte[] inflate(PackedObjectInfo obj, long zpos, int sz)
+ throws IOException, CorruptObjectException {
+ try {
+ return packOut.inflate(zpos, sz);
+ } catch (DataFormatException dfe) {
+ CorruptObjectException coe = new CorruptObjectException(
+ MessageFormat.format(
+ JGitText.get().objectAtHasBadZlibStream,
+ Long.valueOf(obj.getOffset()),
+ tmpPack.getAbsolutePath()));
+ coe.initCause(dfe);
+ throw coe;
+ }
+ }
+
+ @Override
+ public Set<ObjectId> getShallowCommits() throws IOException {
+ return ctx.getShallowCommits();
+ }
+
+ @Override
+ public void close() {
+ ctx.close();
+ }
+
+ private class StreamLoader extends ObjectLoader {
+ private final int type;
+ private final long size;
+ private final long pos;
+
+ StreamLoader(int type, long size, long pos) {
+ this.type = type;
+ this.size = size;
+ this.pos = pos;
+ }
+
+ @Override
+ public ObjectStream openStream()
+ throws MissingObjectException, IOException {
+ int bufsz = buffer().length;
+ packOut.seek(pos);
+
+ InputStream fileStream = new FilterInputStream(
+ Channels.newInputStream(packOut.file.getChannel())) {
+ // atEnd was already set to false by the previous seek, but it's
+ // technically possible for a caller to call insert on the
+ // inserter in the middle of reading from this stream. Behavior is
+ // undefined in this case, so it would arguably be ok to ignore,
+ // but it's not hard to at least make an attempt to not corrupt
+ // the data.
+ @Override
+ public int read() throws IOException {
+ packOut.atEnd = false;
+ return super.read();
+ }
+
+ @Override
+ public int read(byte[] b) throws IOException {
+ packOut.atEnd = false;
+ return super.read(b);
+ }
+
+ @Override
+ public int read(byte[] b, int off, int len) throws IOException {
+ packOut.atEnd = false;
+ return super.read(b,off,len);
+ }
+
+ @Override
+ public void close() {
+ // Never close underlying RandomAccessFile, which lasts the
+ // lifetime of the enclosing PackStream.
+ }
+ };
+ return new ObjectStream.Filter(
+ type, size,
+ new BufferedInputStream(
+ new InflaterInputStream(fileStream, inflater(), bufsz), bufsz));
+ }
+
+ @Override
+ public int getType() {
+ return type;
+ }
+
+ @Override
+ public long getSize() {
+ return size;
+ }
+
+ @Override
+ public byte[] getCachedBytes() throws LargeObjectException {
+ throw new LargeObjectException.ExceedsLimit(
+ getStreamFileThreshold(), size);
+ }
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackedBatchRefUpdate.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackedBatchRefUpdate.java
new file mode 100644
index 0000000000..c1f5476496
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackedBatchRefUpdate.java
@@ -0,0 +1,531 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.file;
+
+import static java.util.stream.Collectors.toList;
+import static org.eclipse.jgit.transport.ReceiveCommand.Result.LOCK_FAILURE;
+import static org.eclipse.jgit.transport.ReceiveCommand.Result.NOT_ATTEMPTED;
+import static org.eclipse.jgit.transport.ReceiveCommand.Result.REJECTED_NONFASTFORWARD;
+
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.errors.LockFailedException;
+import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.file.RefDirectory.PackedRefList;
+import org.eclipse.jgit.lib.BatchRefUpdate;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdRef;
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.ProgressMonitor;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.RefDatabase;
+import org.eclipse.jgit.lib.ReflogEntry;
+import org.eclipse.jgit.revwalk.RevObject;
+import org.eclipse.jgit.revwalk.RevTag;
+import org.eclipse.jgit.revwalk.RevWalk;
+import org.eclipse.jgit.transport.ReceiveCommand;
+import org.eclipse.jgit.util.RefList;
+
+/**
+ * Implementation of {@link BatchRefUpdate} that uses the {@code packed-refs}
+ * file to support atomically updating multiple refs.
+ * <p>
+ * The algorithm is designed to be compatible with traditional single ref
+ * updates operating on single refs only. Regardless of success or failure, the
+ * results are atomic: from the perspective of any reader, either all updates in
+ * the batch will be visible, or none will. In the case of process failure
+ * during any of the following steps, removal of stale lock files is always
+ * safe, and will never result in an inconsistent state, although the update may
+ * or may not have been applied.
+ * <p>
+ * The algorithm is:
+ * <ol>
+ * <li>Pack loose refs involved in the transaction using the normal pack-refs
+ * operation. This ensures that creating lock files in the following step
+ * succeeds even if a batch contains both a delete of {@code refs/x} (loose) and
+ * a create of {@code refs/x/y}.</li>
+ * <li>Create locks for all loose refs involved in the transaction, even if they
+ * are not currently loose.</li>
+ * <li>Pack loose refs again, this time while holding all lock files (see {@link
+ * RefDirectory#pack(Map)}), without deleting them afterwards. This covers a
+ * potential race where new loose refs were created after the initial packing
+ * step. If no new loose refs were created during this race, this step does not
+ * modify any files on disk. Keep the merged state in memory.</li>
+ * <li>Update the in-memory packed refs with the commands in the batch, possibly
+ * failing the whole batch if any old ref values do not match.</li>
+ * <li>If the update succeeds, lock {@code packed-refs} and commit by atomically
+ * renaming the lock file.</li>
+ * <li>Delete loose ref lock files.</li>
+ * </ol>
+ *
+ * Because the packed-refs file format is a sorted list, this algorithm is
+ * linear in the total number of refs, regardless of the batch size. This can be
+ * a significant slowdown on repositories with large numbers of refs; callers
+ * that prefer speed over atomicity should use {@code setAtomic(false)}. As an
+ * optimization, an update containing a single ref update does not use the
+ * packed-refs protocol.
+ */
+class PackedBatchRefUpdate extends BatchRefUpdate {
+ private RefDirectory refdb;
+
+ PackedBatchRefUpdate(RefDirectory refdb) {
+ super(refdb);
+ this.refdb = refdb;
+ }
+
+ @Override
+ public void execute(RevWalk walk, ProgressMonitor monitor,
+ List<String> options) throws IOException {
+ if (!isAtomic()) {
+ // Use default one-by-one implementation.
+ super.execute(walk, monitor, options);
+ return;
+ }
+ List<ReceiveCommand> pending =
+ ReceiveCommand.filter(getCommands(), NOT_ATTEMPTED);
+ if (pending.isEmpty()) {
+ return;
+ }
+ if (pending.size() == 1) {
+ // Single-ref updates are always atomic, no need for packed-refs.
+ super.execute(walk, monitor, options);
+ return;
+ }
+
+ // Required implementation details copied from super.execute.
+ if (!blockUntilTimestamps(MAX_WAIT)) {
+ return;
+ }
+ if (options != null) {
+ setPushOptions(options);
+ }
+ // End required implementation details.
+
+ // Check for conflicting names before attempting to acquire locks, since
+ // lockfile creation may fail on file/directory conflicts.
+ if (!checkConflictingNames(pending)) {
+ return;
+ }
+
+ if (!checkObjectExistence(walk, pending)) {
+ return;
+ }
+
+ if (!checkNonFastForwards(walk, pending)) {
+ return;
+ }
+
+ // Pack refs normally, so we can create lock files even in the case where
+ // refs/x is deleted and refs/x/y is created in this batch.
+ try {
+ refdb.pack(
+ pending.stream().map(ReceiveCommand::getRefName).collect(toList()));
+ } catch (LockFailedException e) {
+ lockFailure(pending.get(0), pending);
+ return;
+ }
+
+ Map<String, LockFile> locks = null;
+ refdb.inProcessPackedRefsLock.lock();
+ try {
+ PackedRefList oldPackedList;
+ if (!refdb.isInClone()) {
+ locks = lockLooseRefs(pending);
+ if (locks == null) {
+ return;
+ }
+ oldPackedList = refdb.pack(locks);
+ } else {
+ // During clone locking isn't needed since no refs exist yet.
+ // This also helps to avoid problems with refs only differing in
+ // case on a case insensitive filesystem (bug 528497)
+ oldPackedList = refdb.getPackedRefs();
+ }
+ RefList<Ref> newRefs = applyUpdates(walk, oldPackedList, pending);
+ if (newRefs == null) {
+ return;
+ }
+ LockFile packedRefsLock = refdb.lockPackedRefs();
+ if (packedRefsLock == null) {
+ lockFailure(pending.get(0), pending);
+ return;
+ }
+ // commitPackedRefs removes lock file (by renaming over real file).
+ refdb.commitPackedRefs(packedRefsLock, newRefs, oldPackedList,
+ true);
+ } finally {
+ try {
+ unlockAll(locks);
+ } finally {
+ refdb.inProcessPackedRefsLock.unlock();
+ }
+ }
+
+ refdb.fireRefsChanged();
+ pending.forEach(c -> c.setResult(ReceiveCommand.Result.OK));
+ writeReflog(pending);
+ }
+
+ private boolean checkConflictingNames(List<ReceiveCommand> commands)
+ throws IOException {
+ Set<String> takenNames = new HashSet<>();
+ Set<String> takenPrefixes = new HashSet<>();
+ Set<String> deletes = new HashSet<>();
+ for (ReceiveCommand cmd : commands) {
+ if (cmd.getType() != ReceiveCommand.Type.DELETE) {
+ takenNames.add(cmd.getRefName());
+ addPrefixesTo(cmd.getRefName(), takenPrefixes);
+ } else {
+ deletes.add(cmd.getRefName());
+ }
+ }
+ Set<String> initialRefs = refdb.getRefs(RefDatabase.ALL).keySet();
+ for (String name : initialRefs) {
+ if (!deletes.contains(name)) {
+ takenNames.add(name);
+ addPrefixesTo(name, takenPrefixes);
+ }
+ }
+
+ for (ReceiveCommand cmd : commands) {
+ if (cmd.getType() != ReceiveCommand.Type.DELETE &&
+ takenPrefixes.contains(cmd.getRefName())) {
+ // This ref is a prefix of some other ref. This check doesn't apply when
+ // this command is a delete, because if the ref is deleted nobody will
+ // ever be creating a loose ref with that name.
+ lockFailure(cmd, commands);
+ return false;
+ }
+ for (String prefix : getPrefixes(cmd.getRefName())) {
+ if (takenNames.contains(prefix)) {
+ // A prefix of this ref is already a refname. This check does apply
+ // when this command is a delete, because we would need to create the
+ // refname as a directory in order to create a lockfile for the
+ // to-be-deleted ref.
+ lockFailure(cmd, commands);
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+
+ private boolean checkObjectExistence(RevWalk walk,
+ List<ReceiveCommand> commands) throws IOException {
+ for (ReceiveCommand cmd : commands) {
+ try {
+ if (!cmd.getNewId().equals(ObjectId.zeroId())) {
+ walk.parseAny(cmd.getNewId());
+ }
+ } catch (MissingObjectException e) {
+ // ReceiveCommand#setResult(Result) converts REJECTED to
+ // REJECTED_NONFASTFORWARD, even though that result is also used for a
+ // missing object. Eagerly handle this case so we can set the right
+ // result.
+ reject(cmd, ReceiveCommand.Result.REJECTED_MISSING_OBJECT, commands);
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private boolean checkNonFastForwards(RevWalk walk,
+ List<ReceiveCommand> commands) throws IOException {
+ if (isAllowNonFastForwards()) {
+ return true;
+ }
+ for (ReceiveCommand cmd : commands) {
+ cmd.updateType(walk);
+ if (cmd.getType() == ReceiveCommand.Type.UPDATE_NONFASTFORWARD) {
+ reject(cmd, REJECTED_NONFASTFORWARD, commands);
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Lock loose refs corresponding to a list of commands.
+ *
+ * @param commands
+ * commands that we intend to execute.
+ * @return map of ref name in the input commands to lock file. Always contains
+ * one entry for each ref in the input list. All locks are acquired
+ * before returning. If any lock was not able to be acquired: the
+ * return value is null; no locks are held; and all commands that were
+ * pending are set to fail with {@code LOCK_FAILURE}.
+ * @throws IOException
+ * an error occurred other than a failure to acquire; no locks are
+ * held if this exception is thrown.
+ */
+ @Nullable
+ private Map<String, LockFile> lockLooseRefs(List<ReceiveCommand> commands)
+ throws IOException {
+ ReceiveCommand failed = null;
+ Map<String, LockFile> locks = new HashMap<>();
+ try {
+ RETRY: for (int ms : refdb.getRetrySleepMs()) {
+ failed = null;
+ // Release all locks before trying again, to prevent deadlock.
+ unlockAll(locks);
+ locks.clear();
+ RefDirectory.sleep(ms);
+
+ for (ReceiveCommand c : commands) {
+ String name = c.getRefName();
+ LockFile lock = new LockFile(refdb.fileFor(name));
+ if (locks.put(name, lock) != null) {
+ throw new IOException(
+ MessageFormat.format(JGitText.get().duplicateRef, name));
+ }
+ if (!lock.lock()) {
+ failed = c;
+ continue RETRY;
+ }
+ }
+ Map<String, LockFile> result = locks;
+ locks = null;
+ return result;
+ }
+ } finally {
+ unlockAll(locks);
+ }
+ lockFailure(failed != null ? failed : commands.get(0), commands);
+ return null;
+ }
+
+ private static RefList<Ref> applyUpdates(RevWalk walk, RefList<Ref> refs,
+ List<ReceiveCommand> commands) throws IOException {
+ int nDeletes = 0;
+ List<ReceiveCommand> adds = new ArrayList<>(commands.size());
+ for (ReceiveCommand c : commands) {
+ if (c.getType() == ReceiveCommand.Type.CREATE) {
+ adds.add(c);
+ } else if (c.getType() == ReceiveCommand.Type.DELETE) {
+ nDeletes++;
+ }
+ }
+ int addIdx = 0;
+
+ // Construct a new RefList by linearly scanning the old list, and merging in
+ // any updates.
+ Map<String, ReceiveCommand> byName = byName(commands);
+ RefList.Builder<Ref> b =
+ new RefList.Builder<>(refs.size() - nDeletes + adds.size());
+ for (Ref ref : refs) {
+ String name = ref.getName();
+ ReceiveCommand cmd = byName.remove(name);
+ if (cmd == null) {
+ b.add(ref);
+ continue;
+ }
+ if (!cmd.getOldId().equals(ref.getObjectId())) {
+ lockFailure(cmd, commands);
+ return null;
+ }
+
+ // Consume any adds between the last and current ref.
+ while (addIdx < adds.size()) {
+ ReceiveCommand currAdd = adds.get(addIdx);
+ if (currAdd.getRefName().compareTo(name) < 0) {
+ b.add(peeledRef(walk, currAdd));
+ byName.remove(currAdd.getRefName());
+ } else {
+ break;
+ }
+ addIdx++;
+ }
+
+ if (cmd.getType() != ReceiveCommand.Type.DELETE) {
+ b.add(peeledRef(walk, cmd));
+ }
+ }
+
+ // All remaining adds are valid, since the refs didn't exist.
+ while (addIdx < adds.size()) {
+ ReceiveCommand cmd = adds.get(addIdx++);
+ byName.remove(cmd.getRefName());
+ b.add(peeledRef(walk, cmd));
+ }
+
+ // Any remaining updates/deletes do not correspond to any existing refs, so
+ // they are lock failures.
+ if (!byName.isEmpty()) {
+ lockFailure(byName.values().iterator().next(), commands);
+ return null;
+ }
+
+ return b.toRefList();
+ }
+
+ private void writeReflog(List<ReceiveCommand> commands) {
+ PersonIdent ident = getRefLogIdent();
+ if (ident == null) {
+ ident = new PersonIdent(refdb.getRepository());
+ }
+ for (ReceiveCommand cmd : commands) {
+ // Assume any pending commands have already been executed atomically.
+ if (cmd.getResult() != ReceiveCommand.Result.OK) {
+ continue;
+ }
+ String name = cmd.getRefName();
+
+ if (cmd.getType() == ReceiveCommand.Type.DELETE) {
+ try {
+ RefDirectory.delete(refdb.logFor(name), RefDirectory.levelsIn(name));
+ } catch (IOException e) {
+ // Ignore failures, see below.
+ }
+ continue;
+ }
+
+ if (isRefLogDisabled(cmd)) {
+ continue;
+ }
+
+ String msg = getRefLogMessage(cmd);
+ if (isRefLogIncludingResult(cmd)) {
+ String strResult = toResultString(cmd);
+ if (strResult != null) {
+ msg = msg.isEmpty()
+ ? strResult : msg + ": " + strResult; //$NON-NLS-1$
+ }
+ }
+ try {
+ new ReflogWriter(refdb, isForceRefLog(cmd))
+ .log(name, cmd.getOldId(), cmd.getNewId(), ident, msg);
+ } catch (IOException e) {
+ // Ignore failures, but continue attempting to write more reflogs.
+ //
+ // In this storage format, it is impossible to atomically write the
+ // reflog with the ref updates, so we have to choose between:
+ // a. Propagating this exception and claiming failure, even though the
+ // actual ref updates succeeded.
+ // b. Ignoring failures writing the reflog, so we claim success if and
+ // only if the ref updates succeeded.
+ // We choose (b) in order to surprise callers the least.
+ //
+ // Possible future improvements:
+ // * Log a warning to a logger.
+ // * Retry a fixed number of times in case the error was transient.
+ }
+ }
+ }
+
+ private String toResultString(ReceiveCommand cmd) {
+ switch (cmd.getType()) {
+ case CREATE:
+ return ReflogEntry.PREFIX_CREATED;
+ case UPDATE:
+ // Match the behavior of a single RefUpdate. In that case, setting the
+ // force bit completely bypasses the potentially expensive isMergedInto
+ // check, by design, so the reflog message may be inaccurate.
+ //
+ // Similarly, this class bypasses the isMergedInto checks when the force
+ // bit is set, meaning we can't actually distinguish between UPDATE and
+ // UPDATE_NONFASTFORWARD when isAllowNonFastForwards() returns true.
+ return isAllowNonFastForwards()
+ ? ReflogEntry.PREFIX_FORCED_UPDATE : ReflogEntry.PREFIX_FAST_FORWARD;
+ case UPDATE_NONFASTFORWARD:
+ return ReflogEntry.PREFIX_FORCED_UPDATE;
+ default:
+ return null;
+ }
+ }
+
+ private static Map<String, ReceiveCommand> byName(
+ List<ReceiveCommand> commands) {
+ Map<String, ReceiveCommand> ret = new LinkedHashMap<>();
+ for (ReceiveCommand cmd : commands) {
+ ret.put(cmd.getRefName(), cmd);
+ }
+ return ret;
+ }
+
+ private static Ref peeledRef(RevWalk walk, ReceiveCommand cmd)
+ throws IOException {
+ ObjectId newId = cmd.getNewId().copy();
+ RevObject obj = walk.parseAny(newId);
+ if (obj instanceof RevTag) {
+ return new ObjectIdRef.PeeledTag(
+ Ref.Storage.PACKED, cmd.getRefName(), newId, walk.peel(obj).copy());
+ }
+ return new ObjectIdRef.PeeledNonTag(
+ Ref.Storage.PACKED, cmd.getRefName(), newId);
+ }
+
+ private static void unlockAll(@Nullable Map<?, LockFile> locks) {
+ if (locks != null) {
+ locks.values().forEach(LockFile::unlock);
+ }
+ }
+
+ private static void lockFailure(ReceiveCommand cmd,
+ List<ReceiveCommand> commands) {
+ reject(cmd, LOCK_FAILURE, commands);
+ }
+
+ private static void reject(ReceiveCommand cmd, ReceiveCommand.Result result,
+ List<ReceiveCommand> commands) {
+ cmd.setResult(result);
+ for (ReceiveCommand c2 : commands) {
+ if (c2.getResult() == ReceiveCommand.Result.OK) {
+ // Undo OK status so ReceiveCommand#abort aborts it. Assumes this method
+ // is always called before committing any updates to disk.
+ c2.setResult(ReceiveCommand.Result.NOT_ATTEMPTED);
+ }
+ }
+ ReceiveCommand.abort(commands);
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
index 8338b2c8a2..4003b27c31 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
@@ -48,6 +48,7 @@ package org.eclipse.jgit.internal.storage.file;
import static org.eclipse.jgit.lib.Constants.CHARSET;
import static org.eclipse.jgit.lib.Constants.HEAD;
+import static org.eclipse.jgit.lib.Constants.LOGS;
import static org.eclipse.jgit.lib.Constants.OBJECT_ID_STRING_LENGTH;
import static org.eclipse.jgit.lib.Constants.PACKED_REFS;
import static org.eclipse.jgit.lib.Constants.R_HEADS;
@@ -63,19 +64,24 @@ import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.io.InterruptedIOException;
import java.nio.file.DirectoryNotEmptyException;
import java.nio.file.Files;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.text.MessageFormat;
import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.locks.ReentrantLock;
import org.eclipse.jgit.annotations.NonNull;
+import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.errors.InvalidObjectIdException;
import org.eclipse.jgit.errors.LockFailedException;
import org.eclipse.jgit.errors.MissingObjectException;
@@ -138,15 +144,21 @@ public class RefDirectory extends RefDatabase {
Constants.MERGE_HEAD, Constants.FETCH_HEAD, Constants.ORIG_HEAD,
Constants.CHERRY_PICK_HEAD };
+ @SuppressWarnings("boxing")
+ private static final List<Integer> RETRY_SLEEP_MS =
+ Collections.unmodifiableList(Arrays.asList(0, 100, 200, 400, 800, 1600));
+
private final FileRepository parent;
private final File gitDir;
final File refsDir;
- private final ReflogWriter logWriter;
+ final File packedRefsFile;
+
+ final File logsDir;
- private final File packedRefsFile;
+ final File logsRefsDir;
/**
* Immutable sorted list of loose references.
@@ -162,6 +174,22 @@ public class RefDirectory extends RefDatabase {
final AtomicReference<PackedRefList> packedRefs = new AtomicReference<>();
/**
+ * Lock for coordinating operations within a single process that may contend
+ * on the {@code packed-refs} file.
+ * <p>
+ * All operations that write {@code packed-refs} must still acquire a
+ * {@link LockFile} on {@link #packedRefsFile}, even after they have acquired
+ * this lock, since there may be multiple {@link RefDirectory} instances or
+ * other processes operating on the same repo on disk.
+ * <p>
+ * This lock exists so multiple threads in the same process can wait in a fair
+ * queue without trying, failing, and retrying to acquire the on-disk lock. If
+ * {@code RepositoryCache} is used, this lock instance will be used by all
+ * threads.
+ */
+ final ReentrantLock inProcessPackedRefsLock = new ReentrantLock(true);
+
+ /**
* Number of modifications made to this database.
* <p>
* This counter is incremented when a change is made, or detected from the
@@ -177,24 +205,43 @@ public class RefDirectory extends RefDatabase {
*/
private final AtomicInteger lastNotifiedModCnt = new AtomicInteger();
+ private List<Integer> retrySleepMs = RETRY_SLEEP_MS;
+
RefDirectory(final FileRepository db) {
final FS fs = db.getFS();
parent = db;
gitDir = db.getDirectory();
- logWriter = new ReflogWriter(db);
refsDir = fs.resolve(gitDir, R_REFS);
+ logsDir = fs.resolve(gitDir, LOGS);
+ logsRefsDir = fs.resolve(gitDir, LOGS + '/' + R_REFS);
packedRefsFile = fs.resolve(gitDir, PACKED_REFS);
looseRefs.set(RefList.<LooseRef> emptyList());
- packedRefs.set(PackedRefList.NO_PACKED_REFS);
+ packedRefs.set(NO_PACKED_REFS);
}
Repository getRepository() {
return parent;
}
- ReflogWriter getLogWriter() {
- return logWriter;
+ ReflogWriter newLogWriter(boolean force) {
+ return new ReflogWriter(this, force);
+ }
+
+ /**
+ * Locate the log file on disk for a single reference name.
+ *
+ * @param name
+ * name of the ref, relative to the Git repository top level
+ * directory (so typically starts with refs/).
+ * @return the log file location.
+ */
+ public File logFor(String name) {
+ if (name.startsWith(R_REFS)) {
+ name = name.substring(R_REFS.length());
+ return new File(logsRefsDir, name);
+ }
+ return new File(logsDir, name);
}
@Override
@@ -202,7 +249,7 @@ public class RefDirectory extends RefDatabase {
FileUtils.mkdir(refsDir);
FileUtils.mkdir(new File(refsDir, R_HEADS.substring(R_REFS.length())));
FileUtils.mkdir(new File(refsDir, R_TAGS.substring(R_REFS.length())));
- logWriter.create();
+ newLogWriter(false).create();
}
@Override
@@ -212,7 +259,7 @@ public class RefDirectory extends RefDatabase {
private void clearReferences() {
looseRefs.set(RefList.<LooseRef> emptyList());
- packedRefs.set(PackedRefList.NO_PACKED_REFS);
+ packedRefs.set(NO_PACKED_REFS);
}
@Override
@@ -565,6 +612,16 @@ public class RefDirectory extends RefDatabase {
return new RefDirectoryRename(from, to);
}
+ @Override
+ public PackedBatchRefUpdate newBatchUpdate() {
+ return new PackedBatchRefUpdate(this);
+ }
+
+ @Override
+ public boolean performsAtomicTransactions() {
+ return true;
+ }
+
void stored(RefDirectoryUpdate update, FileSnapshot snapshot) {
final ObjectId target = update.getNewObjectId().copy();
final Ref leaf = update.getRef().getLeaf();
@@ -583,6 +640,9 @@ public class RefDirectory extends RefDatabase {
void delete(RefDirectoryUpdate update) throws IOException {
Ref dst = update.getRef();
+ if (!update.isDetachingSymbolicRef()) {
+ dst = dst.getLeaf();
+ }
String name = dst.getName();
// Write the packed-refs file using an atomic update. We might
@@ -590,16 +650,20 @@ public class RefDirectory extends RefDatabase {
// we don't miss an edit made externally.
final PackedRefList packed = getPackedRefs();
if (packed.contains(name)) {
- LockFile lck = new LockFile(packedRefsFile);
- if (!lck.lock())
- throw new LockFailedException(packedRefsFile);
+ inProcessPackedRefsLock.lock();
try {
- PackedRefList cur = readPackedRefs();
- int idx = cur.find(name);
- if (0 <= idx)
- commitPackedRefs(lck, cur.remove(idx), packed);
+ LockFile lck = lockPackedRefsOrThrow();
+ try {
+ PackedRefList cur = readPackedRefs();
+ int idx = cur.find(name);
+ if (0 <= idx) {
+ commitPackedRefs(lck, cur.remove(idx), packed, true);
+ }
+ } finally {
+ lck.unlock();
+ }
} finally {
- lck.unlock();
+ inProcessPackedRefsLock.unlock();
}
}
@@ -613,7 +677,7 @@ public class RefDirectory extends RefDatabase {
} while (!looseRefs.compareAndSet(curLoose, newLoose));
int levels = levelsIn(name) - 2;
- delete(logWriter.logFor(name), levels);
+ delete(logFor(name), levels);
if (dst.getStorage().isLoose()) {
update.unlock();
delete(fileFor(name), levels);
@@ -635,75 +699,145 @@ public class RefDirectory extends RefDatabase {
* @throws IOException
*/
public void pack(List<String> refs) throws IOException {
- if (refs.size() == 0)
- return;
+ pack(refs, Collections.emptyMap());
+ }
+
+ PackedRefList pack(Map<String, LockFile> heldLocks) throws IOException {
+ return pack(heldLocks.keySet(), heldLocks);
+ }
+
+ private PackedRefList pack(Collection<String> refs,
+ Map<String, LockFile> heldLocks) throws IOException {
+ for (LockFile ol : heldLocks.values()) {
+ ol.requireLock();
+ }
+ if (refs.size() == 0) {
+ return null;
+ }
FS fs = parent.getFS();
// Lock the packed refs file and read the content
- LockFile lck = new LockFile(packedRefsFile);
- if (!lck.lock())
- throw new IOException(MessageFormat.format(
- JGitText.get().cannotLock, packedRefsFile));
-
+ inProcessPackedRefsLock.lock();
try {
- final PackedRefList packed = getPackedRefs();
- RefList<Ref> cur = readPackedRefs();
-
- // Iterate over all refs to be packed
- for (String refName : refs) {
- Ref ref = readRef(refName, cur);
- if (ref.isSymbolic())
- continue; // can't pack symbolic refs
- // Add/Update it to packed-refs
- int idx = cur.find(refName);
- if (idx >= 0)
- cur = cur.set(idx, peeledPackedRef(ref));
- else
- cur = cur.add(idx, peeledPackedRef(ref));
- }
+ LockFile lck = lockPackedRefsOrThrow();
+ try {
+ final PackedRefList packed = getPackedRefs();
+ RefList<Ref> cur = readPackedRefs();
+
+ // Iterate over all refs to be packed
+ boolean dirty = false;
+ for (String refName : refs) {
+ Ref oldRef = readRef(refName, cur);
+ if (oldRef == null) {
+ continue; // A non-existent ref is already correctly packed.
+ }
+ if (oldRef.isSymbolic()) {
+ continue; // can't pack symbolic refs
+ }
+ // Add/Update it to packed-refs
+ Ref newRef = peeledPackedRef(oldRef);
+ if (newRef == oldRef) {
+ // No-op; peeledPackedRef returns the input ref only if it's already
+ // packed, and readRef returns a packed ref only if there is no
+ // loose ref.
+ continue;
+ }
- // The new content for packed-refs is collected. Persist it.
- commitPackedRefs(lck, cur, packed);
+ dirty = true;
+ int idx = cur.find(refName);
+ if (idx >= 0) {
+ cur = cur.set(idx, newRef);
+ } else {
+ cur = cur.add(idx, newRef);
+ }
+ }
+ if (!dirty) {
+ // All requested refs were already packed accurately
+ return packed;
+ }
- // Now delete the loose refs which are now packed
- for (String refName : refs) {
- // Lock the loose ref
- File refFile = fileFor(refName);
- if (!fs.exists(refFile))
- continue;
- LockFile rLck = new LockFile(refFile);
- if (!rLck.lock())
- continue;
- try {
- LooseRef currentLooseRef = scanRef(null, refName);
- if (currentLooseRef == null || currentLooseRef.isSymbolic())
+ // The new content for packed-refs is collected. Persist it.
+ PackedRefList result = commitPackedRefs(lck, cur, packed,
+ false);
+
+ // Now delete the loose refs which are now packed
+ for (String refName : refs) {
+ // Lock the loose ref
+ File refFile = fileFor(refName);
+ if (!fs.exists(refFile)) {
continue;
- Ref packedRef = cur.get(refName);
- ObjectId clr_oid = currentLooseRef.getObjectId();
- if (clr_oid != null
- && clr_oid.equals(packedRef.getObjectId())) {
- RefList<LooseRef> curLoose, newLoose;
- do {
- curLoose = looseRefs.get();
- int idx = curLoose.find(refName);
- if (idx < 0)
- break;
- newLoose = curLoose.remove(idx);
- } while (!looseRefs.compareAndSet(curLoose, newLoose));
- int levels = levelsIn(refName) - 2;
- delete(refFile, levels, rLck);
}
- } finally {
- rLck.unlock();
+
+ LockFile rLck = heldLocks.get(refName);
+ boolean shouldUnlock;
+ if (rLck == null) {
+ rLck = new LockFile(refFile);
+ if (!rLck.lock()) {
+ continue;
+ }
+ shouldUnlock = true;
+ } else {
+ shouldUnlock = false;
+ }
+
+ try {
+ LooseRef currentLooseRef = scanRef(null, refName);
+ if (currentLooseRef == null || currentLooseRef.isSymbolic()) {
+ continue;
+ }
+ Ref packedRef = cur.get(refName);
+ ObjectId clr_oid = currentLooseRef.getObjectId();
+ if (clr_oid != null
+ && clr_oid.equals(packedRef.getObjectId())) {
+ RefList<LooseRef> curLoose, newLoose;
+ do {
+ curLoose = looseRefs.get();
+ int idx = curLoose.find(refName);
+ if (idx < 0) {
+ break;
+ }
+ newLoose = curLoose.remove(idx);
+ } while (!looseRefs.compareAndSet(curLoose, newLoose));
+ int levels = levelsIn(refName) - 2;
+ delete(refFile, levels, rLck);
+ }
+ } finally {
+ if (shouldUnlock) {
+ rLck.unlock();
+ }
+ }
}
+ // Don't fire refsChanged. The refs have not change, only their
+ // storage.
+ return result;
+ } finally {
+ lck.unlock();
}
- // Don't fire refsChanged. The refs have not change, only their
- // storage.
} finally {
- lck.unlock();
+ inProcessPackedRefsLock.unlock();
}
}
+ @Nullable
+ LockFile lockPackedRefs() throws IOException {
+ LockFile lck = new LockFile(packedRefsFile);
+ for (int ms : getRetrySleepMs()) {
+ sleep(ms);
+ if (lck.lock()) {
+ return lck;
+ }
+ }
+ return null;
+ }
+
+ private LockFile lockPackedRefsOrThrow() throws IOException {
+ LockFile lck = lockPackedRefs();
+ if (lck == null) {
+ throw new LockFailedException(packedRefsFile);
+ }
+ return lck;
+ }
+
/**
* Make sure a ref is peeled and has the Storage PACKED. If the given ref
* has this attributes simply return it. Otherwise create a new peeled
@@ -732,9 +866,9 @@ public class RefDirectory extends RefDatabase {
}
}
- void log(final RefUpdate update, final String msg, final boolean deref)
+ void log(boolean force, RefUpdate update, String msg, boolean deref)
throws IOException {
- logWriter.log(update, msg, deref);
+ newLogWriter(force).log(update, msg, deref);
}
private Ref resolve(final Ref ref, int depth, String prefix,
@@ -769,7 +903,7 @@ public class RefDirectory extends RefDatabase {
return ref;
}
- private PackedRefList getPackedRefs() throws IOException {
+ PackedRefList getPackedRefs() throws IOException {
boolean trustFolderStat = getRepository().getConfig().getBoolean(
ConfigConstants.CONFIG_CORE_SECTION,
ConfigConstants.CONFIG_KEY_TRUSTFOLDERSTAT, true);
@@ -803,7 +937,7 @@ public class RefDirectory extends RefDatabase {
throw noPackedRefs;
}
// Ignore it and leave the new list empty.
- return PackedRefList.NO_PACKED_REFS;
+ return NO_PACKED_REFS;
}
try {
return new PackedRefList(parsePackedRefs(br), snapshot,
@@ -884,8 +1018,12 @@ public class RefDirectory extends RefDatabase {
return new StringBuilder(end - off).append(src, off, end).toString();
}
- private void commitPackedRefs(final LockFile lck, final RefList<Ref> refs,
- final PackedRefList oldPackedList) throws IOException {
+ PackedRefList commitPackedRefs(final LockFile lck, final RefList<Ref> refs,
+ final PackedRefList oldPackedList, boolean changed)
+ throws IOException {
+ // Can't just return packedRefs.get() from this method; it might have been
+ // updated again after writePackedRefs() returns.
+ AtomicReference<PackedRefList> result = new AtomicReference<>();
new RefWriter(refs) {
@Override
protected void writeFile(String name, byte[] content)
@@ -907,10 +1045,31 @@ public class RefDirectory extends RefDatabase {
throw new ObjectWritingException(MessageFormat.format(JGitText.get().unableToWrite, name));
byte[] digest = Constants.newMessageDigest().digest(content);
- packedRefs.compareAndSet(oldPackedList, new PackedRefList(refs,
- lck.getCommitSnapshot(), ObjectId.fromRaw(digest)));
+ PackedRefList newPackedList = new PackedRefList(
+ refs, lck.getCommitSnapshot(), ObjectId.fromRaw(digest));
+
+ // This thread holds the file lock, so no other thread or process should
+ // be able to modify the packed-refs file on disk. If the list changed,
+ // it means something is very wrong, so throw an exception.
+ //
+ // However, we can't use a naive compareAndSet to check whether the
+ // update was successful, because another thread might _read_ the
+ // packed refs file that was written out by this thread while holding
+ // the lock, and update the packedRefs reference to point to that. So
+ // compare the actual contents instead.
+ PackedRefList afterUpdate = packedRefs.updateAndGet(
+ p -> p.id.equals(oldPackedList.id) ? newPackedList : p);
+ if (!afterUpdate.id.equals(newPackedList.id)) {
+ throw new ObjectWritingException(
+ MessageFormat.format(JGitText.get().unableToWrite, name));
+ }
+ if (changed) {
+ modCnt.incrementAndGet();
+ }
+ result.set(newPackedList);
}
}.writePackedRefs();
+ return result.get();
}
private Ref readRef(String name, RefList<Ref> packed) throws IOException {
@@ -1031,8 +1190,31 @@ public class RefDirectory extends RefDatabase {
&& buf[4] == ' ';
}
+ /**
+ * Detect if we are in a clone command execution
+ *
+ * @return {@code true} if we are currently cloning a repository
+ * @throws IOException
+ */
+ boolean isInClone() throws IOException {
+ return hasDanglingHead() && !packedRefsFile.exists() && !hasLooseRef();
+ }
+
+ private boolean hasDanglingHead() throws IOException {
+ Ref head = exactRef(Constants.HEAD);
+ if (head != null) {
+ ObjectId id = head.getObjectId();
+ return id == null || id.equals(ObjectId.zeroId());
+ }
+ return false;
+ }
+
+ private boolean hasLooseRef() throws IOException {
+ return Files.walk(refsDir.toPath()).anyMatch(Files::isRegularFile);
+ }
+
/** If the parent should fire listeners, fires them. */
- private void fireRefsChanged() {
+ void fireRefsChanged() {
final int last = lastNotifiedModCnt.get();
final int curr = modCnt.get();
if (last != curr && lastNotifiedModCnt.compareAndSet(last, curr) && last != 0)
@@ -1107,22 +1289,80 @@ public class RefDirectory extends RefDatabase {
}
}
- private static class PackedRefList extends RefList<Ref> {
- static final PackedRefList NO_PACKED_REFS = new PackedRefList(
- RefList.emptyList(), FileSnapshot.MISSING_FILE,
- ObjectId.zeroId());
+ /**
+ * Get times to sleep while retrying a possibly contentious operation.
+ * <p>
+ * For retrying an operation that might have high contention, such as locking
+ * the {@code packed-refs} file, the caller may implement a retry loop using
+ * the returned values:
+ *
+ * <pre>
+ * for (int toSleepMs : getRetrySleepMs()) {
+ * sleep(toSleepMs);
+ * if (isSuccessful(doSomething())) {
+ * return success;
+ * }
+ * }
+ * return failure;
+ * </pre>
+ *
+ * The first value in the returned iterable is 0, and the caller should treat
+ * a fully-consumed iterator as a timeout.
+ *
+ * @return iterable of times, in milliseconds, that the caller should sleep
+ * before attempting an operation.
+ */
+ Iterable<Integer> getRetrySleepMs() {
+ return retrySleepMs;
+ }
- final FileSnapshot snapshot;
+ void setRetrySleepMs(List<Integer> retrySleepMs) {
+ if (retrySleepMs == null || retrySleepMs.isEmpty()
+ || retrySleepMs.get(0).intValue() != 0) {
+ throw new IllegalArgumentException();
+ }
+ this.retrySleepMs = retrySleepMs;
+ }
- final ObjectId id;
+ /**
+ * Sleep with {@link Thread#sleep(long)}, converting {@link
+ * InterruptedException} to {@link InterruptedIOException}.
+ *
+ * @param ms
+ * time to sleep, in milliseconds; zero or negative is a no-op.
+ * @throws InterruptedIOException
+ * if sleeping was interrupted.
+ */
+ static void sleep(long ms) throws InterruptedIOException {
+ if (ms <= 0) {
+ return;
+ }
+ try {
+ Thread.sleep(ms);
+ } catch (InterruptedException e) {
+ InterruptedIOException ie = new InterruptedIOException();
+ ie.initCause(e);
+ throw ie;
+ }
+ }
+
+ static class PackedRefList extends RefList<Ref> {
- PackedRefList(RefList<Ref> src, FileSnapshot s, ObjectId i) {
+ private final FileSnapshot snapshot;
+
+ private final ObjectId id;
+
+ private PackedRefList(RefList<Ref> src, FileSnapshot s, ObjectId i) {
super(src);
snapshot = s;
id = i;
}
}
+ private static final PackedRefList NO_PACKED_REFS = new PackedRefList(
+ RefList.emptyList(), FileSnapshot.MISSING_FILE,
+ ObjectId.zeroId());
+
private static LooseSymbolicRef newSymbolicRef(FileSnapshot snapshot,
String name, String target) {
Ref dst = new ObjectIdRef.Unpeeled(NEW, target, null);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryRename.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryRename.java
index 4b803a5144..09456c85c6 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryRename.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryRename.java
@@ -188,8 +188,8 @@ class RefDirectoryRename extends RefRename {
}
private boolean renameLog(RefUpdate src, RefUpdate dst) {
- File srcLog = refdb.getLogWriter().logFor(src.getName());
- File dstLog = refdb.getLogWriter().logFor(dst.getName());
+ File srcLog = refdb.logFor(src.getName());
+ File dstLog = refdb.logFor(dst.getName());
if (!srcLog.exists())
return true;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryUpdate.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryUpdate.java
index 3c1916b642..7ab30faf10 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryUpdate.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryUpdate.java
@@ -50,6 +50,7 @@ import java.io.IOException;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefUpdate;
+import org.eclipse.jgit.lib.ReflogEntry;
import org.eclipse.jgit.lib.Repository;
/** Updates any reference stored by {@link RefDirectory}. */
@@ -119,7 +120,7 @@ class RefDirectoryUpdate extends RefUpdate {
msg = strResult;
}
}
- database.log(this, msg, shouldDeref);
+ database.log(isForceRefLog(), this, msg, shouldDeref);
}
if (!lock.commit())
return Result.LOCK_FAILURE;
@@ -127,14 +128,14 @@ class RefDirectoryUpdate extends RefUpdate {
return status;
}
- private String toResultString(final Result status) {
+ private String toResultString(Result status) {
switch (status) {
case FORCED:
- return "forced-update"; //$NON-NLS-1$
+ return ReflogEntry.PREFIX_FORCED_UPDATE;
case FAST_FORWARD:
- return "fast forward"; //$NON-NLS-1$
+ return ReflogEntry.PREFIX_FAST_FORWARD;
case NEW:
- return "created"; //$NON-NLS-1$
+ return ReflogEntry.PREFIX_CREATED;
default:
return null;
}
@@ -158,7 +159,7 @@ class RefDirectoryUpdate extends RefUpdate {
String msg = getRefLogMessage();
if (msg != null)
- database.log(this, msg, false);
+ database.log(isForceRefLog(), this, msg, false);
if (!lock.commit())
return Result.LOCK_FAILURE;
database.storedSymbolicRef(this, lock.getCommitSnapshot(), target);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogEntryImpl.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogEntryImpl.java
index 16b2a460e1..8723a8b384 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogEntryImpl.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogEntryImpl.java
@@ -139,4 +139,4 @@ public class ReflogEntryImpl implements Serializable, ReflogEntry {
else
return null;
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogWriter.java
index 892c1c85ad..f0bb9c519e 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogWriter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogWriter.java
@@ -46,12 +46,11 @@
package org.eclipse.jgit.internal.storage.file;
import static org.eclipse.jgit.lib.Constants.HEAD;
-import static org.eclipse.jgit.lib.Constants.LOGS;
import static org.eclipse.jgit.lib.Constants.LOCK_SUFFIX;
import static org.eclipse.jgit.lib.Constants.R_HEADS;
+import static org.eclipse.jgit.lib.Constants.R_NOTES;
import static org.eclipse.jgit.lib.Constants.R_REFS;
import static org.eclipse.jgit.lib.Constants.R_REMOTES;
-import static org.eclipse.jgit.lib.Constants.R_STASH;
import java.io.File;
import java.io.FileNotFoundException;
@@ -69,110 +68,75 @@ import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.lib.ReflogEntry;
-import org.eclipse.jgit.lib.Repository;
-import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.FileUtils;
/**
- * Utility for writing reflog entries
+ * Utility for writing reflog entries using the traditional one-file-per-log
+ * format.
*/
public class ReflogWriter {
/**
- * Get the ref name to be used for when locking a ref's log for rewriting
+ * Get the ref name to be used for when locking a ref's log for rewriting.
*
* @param name
* name of the ref, relative to the Git repository top level
* directory (so typically starts with refs/).
- * @return the name of the ref's lock ref
+ * @return the name of the ref's lock ref.
*/
- public static String refLockFor(final String name) {
+ public static String refLockFor(String name) {
return name + LOCK_SUFFIX;
}
- private final Repository parent;
-
- private final File logsDir;
-
- private final File logsRefsDir;
+ private final RefDirectory refdb;
private final boolean forceWrite;
/**
- * Create write for repository
+ * Create writer for ref directory.
*
- * @param repository
+ * @param refdb
*/
- public ReflogWriter(final Repository repository) {
- this(repository, false);
+ public ReflogWriter(RefDirectory refdb) {
+ this(refdb, false);
}
/**
- * Create write for repository
+ * Create writer for ref directory.
*
- * @param repository
+ * @param refdb
* @param forceWrite
* true to write to disk all entries logged, false to respect the
- * repository's config and current log file status
+ * repository's config and current log file status.
*/
- public ReflogWriter(final Repository repository, final boolean forceWrite) {
- final FS fs = repository.getFS();
- parent = repository;
- File gitDir = repository.getDirectory();
- logsDir = fs.resolve(gitDir, LOGS);
- logsRefsDir = fs.resolve(gitDir, LOGS + '/' + R_REFS);
+ public ReflogWriter(RefDirectory refdb, boolean forceWrite) {
+ this.refdb = refdb;
this.forceWrite = forceWrite;
}
/**
- * Get repository that reflog is being written for
- *
- * @return file repository
- */
- public Repository getRepository() {
- return parent;
- }
-
- /**
- * Create the log directories
+ * Create the log directories.
*
* @throws IOException
- * @return this writer
+ * @return this writer.
*/
public ReflogWriter create() throws IOException {
- FileUtils.mkdir(logsDir);
- FileUtils.mkdir(logsRefsDir);
- FileUtils.mkdir(new File(logsRefsDir,
- R_HEADS.substring(R_REFS.length())));
+ FileUtils.mkdir(refdb.logsDir);
+ FileUtils.mkdir(refdb.logsRefsDir);
+ FileUtils.mkdir(
+ new File(refdb.logsRefsDir, R_HEADS.substring(R_REFS.length())));
return this;
}
/**
- * Locate the log file on disk for a single reference name.
- *
- * @param name
- * name of the ref, relative to the Git repository top level
- * directory (so typically starts with refs/).
- * @return the log file location.
- */
- public File logFor(String name) {
- if (name.startsWith(R_REFS)) {
- name = name.substring(R_REFS.length());
- return new File(logsRefsDir, name);
- }
- return new File(logsDir, name);
- }
-
- /**
- * Write the given {@link ReflogEntry} entry to the ref's log
+ * Write the given entry to the ref's log.
*
* @param refName
- *
* @param entry
* @return this writer
* @throws IOException
*/
- public ReflogWriter log(final String refName, final ReflogEntry entry)
+ public ReflogWriter log(String refName, ReflogEntry entry)
throws IOException {
return log(refName, entry.getOldId(), entry.getNewId(), entry.getWho(),
entry.getComment());
@@ -189,15 +153,14 @@ public class ReflogWriter {
* @return this writer
* @throws IOException
*/
- public ReflogWriter log(final String refName, final ObjectId oldId,
- final ObjectId newId, final PersonIdent ident, final String message)
- throws IOException {
+ public ReflogWriter log(String refName, ObjectId oldId,
+ ObjectId newId, PersonIdent ident, String message) throws IOException {
byte[] encoded = encode(oldId, newId, ident, message);
return log(refName, encoded);
}
/**
- * Write the given ref update to the ref's log
+ * Write the given ref update to the ref's log.
*
* @param update
* @param msg
@@ -205,19 +168,19 @@ public class ReflogWriter {
* @return this writer
* @throws IOException
*/
- public ReflogWriter log(final RefUpdate update, final String msg,
- final boolean deref) throws IOException {
- final ObjectId oldId = update.getOldObjectId();
- final ObjectId newId = update.getNewObjectId();
- final Ref ref = update.getRef();
+ public ReflogWriter log(RefUpdate update, String msg,
+ boolean deref) throws IOException {
+ ObjectId oldId = update.getOldObjectId();
+ ObjectId newId = update.getNewObjectId();
+ Ref ref = update.getRef();
PersonIdent ident = update.getRefLogIdent();
if (ident == null)
- ident = new PersonIdent(parent);
+ ident = new PersonIdent(refdb.getRepository());
else
ident = new PersonIdent(ident);
- final byte[] rec = encode(oldId, newId, ident, msg);
+ byte[] rec = encode(oldId, newId, ident, msg);
if (deref && ref.isSymbolic()) {
log(ref.getName(), rec);
log(ref.getLeaf().getName(), rec);
@@ -229,33 +192,34 @@ public class ReflogWriter {
private byte[] encode(ObjectId oldId, ObjectId newId, PersonIdent ident,
String message) {
- final StringBuilder r = new StringBuilder();
+ StringBuilder r = new StringBuilder();
r.append(ObjectId.toString(oldId));
r.append(' ');
r.append(ObjectId.toString(newId));
r.append(' ');
r.append(ident.toExternalString());
r.append('\t');
- r.append(message.replace("\r\n", " ").replace("\n", " ")); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
+ r.append(
+ message.replace("\r\n", " ") //$NON-NLS-1$ //$NON-NLS-2$
+ .replace("\n", " ")); //$NON-NLS-1$ //$NON-NLS-2$
r.append('\n');
return Constants.encode(r.toString());
}
- private ReflogWriter log(final String refName, final byte[] rec)
- throws IOException {
- final File log = logFor(refName);
- final boolean write = forceWrite
+ private ReflogWriter log(String refName, byte[] rec) throws IOException {
+ File log = refdb.logFor(refName);
+ boolean write = forceWrite
|| (isLogAllRefUpdates() && shouldAutoCreateLog(refName))
|| log.isFile();
if (!write)
return this;
- WriteConfig wc = getRepository().getConfig().get(WriteConfig.KEY);
+ WriteConfig wc = refdb.getRepository().getConfig().get(WriteConfig.KEY);
FileOutputStream out;
try {
out = new FileOutputStream(log, true);
} catch (FileNotFoundException err) {
- final File dir = log.getParentFile();
+ File dir = log.getParentFile();
if (dir.exists())
throw err;
if (!dir.mkdirs() && !dir.isDirectory())
@@ -279,13 +243,14 @@ public class ReflogWriter {
}
private boolean isLogAllRefUpdates() {
- return parent.getConfig().get(CoreConfig.KEY).isLogAllRefUpdates();
+ return refdb.getRepository().getConfig().get(CoreConfig.KEY)
+ .isLogAllRefUpdates();
}
- private boolean shouldAutoCreateLog(final String refName) {
- return refName.equals(HEAD) //
- || refName.startsWith(R_HEADS) //
- || refName.startsWith(R_REMOTES) //
- || refName.equals(R_STASH);
+ private boolean shouldAutoCreateLog(String refName) {
+ return refName.equals(HEAD)
+ || refName.startsWith(R_HEADS)
+ || refName.startsWith(R_REMOTES)
+ || refName.startsWith(R_NOTES);
}
-}
+} \ No newline at end of file
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SimpleDataOutput.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SimpleDataOutput.java
index 373a49465b..5fe0429246 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SimpleDataOutput.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SimpleDataOutput.java
@@ -136,4 +136,4 @@ class SimpleDataOutput implements DataOutput {
public void writeUTF(String s) throws IOException {
throw new UnsupportedOperationException();
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCache.java
index a525c85116..61960068be 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCache.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCache.java
@@ -496,31 +496,16 @@ public class WindowCache {
private void gc() {
Ref r;
while ((r = (Ref) queue.poll()) != null) {
- // Sun's Java 5 and 6 implementation have a bug where a Reference
- // can be enqueued and dequeued twice on the same reference queue
- // due to a race condition within ReferenceQueue.enqueue(Reference).
- //
- // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6837858
- //
- // We CANNOT permit a Reference to come through us twice, as it will
- // skew the resource counters we maintain. Our canClear() check here
- // provides a way to skip the redundant dequeues, if any.
- //
- if (r.canClear()) {
- clear(r);
-
- boolean found = false;
- final int s = slot(r.pack, r.position);
- final Entry e1 = table.get(s);
- for (Entry n = e1; n != null; n = n.next) {
- if (n.ref == r) {
- n.dead = true;
- found = true;
- break;
- }
- }
- if (found)
+ clear(r);
+
+ final int s = slot(r.pack, r.position);
+ final Entry e1 = table.get(s);
+ for (Entry n = e1; n != null; n = n.next) {
+ if (n.ref == r) {
+ n.dead = true;
table.compareAndSet(s, e1, clean(e1));
+ break;
+ }
}
}
}
@@ -581,8 +566,6 @@ public class WindowCache {
long lastAccess;
- private boolean cleared;
-
protected Ref(final PackFile pack, final long position,
final ByteWindow v, final ReferenceQueue<ByteWindow> queue) {
super(v, queue);
@@ -590,13 +573,6 @@ public class WindowCache {
this.position = position;
this.size = v.size();
}
-
- final synchronized boolean canClear() {
- if (cleared)
- return false;
- cleared = true;
- return true;
- }
}
private static final class Lock {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WriteConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WriteConfig.java
index 1e2b239324..d9cbbd8f82 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WriteConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WriteConfig.java
@@ -49,12 +49,7 @@ import org.eclipse.jgit.lib.CoreConfig;
class WriteConfig {
/** Key for {@link Config#get(SectionParser)}. */
- static final Config.SectionParser<WriteConfig> KEY = new SectionParser<WriteConfig>() {
- @Override
- public WriteConfig parse(final Config cfg) {
- return new WriteConfig(cfg);
- }
- };
+ static final Config.SectionParser<WriteConfig> KEY = WriteConfig::new;
private final int compression;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/io/BlockSource.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/io/BlockSource.java
new file mode 100644
index 0000000000..0a5f9c1a72
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/io/BlockSource.java
@@ -0,0 +1,185 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.io;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+
+/**
+ * Provides content blocks of file.
+ * <p>
+ * {@code BlockSource} implementations must decide if they will be thread-safe,
+ * or not.
+ */
+public abstract class BlockSource implements AutoCloseable {
+ /**
+ * Wrap a byte array as a {@code BlockSource}.
+ *
+ * @param content
+ * input file.
+ * @return block source to read from {@code content}.
+ */
+ public static BlockSource from(byte[] content) {
+ return new BlockSource() {
+ @Override
+ public ByteBuffer read(long pos, int cnt) {
+ ByteBuffer buf = ByteBuffer.allocate(cnt);
+ if (pos < content.length) {
+ int p = (int) pos;
+ int n = Math.min(cnt, content.length - p);
+ buf.put(content, p, n);
+ }
+ return buf;
+ }
+
+ @Override
+ public long size() {
+ return content.length;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing.
+ }
+ };
+ }
+
+ /**
+ * Read from a {@code FileInputStream}.
+ * <p>
+ * The returned {@code BlockSource} is not thread-safe, as it must seek the
+ * file channel to read a block.
+ *
+ * @param in
+ * the file. The {@code BlockSource} will close {@code in}.
+ * @return wrapper for {@code in}.
+ */
+ public static BlockSource from(FileInputStream in) {
+ return from(in.getChannel());
+ }
+
+ /**
+ * Read from a {@code FileChannel}.
+ * <p>
+ * The returned {@code BlockSource} is not thread-safe, as it must seek the
+ * file channel to read a block.
+ *
+ * @param ch
+ * the file. The {@code BlockSource} will close {@code ch}.
+ * @return wrapper for {@code ch}.
+ */
+ public static BlockSource from(FileChannel ch) {
+ return new BlockSource() {
+ @Override
+ public ByteBuffer read(long pos, int blockSize) throws IOException {
+ ByteBuffer b = ByteBuffer.allocate(blockSize);
+ ch.position(pos);
+ int n;
+ do {
+ n = ch.read(b);
+ } while (n > 0 && b.position() < blockSize);
+ return b;
+ }
+
+ @Override
+ public long size() throws IOException {
+ return ch.size();
+ }
+
+ @Override
+ public void close() {
+ try {
+ ch.close();
+ } catch (IOException e) {
+ // Ignore close failures of read-only files.
+ }
+ }
+ };
+ }
+
+ /**
+ * Read a block from the file.
+ * <p>
+ * To reduce copying, the returned ByteBuffer should have an accessible
+ * array and {@code arrayOffset() == 0}. The caller will discard the
+ * ByteBuffer and directly use the backing array.
+ *
+ * @param position
+ * position of the block in the file, specified in bytes from the
+ * beginning of the file.
+ * @param blockSize
+ * size to read.
+ * @return buffer containing the block content.
+ * @throws IOException
+ * if block cannot be read.
+ */
+ public abstract ByteBuffer read(long position, int blockSize)
+ throws IOException;
+
+ /**
+ * Determine the size of the file.
+ *
+ * @return total number of bytes in the file.
+ * @throws IOException
+ * if size cannot be obtained.
+ */
+ public abstract long size() throws IOException;
+
+ /**
+ * Advise the {@code BlockSource} a sequential scan is starting.
+ *
+ * @param startPos
+ * starting position.
+ * @param endPos
+ * ending position.
+ */
+ public void adviseSequentialRead(long startPos, long endPos) {
+ // Do nothing by default.
+ }
+
+ @Override
+ public abstract void close();
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaIndexScanner.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaIndexScanner.java
index 7e1087827b..969d02b946 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaIndexScanner.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaIndexScanner.java
@@ -127,4 +127,4 @@ class DeltaIndexScanner {
sz <<= 1;
return sz;
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/ObjectToPack.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/ObjectToPack.java
index a0896577f3..bc7a603c4a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/ObjectToPack.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/ObjectToPack.java
@@ -182,6 +182,7 @@ public class ObjectToPack extends PackedObjectInfo {
}
/** @return the type of this object. */
+ @Override
public final int getType() {
return (flags >> TYPE_SHIFT) & 0x7;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java
index 248692f93f..e8bbf78a15 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java
@@ -59,6 +59,9 @@ public class PackExt {
/** A pack bitmap index file extension. */
public static final PackExt BITMAP_INDEX = newPackExt("bitmap"); //$NON-NLS-1$
+ /** A reftable file. */
+ public static final PackExt REFTABLE = newPackExt("ref"); //$NON-NLS-1$
+
/** @return all of the PackExt values. */
public static PackExt[] values() {
return VALUES;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockReader.java
new file mode 100644
index 0000000000..ce2ba4a2e1
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockReader.java
@@ -0,0 +1,589 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.eclipse.jgit.internal.storage.reftable.BlockWriter.compare;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_HEADER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.INDEX_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_DATA;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_NONE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.OBJ_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.REF_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_1ID;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_2ID;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_NONE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_SYMREF;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_TYPE_MASK;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.reverseUpdateIndex;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+import static org.eclipse.jgit.lib.Ref.Storage.NEW;
+import static org.eclipse.jgit.lib.Ref.Storage.PACKED;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.zip.DataFormatException;
+import java.util.zip.Inflater;
+
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.io.BlockSource;
+import org.eclipse.jgit.lib.CheckoutEntry;
+import org.eclipse.jgit.lib.InflaterCache;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdRef;
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.ReflogEntry;
+import org.eclipse.jgit.lib.SymbolicRef;
+import org.eclipse.jgit.util.LongList;
+import org.eclipse.jgit.util.NB;
+import org.eclipse.jgit.util.RawParseUtils;
+
+/** Reads a single block for {@link ReftableReader}. */
+class BlockReader {
+ private byte blockType;
+ private long endPosition;
+ private boolean truncated;
+
+ private byte[] buf;
+ private int bufLen;
+ private int ptr;
+
+ private int keysStart;
+ private int keysEnd;
+
+ private int restartCnt;
+ private int restartTbl;
+
+ private byte[] nameBuf = new byte[256];
+ private int nameLen;
+ private int valueType;
+
+ byte type() {
+ return blockType;
+ }
+
+ boolean truncated() {
+ return truncated;
+ }
+
+ long endPosition() {
+ return endPosition;
+ }
+
+ boolean next() {
+ return ptr < keysEnd;
+ }
+
+ void parseKey() {
+ int pfx = readVarint32();
+ valueType = readVarint32();
+ int sfx = valueType >>> 3;
+ if (pfx + sfx > nameBuf.length) {
+ int n = Math.max(pfx + sfx, nameBuf.length * 2);
+ nameBuf = Arrays.copyOf(nameBuf, n);
+ }
+ System.arraycopy(buf, ptr, nameBuf, pfx, sfx);
+ ptr += sfx;
+ nameLen = pfx + sfx;
+ }
+
+ String name() {
+ int len = nameLen;
+ if (blockType == LOG_BLOCK_TYPE) {
+ len -= 9;
+ }
+ return RawParseUtils.decode(UTF_8, nameBuf, 0, len);
+ }
+
+ boolean match(byte[] match, boolean matchIsPrefix) {
+ int len = nameLen;
+ if (blockType == LOG_BLOCK_TYPE) {
+ len -= 9;
+ }
+ if (matchIsPrefix) {
+ return len >= match.length
+ && compare(
+ match, 0, match.length,
+ nameBuf, 0, match.length) == 0;
+ }
+ return compare(match, 0, match.length, nameBuf, 0, len) == 0;
+ }
+
+ long readPositionFromIndex() throws IOException {
+ if (blockType != INDEX_BLOCK_TYPE) {
+ throw invalidBlock();
+ }
+
+ readVarint32(); // skip prefix length
+ int n = readVarint32() >>> 3;
+ ptr += n; // skip name
+ return readVarint64();
+ }
+
+ long readUpdateIndexDelta() {
+ return readVarint64();
+ }
+
+ Ref readRef() throws IOException {
+ String name = RawParseUtils.decode(UTF_8, nameBuf, 0, nameLen);
+ switch (valueType & VALUE_TYPE_MASK) {
+ case VALUE_NONE: // delete
+ return newRef(name);
+
+ case VALUE_1ID:
+ return new ObjectIdRef.PeeledNonTag(PACKED, name, readValueId());
+
+ case VALUE_2ID: { // annotated tag
+ ObjectId id1 = readValueId();
+ ObjectId id2 = readValueId();
+ return new ObjectIdRef.PeeledTag(PACKED, name, id1, id2);
+ }
+
+ case VALUE_SYMREF: {
+ String val = readValueString();
+ return new SymbolicRef(name, newRef(val));
+ }
+
+ default:
+ throw invalidBlock();
+ }
+ }
+
+ @Nullable
+ LongList readBlockPositionList() {
+ int n = valueType & VALUE_TYPE_MASK;
+ if (n == 0) {
+ n = readVarint32();
+ if (n == 0) {
+ return null;
+ }
+ }
+
+ LongList b = new LongList(n);
+ b.add(readVarint64());
+ for (int j = 1; j < n; j++) {
+ long prior = b.get(j - 1);
+ b.add(prior + readVarint64());
+ }
+ return b;
+ }
+
+ long readLogUpdateIndex() {
+ return reverseUpdateIndex(NB.decodeUInt64(nameBuf, nameLen - 8));
+ }
+
+ @Nullable
+ ReflogEntry readLogEntry() {
+ if ((valueType & VALUE_TYPE_MASK) == LOG_NONE) {
+ return null;
+ }
+
+ ObjectId oldId = readValueId();
+ ObjectId newId = readValueId();
+ PersonIdent who = readPersonIdent();
+ String msg = readValueString();
+
+ return new ReflogEntry() {
+ @Override
+ public ObjectId getOldId() {
+ return oldId;
+ }
+
+ @Override
+ public ObjectId getNewId() {
+ return newId;
+ }
+
+ @Override
+ public PersonIdent getWho() {
+ return who;
+ }
+
+ @Override
+ public String getComment() {
+ return msg;
+ }
+
+ @Override
+ public CheckoutEntry parseCheckout() {
+ return null;
+ }
+ };
+ }
+
+ private ObjectId readValueId() {
+ ObjectId id = ObjectId.fromRaw(buf, ptr);
+ ptr += OBJECT_ID_LENGTH;
+ return id;
+ }
+
+ private String readValueString() {
+ int len = readVarint32();
+ int end = ptr + len;
+ String s = RawParseUtils.decode(UTF_8, buf, ptr, end);
+ ptr = end;
+ return s;
+ }
+
+ private PersonIdent readPersonIdent() {
+ String name = readValueString();
+ String email = readValueString();
+ long ms = readVarint64() * 1000;
+ int tz = readInt16();
+ return new PersonIdent(name, email, ms, tz);
+ }
+
+ void readBlock(BlockSource src, long pos, int fileBlockSize)
+ throws IOException {
+ readBlockIntoBuf(src, pos, fileBlockSize);
+ parseBlockStart(src, pos, fileBlockSize);
+ }
+
+ private void readBlockIntoBuf(BlockSource src, long pos, int size)
+ throws IOException {
+ ByteBuffer b = src.read(pos, size);
+ bufLen = b.position();
+ if (bufLen <= 0) {
+ throw invalidBlock();
+ }
+ if (b.hasArray() && b.arrayOffset() == 0) {
+ buf = b.array();
+ } else {
+ buf = new byte[bufLen];
+ b.flip();
+ b.get(buf);
+ }
+ endPosition = pos + bufLen;
+ }
+
+ private void parseBlockStart(BlockSource src, long pos, int fileBlockSize)
+ throws IOException {
+ ptr = 0;
+ if (pos == 0) {
+ if (bufLen == FILE_HEADER_LEN) {
+ setupEmptyFileBlock();
+ return;
+ }
+ ptr += FILE_HEADER_LEN; // first block begins with file header
+ }
+
+ int typeAndSize = NB.decodeInt32(buf, ptr);
+ ptr += 4;
+
+ blockType = (byte) (typeAndSize >>> 24);
+ int blockLen = decodeBlockLen(typeAndSize);
+ if (blockType == LOG_BLOCK_TYPE) {
+ // Log blocks must be inflated after the header.
+ long deflatedSize = inflateBuf(src, pos, blockLen, fileBlockSize);
+ endPosition = pos + 4 + deflatedSize;
+ }
+ if (bufLen < blockLen) {
+ if (blockType != INDEX_BLOCK_TYPE) {
+ throw invalidBlock();
+ }
+ // Its OK during sequential scan for an index block to have been
+ // partially read and be truncated in-memory. This happens when
+ // the index block is larger than the file's blockSize. Caller
+ // will break out of its scan loop once it sees the blockType.
+ truncated = true;
+ } else if (bufLen > blockLen) {
+ bufLen = blockLen;
+ }
+
+ if (blockType != FILE_BLOCK_TYPE) {
+ restartCnt = NB.decodeUInt16(buf, bufLen - 2);
+ restartTbl = bufLen - (restartCnt * 3 + 2);
+ keysStart = ptr;
+ keysEnd = restartTbl;
+ } else {
+ keysStart = ptr;
+ keysEnd = ptr;
+ }
+ }
+
+ static int decodeBlockLen(int typeAndSize) {
+ return typeAndSize & 0xffffff;
+ }
+
+ private long inflateBuf(BlockSource src, long pos, int blockLen,
+ int fileBlockSize) throws IOException {
+ byte[] dst = new byte[blockLen];
+ System.arraycopy(buf, 0, dst, 0, 4);
+
+ long deflatedSize = 0;
+ Inflater inf = InflaterCache.get();
+ try {
+ inf.setInput(buf, ptr, bufLen - ptr);
+ for (int o = 4;;) {
+ int n = inf.inflate(dst, o, dst.length - o);
+ o += n;
+ if (inf.finished()) {
+ deflatedSize = inf.getBytesRead();
+ break;
+ } else if (n <= 0 && inf.needsInput()) {
+ long p = pos + 4 + inf.getBytesRead();
+ readBlockIntoBuf(src, p, fileBlockSize);
+ inf.setInput(buf, 0, bufLen);
+ } else if (n <= 0) {
+ throw invalidBlock();
+ }
+ }
+ } catch (DataFormatException e) {
+ throw invalidBlock(e);
+ } finally {
+ InflaterCache.release(inf);
+ }
+
+ buf = dst;
+ bufLen = dst.length;
+ return deflatedSize;
+ }
+
+ private void setupEmptyFileBlock() {
+ // An empty reftable has only the file header in first block.
+ blockType = FILE_BLOCK_TYPE;
+ ptr = FILE_HEADER_LEN;
+ restartCnt = 0;
+ restartTbl = bufLen;
+ keysStart = bufLen;
+ keysEnd = bufLen;
+ }
+
+ void verifyIndex() throws IOException {
+ if (blockType != INDEX_BLOCK_TYPE || truncated) {
+ throw invalidBlock();
+ }
+ }
+
+ /**
+ * Finds a key in the block and positions the current pointer on its record.
+ * <p>
+ * As a side-effect this method arranges for the current pointer to be near
+ * or exactly on {@code key}, allowing other methods to access data from
+ * that current record:
+ * <ul>
+ * <li>{@link #name()}
+ * <li>{@link #match(byte[], boolean)}
+ * <li>{@link #readRef()}
+ * <li>{@link #readLogUpdateIndex()}
+ * <li>{@link #readLogEntry()}
+ * <li>{@link #readBlockPositionList()}
+ * </ul>
+ *
+ * @param key
+ * key to find.
+ * @return {@code <0} if the key occurs before the start of this block;
+ * {@code 0} if the block is positioned on the key; {@code >0} if
+ * the key occurs after the last key of this block.
+ */
+ int seekKey(byte[] key) {
+ int low = 0;
+ int end = restartCnt;
+ for (;;) {
+ int mid = (low + end) >>> 1;
+ int p = NB.decodeUInt24(buf, restartTbl + mid * 3);
+ ptr = p + 1; // skip 0 prefix length
+ int n = readVarint32() >>> 3;
+ int cmp = compare(key, 0, key.length, buf, ptr, n);
+ if (cmp < 0) {
+ end = mid;
+ } else if (cmp == 0) {
+ ptr = p;
+ return 0;
+ } else /* if (cmp > 0) */ {
+ low = mid + 1;
+ }
+ if (low >= end) {
+ return scanToKey(key, p, low, cmp);
+ }
+ }
+ }
+
+ /**
+ * Performs the linear search step within a restart interval.
+ * <p>
+ * Starts at a restart position whose key sorts before (or equal to)
+ * {@code key} and walks sequentially through the following prefix
+ * compressed records to find {@code key}.
+ *
+ * @param key
+ * key the caller wants to find.
+ * @param rPtr
+ * current record pointer from restart table binary search.
+ * @param rIdx
+ * current restart table index.
+ * @param rCmp
+ * result of compare from restart table binary search.
+ * @return {@code <0} if the key occurs before the start of this block;
+ * {@code 0} if the block is positioned on the key; {@code >0} if
+ * the key occurs after the last key of this block.
+ */
+ private int scanToKey(byte[] key, int rPtr, int rIdx, int rCmp) {
+ if (rCmp < 0) {
+ if (rIdx == 0) {
+ ptr = keysStart;
+ return -1;
+ }
+ ptr = NB.decodeUInt24(buf, restartTbl + (rIdx - 1) * 3);
+ } else {
+ ptr = rPtr;
+ }
+
+ int cmp;
+ do {
+ int savePtr = ptr;
+ parseKey();
+ cmp = compare(key, 0, key.length, nameBuf, 0, nameLen);
+ if (cmp <= 0) {
+ // cmp < 0, name should be in this block, but is not.
+ // cmp = 0, block is positioned at name.
+ ptr = savePtr;
+ return cmp < 0 && savePtr == keysStart ? -1 : 0;
+ }
+ skipValue();
+ } while (ptr < keysEnd);
+ return cmp;
+ }
+
+ void skipValue() {
+ switch (blockType) {
+ case REF_BLOCK_TYPE:
+ readVarint64(); // update_index_delta
+ switch (valueType & VALUE_TYPE_MASK) {
+ case VALUE_NONE:
+ return;
+ case VALUE_1ID:
+ ptr += OBJECT_ID_LENGTH;
+ return;
+ case VALUE_2ID:
+ ptr += 2 * OBJECT_ID_LENGTH;
+ return;
+ case VALUE_SYMREF:
+ skipString();
+ return;
+ }
+ break;
+
+ case OBJ_BLOCK_TYPE: {
+ int n = valueType & VALUE_TYPE_MASK;
+ if (n == 0) {
+ n = readVarint32();
+ }
+ while (n-- > 0) {
+ readVarint32();
+ }
+ return;
+ }
+
+ case INDEX_BLOCK_TYPE:
+ readVarint32();
+ return;
+
+ case LOG_BLOCK_TYPE:
+ if ((valueType & VALUE_TYPE_MASK) == LOG_NONE) {
+ return;
+ } else if ((valueType & VALUE_TYPE_MASK) == LOG_DATA) {
+ ptr += 2 * OBJECT_ID_LENGTH; // oldId, newId
+ skipString(); // name
+ skipString(); // email
+ readVarint64(); // time
+ ptr += 2; // tz
+ skipString(); // msg
+ return;
+ }
+ }
+
+ throw new IllegalStateException();
+ }
+
+ private void skipString() {
+ int n = readVarint32(); // string length
+ ptr += n;
+ }
+
+ private short readInt16() {
+ return (short) NB.decodeUInt16(buf, ptr += 2);
+ }
+
+ private int readVarint32() {
+ byte c = buf[ptr++];
+ int val = c & 0x7f;
+ while ((c & 0x80) != 0) {
+ c = buf[ptr++];
+ val++;
+ val <<= 7;
+ val |= (c & 0x7f);
+ }
+ return val;
+ }
+
+ private long readVarint64() {
+ byte c = buf[ptr++];
+ long val = c & 0x7f;
+ while ((c & 0x80) != 0) {
+ c = buf[ptr++];
+ val++;
+ val <<= 7;
+ val |= (c & 0x7f);
+ }
+ return val;
+ }
+
+ private static Ref newRef(String name) {
+ return new ObjectIdRef.Unpeeled(NEW, name, null);
+ }
+
+ private static IOException invalidBlock() {
+ return invalidBlock(null);
+ }
+
+ private static IOException invalidBlock(Throwable cause) {
+ return new IOException(JGitText.get().invalidReftableBlock, cause);
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockSizeTooSmallException.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockSizeTooSmallException.java
new file mode 100644
index 0000000000..cb0f988b23
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockSizeTooSmallException.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import java.io.IOException;
+
+/** Thrown if {@link ReftableWriter} cannot fit a reference. */
+public class BlockSizeTooSmallException extends IOException {
+ private static final long serialVersionUID = 1L;
+
+ private final int minBlockSize;
+
+ BlockSizeTooSmallException(int b) {
+ minBlockSize = b;
+ }
+
+ /** @return minimum block size in bytes reftable requires to write a ref. */
+ public int getMinimumBlockSize() {
+ return minBlockSize;
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockWriter.java
new file mode 100644
index 0000000000..b3173e838c
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockWriter.java
@@ -0,0 +1,605 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_HEADER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.INDEX_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_DATA;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_NONE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.MAX_RESTARTS;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.OBJ_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.REF_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_1ID;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_2ID;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_NONE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_SYMREF;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_TYPE_MASK;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.reverseUpdateIndex;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableOutputStream.computeVarintSize;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+import static org.eclipse.jgit.lib.Ref.Storage.NEW;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.util.IntList;
+import org.eclipse.jgit.util.LongList;
+import org.eclipse.jgit.util.NB;
+
+/** Formats and writes blocks for {@link ReftableWriter}. */
+class BlockWriter {
+ private final byte blockType;
+ private final byte keyType;
+ private final List<Entry> entries;
+ private final int blockLimitBytes;
+ private final int restartInterval;
+
+ private int entriesSumBytes;
+ private int restartCnt;
+
+ BlockWriter(byte type, byte kt, int bs, int ri) {
+ blockType = type;
+ keyType = kt;
+ blockLimitBytes = bs;
+ restartInterval = ri;
+ entries = new ArrayList<>(estimateEntryCount(type, kt, bs));
+ }
+
+ private static int estimateEntryCount(byte blockType, byte keyType,
+ int blockLimitBytes) {
+ double avgBytesPerEntry;
+ switch (blockType) {
+ case REF_BLOCK_TYPE:
+ default:
+ avgBytesPerEntry = 35.31;
+ break;
+
+ case OBJ_BLOCK_TYPE:
+ avgBytesPerEntry = 4.19;
+ break;
+
+ case LOG_BLOCK_TYPE:
+ avgBytesPerEntry = 101.14;
+ break;
+
+ case INDEX_BLOCK_TYPE:
+ switch (keyType) {
+ case REF_BLOCK_TYPE:
+ case LOG_BLOCK_TYPE:
+ default:
+ avgBytesPerEntry = 27.44;
+ break;
+
+ case OBJ_BLOCK_TYPE:
+ avgBytesPerEntry = 11.57;
+ break;
+ }
+ }
+
+ int cnt = (int) (Math.ceil(blockLimitBytes / avgBytesPerEntry));
+ return Math.min(cnt, 4096);
+ }
+
+ byte blockType() {
+ return blockType;
+ }
+
+ boolean padBetweenBlocks() {
+ return padBetweenBlocks(blockType)
+ || (blockType == INDEX_BLOCK_TYPE && padBetweenBlocks(keyType));
+ }
+
+ static boolean padBetweenBlocks(byte type) {
+ return type == REF_BLOCK_TYPE || type == OBJ_BLOCK_TYPE;
+ }
+
+ byte[] lastKey() {
+ return entries.get(entries.size() - 1).key;
+ }
+
+ int currentSize() {
+ return computeBlockBytes(0, false);
+ }
+
+ void mustAdd(Entry entry) throws BlockSizeTooSmallException {
+ if (!tryAdd(entry, true)) {
+ // Insanely long names need a larger block size.
+ throw blockSizeTooSmall(entry);
+ }
+ }
+
+ boolean tryAdd(Entry entry) {
+ if (entry instanceof ObjEntry
+ && computeBlockBytes(entry.sizeBytes(), 1) > blockLimitBytes) {
+ // If the ObjEntry has so many ref block pointers that its
+ // encoding overflows any block, reconfigure it to tell readers to
+ // instead scan all refs for this ObjectId. That significantly
+ // shrinks the entry to a very small size, which may now fit into
+ // this block.
+ ((ObjEntry) entry).markScanRequired();
+ }
+
+ if (tryAdd(entry, true)) {
+ return true;
+ } else if (nextShouldBeRestart()) {
+ // It was time for another restart, but the entry doesn't fit
+ // with its complete key, as the block is nearly full. Try to
+ // force it to fit with prefix compression rather than waste
+ // the tail of the block with padding.
+ return tryAdd(entry, false);
+ }
+ return false;
+ }
+
+ private boolean tryAdd(Entry entry, boolean tryRestart) {
+ byte[] key = entry.key;
+ int prefixLen = 0;
+ boolean restart = tryRestart && nextShouldBeRestart();
+ if (!restart) {
+ Entry priorEntry = entries.get(entries.size() - 1);
+ byte[] prior = priorEntry.key;
+ prefixLen = commonPrefix(prior, prior.length, key);
+ if (prefixLen <= 5 /* "refs/" */ && keyType == REF_BLOCK_TYPE) {
+ // Force restart points at transitions between namespaces
+ // such as "refs/heads/" to "refs/tags/".
+ restart = true;
+ prefixLen = 0;
+ } else if (prefixLen == 0) {
+ restart = true;
+ }
+ }
+
+ entry.restart = restart;
+ entry.prefixLen = prefixLen;
+ int entryBytes = entry.sizeBytes();
+ if (computeBlockBytes(entryBytes, restart) > blockLimitBytes) {
+ return false;
+ }
+
+ entriesSumBytes += entryBytes;
+ entries.add(entry);
+ if (restart) {
+ restartCnt++;
+ }
+ return true;
+ }
+
+ private boolean nextShouldBeRestart() {
+ int cnt = entries.size();
+ return (cnt == 0 || ((cnt + 1) % restartInterval) == 0)
+ && restartCnt < MAX_RESTARTS;
+ }
+
+ private int computeBlockBytes(int entryBytes, boolean restart) {
+ return computeBlockBytes(
+ entriesSumBytes + entryBytes,
+ restartCnt + (restart ? 1 : 0));
+ }
+
+ private static int computeBlockBytes(int entryBytes, int restartCnt) {
+ return 4 // 4-byte block header
+ + entryBytes
+ + restartCnt * 3 // restart_offset
+ + 2; // 2-byte restart_count
+ }
+
+ void writeTo(ReftableOutputStream os) throws IOException {
+ os.beginBlock(blockType);
+ IntList restarts = new IntList(restartCnt);
+ for (Entry entry : entries) {
+ if (entry.restart) {
+ restarts.add(os.bytesWrittenInBlock());
+ }
+ entry.writeKey(os);
+ entry.writeValue(os);
+ }
+ if (restarts.size() == 0 || restarts.size() > MAX_RESTARTS) {
+ throw new IllegalStateException();
+ }
+ for (int i = 0; i < restarts.size(); i++) {
+ os.writeInt24(restarts.get(i));
+ }
+ os.writeInt16(restarts.size());
+ os.flushBlock();
+ }
+
+ private BlockSizeTooSmallException blockSizeTooSmall(Entry entry) {
+ // Compute size required to fit this entry by itself.
+ int min = FILE_HEADER_LEN + computeBlockBytes(entry.sizeBytes(), 1);
+ return new BlockSizeTooSmallException(min);
+ }
+
+ static int commonPrefix(byte[] a, int n, byte[] b) {
+ int len = Math.min(n, Math.min(a.length, b.length));
+ for (int i = 0; i < len; i++) {
+ if (a[i] != b[i]) {
+ return i;
+ }
+ }
+ return len;
+ }
+
+ static int encodeSuffixAndType(int sfx, int valueType) {
+ return (sfx << 3) | valueType;
+ }
+
+ static int compare(
+ byte[] a, int ai, int aLen,
+ byte[] b, int bi, int bLen) {
+ int aEnd = ai + aLen;
+ int bEnd = bi + bLen;
+ while (ai < aEnd && bi < bEnd) {
+ int c = (a[ai++] & 0xff) - (b[bi++] & 0xff);
+ if (c != 0) {
+ return c;
+ }
+ }
+ return aLen - bLen;
+ }
+
+ static abstract class Entry {
+ static int compare(Entry ea, Entry eb) {
+ byte[] a = ea.key;
+ byte[] b = eb.key;
+ return BlockWriter.compare(a, 0, a.length, b, 0, b.length);
+ }
+
+ final byte[] key;
+ int prefixLen;
+ boolean restart;
+
+ Entry(byte[] key) {
+ this.key = key;
+ }
+
+ void writeKey(ReftableOutputStream os) {
+ int sfxLen = key.length - prefixLen;
+ os.writeVarint(prefixLen);
+ os.writeVarint(encodeSuffixAndType(sfxLen, valueType()));
+ os.write(key, prefixLen, sfxLen);
+ }
+
+ int sizeBytes() {
+ int sfxLen = key.length - prefixLen;
+ int sfx = encodeSuffixAndType(sfxLen, valueType());
+ return computeVarintSize(prefixLen)
+ + computeVarintSize(sfx)
+ + sfxLen
+ + valueSize();
+ }
+
+ abstract byte blockType();
+ abstract int valueType();
+ abstract int valueSize();
+ abstract void writeValue(ReftableOutputStream os) throws IOException;
+ }
+
+ static class IndexEntry extends Entry {
+ private final long blockPosition;
+
+ IndexEntry(byte[] key, long blockPosition) {
+ super(key);
+ this.blockPosition = blockPosition;
+ }
+
+ @Override
+ byte blockType() {
+ return INDEX_BLOCK_TYPE;
+ }
+
+ @Override
+ int valueType() {
+ return 0;
+ }
+
+ @Override
+ int valueSize() {
+ return computeVarintSize(blockPosition);
+ }
+
+ @Override
+ void writeValue(ReftableOutputStream os) {
+ os.writeVarint(blockPosition);
+ }
+ }
+
+ static class RefEntry extends Entry {
+ final Ref ref;
+ final long updateIndexDelta;
+
+ RefEntry(Ref ref, long updateIndexDelta) {
+ super(nameUtf8(ref));
+ this.ref = ref;
+ this.updateIndexDelta = updateIndexDelta;
+ }
+
+ @Override
+ byte blockType() {
+ return REF_BLOCK_TYPE;
+ }
+
+ @Override
+ int valueType() {
+ if (ref.isSymbolic()) {
+ return VALUE_SYMREF;
+ } else if (ref.getStorage() == NEW && ref.getObjectId() == null) {
+ return VALUE_NONE;
+ } else if (ref.getPeeledObjectId() != null) {
+ return VALUE_2ID;
+ } else {
+ return VALUE_1ID;
+ }
+ }
+
+ @Override
+ int valueSize() {
+ int n = computeVarintSize(updateIndexDelta);
+ switch (valueType()) {
+ case VALUE_NONE:
+ return n;
+ case VALUE_1ID:
+ return n + OBJECT_ID_LENGTH;
+ case VALUE_2ID:
+ return n + 2 * OBJECT_ID_LENGTH;
+ case VALUE_SYMREF:
+ if (ref.isSymbolic()) {
+ int nameLen = nameUtf8(ref.getTarget()).length;
+ return n + computeVarintSize(nameLen) + nameLen;
+ }
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ void writeValue(ReftableOutputStream os) throws IOException {
+ os.writeVarint(updateIndexDelta);
+ switch (valueType()) {
+ case VALUE_NONE:
+ return;
+
+ case VALUE_1ID: {
+ ObjectId id1 = ref.getObjectId();
+ if (!ref.isPeeled()) {
+ throw new IOException(JGitText.get().peeledRefIsRequired);
+ } else if (id1 == null) {
+ throw new IOException(JGitText.get().invalidId0);
+ }
+ os.writeId(id1);
+ return;
+ }
+
+ case VALUE_2ID: {
+ ObjectId id1 = ref.getObjectId();
+ ObjectId id2 = ref.getPeeledObjectId();
+ if (!ref.isPeeled()) {
+ throw new IOException(JGitText.get().peeledRefIsRequired);
+ } else if (id1 == null || id2 == null) {
+ throw new IOException(JGitText.get().invalidId0);
+ }
+ os.writeId(id1);
+ os.writeId(id2);
+ return;
+ }
+
+ case VALUE_SYMREF:
+ if (ref.isSymbolic()) {
+ os.writeVarintString(ref.getTarget().getName());
+ return;
+ }
+ }
+ throw new IllegalStateException();
+ }
+
+ private static byte[] nameUtf8(Ref ref) {
+ return ref.getName().getBytes(UTF_8);
+ }
+ }
+
+ static class ObjEntry extends Entry {
+ final LongList blockPos;
+
+ ObjEntry(int idLen, ObjectId id, LongList blockPos) {
+ super(key(idLen, id));
+ this.blockPos = blockPos;
+ }
+
+ private static byte[] key(int idLen, ObjectId id) {
+ byte[] key = new byte[OBJECT_ID_LENGTH];
+ id.copyRawTo(key, 0);
+ if (idLen < OBJECT_ID_LENGTH) {
+ return Arrays.copyOf(key, idLen);
+ }
+ return key;
+ }
+
+ void markScanRequired() {
+ blockPos.clear();
+ }
+
+ @Override
+ byte blockType() {
+ return OBJ_BLOCK_TYPE;
+ }
+
+ @Override
+ int valueType() {
+ int cnt = blockPos.size();
+ return cnt != 0 && cnt <= VALUE_TYPE_MASK ? cnt : 0;
+ }
+
+ @Override
+ int valueSize() {
+ int cnt = blockPos.size();
+ if (cnt == 0) {
+ return computeVarintSize(0);
+ }
+
+ int n = 0;
+ if (cnt > VALUE_TYPE_MASK) {
+ n += computeVarintSize(cnt);
+ }
+ n += computeVarintSize(blockPos.get(0));
+ for (int j = 1; j < cnt; j++) {
+ long prior = blockPos.get(j - 1);
+ long b = blockPos.get(j);
+ n += computeVarintSize(b - prior);
+ }
+ return n;
+ }
+
+ @Override
+ void writeValue(ReftableOutputStream os) throws IOException {
+ int cnt = blockPos.size();
+ if (cnt == 0) {
+ os.writeVarint(0);
+ return;
+ }
+
+ if (cnt > VALUE_TYPE_MASK) {
+ os.writeVarint(cnt);
+ }
+ os.writeVarint(blockPos.get(0));
+ for (int j = 1; j < cnt; j++) {
+ long prior = blockPos.get(j - 1);
+ long b = blockPos.get(j);
+ os.writeVarint(b - prior);
+ }
+ }
+ }
+
+ static class DeleteLogEntry extends Entry {
+ DeleteLogEntry(String refName, long updateIndex) {
+ super(LogEntry.key(refName, updateIndex));
+ }
+
+ @Override
+ byte blockType() {
+ return LOG_BLOCK_TYPE;
+ }
+
+ @Override
+ int valueType() {
+ return LOG_NONE;
+ }
+
+ @Override
+ int valueSize() {
+ return 0;
+ }
+
+ @Override
+ void writeValue(ReftableOutputStream os) {
+ // Nothing in a delete log record.
+ }
+ }
+
+ static class LogEntry extends Entry {
+ final ObjectId oldId;
+ final ObjectId newId;
+ final long timeSecs;
+ final short tz;
+ final byte[] name;
+ final byte[] email;
+ final byte[] msg;
+
+ LogEntry(String refName, long updateIndex, PersonIdent who,
+ ObjectId oldId, ObjectId newId, String message) {
+ super(key(refName, updateIndex));
+
+ this.oldId = oldId;
+ this.newId = newId;
+ this.timeSecs = who.getWhen().getTime() / 1000L;
+ this.tz = (short) who.getTimeZoneOffset();
+ this.name = who.getName().getBytes(UTF_8);
+ this.email = who.getEmailAddress().getBytes(UTF_8);
+ this.msg = message.getBytes(UTF_8);
+ }
+
+ static byte[] key(String ref, long index) {
+ byte[] name = ref.getBytes(UTF_8);
+ byte[] key = Arrays.copyOf(name, name.length + 1 + 8);
+ NB.encodeInt64(key, key.length - 8, reverseUpdateIndex(index));
+ return key;
+ }
+
+ @Override
+ byte blockType() {
+ return LOG_BLOCK_TYPE;
+ }
+
+ @Override
+ int valueType() {
+ return LOG_DATA;
+ }
+
+ @Override
+ int valueSize() {
+ return 2 * OBJECT_ID_LENGTH
+ + computeVarintSize(name.length) + name.length
+ + computeVarintSize(email.length) + email.length
+ + computeVarintSize(timeSecs)
+ + 2 // tz
+ + computeVarintSize(msg.length) + msg.length;
+ }
+
+ @Override
+ void writeValue(ReftableOutputStream os) {
+ os.writeId(oldId);
+ os.writeId(newId);
+ os.writeVarintString(name);
+ os.writeVarintString(email);
+ os.writeVarint(timeSecs);
+ os.writeInt16(tz);
+ os.writeVarintString(msg);
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/EmptyLogCursor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/EmptyLogCursor.java
new file mode 100644
index 0000000000..d7745891a5
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/EmptyLogCursor.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import java.io.IOException;
+
+import org.eclipse.jgit.lib.ReflogEntry;
+
+/** Empty {@link LogCursor} with no results. */
+class EmptyLogCursor extends LogCursor {
+ @Override
+ public boolean next() throws IOException {
+ return false;
+ }
+
+ @Override
+ public String getRefName() {
+ return null;
+ }
+
+ @Override
+ public long getUpdateIndex() {
+ return 0;
+ }
+
+ @Override
+ public ReflogEntry getReflogEntry() {
+ return null;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing.
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/LogCursor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/LogCursor.java
new file mode 100644
index 0000000000..c19968c098
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/LogCursor.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import java.io.IOException;
+
+import org.eclipse.jgit.lib.ReflogEntry;
+
+/** Iterator over logs inside a {@link Reftable}. */
+public abstract class LogCursor implements AutoCloseable {
+ /**
+ * Check if another log record is available.
+ *
+ * @return {@code true} if there is another result.
+ * @throws IOException
+ * logs cannot be read.
+ */
+ public abstract boolean next() throws IOException;
+
+ /** @return name of the current reference. */
+ public abstract String getRefName();
+
+ /** @return identifier of the transaction that created the log record. */
+ public abstract long getUpdateIndex();
+
+ /** @return current log entry. */
+ public abstract ReflogEntry getReflogEntry();
+
+ @Override
+ public abstract void close();
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/MergedReftable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/MergedReftable.java
new file mode 100644
index 0000000000..9fc6ae2bb4
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/MergedReftable.java
@@ -0,0 +1,376 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.PriorityQueue;
+
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.ReflogEntry;
+
+/**
+ * Merges multiple reference tables together.
+ * <p>
+ * A {@link MergedReftable} merge-joins multiple {@link ReftableReader} on the
+ * fly. Tables higher/later in the stack shadow lower/earlier tables, hiding
+ * references that been updated/replaced.
+ * <p>
+ * By default deleted references are skipped and not returned to the caller.
+ * {@link #setIncludeDeletes(boolean)} can be used to modify this behavior if
+ * the caller needs to preserve deletions during partial compaction.
+ * <p>
+ * A {@code MergedReftable} is not thread-safe.
+ */
+public class MergedReftable extends Reftable {
+ private final Reftable[] tables;
+
+ /**
+ * Initialize a merged table reader.
+ * <p>
+ * The tables in {@code tableStack} will be closed when this
+ * {@code MergedReftable} is closed.
+ *
+ * @param tableStack
+ * stack of tables to read from. The base of the stack is at
+ * index 0, the most recent should be at the top of the stack at
+ * {@code tableStack.size() - 1}. The top of the stack (higher
+ * index) shadows the base of the stack (lower index).
+ */
+ public MergedReftable(List<Reftable> tableStack) {
+ tables = tableStack.toArray(new Reftable[0]);
+
+ // Tables must expose deletes to this instance to correctly
+ // shadow references from lower tables.
+ for (Reftable t : tables) {
+ t.setIncludeDeletes(true);
+ }
+ }
+
+ @Override
+ public RefCursor allRefs() throws IOException {
+ MergedRefCursor m = new MergedRefCursor();
+ for (int i = 0; i < tables.length; i++) {
+ m.add(new RefQueueEntry(tables[i].allRefs(), i));
+ }
+ return m;
+ }
+
+ @Override
+ public RefCursor seekRef(String name) throws IOException {
+ MergedRefCursor m = new MergedRefCursor();
+ for (int i = 0; i < tables.length; i++) {
+ m.add(new RefQueueEntry(tables[i].seekRef(name), i));
+ }
+ return m;
+ }
+
+ @Override
+ public RefCursor byObjectId(AnyObjectId name) throws IOException {
+ MergedRefCursor m = new MergedRefCursor();
+ for (int i = 0; i < tables.length; i++) {
+ m.add(new RefQueueEntry(tables[i].byObjectId(name), i));
+ }
+ return m;
+ }
+
+ @Override
+ public LogCursor allLogs() throws IOException {
+ MergedLogCursor m = new MergedLogCursor();
+ for (int i = 0; i < tables.length; i++) {
+ m.add(new LogQueueEntry(tables[i].allLogs(), i));
+ }
+ return m;
+ }
+
+ @Override
+ public LogCursor seekLog(String refName, long updateIdx)
+ throws IOException {
+ MergedLogCursor m = new MergedLogCursor();
+ for (int i = 0; i < tables.length; i++) {
+ m.add(new LogQueueEntry(tables[i].seekLog(refName, updateIdx), i));
+ }
+ return m;
+ }
+
+ @Override
+ public void close() throws IOException {
+ for (Reftable t : tables) {
+ t.close();
+ }
+ }
+
+ int queueSize() {
+ return Math.max(1, tables.length);
+ }
+
+ private class MergedRefCursor extends RefCursor {
+ private final PriorityQueue<RefQueueEntry> queue;
+ private RefQueueEntry head;
+ private Ref ref;
+ private long updateIndex;
+
+ MergedRefCursor() {
+ queue = new PriorityQueue<>(queueSize(), RefQueueEntry::compare);
+ }
+
+ void add(RefQueueEntry t) throws IOException {
+ // Common case is many iterations over the same RefQueueEntry
+ // for the bottom of the stack (scanning all refs). Its almost
+ // always less than the top of the queue. Avoid the queue's
+ // O(log N) insertion and removal costs for this common case.
+ if (!t.rc.next()) {
+ t.rc.close();
+ } else if (head == null) {
+ RefQueueEntry p = queue.peek();
+ if (p == null || RefQueueEntry.compare(t, p) < 0) {
+ head = t;
+ } else {
+ head = queue.poll();
+ queue.add(t);
+ }
+ } else if (RefQueueEntry.compare(t, head) > 0) {
+ queue.add(t);
+ } else {
+ queue.add(head);
+ head = t;
+ }
+ }
+
+ @Override
+ public boolean next() throws IOException {
+ for (;;) {
+ RefQueueEntry t = poll();
+ if (t == null) {
+ return false;
+ }
+
+ ref = t.rc.getRef();
+ updateIndex = t.rc.getUpdateIndex();
+ boolean include = includeDeletes || !t.rc.wasDeleted();
+ skipShadowedRefs(ref.getName());
+ add(t);
+ if (include) {
+ return true;
+ }
+ }
+ }
+
+ private RefQueueEntry poll() {
+ RefQueueEntry e = head;
+ if (e != null) {
+ head = null;
+ return e;
+ }
+ return queue.poll();
+ }
+
+ private void skipShadowedRefs(String name) throws IOException {
+ for (;;) {
+ RefQueueEntry t = head != null ? head : queue.peek();
+ if (t != null && name.equals(t.name())) {
+ add(poll());
+ } else {
+ break;
+ }
+ }
+ }
+
+ @Override
+ public Ref getRef() {
+ return ref;
+ }
+
+ @Override
+ public long getUpdateIndex() {
+ return updateIndex;
+ }
+
+ @Override
+ public void close() {
+ if (head != null) {
+ head.rc.close();
+ head = null;
+ }
+ while (!queue.isEmpty()) {
+ queue.remove().rc.close();
+ }
+ }
+ }
+
+ private static class RefQueueEntry {
+ static int compare(RefQueueEntry a, RefQueueEntry b) {
+ int cmp = a.name().compareTo(b.name());
+ if (cmp == 0) {
+ // higher updateIndex shadows lower updateIndex.
+ cmp = Long.signum(b.updateIndex() - a.updateIndex());
+ }
+ if (cmp == 0) {
+ // higher index shadows lower index, so higher index first.
+ cmp = b.stackIdx - a.stackIdx;
+ }
+ return cmp;
+ }
+
+ final RefCursor rc;
+ final int stackIdx;
+
+ RefQueueEntry(RefCursor rc, int stackIdx) {
+ this.rc = rc;
+ this.stackIdx = stackIdx;
+ }
+
+ String name() {
+ return rc.getRef().getName();
+ }
+
+ long updateIndex() {
+ return rc.getUpdateIndex();
+ }
+ }
+
+ private class MergedLogCursor extends LogCursor {
+ private final PriorityQueue<LogQueueEntry> queue;
+ private String refName;
+ private long updateIndex;
+ private ReflogEntry entry;
+
+ MergedLogCursor() {
+ queue = new PriorityQueue<>(queueSize(), LogQueueEntry::compare);
+ }
+
+ void add(LogQueueEntry t) throws IOException {
+ if (t.lc.next()) {
+ queue.add(t);
+ } else {
+ t.lc.close();
+ }
+ }
+
+ @Override
+ public boolean next() throws IOException {
+ for (;;) {
+ LogQueueEntry t = queue.poll();
+ if (t == null) {
+ return false;
+ }
+
+ refName = t.lc.getRefName();
+ updateIndex = t.lc.getUpdateIndex();
+ entry = t.lc.getReflogEntry();
+ boolean include = includeDeletes || entry != null;
+ skipShadowed(refName, updateIndex);
+ add(t);
+ if (include) {
+ return true;
+ }
+ }
+ }
+
+ private void skipShadowed(String name, long index) throws IOException {
+ for (;;) {
+ LogQueueEntry t = queue.peek();
+ if (t != null && name.equals(t.name()) && index == t.index()) {
+ add(queue.remove());
+ } else {
+ break;
+ }
+ }
+ }
+
+ @Override
+ public String getRefName() {
+ return refName;
+ }
+
+ @Override
+ public long getUpdateIndex() {
+ return updateIndex;
+ }
+
+ @Override
+ public ReflogEntry getReflogEntry() {
+ return entry;
+ }
+
+ @Override
+ public void close() {
+ while (!queue.isEmpty()) {
+ queue.remove().lc.close();
+ }
+ }
+ }
+
+ private static class LogQueueEntry {
+ static int compare(LogQueueEntry a, LogQueueEntry b) {
+ int cmp = a.name().compareTo(b.name());
+ if (cmp == 0) {
+ // higher update index sorts first.
+ cmp = Long.signum(b.index() - a.index());
+ }
+ if (cmp == 0) {
+ // higher index comes first.
+ cmp = b.stackIdx - a.stackIdx;
+ }
+ return cmp;
+ }
+
+ final LogCursor lc;
+ final int stackIdx;
+
+ LogQueueEntry(LogCursor lc, int stackIdx) {
+ this.lc = lc;
+ this.stackIdx = stackIdx;
+ }
+
+ String name() {
+ return lc.getRefName();
+ }
+
+ long index() {
+ return lc.getUpdateIndex();
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/RefCursor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/RefCursor.java
new file mode 100644
index 0000000000..d8e9c609f0
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/RefCursor.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import java.io.IOException;
+
+import org.eclipse.jgit.lib.Ref;
+
+/** Iterator over references inside a {@link Reftable}. */
+public abstract class RefCursor implements AutoCloseable {
+ /**
+ * Check if another reference is available.
+ *
+ * @return {@code true} if there is another result.
+ * @throws IOException
+ * references cannot be read.
+ */
+ public abstract boolean next() throws IOException;
+
+ /** @return reference at the current position. */
+ public abstract Ref getRef();
+
+ /** @return updateIndex that last modified the current reference, */
+ public abstract long getUpdateIndex();
+
+ /** @return {@code true} if the current reference was deleted. */
+ public boolean wasDeleted() {
+ Ref r = getRef();
+ return r.getStorage() == Ref.Storage.NEW && r.getObjectId() == null;
+ }
+
+ @Override
+ public abstract void close();
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/Reftable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/Reftable.java
new file mode 100644
index 0000000000..1189ed3b96
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/Reftable.java
@@ -0,0 +1,262 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static org.eclipse.jgit.lib.RefDatabase.MAX_SYMBOLIC_REF_DEPTH;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.Collection;
+
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.internal.storage.io.BlockSource;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.SymbolicRef;
+
+/** Abstract table of references. */
+public abstract class Reftable implements AutoCloseable {
+ /**
+ * @param refs
+ * references to convert into a reftable; may be empty.
+ * @return a reader for the supplied references.
+ */
+ public static Reftable from(Collection<Ref> refs) {
+ try {
+ ReftableConfig cfg = new ReftableConfig();
+ cfg.setIndexObjects(false);
+ cfg.setAlignBlocks(false);
+ ByteArrayOutputStream buf = new ByteArrayOutputStream();
+ new ReftableWriter()
+ .setConfig(cfg)
+ .begin(buf)
+ .sortAndWriteRefs(refs)
+ .finish();
+ return new ReftableReader(BlockSource.from(buf.toByteArray()));
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /** {@code true} if deletions should be included in results. */
+ protected boolean includeDeletes;
+
+ /**
+ * @param deletes
+ * if {@code true} deleted references will be returned. If
+ * {@code false} (default behavior), deleted references will be
+ * skipped, and not returned.
+ */
+ public void setIncludeDeletes(boolean deletes) {
+ includeDeletes = deletes;
+ }
+
+ /**
+ * Seek to the first reference, to iterate in order.
+ *
+ * @return cursor to iterate.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ public abstract RefCursor allRefs() throws IOException;
+
+ /**
+ * Seek either to a reference, or a reference subtree.
+ * <p>
+ * If {@code refName} ends with {@code "/"} the method will seek to the
+ * subtree of all references starting with {@code refName} as a prefix. If
+ * no references start with this prefix, an empty cursor is returned.
+ * <p>
+ * Otherwise exactly {@code refName} will be looked for. If present, the
+ * returned cursor will iterate exactly one entry. If not found, an empty
+ * cursor is returned.
+ *
+ * @param refName
+ * reference name or subtree to find.
+ * @return cursor to iterate; empty cursor if no references match.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ public abstract RefCursor seekRef(String refName) throws IOException;
+
+ /**
+ * Match references pointing to a specific object.
+ *
+ * @param id
+ * object to find.
+ * @return cursor to iterate; empty cursor if no references match.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ public abstract RefCursor byObjectId(AnyObjectId id) throws IOException;
+
+ /**
+ * Seek reader to read log records.
+ *
+ * @return cursor to iterate; empty cursor if no logs are present.
+ * @throws IOException
+ * if logs cannot be read.
+ */
+ public abstract LogCursor allLogs() throws IOException;
+
+ /**
+ * Read a single reference's log.
+ *
+ * @param refName
+ * exact name of the reference whose log to read.
+ * @return cursor to iterate; empty cursor if no logs match.
+ * @throws IOException
+ * if logs cannot be read.
+ */
+ public LogCursor seekLog(String refName) throws IOException {
+ return seekLog(refName, Long.MAX_VALUE);
+ }
+
+ /**
+ * Seek to an update index in a reference's log.
+ *
+ * @param refName
+ * exact name of the reference whose log to read.
+ * @param updateIndex
+ * most recent index to return first in the log cursor. Log
+ * records at or before {@code updateIndex} will be returned.
+ * @return cursor to iterate; empty cursor if no logs match.
+ * @throws IOException
+ * if logs cannot be read.
+ */
+ public abstract LogCursor seekLog(String refName, long updateIndex)
+ throws IOException;
+
+ /**
+ * Lookup a reference, or null if not found.
+ *
+ * @param refName
+ * reference name to find.
+ * @return the reference, or {@code null} if not found.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ @Nullable
+ public Ref exactRef(String refName) throws IOException {
+ try (RefCursor rc = seekRef(refName)) {
+ return rc.next() ? rc.getRef() : null;
+ }
+ }
+
+ /**
+ * Test if a reference or reference subtree exists.
+ * <p>
+ * If {@code refName} ends with {@code "/"}, the method tests if any
+ * reference starts with {@code refName} as a prefix.
+ * <p>
+ * Otherwise, the method checks if {@code refName} exists.
+ *
+ * @param refName
+ * reference name or subtree to find.
+ * @return {@code true} if the reference exists, or at least one reference
+ * exists in the subtree.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ public boolean hasRef(String refName) throws IOException {
+ try (RefCursor rc = seekRef(refName)) {
+ return rc.next();
+ }
+ }
+
+ /**
+ * Test if any reference directly refers to the object.
+ *
+ * @param id
+ * ObjectId to find.
+ * @return {@code true} if any reference exists directly referencing
+ * {@code id}, or a annotated tag that peels to {@code id}.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ public boolean hasId(AnyObjectId id) throws IOException {
+ try (RefCursor rc = byObjectId(id)) {
+ return rc.next();
+ }
+ }
+
+ /**
+ * Resolve a symbolic reference to populate its value.
+ *
+ * @param symref
+ * reference to resolve.
+ * @return resolved {@code symref}, or {@code null}.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ @Nullable
+ public Ref resolve(Ref symref) throws IOException {
+ return resolve(symref, 0);
+ }
+
+ private Ref resolve(Ref ref, int depth) throws IOException {
+ if (!ref.isSymbolic()) {
+ return ref;
+ }
+
+ Ref dst = ref.getTarget();
+ if (MAX_SYMBOLIC_REF_DEPTH <= depth) {
+ return null; // claim it doesn't exist
+ }
+
+ dst = exactRef(dst.getName());
+ if (dst == null) {
+ return ref;
+ }
+
+ dst = resolve(dst, depth + 1);
+ if (dst == null) {
+ return null; // claim it doesn't exist
+ }
+ return new SymbolicRef(ref.getName(), dst);
+ }
+
+ @Override
+ public abstract void close() throws IOException;
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableCompactor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableCompactor.java
new file mode 100644
index 0000000000..c22157784c
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableCompactor.java
@@ -0,0 +1,281 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.eclipse.jgit.internal.storage.reftable.ReftableWriter.Stats;
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.ReflogEntry;
+
+/**
+ * Merges reftables and compacts them into a single output.
+ * <p>
+ * For a partial compaction callers should {@link #setIncludeDeletes(boolean)}
+ * to {@code true} to ensure the new reftable continues to use a delete marker
+ * to shadow any lower reftable that may have the reference present.
+ * <p>
+ * By default all log entries within the range defined by
+ * {@link #setMinUpdateIndex(long)} and {@link #setMaxUpdateIndex(long)} are
+ * copied, even if no references in the output file match the log records.
+ * Callers may truncate the log to a more recent time horizon with
+ * {@link #setOldestReflogTimeMillis(long)}, or disable the log altogether with
+ * {@code setOldestReflogTimeMillis(Long.MAX_VALUE)}.
+ */
+public class ReftableCompactor {
+ private final ReftableWriter writer = new ReftableWriter();
+ private final ArrayDeque<Reftable> tables = new ArrayDeque<>();
+
+ private long compactBytesLimit;
+ private long bytesToCompact;
+ private boolean includeDeletes;
+ private long minUpdateIndex;
+ private long maxUpdateIndex;
+ private long oldestReflogTimeMillis;
+ private Stats stats;
+
+ /**
+ * @param cfg
+ * configuration for the reftable.
+ * @return {@code this}
+ */
+ public ReftableCompactor setConfig(ReftableConfig cfg) {
+ writer.setConfig(cfg);
+ return this;
+ }
+
+ /**
+ * @param bytes
+ * limit on number of bytes from source tables to compact.
+ * @return {@code this}
+ */
+ public ReftableCompactor setCompactBytesLimit(long bytes) {
+ compactBytesLimit = bytes;
+ return this;
+ }
+
+ /**
+ * @param deletes
+ * {@code true} to include deletions in the output, which may be
+ * necessary for partial compaction.
+ * @return {@code this}
+ */
+ public ReftableCompactor setIncludeDeletes(boolean deletes) {
+ includeDeletes = deletes;
+ return this;
+ }
+
+ /**
+ * @param min
+ * the minimum update index for log entries that appear in the
+ * compacted reftable. This should be 1 higher than the prior
+ * reftable's {@code maxUpdateIndex} if this table will be used
+ * in a stack.
+ * @return {@code this}
+ */
+ public ReftableCompactor setMinUpdateIndex(long min) {
+ minUpdateIndex = min;
+ return this;
+ }
+
+ /**
+ * @param max
+ * the maximum update index for log entries that appear in the
+ * compacted reftable. This should be at least 1 higher than the
+ * prior reftable's {@code maxUpdateIndex} if this table will be
+ * used in a stack.
+ * @return {@code this}
+ */
+ public ReftableCompactor setMaxUpdateIndex(long max) {
+ maxUpdateIndex = max;
+ return this;
+ }
+
+ /**
+ * @param timeMillis
+ * oldest log time to preserve. Entries whose timestamps are
+ * {@code >= timeMillis} will be copied into the output file. Log
+ * entries that predate {@code timeMillis} will be discarded.
+ * Specified in Java standard milliseconds since the epoch.
+ * @return {@code this}
+ */
+ public ReftableCompactor setOldestReflogTimeMillis(long timeMillis) {
+ oldestReflogTimeMillis = timeMillis;
+ return this;
+ }
+
+ /**
+ * Add all of the tables, in the specified order.
+ * <p>
+ * Unconditionally adds all tables, ignoring the
+ * {@link #setCompactBytesLimit(long)}.
+ *
+ * @param readers
+ * tables to compact. Tables should be ordered oldest first/most
+ * recent last so that the more recent tables can shadow the
+ * older results. Caller is responsible for closing the readers.
+ * @throws IOException
+ * update indexes of a reader cannot be accessed.
+ */
+ public void addAll(List<? extends Reftable> readers) throws IOException {
+ tables.addAll(readers);
+ for (Reftable r : readers) {
+ if (r instanceof ReftableReader) {
+ adjustUpdateIndexes((ReftableReader) r);
+ }
+ }
+ }
+
+ /**
+ * Try to add this reader at the bottom of the stack.
+ * <p>
+ * A reader may be rejected by returning {@code false} if the compactor is
+ * already rewriting its {@link #setCompactBytesLimit(long)}. When this
+ * happens the caller should stop trying to add tables, and execute the
+ * compaction.
+ *
+ * @param reader
+ * the reader to insert at the bottom of the stack. Caller is
+ * responsible for closing the reader.
+ * @return {@code true} if the compactor accepted this table; {@code false}
+ * if the compactor has reached its limit.
+ * @throws IOException
+ * if size of {@code reader}, or its update indexes cannot be read.
+ */
+ public boolean tryAddFirst(ReftableReader reader) throws IOException {
+ long sz = reader.size();
+ if (compactBytesLimit > 0 && bytesToCompact + sz > compactBytesLimit) {
+ return false;
+ }
+ bytesToCompact += sz;
+ adjustUpdateIndexes(reader);
+ tables.addFirst(reader);
+ return true;
+ }
+
+ private void adjustUpdateIndexes(ReftableReader reader) throws IOException {
+ if (minUpdateIndex == 0) {
+ minUpdateIndex = reader.minUpdateIndex();
+ } else {
+ minUpdateIndex = Math.min(minUpdateIndex, reader.minUpdateIndex());
+ }
+ maxUpdateIndex = Math.max(maxUpdateIndex, reader.maxUpdateIndex());
+ }
+
+ /**
+ * Write a compaction to {@code out}.
+ *
+ * @param out
+ * stream to write the compacted tables to. Caller is responsible
+ * for closing {@code out}.
+ * @throws IOException
+ * if tables cannot be read, or cannot be written.
+ */
+ public void compact(OutputStream out) throws IOException {
+ MergedReftable mr = new MergedReftable(new ArrayList<>(tables));
+ mr.setIncludeDeletes(includeDeletes);
+
+ writer.setMinUpdateIndex(minUpdateIndex);
+ writer.setMaxUpdateIndex(maxUpdateIndex);
+ writer.begin(out);
+ mergeRefs(mr);
+ mergeLogs(mr);
+ writer.finish();
+ stats = writer.getStats();
+ }
+
+ /** @return statistics of the last written reftable. */
+ public Stats getStats() {
+ return stats;
+ }
+
+ private void mergeRefs(MergedReftable mr) throws IOException {
+ try (RefCursor rc = mr.allRefs()) {
+ while (rc.next()) {
+ writer.writeRef(rc.getRef(), rc.getUpdateIndex());
+ }
+ }
+ }
+
+ private void mergeLogs(MergedReftable mr) throws IOException {
+ if (oldestReflogTimeMillis == Long.MAX_VALUE) {
+ return;
+ }
+
+ try (LogCursor lc = mr.allLogs()) {
+ while (lc.next()) {
+ long updateIndex = lc.getUpdateIndex();
+ if (updateIndex < minUpdateIndex
+ || updateIndex > maxUpdateIndex) {
+ // Cannot merge log records outside the header's range.
+ continue;
+ }
+
+ String refName = lc.getRefName();
+ ReflogEntry log = lc.getReflogEntry();
+ if (log == null) {
+ if (includeDeletes) {
+ writer.deleteLog(refName, updateIndex);
+ }
+ continue;
+ }
+
+ PersonIdent who = log.getWho();
+ if (who.getWhen().getTime() >= oldestReflogTimeMillis) {
+ writer.writeLog(
+ refName,
+ updateIndex,
+ who,
+ log.getOldId(),
+ log.getNewId(),
+ log.getComment());
+ }
+ }
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConfig.java
new file mode 100644
index 0000000000..f7a1fbe2af
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConfig.java
@@ -0,0 +1,216 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.MAX_BLOCK_SIZE;
+
+import org.eclipse.jgit.lib.Config;
+import org.eclipse.jgit.lib.Repository;
+
+/** Configuration used by a reftable writer when constructing the stream. */
+public class ReftableConfig {
+ private int refBlockSize = 4 << 10;
+ private int logBlockSize;
+ private int restartInterval;
+ private int maxIndexLevels;
+ private boolean alignBlocks = true;
+ private boolean indexObjects = true;
+
+ /** Create a default configuration. */
+ public ReftableConfig() {
+ }
+
+ /**
+ * Create a configuration honoring the repository's settings.
+ *
+ * @param db
+ * the repository to read settings from. The repository is not
+ * retained by the new configuration, instead its settings are
+ * copied during the constructor.
+ */
+ public ReftableConfig(Repository db) {
+ fromConfig(db.getConfig());
+ }
+
+ /**
+ * Create a configuration honoring settings in a {@link Config}.
+ *
+ * @param cfg
+ * the source to read settings from. The source is not retained
+ * by the new configuration, instead its settings are copied
+ * during the constructor.
+ */
+ public ReftableConfig(Config cfg) {
+ fromConfig(cfg);
+ }
+
+ /**
+ * Copy an existing configuration to a new instance.
+ *
+ * @param cfg
+ * the source configuration to copy from.
+ */
+ public ReftableConfig(ReftableConfig cfg) {
+ this.refBlockSize = cfg.refBlockSize;
+ this.logBlockSize = cfg.logBlockSize;
+ this.restartInterval = cfg.restartInterval;
+ this.maxIndexLevels = cfg.maxIndexLevels;
+ this.alignBlocks = cfg.alignBlocks;
+ this.indexObjects = cfg.indexObjects;
+ }
+
+ /** @return desired output block size for references, in bytes */
+ public int getRefBlockSize() {
+ return refBlockSize;
+ }
+
+ /**
+ * @param szBytes
+ * desired output block size for references, in bytes.
+ */
+ public void setRefBlockSize(int szBytes) {
+ if (szBytes > MAX_BLOCK_SIZE) {
+ throw new IllegalArgumentException();
+ }
+ refBlockSize = Math.max(0, szBytes);
+ }
+
+ /**
+ * @return desired output block size for log entries, in bytes. If 0 the
+ * writer will default to {@code 2 * getRefBlockSize()}.
+ */
+ public int getLogBlockSize() {
+ return logBlockSize;
+ }
+
+ /**
+ * @param szBytes
+ * desired output block size for log entries, in bytes. If 0 will
+ * default to {@code 2 * getRefBlockSize()}.
+ */
+ public void setLogBlockSize(int szBytes) {
+ if (szBytes > MAX_BLOCK_SIZE) {
+ throw new IllegalArgumentException();
+ }
+ logBlockSize = Math.max(0, szBytes);
+ }
+
+ /** @return number of references between binary search markers. */
+ public int getRestartInterval() {
+ return restartInterval;
+ }
+
+ /**
+ * @param interval
+ * number of references between binary search markers. If
+ * {@code interval} is 0 (default), the writer will select a
+ * default value based on the block size.
+ */
+ public void setRestartInterval(int interval) {
+ restartInterval = Math.max(0, interval);
+ }
+
+ /** @return maximum depth of the index; 0 for unlimited. */
+ public int getMaxIndexLevels() {
+ return maxIndexLevels;
+ }
+
+ /**
+ * @param levels
+ * maximum number of levels to use in indexes. Lower levels of
+ * the index respect {@link #getRefBlockSize()}, and the highest
+ * level may exceed that if the number of levels is limited.
+ */
+ public void setMaxIndexLevels(int levels) {
+ maxIndexLevels = Math.max(0, levels);
+ }
+
+ /** @return {@code true} if the writer should align blocks. */
+ public boolean isAlignBlocks() {
+ return alignBlocks;
+ }
+
+ /**
+ * @param align
+ * if {@code true} blocks are written aligned to multiples of
+ * {@link #getRefBlockSize()}. May increase file size due to NUL
+ * padding bytes added between blocks. Default is {@code true}.
+ */
+ public void setAlignBlocks(boolean align) {
+ alignBlocks = align;
+ }
+
+ /** @return {@code true} if the writer should index object to ref. */
+ public boolean isIndexObjects() {
+ return indexObjects;
+ }
+
+ /**
+ * @param index
+ * if {@code true} the reftable may include additional storage to
+ * efficiently map from {@code ObjectId} to reference names. By
+ * default, {@code true}.
+ */
+ public void setIndexObjects(boolean index) {
+ indexObjects = index;
+ }
+
+ /**
+ * Update properties by setting fields from the configuration.
+ *
+ * If a property's corresponding variable is not defined in the supplied
+ * configuration, then it is left unmodified.
+ *
+ * @param rc
+ * configuration to read properties from.
+ */
+ public void fromConfig(Config rc) {
+ refBlockSize = rc.getInt("reftable", "blockSize", refBlockSize); //$NON-NLS-1$ //$NON-NLS-2$
+ logBlockSize = rc.getInt("reftable", "logBlockSize", logBlockSize); //$NON-NLS-1$ //$NON-NLS-2$
+ restartInterval = rc.getInt("reftable", "restartInterval", restartInterval); //$NON-NLS-1$ //$NON-NLS-2$
+ maxIndexLevels = rc.getInt("reftable", "indexLevels", maxIndexLevels); //$NON-NLS-1$ //$NON-NLS-2$
+ alignBlocks = rc.getBoolean("reftable", "alignBlocks", alignBlocks); //$NON-NLS-1$ //$NON-NLS-2$
+ indexObjects = rc.getBoolean("reftable", "indexObjects", indexObjects); //$NON-NLS-1$ //$NON-NLS-2$
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConstants.java
new file mode 100644
index 0000000000..0b89327582
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConstants.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+class ReftableConstants {
+ static final byte[] FILE_HEADER_MAGIC = { 'R', 'E', 'F', 'T' };
+ static final byte VERSION_1 = (byte) 1;
+
+ static final int FILE_HEADER_LEN = 24;
+ static final int FILE_FOOTER_LEN = 68;
+
+ static final byte FILE_BLOCK_TYPE = 'R';
+ static final byte REF_BLOCK_TYPE = 'r';
+ static final byte OBJ_BLOCK_TYPE = 'o';
+ static final byte LOG_BLOCK_TYPE = 'g';
+ static final byte INDEX_BLOCK_TYPE = 'i';
+
+ static final int VALUE_NONE = 0x0;
+ static final int VALUE_1ID = 0x1;
+ static final int VALUE_2ID = 0x2;
+ static final int VALUE_SYMREF = 0x3;
+ static final int VALUE_TYPE_MASK = 0x7;
+
+ static final int LOG_NONE = 0x0;
+ static final int LOG_DATA = 0x1;
+
+ static final int MAX_BLOCK_SIZE = (1 << 24) - 1;
+ static final int MAX_RESTARTS = 65535;
+
+ static boolean isFileHeaderMagic(byte[] buf, int o, int n) {
+ return (n - o) >= FILE_HEADER_MAGIC.length
+ && buf[o + 0] == FILE_HEADER_MAGIC[0]
+ && buf[o + 1] == FILE_HEADER_MAGIC[1]
+ && buf[o + 2] == FILE_HEADER_MAGIC[2]
+ && buf[o + 3] == FILE_HEADER_MAGIC[3];
+ }
+
+ static long reverseUpdateIndex(long time) {
+ return 0xffffffffffffffffL - time;
+ }
+
+ private ReftableConstants() {
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableOutputStream.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableOutputStream.java
new file mode 100644
index 0000000000..a24619b2c5
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableOutputStream.java
@@ -0,0 +1,247 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_HEADER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.INDEX_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_BLOCK_TYPE;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Arrays;
+import java.util.zip.Deflater;
+import java.util.zip.DeflaterOutputStream;
+
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.util.NB;
+import org.eclipse.jgit.util.io.CountingOutputStream;
+
+/**
+ * Wrapper to assist formatting a reftable to an {@link OutputStream}.
+ * <p>
+ * Internally buffers at block size boundaries, flushing only complete blocks to
+ * the {@code OutputStream}.
+ */
+class ReftableOutputStream extends OutputStream {
+ private final byte[] tmp = new byte[10];
+ private final CountingOutputStream out;
+ private final boolean alignBlocks;
+
+ private Deflater deflater;
+ private DeflaterOutputStream compressor;
+
+ private int blockType;
+ private int blockSize;
+ private int blockStart;
+ private byte[] blockBuf;
+ private int cur;
+ private long paddingUsed;
+
+ ReftableOutputStream(OutputStream os, int bs, boolean align) {
+ blockSize = bs;
+ blockBuf = new byte[bs];
+ alignBlocks = align;
+ out = new CountingOutputStream(os);
+ }
+
+ void setBlockSize(int bs) {
+ blockSize = bs;
+ }
+
+ @Override
+ public void write(int b) {
+ ensureBytesAvailableInBlockBuf(1);
+ blockBuf[cur++] = (byte) b;
+ }
+
+ @Override
+ public void write(byte[] b, int off, int cnt) {
+ ensureBytesAvailableInBlockBuf(cnt);
+ System.arraycopy(b, off, blockBuf, cur, cnt);
+ cur += cnt;
+ }
+
+ int bytesWrittenInBlock() {
+ return cur;
+ }
+
+ int bytesAvailableInBlock() {
+ return blockSize - cur;
+ }
+
+ long paddingUsed() {
+ return paddingUsed;
+ }
+
+ /** @return bytes flushed; excludes {@link #bytesWrittenInBlock()}. */
+ long size() {
+ return out.getCount();
+ }
+
+ static int computeVarintSize(long val) {
+ int n = 1;
+ for (; (val >>>= 7) != 0; n++) {
+ val--;
+ }
+ return n;
+ }
+
+ void writeVarint(long val) {
+ int n = tmp.length;
+ tmp[--n] = (byte) (val & 0x7f);
+ while ((val >>>= 7) != 0) {
+ tmp[--n] = (byte) (0x80 | (--val & 0x7F));
+ }
+ write(tmp, n, tmp.length - n);
+ }
+
+ void writeInt16(int val) {
+ ensureBytesAvailableInBlockBuf(2);
+ NB.encodeInt16(blockBuf, cur, val);
+ cur += 2;
+ }
+
+ void writeInt24(int val) {
+ ensureBytesAvailableInBlockBuf(3);
+ NB.encodeInt24(blockBuf, cur, val);
+ cur += 3;
+ }
+
+ void writeId(ObjectId id) {
+ ensureBytesAvailableInBlockBuf(OBJECT_ID_LENGTH);
+ id.copyRawTo(blockBuf, cur);
+ cur += OBJECT_ID_LENGTH;
+ }
+
+ void writeVarintString(String s) {
+ writeVarintString(s.getBytes(UTF_8));
+ }
+
+ void writeVarintString(byte[] msg) {
+ writeVarint(msg.length);
+ write(msg, 0, msg.length);
+ }
+
+ private void ensureBytesAvailableInBlockBuf(int cnt) {
+ if (cur + cnt > blockBuf.length) {
+ int n = Math.max(cur + cnt, blockBuf.length * 2);
+ blockBuf = Arrays.copyOf(blockBuf, n);
+ }
+ }
+
+ void flushFileHeader() throws IOException {
+ if (cur == FILE_HEADER_LEN && out.getCount() == 0) {
+ out.write(blockBuf, 0, cur);
+ cur = 0;
+ }
+ }
+
+ void beginBlock(byte type) {
+ blockType = type;
+ blockStart = cur;
+ cur += 4; // reserve space for 4-byte block header.
+ }
+
+ void flushBlock() throws IOException {
+ if (cur > blockSize && blockType != INDEX_BLOCK_TYPE) {
+ throw new IOException(JGitText.get().overflowedReftableBlock);
+ }
+ NB.encodeInt32(blockBuf, blockStart, (blockType << 24) | cur);
+
+ if (blockType == LOG_BLOCK_TYPE) {
+ // Log blocks are deflated after the block header.
+ out.write(blockBuf, 0, 4);
+ if (deflater != null) {
+ deflater.reset();
+ } else {
+ deflater = new Deflater(Deflater.BEST_COMPRESSION);
+ compressor = new DeflaterOutputStream(out, deflater);
+ }
+ compressor.write(blockBuf, 4, cur - 4);
+ compressor.finish();
+ } else {
+ // Other blocks are uncompressed.
+ out.write(blockBuf, 0, cur);
+ }
+
+ cur = 0;
+ blockType = 0;
+ blockStart = 0;
+ }
+
+ void padBetweenBlocksToNextBlock() throws IOException {
+ if (alignBlocks) {
+ long m = size() % blockSize;
+ if (m > 0) {
+ int pad = blockSize - (int) m;
+ ensureBytesAvailableInBlockBuf(pad);
+ Arrays.fill(blockBuf, 0, pad, (byte) 0);
+ out.write(blockBuf, 0, pad);
+ paddingUsed += pad;
+ }
+ }
+ }
+
+ int estimatePadBetweenBlocks(int currentBlockSize) {
+ if (alignBlocks) {
+ long m = (size() + currentBlockSize) % blockSize;
+ return m > 0 ? blockSize - (int) m : 0;
+ }
+ return 0;
+ }
+
+ void finishFile() throws IOException {
+ // File footer doesn't need patching for the block start.
+ // Just flush what has been buffered.
+ out.write(blockBuf, 0, cur);
+ cur = 0;
+
+ if (deflater != null) {
+ deflater.end();
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableReader.java
new file mode 100644
index 0000000000..407a77c7d3
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableReader.java
@@ -0,0 +1,683 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.eclipse.jgit.internal.storage.reftable.BlockReader.decodeBlockLen;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_FOOTER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_HEADER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.INDEX_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.REF_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VERSION_1;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.isFileHeaderMagic;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.text.MessageFormat;
+import java.util.Arrays;
+import java.util.zip.CRC32;
+
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.io.BlockSource;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.LogEntry;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.ReflogEntry;
+import org.eclipse.jgit.util.LongList;
+import org.eclipse.jgit.util.LongMap;
+import org.eclipse.jgit.util.NB;
+
+/**
+ * Reads a reftable formatted file.
+ * <p>
+ * {@code ReftableReader} is not thread-safe. Concurrent readers need their own
+ * instance to read from the same file.
+ */
+public class ReftableReader extends Reftable {
+ private final BlockSource src;
+
+ private int blockSize = -1;
+ private long minUpdateIndex;
+ private long maxUpdateIndex;
+
+ private long refEnd;
+ private long objPosition;
+ private long objEnd;
+ private long logPosition;
+ private long logEnd;
+ private int objIdLen;
+
+ private long refIndexPosition = -1;
+ private long objIndexPosition = -1;
+ private long logIndexPosition = -1;
+
+ private BlockReader refIndex;
+ private BlockReader objIndex;
+ private BlockReader logIndex;
+ private LongMap<BlockReader> indexCache;
+
+ /**
+ * Initialize a new reftable reader.
+ *
+ * @param src
+ * the file content to read.
+ */
+ public ReftableReader(BlockSource src) {
+ this.src = src;
+ }
+
+ /**
+ * @return the block size in bytes chosen for this file by the writer. Most
+ * reads from the {@link BlockSource} will be aligned to the block
+ * size.
+ * @throws IOException
+ * file cannot be read.
+ */
+ public int blockSize() throws IOException {
+ if (blockSize == -1) {
+ readFileHeader();
+ }
+ return blockSize;
+ }
+
+ /**
+ * @return the minimum update index for log entries that appear in this
+ * reftable. This should be 1 higher than the prior reftable's
+ * {@code maxUpdateIndex} if this table is used in a stack.
+ * @throws IOException
+ * file cannot be read.
+ */
+ public long minUpdateIndex() throws IOException {
+ if (blockSize == -1) {
+ readFileHeader();
+ }
+ return minUpdateIndex;
+ }
+
+ /**
+ * @return the maximum update index for log entries that appear in this
+ * reftable. This should be 1 higher than the prior reftable's
+ * {@code maxUpdateIndex} if this table is used in a stack.
+ * @throws IOException
+ * file cannot be read.
+ */
+ public long maxUpdateIndex() throws IOException {
+ if (blockSize == -1) {
+ readFileHeader();
+ }
+ return maxUpdateIndex;
+ }
+
+ @Override
+ public RefCursor allRefs() throws IOException {
+ if (blockSize == -1) {
+ readFileHeader();
+ }
+
+ long end = refEnd > 0 ? refEnd : (src.size() - FILE_FOOTER_LEN);
+ src.adviseSequentialRead(0, end);
+
+ RefCursorImpl i = new RefCursorImpl(end, null, false);
+ i.block = readBlock(0, end);
+ return i;
+ }
+
+ @Override
+ public RefCursor seekRef(String refName) throws IOException {
+ initRefIndex();
+
+ byte[] key = refName.getBytes(UTF_8);
+ boolean prefix = key[key.length - 1] == '/';
+
+ RefCursorImpl i = new RefCursorImpl(refEnd, key, prefix);
+ i.block = seek(REF_BLOCK_TYPE, key, refIndex, 0, refEnd);
+ return i;
+ }
+
+ @Override
+ public RefCursor byObjectId(AnyObjectId id) throws IOException {
+ initObjIndex();
+ ObjCursorImpl i = new ObjCursorImpl(refEnd, id);
+ if (objIndex != null) {
+ i.initSeek();
+ } else {
+ i.initScan();
+ }
+ return i;
+ }
+
+ @Override
+ public LogCursor allLogs() throws IOException {
+ initLogIndex();
+ if (logPosition > 0) {
+ src.adviseSequentialRead(logPosition, logEnd);
+ LogCursorImpl i = new LogCursorImpl(logEnd, null);
+ i.block = readBlock(logPosition, logEnd);
+ return i;
+ }
+ return new EmptyLogCursor();
+ }
+
+ @Override
+ public LogCursor seekLog(String refName, long updateIndex)
+ throws IOException {
+ initLogIndex();
+ if (logPosition > 0) {
+ byte[] key = LogEntry.key(refName, updateIndex);
+ byte[] match = refName.getBytes(UTF_8);
+ LogCursorImpl i = new LogCursorImpl(logEnd, match);
+ i.block = seek(LOG_BLOCK_TYPE, key, logIndex, logPosition, logEnd);
+ return i;
+ }
+ return new EmptyLogCursor();
+ }
+
+ private BlockReader seek(byte blockType, byte[] key, BlockReader idx,
+ long startPos, long endPos) throws IOException {
+ if (idx != null) {
+ // Walk through a possibly multi-level index to a leaf block.
+ BlockReader block = idx;
+ do {
+ if (block.seekKey(key) > 0) {
+ return null;
+ }
+ long pos = block.readPositionFromIndex();
+ block = readBlock(pos, endPos);
+ } while (block.type() == INDEX_BLOCK_TYPE);
+ block.seekKey(key);
+ return block;
+ }
+ return binarySearch(blockType, key, startPos, endPos);
+ }
+
+ private BlockReader binarySearch(byte blockType, byte[] key,
+ long startPos, long endPos) throws IOException {
+ if (blockSize == 0) {
+ BlockReader b = readBlock(startPos, endPos);
+ if (blockType != b.type()) {
+ return null;
+ }
+ b.seekKey(key);
+ return b;
+ }
+
+ int low = (int) (startPos / blockSize);
+ int end = blocksIn(startPos, endPos);
+ BlockReader block = null;
+ do {
+ int mid = (low + end) >>> 1;
+ block = readBlock(((long) mid) * blockSize, endPos);
+ if (blockType != block.type()) {
+ return null;
+ }
+ int cmp = block.seekKey(key);
+ if (cmp < 0) {
+ end = mid;
+ } else if (cmp == 0) {
+ break;
+ } else /* if (cmp > 0) */ {
+ low = mid + 1;
+ }
+ } while (low < end);
+ return block;
+ }
+
+ private void readFileHeader() throws IOException {
+ readHeaderOrFooter(0, FILE_HEADER_LEN);
+ }
+
+ private void readFileFooter() throws IOException {
+ int ftrLen = FILE_FOOTER_LEN;
+ byte[] ftr = readHeaderOrFooter(src.size() - ftrLen, ftrLen);
+
+ CRC32 crc = new CRC32();
+ crc.update(ftr, 0, ftrLen - 4);
+ if (crc.getValue() != NB.decodeUInt32(ftr, ftrLen - 4)) {
+ throw new IOException(JGitText.get().invalidReftableCRC);
+ }
+
+ refIndexPosition = NB.decodeInt64(ftr, 24);
+ long p = NB.decodeInt64(ftr, 32);
+ objPosition = p >>> 5;
+ objIdLen = (int) (p & 0x1f);
+ objIndexPosition = NB.decodeInt64(ftr, 40);
+ logPosition = NB.decodeInt64(ftr, 48);
+ logIndexPosition = NB.decodeInt64(ftr, 56);
+
+ if (refIndexPosition > 0) {
+ refEnd = refIndexPosition;
+ } else if (objPosition > 0) {
+ refEnd = objPosition;
+ } else if (logPosition > 0) {
+ refEnd = logPosition;
+ } else {
+ refEnd = src.size() - ftrLen;
+ }
+
+ if (objPosition > 0) {
+ if (objIndexPosition > 0) {
+ objEnd = objIndexPosition;
+ } else if (logPosition > 0) {
+ objEnd = logPosition;
+ } else {
+ objEnd = src.size() - ftrLen;
+ }
+ }
+
+ if (logPosition > 0) {
+ if (logIndexPosition > 0) {
+ logEnd = logIndexPosition;
+ } else {
+ logEnd = src.size() - ftrLen;
+ }
+ }
+ }
+
+ private byte[] readHeaderOrFooter(long pos, int len) throws IOException {
+ ByteBuffer buf = src.read(pos, len);
+ if (buf.position() != len) {
+ throw new IOException(JGitText.get().shortReadOfBlock);
+ }
+
+ byte[] tmp = new byte[len];
+ buf.flip();
+ buf.get(tmp);
+ if (!isFileHeaderMagic(tmp, 0, len)) {
+ throw new IOException(JGitText.get().invalidReftableFile);
+ }
+
+ int v = NB.decodeInt32(tmp, 4);
+ int version = v >>> 24;
+ if (VERSION_1 != version) {
+ throw new IOException(MessageFormat.format(
+ JGitText.get().unsupportedReftableVersion,
+ Integer.valueOf(version)));
+ }
+ if (blockSize == -1) {
+ blockSize = v & 0xffffff;
+ }
+ minUpdateIndex = NB.decodeInt64(tmp, 8);
+ maxUpdateIndex = NB.decodeInt64(tmp, 16);
+ return tmp;
+ }
+
+ private void initRefIndex() throws IOException {
+ if (refIndexPosition < 0) {
+ readFileFooter();
+ }
+ if (refIndex == null && refIndexPosition > 0) {
+ refIndex = readIndex(refIndexPosition);
+ }
+ }
+
+ private void initObjIndex() throws IOException {
+ if (objIndexPosition < 0) {
+ readFileFooter();
+ }
+ if (objIndex == null && objIndexPosition > 0) {
+ objIndex = readIndex(objIndexPosition);
+ }
+ }
+
+ private void initLogIndex() throws IOException {
+ if (logIndexPosition < 0) {
+ readFileFooter();
+ }
+ if (logIndex == null && logIndexPosition > 0) {
+ logIndex = readIndex(logIndexPosition);
+ }
+ }
+
+ private BlockReader readIndex(long pos) throws IOException {
+ int sz = readBlockLen(pos);
+ BlockReader i = new BlockReader();
+ i.readBlock(src, pos, sz);
+ i.verifyIndex();
+ return i;
+ }
+
+ private int readBlockLen(long pos) throws IOException {
+ int sz = pos == 0 ? FILE_HEADER_LEN + 4 : 4;
+ ByteBuffer tmp = src.read(pos, sz);
+ if (tmp.position() < sz) {
+ throw new IOException(JGitText.get().invalidReftableFile);
+ }
+ byte[] buf;
+ if (tmp.hasArray() && tmp.arrayOffset() == 0) {
+ buf = tmp.array();
+ } else {
+ buf = new byte[sz];
+ tmp.flip();
+ tmp.get(buf);
+ }
+ if (pos == 0 && buf[FILE_HEADER_LEN] == FILE_BLOCK_TYPE) {
+ return FILE_HEADER_LEN;
+ }
+ int p = pos == 0 ? FILE_HEADER_LEN : 0;
+ return decodeBlockLen(NB.decodeInt32(buf, p));
+ }
+
+ private BlockReader readBlock(long pos, long end) throws IOException {
+ if (indexCache != null) {
+ BlockReader b = indexCache.get(pos);
+ if (b != null) {
+ return b;
+ }
+ }
+
+ int sz = blockSize;
+ if (sz == 0) {
+ sz = readBlockLen(pos);
+ } else if (pos + sz > end) {
+ sz = (int) (end - pos); // last block may omit padding.
+ }
+
+ BlockReader b = new BlockReader();
+ b.readBlock(src, pos, sz);
+ if (b.type() == INDEX_BLOCK_TYPE && !b.truncated()) {
+ if (indexCache == null) {
+ indexCache = new LongMap<>();
+ }
+ indexCache.put(pos, b);
+ }
+ return b;
+ }
+
+ private int blocksIn(long pos, long end) {
+ int blocks = (int) ((end - pos) / blockSize);
+ return end % blockSize == 0 ? blocks : (blocks + 1);
+ }
+
+ /**
+ * Get size of the reftable, in bytes.
+ *
+ * @return size of the reftable, in bytes.
+ * @throws IOException
+ * size cannot be obtained.
+ */
+ public long size() throws IOException {
+ return src.size();
+ }
+
+ @Override
+ public void close() throws IOException {
+ src.close();
+ }
+
+ private class RefCursorImpl extends RefCursor {
+ private final long scanEnd;
+ private final byte[] match;
+ private final boolean prefix;
+
+ private Ref ref;
+ private long updateIndex;
+ BlockReader block;
+
+ RefCursorImpl(long scanEnd, byte[] match, boolean prefix) {
+ this.scanEnd = scanEnd;
+ this.match = match;
+ this.prefix = prefix;
+ }
+
+ @Override
+ public boolean next() throws IOException {
+ for (;;) {
+ if (block == null || block.type() != REF_BLOCK_TYPE) {
+ return false;
+ } else if (!block.next()) {
+ long pos = block.endPosition();
+ if (pos >= scanEnd) {
+ return false;
+ }
+ block = readBlock(pos, scanEnd);
+ continue;
+ }
+
+ block.parseKey();
+ if (match != null && !block.match(match, prefix)) {
+ block.skipValue();
+ return false;
+ }
+
+ updateIndex = minUpdateIndex + block.readUpdateIndexDelta();
+ ref = block.readRef();
+ if (!includeDeletes && wasDeleted()) {
+ continue;
+ }
+ return true;
+ }
+ }
+
+ @Override
+ public Ref getRef() {
+ return ref;
+ }
+
+ @Override
+ public long getUpdateIndex() {
+ return updateIndex;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing.
+ }
+ }
+
+ private class LogCursorImpl extends LogCursor {
+ private final long scanEnd;
+ private final byte[] match;
+
+ private String refName;
+ private long updateIndex;
+ private ReflogEntry entry;
+ BlockReader block;
+
+ LogCursorImpl(long scanEnd, byte[] match) {
+ this.scanEnd = scanEnd;
+ this.match = match;
+ }
+
+ @Override
+ public boolean next() throws IOException {
+ for (;;) {
+ if (block == null || block.type() != LOG_BLOCK_TYPE) {
+ return false;
+ } else if (!block.next()) {
+ long pos = block.endPosition();
+ if (pos >= scanEnd) {
+ return false;
+ }
+ block = readBlock(pos, scanEnd);
+ continue;
+ }
+
+ block.parseKey();
+ if (match != null && !block.match(match, false)) {
+ block.skipValue();
+ return false;
+ }
+
+ refName = block.name();
+ updateIndex = block.readLogUpdateIndex();
+ entry = block.readLogEntry();
+ if (entry == null && !includeDeletes) {
+ continue;
+ }
+ return true;
+ }
+ }
+
+ @Override
+ public String getRefName() {
+ return refName;
+ }
+
+ @Override
+ public long getUpdateIndex() {
+ return updateIndex;
+ }
+
+ @Override
+ public ReflogEntry getReflogEntry() {
+ return entry;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing.
+ }
+ }
+
+ static final LongList EMPTY_LONG_LIST = new LongList(0);
+
+ private class ObjCursorImpl extends RefCursor {
+ private final long scanEnd;
+ private final ObjectId match;
+
+ private Ref ref;
+ private long updateIndex;
+ private int listIdx;
+
+ private LongList blockPos;
+ private BlockReader block;
+
+ ObjCursorImpl(long scanEnd, AnyObjectId id) {
+ this.scanEnd = scanEnd;
+ this.match = id.copy();
+ }
+
+ void initSeek() throws IOException {
+ byte[] rawId = new byte[OBJECT_ID_LENGTH];
+ match.copyRawTo(rawId, 0);
+ byte[] key = Arrays.copyOf(rawId, objIdLen);
+
+ BlockReader b = objIndex;
+ do {
+ if (b.seekKey(key) > 0) {
+ blockPos = EMPTY_LONG_LIST;
+ return;
+ }
+ long pos = b.readPositionFromIndex();
+ b = readBlock(pos, objEnd);
+ } while (b.type() == INDEX_BLOCK_TYPE);
+ b.seekKey(key);
+ while (b.next()) {
+ b.parseKey();
+ if (b.match(key, false)) {
+ blockPos = b.readBlockPositionList();
+ if (blockPos == null) {
+ initScan();
+ return;
+ }
+ break;
+ }
+ b.skipValue();
+ }
+ if (blockPos == null) {
+ blockPos = EMPTY_LONG_LIST;
+ }
+ if (blockPos.size() > 0) {
+ long pos = blockPos.get(listIdx++);
+ block = readBlock(pos, scanEnd);
+ }
+ }
+
+ void initScan() throws IOException {
+ block = readBlock(0, scanEnd);
+ }
+
+ @Override
+ public boolean next() throws IOException {
+ for (;;) {
+ if (block == null || block.type() != REF_BLOCK_TYPE) {
+ return false;
+ } else if (!block.next()) {
+ long pos;
+ if (blockPos != null) {
+ if (listIdx >= blockPos.size()) {
+ return false;
+ }
+ pos = blockPos.get(listIdx++);
+ } else {
+ pos = block.endPosition();
+ }
+ if (pos >= scanEnd) {
+ return false;
+ }
+ block = readBlock(pos, scanEnd);
+ continue;
+ }
+
+ block.parseKey();
+ updateIndex = minUpdateIndex + block.readUpdateIndexDelta();
+ ref = block.readRef();
+ ObjectId id = ref.getObjectId();
+ if (id != null && match.equals(id)
+ && (includeDeletes || !wasDeleted())) {
+ return true;
+ }
+ }
+ }
+
+ @Override
+ public Ref getRef() {
+ return ref;
+ }
+
+ @Override
+ public long getUpdateIndex() {
+ return updateIndex;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing.
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableWriter.java
new file mode 100644
index 0000000000..0ac2445fc0
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableWriter.java
@@ -0,0 +1,813 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static java.lang.Math.log;
+import static org.eclipse.jgit.internal.storage.reftable.BlockWriter.padBetweenBlocks;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_FOOTER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_HEADER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_HEADER_MAGIC;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.INDEX_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.MAX_BLOCK_SIZE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.MAX_RESTARTS;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.OBJ_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.REF_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VERSION_1;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.zip.CRC32;
+
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.DeleteLogEntry;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.Entry;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.IndexEntry;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.LogEntry;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.ObjEntry;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.RefEntry;
+import org.eclipse.jgit.lib.AbbreviatedObjectId;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdOwnerMap;
+import org.eclipse.jgit.lib.ObjectIdSubclassMap;
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.util.LongList;
+import org.eclipse.jgit.util.NB;
+
+/**
+ * Writes a reftable formatted file.
+ * <p>
+ * A reftable can be written in a streaming fashion, provided the caller sorts
+ * all references. A {@link ReftableWriter} is single-use, and not thread-safe.
+ */
+public class ReftableWriter {
+ private ReftableConfig config;
+ private int refBlockSize;
+ private int logBlockSize;
+ private int restartInterval;
+ private int maxIndexLevels;
+ private boolean alignBlocks;
+ private boolean indexObjects;
+
+ private long minUpdateIndex;
+ private long maxUpdateIndex;
+
+ private ReftableOutputStream out;
+ private ObjectIdSubclassMap<RefList> obj2ref;
+
+ private BlockWriter cur;
+ private Section refs;
+ private Section objs;
+ private Section logs;
+ private int objIdLen;
+ private Stats stats;
+
+ /** Initialize a writer with a default configuration. */
+ public ReftableWriter() {
+ this(new ReftableConfig());
+ }
+
+ /**
+ * Initialize a writer with a specific configuration.
+ *
+ * @param cfg
+ * configuration for the writer.
+ */
+ public ReftableWriter(ReftableConfig cfg) {
+ config = cfg;
+ }
+
+ /**
+ * @param cfg
+ * configuration for the writer.
+ * @return {@code this}
+ */
+ public ReftableWriter setConfig(ReftableConfig cfg) {
+ this.config = cfg != null ? cfg : new ReftableConfig();
+ return this;
+ }
+
+ /**
+ * @param min
+ * the minimum update index for log entries that appear in this
+ * reftable. This should be 1 higher than the prior reftable's
+ * {@code maxUpdateIndex} if this table will be used in a stack.
+ * @return {@code this}
+ */
+ public ReftableWriter setMinUpdateIndex(long min) {
+ minUpdateIndex = min;
+ return this;
+ }
+
+ /**
+ * @param max
+ * the maximum update index for log entries that appear in this
+ * reftable. This should be at least 1 higher than the prior
+ * reftable's {@code maxUpdateIndex} if this table will be used
+ * in a stack.
+ * @return {@code this}
+ */
+ public ReftableWriter setMaxUpdateIndex(long max) {
+ maxUpdateIndex = max;
+ return this;
+ }
+
+ /**
+ * Begin writing the reftable.
+ *
+ * @param os
+ * stream to write the table to. Caller is responsible for
+ * closing the stream after invoking {@link #finish()}.
+ * @return {@code this}
+ * @throws IOException
+ * if reftable header cannot be written.
+ */
+ public ReftableWriter begin(OutputStream os) throws IOException {
+ refBlockSize = config.getRefBlockSize();
+ logBlockSize = config.getLogBlockSize();
+ restartInterval = config.getRestartInterval();
+ maxIndexLevels = config.getMaxIndexLevels();
+ alignBlocks = config.isAlignBlocks();
+ indexObjects = config.isIndexObjects();
+
+ if (refBlockSize <= 0) {
+ refBlockSize = 4 << 10;
+ } else if (refBlockSize > MAX_BLOCK_SIZE) {
+ throw new IllegalArgumentException();
+ }
+ if (logBlockSize <= 0) {
+ logBlockSize = 2 * refBlockSize;
+ }
+ if (restartInterval <= 0) {
+ restartInterval = refBlockSize < (60 << 10) ? 16 : 64;
+ }
+
+ out = new ReftableOutputStream(os, refBlockSize, alignBlocks);
+ refs = new Section(REF_BLOCK_TYPE);
+ if (indexObjects) {
+ obj2ref = new ObjectIdSubclassMap<>();
+ }
+ writeFileHeader();
+ return this;
+ }
+
+ /**
+ * Sort a collection of references and write them to the reftable.
+ *
+ * @param refsToPack
+ * references to sort and write.
+ * @return {@code this}
+ * @throws IOException
+ * if reftable cannot be written.
+ */
+ public ReftableWriter sortAndWriteRefs(Collection<Ref> refsToPack)
+ throws IOException {
+ Iterator<RefEntry> itr = refsToPack.stream()
+ .map(r -> new RefEntry(r, maxUpdateIndex - minUpdateIndex))
+ .sorted(Entry::compare)
+ .iterator();
+ while (itr.hasNext()) {
+ RefEntry entry = itr.next();
+ long blockPos = refs.write(entry);
+ indexRef(entry.ref, blockPos);
+ }
+ return this;
+ }
+
+ /**
+ * Write one reference to the reftable.
+ * <p>
+ * References must be passed in sorted order.
+ *
+ * @param ref
+ * the reference to store.
+ * @throws IOException
+ * if reftable cannot be written.
+ */
+ public void writeRef(Ref ref) throws IOException {
+ writeRef(ref, maxUpdateIndex);
+ }
+
+ /**
+ * Write one reference to the reftable.
+ * <p>
+ * References must be passed in sorted order.
+ *
+ * @param ref
+ * the reference to store.
+ * @param updateIndex
+ * the updateIndex that modified this reference. Must be
+ * {@code >= minUpdateIndex} for this file.
+ * @throws IOException
+ * if reftable cannot be written.
+ */
+ public void writeRef(Ref ref, long updateIndex) throws IOException {
+ if (updateIndex < minUpdateIndex) {
+ throw new IllegalArgumentException();
+ }
+ long d = updateIndex - minUpdateIndex;
+ long blockPos = refs.write(new RefEntry(ref, d));
+ indexRef(ref, blockPos);
+ }
+
+ private void indexRef(Ref ref, long blockPos) {
+ if (indexObjects && !ref.isSymbolic()) {
+ indexId(ref.getObjectId(), blockPos);
+ indexId(ref.getPeeledObjectId(), blockPos);
+ }
+ }
+
+ private void indexId(ObjectId id, long blockPos) {
+ if (id != null) {
+ RefList l = obj2ref.get(id);
+ if (l == null) {
+ l = new RefList(id);
+ obj2ref.add(l);
+ }
+ l.addBlock(blockPos);
+ }
+ }
+
+ /**
+ * Write one reflog entry to the reftable.
+ * <p>
+ * Reflog entries must be written in reference name and descending
+ * {@code updateIndex} (highest first) order.
+ *
+ * @param ref
+ * name of the reference.
+ * @param updateIndex
+ * identifier of the transaction that created the log record. The
+ * {@code updateIndex} must be unique within the scope of
+ * {@code ref}, and must be within the bounds defined by
+ * {@code minUpdateIndex <= updateIndex <= maxUpdateIndex}.
+ * @param who
+ * committer of the reflog entry.
+ * @param oldId
+ * prior id; pass {@link ObjectId#zeroId()} for creations.
+ * @param newId
+ * new id; pass {@link ObjectId#zeroId()} for deletions.
+ * @param message
+ * optional message (may be null).
+ * @throws IOException
+ * if reftable cannot be written.
+ */
+ public void writeLog(String ref, long updateIndex, PersonIdent who,
+ ObjectId oldId, ObjectId newId, @Nullable String message)
+ throws IOException {
+ String msg = message != null ? message : ""; //$NON-NLS-1$
+ beginLog();
+ logs.write(new LogEntry(ref, updateIndex, who, oldId, newId, msg));
+ }
+
+ /**
+ * Record deletion of one reflog entry in this reftable.
+ *
+ * <p>
+ * The deletion can shadow an entry stored in a lower table in the stack.
+ * This is useful for {@code refs/stash} and dropping an entry from its
+ * reflog.
+ * <p>
+ * Deletion must be properly interleaved in sorted updateIndex order with
+ * any other logs written by
+ * {@link #writeLog(String, long, PersonIdent, ObjectId, ObjectId, String)}.
+ *
+ * @param ref
+ * the ref to delete (hide) a reflog entry from.
+ * @param updateIndex
+ * the update index that must be hidden.
+ * @throws IOException
+ * if reftable cannot be written.
+ */
+ public void deleteLog(String ref, long updateIndex) throws IOException {
+ beginLog();
+ logs.write(new DeleteLogEntry(ref, updateIndex));
+ }
+
+ private void beginLog() throws IOException {
+ if (logs == null) {
+ finishRefAndObjSections(); // close prior ref blocks and their index, if present.
+ out.flushFileHeader();
+ out.setBlockSize(logBlockSize);
+ logs = new Section(LOG_BLOCK_TYPE);
+ }
+ }
+
+ /**
+ * @return an estimate of the current size in bytes of the reftable, if it
+ * was finished right now. Estimate is only accurate if
+ * {@link ReftableConfig#setIndexObjects(boolean)} is {@code false}
+ * and {@link ReftableConfig#setMaxIndexLevels(int)} is {@code 1}.
+ */
+ public long estimateTotalBytes() {
+ long bytes = out.size();
+ if (bytes == 0) {
+ bytes += FILE_HEADER_LEN;
+ }
+ if (cur != null) {
+ long curBlockPos = out.size();
+ int sz = cur.currentSize();
+ bytes += sz;
+
+ IndexBuilder idx = null;
+ if (cur.blockType() == REF_BLOCK_TYPE) {
+ idx = refs.idx;
+ } else if (cur.blockType() == LOG_BLOCK_TYPE) {
+ idx = logs.idx;
+ }
+ if (idx != null && shouldHaveIndex(idx)) {
+ if (idx == refs.idx) {
+ bytes += out.estimatePadBetweenBlocks(sz);
+ }
+ bytes += idx.estimateBytes(curBlockPos);
+ }
+ }
+ bytes += FILE_FOOTER_LEN;
+ return bytes;
+ }
+
+ /**
+ * Finish writing the reftable by writing its trailer.
+ *
+ * @return {@code this}
+ * @throws IOException
+ * if reftable cannot be written.
+ */
+ public ReftableWriter finish() throws IOException {
+ finishRefAndObjSections();
+ finishLogSection();
+ writeFileFooter();
+ out.finishFile();
+
+ stats = new Stats(this, out);
+ out = null;
+ obj2ref = null;
+ cur = null;
+ refs = null;
+ objs = null;
+ logs = null;
+ return this;
+ }
+
+ private void finishRefAndObjSections() throws IOException {
+ if (cur != null && cur.blockType() == REF_BLOCK_TYPE) {
+ refs.finishSectionMaybeWriteIndex();
+ if (indexObjects && !obj2ref.isEmpty() && refs.idx.bytes > 0) {
+ writeObjBlocks();
+ }
+ obj2ref = null;
+ }
+ }
+
+ private void writeObjBlocks() throws IOException {
+ List<RefList> sorted = sortById(obj2ref);
+ obj2ref = null;
+ objIdLen = shortestUniqueAbbreviation(sorted);
+
+ out.padBetweenBlocksToNextBlock();
+ objs = new Section(OBJ_BLOCK_TYPE);
+ objs.entryCnt = sorted.size();
+ for (RefList l : sorted) {
+ objs.write(new ObjEntry(objIdLen, l, l.blockPos));
+ }
+ objs.finishSectionMaybeWriteIndex();
+ }
+
+ private void finishLogSection() throws IOException {
+ if (cur != null && cur.blockType() == LOG_BLOCK_TYPE) {
+ logs.finishSectionMaybeWriteIndex();
+ }
+ }
+
+ private boolean shouldHaveIndex(IndexBuilder idx) {
+ int threshold;
+ if (idx == refs.idx && alignBlocks) {
+ threshold = 4;
+ } else {
+ threshold = 1;
+ }
+ return idx.entries.size() + (cur != null ? 1 : 0) > threshold;
+ }
+
+ private void writeFileHeader() {
+ byte[] hdr = new byte[FILE_HEADER_LEN];
+ encodeHeader(hdr);
+ out.write(hdr, 0, FILE_HEADER_LEN);
+ }
+
+ private void encodeHeader(byte[] hdr) {
+ System.arraycopy(FILE_HEADER_MAGIC, 0, hdr, 0, 4);
+ int bs = alignBlocks ? refBlockSize : 0;
+ NB.encodeInt32(hdr, 4, (VERSION_1 << 24) | bs);
+ NB.encodeInt64(hdr, 8, minUpdateIndex);
+ NB.encodeInt64(hdr, 16, maxUpdateIndex);
+ }
+
+ private void writeFileFooter() {
+ int ftrLen = FILE_FOOTER_LEN;
+ byte[] ftr = new byte[ftrLen];
+ encodeHeader(ftr);
+
+ NB.encodeInt64(ftr, 24, indexPosition(refs));
+ NB.encodeInt64(ftr, 32, (firstBlockPosition(objs) << 5) | objIdLen);
+ NB.encodeInt64(ftr, 40, indexPosition(objs));
+ NB.encodeInt64(ftr, 48, firstBlockPosition(logs));
+ NB.encodeInt64(ftr, 56, indexPosition(logs));
+
+ CRC32 crc = new CRC32();
+ crc.update(ftr, 0, ftrLen - 4);
+ NB.encodeInt32(ftr, ftrLen - 4, (int) crc.getValue());
+
+ out.write(ftr, 0, ftrLen);
+ }
+
+ private static long firstBlockPosition(@Nullable Section s) {
+ return s != null ? s.firstBlockPosition : 0;
+ }
+
+ private static long indexPosition(@Nullable Section s) {
+ return s != null && s.idx != null ? s.idx.rootPosition : 0;
+ }
+
+ /** @return statistics of the last written reftable. */
+ public Stats getStats() {
+ return stats;
+ }
+
+ /** Statistics about a written reftable. */
+ public static class Stats {
+ private final int refBlockSize;
+ private final int logBlockSize;
+ private final int restartInterval;
+
+ private final long minUpdateIndex;
+ private final long maxUpdateIndex;
+
+ private final long refCnt;
+ private final long objCnt;
+ private final int objIdLen;
+ private final long logCnt;
+ private final long refBytes;
+ private final long objBytes;
+ private final long logBytes;
+ private final long paddingUsed;
+ private final long totalBytes;
+
+ private final int refIndexSize;
+ private final int refIndexLevels;
+ private final int objIndexSize;
+ private final int objIndexLevels;
+
+ Stats(ReftableWriter w, ReftableOutputStream o) {
+ refBlockSize = w.refBlockSize;
+ logBlockSize = w.logBlockSize;
+ restartInterval = w.restartInterval;
+
+ minUpdateIndex = w.minUpdateIndex;
+ maxUpdateIndex = w.maxUpdateIndex;
+ paddingUsed = o.paddingUsed();
+ totalBytes = o.size();
+
+ refCnt = w.refs.entryCnt;
+ refBytes = w.refs.bytes;
+
+ objCnt = w.objs != null ? w.objs.entryCnt : 0;
+ objBytes = w.objs != null ? w.objs.bytes : 0;
+ objIdLen = w.objIdLen;
+
+ logCnt = w.logs != null ? w.logs.entryCnt : 0;
+ logBytes = w.logs != null ? w.logs.bytes : 0;
+
+ IndexBuilder refIdx = w.refs.idx;
+ refIndexSize = refIdx.bytes;
+ refIndexLevels = refIdx.levels;
+
+ IndexBuilder objIdx = w.objs != null ? w.objs.idx : null;
+ objIndexSize = objIdx != null ? objIdx.bytes : 0;
+ objIndexLevels = objIdx != null ? objIdx.levels : 0;
+ }
+
+ /** @return number of bytes in a ref block. */
+ public int refBlockSize() {
+ return refBlockSize;
+ }
+
+ /** @return number of bytes to compress into a log block. */
+ public int logBlockSize() {
+ return logBlockSize;
+ }
+
+ /** @return number of references between binary search markers. */
+ public int restartInterval() {
+ return restartInterval;
+ }
+
+ /** @return smallest update index contained in this reftable. */
+ public long minUpdateIndex() {
+ return minUpdateIndex;
+ }
+
+ /** @return largest update index contained in this reftable. */
+ public long maxUpdateIndex() {
+ return maxUpdateIndex;
+ }
+
+ /** @return total number of references in the reftable. */
+ public long refCount() {
+ return refCnt;
+ }
+
+ /** @return number of unique objects in the reftable. */
+ public long objCount() {
+ return objCnt;
+ }
+
+ /** @return total number of log records in the reftable. */
+ public long logCount() {
+ return logCnt;
+ }
+
+ /** @return number of bytes for references, including ref index. */
+ public long refBytes() {
+ return refBytes;
+ }
+
+ /** @return number of bytes for objects, including object index. */
+ public long objBytes() {
+ return objBytes;
+ }
+
+ /** @return number of bytes for log, including log index. */
+ public long logBytes() {
+ return logBytes;
+ }
+
+ /** @return total number of bytes in the reftable. */
+ public long totalBytes() {
+ return totalBytes;
+ }
+
+ /** @return bytes of padding used to maintain block alignment. */
+ public long paddingBytes() {
+ return paddingUsed;
+ }
+
+ /** @return number of bytes in the ref index; 0 if no index was used. */
+ public int refIndexSize() {
+ return refIndexSize;
+ }
+
+ /** @return number of levels in the ref index. */
+ public int refIndexLevels() {
+ return refIndexLevels;
+ }
+
+ /** @return number of bytes in the object index; 0 if no index. */
+ public int objIndexSize() {
+ return objIndexSize;
+ }
+
+ /** @return number of levels in the object index. */
+ public int objIndexLevels() {
+ return objIndexLevels;
+ }
+
+ /**
+ * @return number of bytes required to uniquely identify all objects in
+ * the reftable. Unique abbreviations in hex would be
+ * {@code 2 * objIdLength()}.
+ */
+ public int objIdLength() {
+ return objIdLen;
+ }
+ }
+
+ private static List<RefList> sortById(ObjectIdSubclassMap<RefList> m) {
+ List<RefList> s = new ArrayList<>(m.size());
+ for (RefList l : m) {
+ s.add(l);
+ }
+ Collections.sort(s);
+ return s;
+ }
+
+ private static int shortestUniqueAbbreviation(List<RefList> in) {
+ // Estimate minimum number of bytes necessary for unique abbreviations.
+ int bytes = Math.max(2, (int) (log(in.size()) / log(8)));
+ Set<AbbreviatedObjectId> tmp = new HashSet<>((int) (in.size() * 0.75f));
+ retry: for (;;) {
+ int hexLen = bytes * 2;
+ for (ObjectId id : in) {
+ AbbreviatedObjectId a = id.abbreviate(hexLen);
+ if (!tmp.add(a)) {
+ if (++bytes >= OBJECT_ID_LENGTH) {
+ return OBJECT_ID_LENGTH;
+ }
+ tmp.clear();
+ continue retry;
+ }
+ }
+ return bytes;
+ }
+ }
+
+ private static class RefList extends ObjectIdOwnerMap.Entry {
+ final LongList blockPos = new LongList(2);
+
+ RefList(AnyObjectId id) {
+ super(id);
+ }
+
+ void addBlock(long pos) {
+ if (!blockPos.contains(pos)) {
+ blockPos.add(pos);
+ }
+ }
+ }
+
+ private class Section {
+ final IndexBuilder idx;
+ final long firstBlockPosition;
+
+ long entryCnt;
+ long bytes;
+
+ Section(byte keyType) {
+ idx = new IndexBuilder(keyType);
+ firstBlockPosition = out.size();
+ }
+
+ long write(BlockWriter.Entry entry) throws IOException {
+ if (cur == null) {
+ beginBlock(entry);
+ } else if (!cur.tryAdd(entry)) {
+ flushCurBlock();
+ if (cur.padBetweenBlocks()) {
+ out.padBetweenBlocksToNextBlock();
+ }
+ beginBlock(entry);
+ }
+ entryCnt++;
+ return out.size();
+ }
+
+ private void beginBlock(BlockWriter.Entry entry)
+ throws BlockSizeTooSmallException {
+ byte blockType = entry.blockType();
+ int bs = out.bytesAvailableInBlock();
+ cur = new BlockWriter(blockType, idx.keyType, bs, restartInterval);
+ cur.mustAdd(entry);
+ }
+
+ void flushCurBlock() throws IOException {
+ idx.entries.add(new IndexEntry(cur.lastKey(), out.size()));
+ cur.writeTo(out);
+ }
+
+ void finishSectionMaybeWriteIndex() throws IOException {
+ flushCurBlock();
+ cur = null;
+ if (shouldHaveIndex(idx)) {
+ idx.writeIndex();
+ }
+ bytes = out.size() - firstBlockPosition;
+ }
+ }
+
+ private class IndexBuilder {
+ final byte keyType;
+ List<IndexEntry> entries = new ArrayList<>();
+ long rootPosition;
+ int bytes;
+ int levels;
+
+ IndexBuilder(byte kt) {
+ keyType = kt;
+ }
+
+ int estimateBytes(long curBlockPos) {
+ BlockWriter b = new BlockWriter(
+ INDEX_BLOCK_TYPE, keyType,
+ MAX_BLOCK_SIZE,
+ Math.max(restartInterval, entries.size() / MAX_RESTARTS));
+ try {
+ for (Entry e : entries) {
+ b.mustAdd(e);
+ }
+ if (cur != null) {
+ b.mustAdd(new IndexEntry(cur.lastKey(), curBlockPos));
+ }
+ } catch (BlockSizeTooSmallException e) {
+ return b.currentSize();
+ }
+ return b.currentSize();
+ }
+
+ void writeIndex() throws IOException {
+ if (padBetweenBlocks(keyType)) {
+ out.padBetweenBlocksToNextBlock();
+ }
+ long startPos = out.size();
+ writeMultiLevelIndex(entries);
+ bytes = (int) (out.size() - startPos);
+ entries = null;
+ }
+
+ private void writeMultiLevelIndex(List<IndexEntry> keys)
+ throws IOException {
+ levels = 1;
+ while (maxIndexLevels == 0 || levels < maxIndexLevels) {
+ keys = writeOneLevel(keys);
+ if (keys == null) {
+ return;
+ }
+ levels++;
+ }
+
+ // When maxIndexLevels has restricted the writer, write one
+ // index block with the entire remaining set of keys.
+ BlockWriter b = new BlockWriter(
+ INDEX_BLOCK_TYPE, keyType,
+ MAX_BLOCK_SIZE,
+ Math.max(restartInterval, keys.size() / MAX_RESTARTS));
+ for (Entry e : keys) {
+ b.mustAdd(e);
+ }
+ rootPosition = out.size();
+ b.writeTo(out);
+ }
+
+ private List<IndexEntry> writeOneLevel(List<IndexEntry> keys)
+ throws IOException {
+ Section thisLevel = new Section(keyType);
+ for (Entry e : keys) {
+ thisLevel.write(e);
+ }
+ if (!thisLevel.idx.entries.isEmpty()) {
+ thisLevel.flushCurBlock();
+ if (cur.padBetweenBlocks()) {
+ out.padBetweenBlocksToNextBlock();
+ }
+ cur = null;
+ return thisLevel.idx.entries;
+ }
+
+ // The current block fit entire level; make it the root.
+ rootPosition = out.size();
+ cur.writeTo(out);
+ cur = null;
+ return null;
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java
index 29a379e4a6..0567051a90 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java
@@ -336,7 +336,7 @@ public final class AbbreviatedObjectId implements Serializable {
@Override
public int hashCode() {
- return w2;
+ return w1;
}
@Override
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java
index de1003bdde..825c1f704a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java
@@ -738,4 +738,4 @@ public class BaseRepositoryBuilder<B extends BaseRepositoryBuilder, R extends Re
protected final B self() {
return (B) this;
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java
index 3f6995de83..bcf9065dd2 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java
@@ -58,6 +58,8 @@ import java.util.HashSet;
import java.util.List;
import java.util.concurrent.TimeoutException;
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.RefUpdate.Result;
import org.eclipse.jgit.revwalk.RevWalk;
@@ -80,8 +82,10 @@ public class BatchRefUpdate {
* clock skew between machines on the same LAN using an NTP server also on
* the same LAN should be under 5 seconds. 5 seconds is also not that long
* for a large `git push` operation to complete.
+ *
+ * @since 4.9
*/
- private static final Duration MAX_WAIT = Duration.ofSeconds(5);
+ protected static final Duration MAX_WAIT = Duration.ofSeconds(5);
private final RefDatabase refdb;
@@ -100,6 +104,12 @@ public class BatchRefUpdate {
/** Should the result value be appended to {@link #refLogMessage}. */
private boolean refLogIncludeResult;
+ /**
+ * Should reflogs be written even if the configured default for this ref is
+ * not to write it.
+ */
+ private boolean forceRefLog;
+
/** Push certificate associated with this update. */
private PushCertificate pushCert;
@@ -173,25 +183,42 @@ public class BatchRefUpdate {
* @return message the caller wants to include in the reflog; null if the
* update should not be logged.
*/
+ @Nullable
public String getRefLogMessage() {
return refLogMessage;
}
- /** @return {@code true} if the ref log message should show the result. */
+ /**
+ * Check whether the reflog message should include the result of the update,
+ * such as fast-forward or force-update.
+ * <p>
+ * Describes the default for commands in this batch that do not override it
+ * with {@link ReceiveCommand#setRefLogMessage(String, boolean)}.
+ *
+ * @return true if the message should include the result.
+ */
public boolean isRefLogIncludingResult() {
return refLogIncludeResult;
}
/**
* Set the message to include in the reflog.
+ * <p>
+ * Repository implementations may limit which reflogs are written by default,
+ * based on the project configuration. If a repo is not configured to write
+ * logs for this ref by default, setting the message alone may have no effect.
+ * To indicate that the repo should write logs for this update in spite of
+ * configured defaults, use {@link #setForceRefLog(boolean)}.
+ * <p>
+ * Describes the default for commands in this batch that do not override it
+ * with {@link ReceiveCommand#setRefLogMessage(String, boolean)}.
*
* @param msg
- * the message to describe this change. It may be null if
- * appendStatus is null in order not to append to the reflog
+ * the message to describe this change. If null and appendStatus is
+ * false, the reflog will not be updated.
* @param appendStatus
* true if the status of the ref change (fast-forward or
- * forced-update) should be appended to the user supplied
- * message.
+ * forced-update) should be appended to the user supplied message.
* @return {@code this}.
*/
public BatchRefUpdate setRefLogMessage(String msg, boolean appendStatus) {
@@ -209,6 +236,8 @@ public class BatchRefUpdate {
/**
* Don't record this update in the ref's associated reflog.
+ * <p>
+ * Equivalent to {@code setRefLogMessage(null, false)}.
*
* @return {@code this}.
*/
@@ -218,12 +247,38 @@ public class BatchRefUpdate {
return this;
}
- /** @return true if log has been disabled by {@link #disableRefLog()}. */
+ /**
+ * Force writing a reflog for the updated ref.
+ *
+ * @param force whether to force.
+ * @return {@code this}
+ * @since 4.9
+ */
+ public BatchRefUpdate setForceRefLog(boolean force) {
+ forceRefLog = force;
+ return this;
+ }
+
+ /**
+ * Check whether log has been disabled by {@link #disableRefLog()}.
+ *
+ * @return true if disabled.
+ */
public boolean isRefLogDisabled() {
return refLogMessage == null;
}
/**
+ * Check whether the reflog should be written regardless of repo defaults.
+ *
+ * @return whether force writing is enabled.
+ * @since 4.9
+ */
+ protected boolean isForceRefLog() {
+ return forceRefLog;
+ }
+
+ /**
* Request that all updates in this batch be performed atomically.
* <p>
* When atomic updates are used, either all commands apply successfully, or
@@ -323,14 +378,29 @@ public class BatchRefUpdate {
/**
* Gets the list of option strings associated with this update.
*
- * @return pushOptions
+ * @return push options that were passed to {@link #execute}; prior to calling
+ * {@link #execute}, always returns null.
* @since 4.5
*/
+ @Nullable
public List<String> getPushOptions() {
return pushOptions;
}
/**
+ * Set push options associated with this update.
+ * <p>
+ * Implementations must call this at the top of {@link #execute(RevWalk,
+ * ProgressMonitor, List)}.
+ *
+ * @param options options passed to {@code execute}.
+ * @since 4.9
+ */
+ protected void setPushOptions(List<String> options) {
+ pushOptions = options;
+ }
+
+ /**
* @return list of timestamps the batch must wait for.
* @since 4.6
*/
@@ -396,7 +466,7 @@ public class BatchRefUpdate {
}
if (options != null) {
- pushOptions = options;
+ setPushOptions(options);
}
monitor.beginTask(JGitText.get().updatingReferences, commands.size());
@@ -407,6 +477,11 @@ public class BatchRefUpdate {
for (ReceiveCommand cmd : commands) {
try {
if (cmd.getResult() == NOT_ATTEMPTED) {
+ if (isMissing(walk, cmd.getOldId())
+ || isMissing(walk, cmd.getNewId())) {
+ cmd.setResult(ReceiveCommand.Result.REJECTED_MISSING_OBJECT);
+ continue;
+ }
cmd.updateType(walk);
switch (cmd.getType()) {
case CREATE:
@@ -462,7 +537,7 @@ public class BatchRefUpdate {
break SWITCH;
}
ru.setCheckConflicting(false);
- addRefToPrefixes(takenPrefixes, cmd.getRefName());
+ takenPrefixes.addAll(getPrefixes(cmd.getRefName()));
takenNames.add(cmd.getRefName());
cmd.setResult(ru.update(walk));
}
@@ -478,6 +553,19 @@ public class BatchRefUpdate {
monitor.endTask();
}
+ private static boolean isMissing(RevWalk walk, ObjectId id)
+ throws IOException {
+ if (id.equals(ObjectId.zeroId())) {
+ return false; // Explicit add or delete is not missing.
+ }
+ try {
+ walk.parseAny(id);
+ return false;
+ } catch (MissingObjectException e) {
+ return true;
+ }
+ }
+
/**
* Wait for timestamps to be in the past, aborting commands on timeout.
*
@@ -523,29 +611,45 @@ public class BatchRefUpdate {
execute(walk, monitor, null);
}
- private static Collection<String> getTakenPrefixes(
- final Collection<String> names) {
+ private static Collection<String> getTakenPrefixes(Collection<String> names) {
Collection<String> ref = new HashSet<>();
- for (String name : names)
- ref.addAll(getPrefixes(name));
+ for (String name : names) {
+ addPrefixesTo(name, ref);
+ }
return ref;
}
- private static void addRefToPrefixes(Collection<String> prefixes,
- String name) {
- for (String prefix : getPrefixes(name)) {
- prefixes.add(prefix);
- }
+ /**
+ * Get all path prefixes of a ref name.
+ *
+ * @param name
+ * ref name.
+ * @return path prefixes of the ref name. For {@code refs/heads/foo}, returns
+ * {@code refs} and {@code refs/heads}.
+ * @since 4.9
+ */
+ protected static Collection<String> getPrefixes(String name) {
+ Collection<String> ret = new HashSet<>();
+ addPrefixesTo(name, ret);
+ return ret;
}
- static Collection<String> getPrefixes(String s) {
- Collection<String> ret = new HashSet<>();
- int p1 = s.indexOf('/');
+ /**
+ * Add prefixes of a ref name to an existing collection.
+ *
+ * @param name
+ * ref name.
+ * @param out
+ * path prefixes of the ref name. For {@code refs/heads/foo},
+ * returns {@code refs} and {@code refs/heads}.
+ * @since 4.9
+ */
+ protected static void addPrefixesTo(String name, Collection<String> out) {
+ int p1 = name.indexOf('/');
while (p1 > 0) {
- ret.add(s.substring(0, p1));
- p1 = s.indexOf('/', p1 + 1);
+ out.add(name.substring(0, p1));
+ p1 = name.indexOf('/', p1 + 1);
}
- return ret;
}
/**
@@ -560,11 +664,12 @@ public class BatchRefUpdate {
*/
protected RefUpdate newUpdate(ReceiveCommand cmd) throws IOException {
RefUpdate ru = refdb.newUpdate(cmd.getRefName(), false);
- if (isRefLogDisabled())
+ if (isRefLogDisabled(cmd)) {
ru.disableRefLog();
- else {
+ } else {
ru.setRefLogIdent(refLogIdent);
- ru.setRefLogMessage(refLogMessage, refLogIncludeResult);
+ ru.setRefLogMessage(getRefLogMessage(cmd), isRefLogIncludingResult(cmd));
+ ru.setForceRefLog(isForceRefLog(cmd));
}
ru.setPushCertificate(pushCert);
switch (cmd.getType()) {
@@ -585,6 +690,62 @@ public class BatchRefUpdate {
}
}
+ /**
+ * Check whether reflog is disabled for a command.
+ *
+ * @param cmd
+ * specific command.
+ * @return whether the reflog is disabled, taking into account the state from
+ * this instance as well as overrides in the given command.
+ * @since 4.9
+ */
+ protected boolean isRefLogDisabled(ReceiveCommand cmd) {
+ return cmd.hasCustomRefLog() ? cmd.isRefLogDisabled() : isRefLogDisabled();
+ }
+
+ /**
+ * Get reflog message for a command.
+ *
+ * @param cmd
+ * specific command.
+ * @return reflog message, taking into account the state from this instance as
+ * well as overrides in the given command.
+ * @since 4.9
+ */
+ protected String getRefLogMessage(ReceiveCommand cmd) {
+ return cmd.hasCustomRefLog() ? cmd.getRefLogMessage() : getRefLogMessage();
+ }
+
+ /**
+ * Check whether the reflog message for a command should include the result.
+ *
+ * @param cmd
+ * specific command.
+ * @return whether the reflog message should show the result, taking into
+ * account the state from this instance as well as overrides in the
+ * given command.
+ * @since 4.9
+ */
+ protected boolean isRefLogIncludingResult(ReceiveCommand cmd) {
+ return cmd.hasCustomRefLog()
+ ? cmd.isRefLogIncludingResult() : isRefLogIncludingResult();
+ }
+
+ /**
+ * Check whether the reflog for a command should be written regardless of repo
+ * defaults.
+ *
+ * @param cmd
+ * specific command.
+ * @return whether force writing is enabled.
+ * @since 4.9
+ */
+ protected boolean isForceRefLog(ReceiveCommand cmd) {
+ Boolean isForceRefLog = cmd.isForceRefLog();
+ return isForceRefLog != null ? isForceRefLog.booleanValue()
+ : isForceRefLog();
+ }
+
@Override
public String toString() {
StringBuilder r = new StringBuilder();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BitmapObject.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BitmapObject.java
index 345016c17d..4e0dc2cda1 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BitmapObject.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BitmapObject.java
@@ -62,4 +62,4 @@ public abstract class BitmapObject {
* @return unique hash of this object.
*/
public abstract ObjectId getObjectId();
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BlobObjectChecker.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BlobObjectChecker.java
new file mode 100644
index 0000000000..0fe63ae5b4
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BlobObjectChecker.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.lib;
+
+import org.eclipse.jgit.errors.CorruptObjectException;
+
+/**
+ * Verifies that a blob object is a valid object.
+ * <p>
+ * Unlike trees, commits and tags, there's no validity of blobs. Implementers
+ * can optionally implement this blob checker to reject certain blobs.
+ *
+ * @since 4.9
+ */
+public interface BlobObjectChecker {
+ /** No-op implementation of {@link BlobObjectChecker}. */
+ public static final BlobObjectChecker NULL_CHECKER =
+ new BlobObjectChecker() {
+ @Override
+ public void update(byte[] in, int p, int len) {
+ // Empty implementation.
+ }
+
+ @Override
+ public void endBlob(AnyObjectId id) {
+ // Empty implementation.
+ }
+ };
+
+ /**
+ * Check a new fragment of the blob.
+ *
+ * @param in
+ * input array of bytes.
+ * @param offset
+ * offset to start at from {@code in}.
+ * @param len
+ * length of the fragment to check.
+ */
+ void update(byte[] in, int offset, int len);
+
+ /**
+ * Finalize the blob checking.
+ *
+ * @param id
+ * identity of the object being checked.
+ * @throws CorruptObjectException
+ * if any error was detected.
+ */
+ void endBlob(AnyObjectId id) throws CorruptObjectException;
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CheckoutEntry.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CheckoutEntry.java
index d6608cd1c6..34d0b14adf 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CheckoutEntry.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CheckoutEntry.java
@@ -17,4 +17,4 @@ public interface CheckoutEntry {
*/
public abstract String getToBranch();
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Config.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Config.java
index f45c71cf95..b0f5c2cf4e 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Config.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Config.java
@@ -51,9 +51,6 @@
package org.eclipse.jgit.lib;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
@@ -62,8 +59,6 @@ import java.util.Locale;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.events.ConfigChangedEvent;
@@ -71,21 +66,24 @@ import org.eclipse.jgit.events.ConfigChangedListener;
import org.eclipse.jgit.events.ListenerHandle;
import org.eclipse.jgit.events.ListenerList;
import org.eclipse.jgit.internal.JGitText;
-import org.eclipse.jgit.util.IO;
-import org.eclipse.jgit.util.RawParseUtils;
-import org.eclipse.jgit.util.StringUtils;
-
+import org.eclipse.jgit.transport.RefSpec;
/**
* Git style {@code .config}, {@code .gitconfig}, {@code .gitmodules} file.
*/
public class Config {
+
private static final String[] EMPTY_STRING_ARRAY = {};
- private static final long KiB = 1024;
- private static final long MiB = 1024 * KiB;
- private static final long GiB = 1024 * MiB;
+
+ static final long KiB = 1024;
+ static final long MiB = 1024 * KiB;
+ static final long GiB = 1024 * MiB;
private static final int MAX_DEPTH = 10;
+ private static final TypedConfigGetter DEFAULT_GETTER = new DefaultTypedConfigGetter();
+
+ private static TypedConfigGetter typedGetter = DEFAULT_GETTER;
+
/** the change listeners */
private final ListenerList listeners = new ListenerList();
@@ -106,7 +104,7 @@ public class Config {
* must ensure it is a special copy of the empty string. It also must
* be treated like the empty string.
*/
- private static final String MAGIC_EMPTY_VALUE = new String();
+ static final String MAGIC_EMPTY_VALUE = new String();
/** Create a configuration with no default fallback. */
public Config() {
@@ -126,6 +124,18 @@ public class Config {
}
/**
+ * Globally sets a {@link TypedConfigGetter} that is subsequently used to
+ * read typed values from all git configs.
+ *
+ * @param getter
+ * to use; if {@code null} use the default getter.
+ * @since 4.9
+ */
+ public static void setTypedConfigGetter(TypedConfigGetter getter) {
+ typedGetter = getter == null ? DEFAULT_GETTER : getter;
+ }
+
+ /**
* Escape the value before saving
*
* @param x
@@ -206,7 +216,7 @@ public class Config {
*/
public int getInt(final String section, final String name,
final int defaultValue) {
- return getInt(section, null, name, defaultValue);
+ return typedGetter.getInt(this, section, null, name, defaultValue);
}
/**
@@ -224,11 +234,8 @@ public class Config {
*/
public int getInt(final String section, String subsection,
final String name, final int defaultValue) {
- final long val = getLong(section, subsection, name, defaultValue);
- if (Integer.MIN_VALUE <= val && val <= Integer.MAX_VALUE)
- return (int) val;
- throw new IllegalArgumentException(MessageFormat.format(JGitText.get().integerValueOutOfRange
- , section, name));
+ return typedGetter.getInt(this, section, subsection, name,
+ defaultValue);
}
/**
@@ -243,7 +250,7 @@ public class Config {
* @return an integer value from the configuration, or defaultValue.
*/
public long getLong(String section, String name, long defaultValue) {
- return getLong(section, null, name, defaultValue);
+ return typedGetter.getLong(this, section, null, name, defaultValue);
}
/**
@@ -261,37 +268,8 @@ public class Config {
*/
public long getLong(final String section, String subsection,
final String name, final long defaultValue) {
- final String str = getString(section, subsection, name);
- if (str == null)
- return defaultValue;
-
- String n = str.trim();
- if (n.length() == 0)
- return defaultValue;
-
- long mul = 1;
- switch (StringUtils.toLowerCase(n.charAt(n.length() - 1))) {
- case 'g':
- mul = GiB;
- break;
- case 'm':
- mul = MiB;
- break;
- case 'k':
- mul = KiB;
- break;
- }
- if (mul > 1)
- n = n.substring(0, n.length() - 1).trim();
- if (n.length() == 0)
- return defaultValue;
-
- try {
- return mul * Long.parseLong(n);
- } catch (NumberFormatException nfe) {
- throw new IllegalArgumentException(MessageFormat.format(JGitText.get().invalidIntegerValue
- , section, name, str));
- }
+ return typedGetter.getLong(this, section, subsection, name,
+ defaultValue);
}
/**
@@ -308,7 +286,7 @@ public class Config {
*/
public boolean getBoolean(final String section, final String name,
final boolean defaultValue) {
- return getBoolean(section, null, name, defaultValue);
+ return typedGetter.getBoolean(this, section, null, name, defaultValue);
}
/**
@@ -327,17 +305,8 @@ public class Config {
*/
public boolean getBoolean(final String section, String subsection,
final String name, final boolean defaultValue) {
- String n = getRawString(section, subsection, name);
- if (n == null)
- return defaultValue;
- if (MAGIC_EMPTY_VALUE == n)
- return true;
- try {
- return StringUtils.toBoolean(n);
- } catch (IllegalArgumentException err) {
- throw new IllegalArgumentException(MessageFormat.format(JGitText.get().invalidBooleanValue
- , section, name, n));
- }
+ return typedGetter.getBoolean(this, section, subsection, name,
+ defaultValue);
}
/**
@@ -358,7 +327,8 @@ public class Config {
public <T extends Enum<?>> T getEnum(final String section,
final String subsection, final String name, final T defaultValue) {
final T[] all = allValuesOf(defaultValue);
- return getEnum(all, section, subsection, name, defaultValue);
+ return typedGetter.getEnum(this, all, section, subsection, name,
+ defaultValue);
}
@SuppressWarnings("unchecked")
@@ -393,55 +363,8 @@ public class Config {
*/
public <T extends Enum<?>> T getEnum(final T[] all, final String section,
final String subsection, final String name, final T defaultValue) {
- String value = getString(section, subsection, name);
- if (value == null)
- return defaultValue;
-
- if (all[0] instanceof ConfigEnum) {
- for (T t : all) {
- if (((ConfigEnum) t).matchConfigValue(value))
- return t;
- }
- }
-
- String n = value.replace(' ', '_');
-
- // Because of c98abc9c0586c73ef7df4172644b7dd21c979e9d being used in
- // the real world before its breakage was fully understood, we must
- // also accept '-' as though it were ' '.
- n = n.replace('-', '_');
-
- T trueState = null;
- T falseState = null;
- for (T e : all) {
- if (StringUtils.equalsIgnoreCase(e.name(), n))
- return e;
- else if (StringUtils.equalsIgnoreCase(e.name(), "TRUE")) //$NON-NLS-1$
- trueState = e;
- else if (StringUtils.equalsIgnoreCase(e.name(), "FALSE")) //$NON-NLS-1$
- falseState = e;
- }
-
- // This is an odd little fallback. C Git sometimes allows boolean
- // values in a tri-state with other things. If we have both a true
- // and a false value in our enumeration, assume its one of those.
- //
- if (trueState != null && falseState != null) {
- try {
- return StringUtils.toBoolean(n) ? trueState : falseState;
- } catch (IllegalArgumentException err) {
- // Fall through and use our custom error below.
- }
- }
-
- if (subsection != null)
- throw new IllegalArgumentException(MessageFormat.format(
- JGitText.get().enumValueNotSupported3, section, subsection,
- name, value));
- else
- throw new IllegalArgumentException(
- MessageFormat.format(JGitText.get().enumValueNotSupported2,
- section, name, value));
+ return typedGetter.getEnum(this, all, section, subsection, name,
+ defaultValue);
}
/**
@@ -515,100 +438,25 @@ public class Config {
*/
public long getTimeUnit(String section, String subsection, String name,
long defaultValue, TimeUnit wantUnit) {
- String valueString = getString(section, subsection, name);
-
- if (valueString == null) {
- return defaultValue;
- }
-
- String s = valueString.trim();
- if (s.length() == 0) {
- return defaultValue;
- }
-
- if (s.startsWith("-")/* negative */) { //$NON-NLS-1$
- throw notTimeUnit(section, subsection, name, valueString);
- }
-
- Matcher m = Pattern.compile("^(0|[1-9][0-9]*)\\s*(.*)$") //$NON-NLS-1$
- .matcher(valueString);
- if (!m.matches()) {
- return defaultValue;
- }
-
- String digits = m.group(1);
- String unitName = m.group(2).trim();
-
- TimeUnit inputUnit;
- int inputMul;
-
- if (unitName.isEmpty()) {
- inputUnit = wantUnit;
- inputMul = 1;
-
- } else if (match(unitName, "ms", "milliseconds")) { //$NON-NLS-1$ //$NON-NLS-2$
- inputUnit = TimeUnit.MILLISECONDS;
- inputMul = 1;
-
- } else if (match(unitName, "s", "sec", "second", "seconds")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
- inputUnit = TimeUnit.SECONDS;
- inputMul = 1;
-
- } else if (match(unitName, "m", "min", "minute", "minutes")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
- inputUnit = TimeUnit.MINUTES;
- inputMul = 1;
-
- } else if (match(unitName, "h", "hr", "hour", "hours")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
- inputUnit = TimeUnit.HOURS;
- inputMul = 1;
-
- } else if (match(unitName, "d", "day", "days")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
- inputUnit = TimeUnit.DAYS;
- inputMul = 1;
-
- } else if (match(unitName, "w", "week", "weeks")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
- inputUnit = TimeUnit.DAYS;
- inputMul = 7;
-
- } else if (match(unitName, "mon", "month", "months")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
- inputUnit = TimeUnit.DAYS;
- inputMul = 30;
-
- } else if (match(unitName, "y", "year", "years")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
- inputUnit = TimeUnit.DAYS;
- inputMul = 365;
-
- } else {
- throw notTimeUnit(section, subsection, name, valueString);
- }
-
- try {
- return wantUnit.convert(Long.parseLong(digits) * inputMul,
- inputUnit);
- } catch (NumberFormatException nfe) {
- throw notTimeUnit(section, subsection, unitName, valueString);
- }
- }
-
- private static boolean match(final String a, final String... cases) {
- for (final String b : cases) {
- if (b != null && b.equalsIgnoreCase(a)) {
- return true;
- }
- }
- return false;
+ return typedGetter.getTimeUnit(this, section, subsection, name,
+ defaultValue, wantUnit);
}
- private IllegalArgumentException notTimeUnit(String section,
- String subsection, String name, String valueString) {
- if (subsection != null) {
- return new IllegalArgumentException(
- MessageFormat.format(JGitText.get().invalidTimeUnitValue3,
- section, subsection, name, valueString));
- }
- return new IllegalArgumentException(
- MessageFormat.format(JGitText.get().invalidTimeUnitValue2,
- section, name, valueString));
+ /**
+ * Parse a list of {@link RefSpec}s from the configuration.
+ *
+ * @param section
+ * section the key is in.
+ * @param subsection
+ * subsection the key is in, or null if not in a subsection.
+ * @param name
+ * the key name.
+ * @return a possibly empty list of {@link RefSpec}s
+ * @since 4.9
+ */
+ public List<RefSpec> getRefSpecs(String section, String subsection,
+ String name) {
+ return typedGetter.getRefSpecs(this, section, subsection, name);
}
/**
@@ -757,7 +605,7 @@ public class Config {
listeners.dispatch(new ConfigChangedEvent());
}
- private String getRawString(final String section, final String subsection,
+ String getRawString(final String section, final String subsection,
final String name) {
String[] lst = getRawStringList(section, subsection, name);
if (lst != null) {
@@ -1220,10 +1068,6 @@ public class Config {
e.value = MAGIC_EMPTY_VALUE;
} else
e.value = readValue(in, false, -1);
-
- if (e.section.equals("include")) { //$NON-NLS-1$
- addIncludedConfig(newEntries, e, depth);
- }
} else
throw new ConfigInvalidException(JGitText.get().invalidLineInConfigFile);
}
@@ -1231,36 +1075,6 @@ public class Config {
return newEntries;
}
- private void addIncludedConfig(final List<ConfigLine> newEntries,
- ConfigLine line, int depth) throws ConfigInvalidException {
- if (!line.name.equals("path") || //$NON-NLS-1$
- line.value == null || line.value.equals(MAGIC_EMPTY_VALUE)) {
- throw new ConfigInvalidException(
- JGitText.get().invalidLineInConfigFile);
- }
- File path = new File(line.value);
- try {
- byte[] bytes = IO.readFully(path);
- String decoded;
- if (isUtf8(bytes)) {
- decoded = RawParseUtils.decode(RawParseUtils.UTF8_CHARSET,
- bytes, 3, bytes.length);
- } else {
- decoded = RawParseUtils.decode(bytes);
- }
- newEntries.addAll(fromTextRecurse(decoded, depth + 1));
- } catch (FileNotFoundException fnfe) {
- if (path.exists()) {
- throw new ConfigInvalidException(MessageFormat
- .format(JGitText.get().cannotReadFile, path), fnfe);
- }
- } catch (IOException ioe) {
- throw new ConfigInvalidException(
- MessageFormat.format(JGitText.get().cannotReadFile, path),
- ioe);
- }
- }
-
private ConfigSnapshot newState() {
return new ConfigSnapshot(Collections.<ConfigLine> emptyList(),
getBaseState());
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
index 26181809ac..08c883a83e 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
@@ -108,6 +108,12 @@ public class ConfigConstants {
public static final String CONFIG_PULL_SECTION = "pull";
/**
+ * The "merge" section
+ * @since 4.9
+ */
+ public static final String CONFIG_MERGE_SECTION = "merge";
+
+ /**
* The "filter" section
* @since 4.6
*/
@@ -372,6 +378,13 @@ public class ConfigConstants {
public static final String CONFIG_KEY_RENAMES = "renames";
/**
+ * The "inCoreLimit" key in the "merge section". It's a size limit (bytes) used to
+ * control a file to be stored in {@code Heap} or {@code LocalFile} during the merge.
+ * @since 4.9
+ */
+ public static final String CONFIG_KEY_IN_CORE_LIMIT = "inCoreLimit";
+
+ /**
* The "prune" key
* @since 3.3
*/
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java
index bda1a2745f..5bfccda15c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java
@@ -1,7 +1,7 @@
/*
* Copyright (C) 2008, Google Inc.
* Copyright (C) 2008, Robin Rosenberg <robin.rosenberg@dewire.com>
- * Copyright (C) 2006-2012, Shawn O. Pearce <spearce@spearce.org>
+ * Copyright (C) 2006-2017, Shawn O. Pearce <spearce@spearce.org>
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -429,6 +429,20 @@ public final class Constants {
public static final String HOOKS = "hooks";
/**
+ * Merge attribute.
+ *
+ * @since 4.9
+ */
+ public static final String ATTR_MERGE = "merge"; //$NON-NLS-1$
+
+ /**
+ * Binary value for custom merger.
+ *
+ * @since 4.9
+ */
+ public static final String ATTR_BUILTIN_BINARY_MERGER = "binary"; //$NON-NLS-1$
+
+ /**
* Create a new digest function for objects.
*
* @return a new digest object.
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java
index 40aba636d1..fdbbe39393 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java
@@ -57,12 +57,7 @@ import org.eclipse.jgit.lib.Config.SectionParser;
*/
public class CoreConfig {
/** Key for {@link Config#get(SectionParser)}. */
- public static final Config.SectionParser<CoreConfig> KEY = new SectionParser<CoreConfig>() {
- @Override
- public CoreConfig parse(final Config cfg) {
- return new CoreConfig(cfg);
- }
- };
+ public static final Config.SectionParser<CoreConfig> KEY = CoreConfig::new;
/** Permissible values for {@code core.autocrlf}. */
public static enum AutoCRLF {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/DefaultTypedConfigGetter.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/DefaultTypedConfigGetter.java
new file mode 100644
index 0000000000..fd37747601
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/DefaultTypedConfigGetter.java
@@ -0,0 +1,299 @@
+/*
+ * Copyright (C) 2017, Thomas Wolf <thomas.wolf@paranor.ch>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.lib;
+
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.eclipse.jgit.annotations.NonNull;
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.lib.Config.ConfigEnum;
+import org.eclipse.jgit.transport.RefSpec;
+import org.eclipse.jgit.util.StringUtils;
+
+/**
+ * An {@link TypedConfigGetter} that throws {@link IllegalArgumentException} on
+ * invalid values.
+ *
+ * @since 4.9
+ */
+public class DefaultTypedConfigGetter implements TypedConfigGetter {
+
+ @Override
+ public boolean getBoolean(Config config, String section, String subsection,
+ String name, boolean defaultValue) {
+ String n = config.getRawString(section, subsection, name);
+ if (n == null) {
+ return defaultValue;
+ }
+ if (Config.MAGIC_EMPTY_VALUE == n) {
+ return true;
+ }
+ try {
+ return StringUtils.toBoolean(n);
+ } catch (IllegalArgumentException err) {
+ throw new IllegalArgumentException(MessageFormat.format(
+ JGitText.get().invalidBooleanValue, section, name, n));
+ }
+ }
+
+ @Override
+ public <T extends Enum<?>> T getEnum(Config config, T[] all, String section,
+ String subsection, String name, T defaultValue) {
+ String value = config.getString(section, subsection, name);
+ if (value == null) {
+ return defaultValue;
+ }
+ if (all[0] instanceof ConfigEnum) {
+ for (T t : all) {
+ if (((ConfigEnum) t).matchConfigValue(value)) {
+ return t;
+ }
+ }
+ }
+
+ String n = value.replace(' ', '_');
+
+ // Because of c98abc9c0586c73ef7df4172644b7dd21c979e9d being used in
+ // the real world before its breakage was fully understood, we must
+ // also accept '-' as though it were ' '.
+ n = n.replace('-', '_');
+
+ T trueState = null;
+ T falseState = null;
+ for (T e : all) {
+ if (StringUtils.equalsIgnoreCase(e.name(), n)) {
+ return e;
+ } else if (StringUtils.equalsIgnoreCase(e.name(), "TRUE")) { //$NON-NLS-1$
+ trueState = e;
+ } else if (StringUtils.equalsIgnoreCase(e.name(), "FALSE")) { //$NON-NLS-1$
+ falseState = e;
+ }
+ }
+
+ // This is an odd little fallback. C Git sometimes allows boolean
+ // values in a tri-state with other things. If we have both a true
+ // and a false value in our enumeration, assume its one of those.
+ //
+ if (trueState != null && falseState != null) {
+ try {
+ return StringUtils.toBoolean(n) ? trueState : falseState;
+ } catch (IllegalArgumentException err) {
+ // Fall through and use our custom error below.
+ }
+ }
+
+ if (subsection != null) {
+ throw new IllegalArgumentException(
+ MessageFormat.format(JGitText.get().enumValueNotSupported3,
+ section, subsection, name, value));
+ } else {
+ throw new IllegalArgumentException(
+ MessageFormat.format(JGitText.get().enumValueNotSupported2,
+ section, name, value));
+ }
+ }
+
+ @Override
+ public int getInt(Config config, String section, String subsection,
+ String name, int defaultValue) {
+ long val = config.getLong(section, subsection, name, defaultValue);
+ if (Integer.MIN_VALUE <= val && val <= Integer.MAX_VALUE) {
+ return (int) val;
+ }
+ throw new IllegalArgumentException(MessageFormat
+ .format(JGitText.get().integerValueOutOfRange, section, name));
+ }
+
+ @Override
+ public long getLong(Config config, String section, String subsection,
+ String name, long defaultValue) {
+ final String str = config.getString(section, subsection, name);
+ if (str == null) {
+ return defaultValue;
+ }
+ String n = str.trim();
+ if (n.length() == 0) {
+ return defaultValue;
+ }
+ long mul = 1;
+ switch (StringUtils.toLowerCase(n.charAt(n.length() - 1))) {
+ case 'g':
+ mul = Config.GiB;
+ break;
+ case 'm':
+ mul = Config.MiB;
+ break;
+ case 'k':
+ mul = Config.KiB;
+ break;
+ }
+ if (mul > 1) {
+ n = n.substring(0, n.length() - 1).trim();
+ }
+ if (n.length() == 0) {
+ return defaultValue;
+ }
+ try {
+ return mul * Long.parseLong(n);
+ } catch (NumberFormatException nfe) {
+ throw new IllegalArgumentException(MessageFormat.format(
+ JGitText.get().invalidIntegerValue, section, name, str));
+ }
+ }
+
+ @Override
+ public long getTimeUnit(Config config, String section, String subsection,
+ String name, long defaultValue, TimeUnit wantUnit) {
+ String valueString = config.getString(section, subsection, name);
+
+ if (valueString == null) {
+ return defaultValue;
+ }
+
+ String s = valueString.trim();
+ if (s.length() == 0) {
+ return defaultValue;
+ }
+
+ if (s.startsWith("-")/* negative */) { //$NON-NLS-1$
+ throw notTimeUnit(section, subsection, name, valueString);
+ }
+
+ Matcher m = Pattern.compile("^(0|[1-9][0-9]*)\\s*(.*)$") //$NON-NLS-1$
+ .matcher(valueString);
+ if (!m.matches()) {
+ return defaultValue;
+ }
+
+ String digits = m.group(1);
+ String unitName = m.group(2).trim();
+
+ TimeUnit inputUnit;
+ int inputMul;
+
+ if (unitName.isEmpty()) {
+ inputUnit = wantUnit;
+ inputMul = 1;
+
+ } else if (match(unitName, "ms", "milliseconds")) { //$NON-NLS-1$ //$NON-NLS-2$
+ inputUnit = TimeUnit.MILLISECONDS;
+ inputMul = 1;
+
+ } else if (match(unitName, "s", "sec", "second", "seconds")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
+ inputUnit = TimeUnit.SECONDS;
+ inputMul = 1;
+
+ } else if (match(unitName, "m", "min", "minute", "minutes")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
+ inputUnit = TimeUnit.MINUTES;
+ inputMul = 1;
+
+ } else if (match(unitName, "h", "hr", "hour", "hours")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
+ inputUnit = TimeUnit.HOURS;
+ inputMul = 1;
+
+ } else if (match(unitName, "d", "day", "days")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ inputUnit = TimeUnit.DAYS;
+ inputMul = 1;
+
+ } else if (match(unitName, "w", "week", "weeks")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ inputUnit = TimeUnit.DAYS;
+ inputMul = 7;
+
+ } else if (match(unitName, "mon", "month", "months")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ inputUnit = TimeUnit.DAYS;
+ inputMul = 30;
+
+ } else if (match(unitName, "y", "year", "years")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ inputUnit = TimeUnit.DAYS;
+ inputMul = 365;
+
+ } else {
+ throw notTimeUnit(section, subsection, name, valueString);
+ }
+
+ try {
+ return wantUnit.convert(Long.parseLong(digits) * inputMul,
+ inputUnit);
+ } catch (NumberFormatException nfe) {
+ throw notTimeUnit(section, subsection, unitName, valueString);
+ }
+ }
+
+ private static boolean match(final String a, final String... cases) {
+ for (final String b : cases) {
+ if (b != null && b.equalsIgnoreCase(a)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private static IllegalArgumentException notTimeUnit(String section,
+ String subsection, String name, String valueString) {
+ if (subsection != null) {
+ return new IllegalArgumentException(
+ MessageFormat.format(JGitText.get().invalidTimeUnitValue3,
+ section, subsection, name, valueString));
+ }
+ return new IllegalArgumentException(
+ MessageFormat.format(JGitText.get().invalidTimeUnitValue2,
+ section, name, valueString));
+ }
+
+ @Override
+ public @NonNull List<RefSpec> getRefSpecs(Config config, String section,
+ String subsection, String name) {
+ String[] values = config.getStringList(section, subsection, name);
+ List<RefSpec> result = new ArrayList<>(values.length);
+ for (String spec : values) {
+ result.add(new RefSpec(spec));
+ }
+ return result;
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/FileMode.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/FileMode.java
index a489461f84..edbc709f48 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/FileMode.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/FileMode.java
@@ -83,7 +83,6 @@ public abstract class FileMode {
public static final int TYPE_MISSING = 0000000;
/** Mode indicating an entry is a tree (aka directory). */
- @SuppressWarnings("synthetic-access")
public static final FileMode TREE = new FileMode(TYPE_TREE,
Constants.OBJ_TREE) {
@Override
@@ -93,7 +92,6 @@ public abstract class FileMode {
};
/** Mode indicating an entry is a symbolic link. */
- @SuppressWarnings("synthetic-access")
public static final FileMode SYMLINK = new FileMode(TYPE_SYMLINK,
Constants.OBJ_BLOB) {
@Override
@@ -103,7 +101,6 @@ public abstract class FileMode {
};
/** Mode indicating an entry is a non-executable file. */
- @SuppressWarnings("synthetic-access")
public static final FileMode REGULAR_FILE = new FileMode(0100644,
Constants.OBJ_BLOB) {
@Override
@@ -113,7 +110,6 @@ public abstract class FileMode {
};
/** Mode indicating an entry is an executable file. */
- @SuppressWarnings("synthetic-access")
public static final FileMode EXECUTABLE_FILE = new FileMode(0100755,
Constants.OBJ_BLOB) {
@Override
@@ -123,7 +119,6 @@ public abstract class FileMode {
};
/** Mode indicating an entry is a submodule commit in another repository. */
- @SuppressWarnings("synthetic-access")
public static final FileMode GITLINK = new FileMode(TYPE_GITLINK,
Constants.OBJ_COMMIT) {
@Override
@@ -133,7 +128,6 @@ public abstract class FileMode {
};
/** Mode indicating an entry is missing during parallel walks. */
- @SuppressWarnings("synthetic-access")
public static final FileMode MISSING = new FileMode(TYPE_MISSING,
Constants.OBJ_BAD) {
@Override
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java
index e544b72a85..ea573a48d7 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java
@@ -513,14 +513,10 @@ public class IndexDiff {
}
}
- for (int i = 0; i < treeWalk.getTreeCount(); i++) {
- Set<String> values = fileModes.get(treeWalk.getFileMode(i));
- String path = treeWalk.getPathString();
- if (path != null) {
- if (values == null)
- values = new HashSet<>();
- values.add(path);
- fileModes.put(treeWalk.getFileMode(i), values);
+ String path = treeWalk.getPathString();
+ if (path != null) {
+ for (int i = 0; i < treeWalk.getTreeCount(); i++) {
+ recordFileMode(path, treeWalk.getFileMode(i));
}
}
}
@@ -545,19 +541,21 @@ public class IndexDiff {
}
Repository subRepo = smw.getRepository();
if (subRepo != null) {
+ String subRepoPath = smw.getPath();
try {
ObjectId subHead = subRepo.resolve("HEAD"); //$NON-NLS-1$
if (subHead != null
- && !subHead.equals(smw.getObjectId()))
- modified.add(smw.getPath());
- else if (ignoreSubmoduleMode != IgnoreSubmoduleMode.DIRTY) {
+ && !subHead.equals(smw.getObjectId())) {
+ modified.add(subRepoPath);
+ recordFileMode(subRepoPath, FileMode.GITLINK);
+ } else if (ignoreSubmoduleMode != IgnoreSubmoduleMode.DIRTY) {
IndexDiff smid = submoduleIndexDiffs.get(smw
.getPath());
if (smid == null) {
smid = new IndexDiff(subRepo,
smw.getObjectId(),
wTreeIt.getWorkingTreeIterator(subRepo));
- submoduleIndexDiffs.put(smw.getPath(), smid);
+ submoduleIndexDiffs.put(subRepoPath, smid);
}
if (smid.diff()) {
if (ignoreSubmoduleMode == IgnoreSubmoduleMode.UNTRACKED
@@ -569,7 +567,8 @@ public class IndexDiff {
&& smid.getRemoved().isEmpty()) {
continue;
}
- modified.add(smw.getPath());
+ modified.add(subRepoPath);
+ recordFileMode(subRepoPath, FileMode.GITLINK);
}
}
} finally {
@@ -593,6 +592,17 @@ public class IndexDiff {
return true;
}
+ private void recordFileMode(String path, FileMode mode) {
+ Set<String> values = fileModes.get(mode);
+ if (path != null) {
+ if (values == null) {
+ values = new HashSet<>();
+ fileModes.put(mode, values);
+ }
+ values.add(path);
+ }
+ }
+
private boolean isEntryGitLink(AbstractTreeIterator ti) {
return ((ti != null) && (ti.getEntryRawMode() == FileMode.GITLINK
.getBits()));
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java
index 9d3aee1508..19c5c7eb45 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java
@@ -359,7 +359,13 @@ public class ObjectChecker {
checkTree(id, raw);
break;
case OBJ_BLOB:
- checkBlob(raw);
+ BlobObjectChecker checker = newBlobObjectChecker();
+ if (checker == null) {
+ checkBlob(raw);
+ } else {
+ checker.update(raw, 0, raw.length);
+ checker.endBlob(id);
+ }
break;
default:
report(UNKNOWN_TYPE, id, MessageFormat.format(
@@ -1067,8 +1073,22 @@ public class ObjectChecker {
}
/**
+ * Create a new {@link BlobObjectChecker}.
+ *
+ * @return new BlobObjectChecker or null if it's not provided.
+ * @since 4.9
+ */
+ @Nullable
+ public BlobObjectChecker newBlobObjectChecker() {
+ return null;
+ }
+
+ /**
* Check a blob for errors.
*
+ * <p>This may not be called from PackParser in some cases. Use {@link
+ * #newBlobObjectChecker} instead.
+ *
* @param raw
* the blob data. The array is never modified.
* @throws CorruptObjectException
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectInserter.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectInserter.java
index 857ec9b2df..b2ffbe6f6f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectInserter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectInserter.java
@@ -423,6 +423,13 @@ public abstract class ObjectInserter implements AutoCloseable {
* <p>
* The returned reader should return this inserter instance from {@link
* ObjectReader#getCreatedFromInserter()}.
+ * <p>
+ * Behavior is undefined if an insert method is called on the inserter in the
+ * middle of reading from an {@link ObjectStream} opened from this reader. For
+ * example, reading the remainder of the object may fail, or newly written
+ * data may even be corrupted. Interleaving whole object reads (including
+ * streaming reads) with inserts is fine, just not interleaving streaming
+ * <em>partial</em> object reads with inserts.
*
* @since 3.5
* @return reader for any object, including an object recently inserted by
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RefUpdate.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RefUpdate.java
index fc334f0275..766b21da0e 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RefUpdate.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RefUpdate.java
@@ -58,7 +58,13 @@ import org.eclipse.jgit.transport.PushCertificate;
* Creates, updates or deletes any reference.
*/
public abstract class RefUpdate {
- /** Status of an update request. */
+ /**
+ * Status of an update request.
+ * <p>
+ * New values may be added to this enum in the future. Callers may assume that
+ * unknown values are failures, and may generally treat them the same as
+ * {@link #REJECTED_OTHER_REASON}.
+ */
public static enum Result {
/** The ref update/delete has not been attempted by the caller. */
NOT_ATTEMPTED,
@@ -114,6 +120,10 @@ public abstract class RefUpdate {
* merged into the new value. The configuration did not allow a forced
* update/delete to take place, so ref still contains the old value. No
* previous history was lost.
+ * <p>
+ * <em>Note:</em> Despite the general name, this result only refers to the
+ * non-fast-forward case. For more general errors, see {@link
+ * #REJECTED_OTHER_REASON}.
*/
REJECTED,
@@ -139,7 +149,25 @@ public abstract class RefUpdate {
* The ref was renamed from another name
* <p>
*/
- RENAMED
+ RENAMED,
+
+ /**
+ * One or more objects aren't in the repository.
+ * <p>
+ * This is severe indication of either repository corruption on the
+ * server side, or a bug in the client wherein the client did not supply
+ * all required objects during the pack transfer.
+ *
+ * @since 4.9
+ */
+ REJECTED_MISSING_OBJECT,
+
+ /**
+ * Rejected for some other reason not covered by another enum value.
+ *
+ * @since 4.9
+ */
+ REJECTED_OTHER_REASON;
}
/** New value the caller wants this ref to have. */
@@ -157,6 +185,12 @@ public abstract class RefUpdate {
/** Should the Result value be appended to {@link #refLogMessage}. */
private boolean refLogIncludeResult;
+ /**
+ * Should reflogs be written even if the configured default for this ref is
+ * not to write it.
+ */
+ private boolean forceRefLog;
+
/** Old value of the ref, obtained after we lock it. */
private ObjectId oldValue;
@@ -278,6 +312,16 @@ public abstract class RefUpdate {
}
/**
+ * Return whether this update is actually detaching a symbolic ref.
+ *
+ * @return true if detaching a symref.
+ * @since 4.9
+ */
+ public boolean isDetachingSymbolicRef() {
+ return detachingSymbolicRef;
+ }
+
+ /**
* Set the new value the ref will update to.
*
* @param id
@@ -365,6 +409,12 @@ public abstract class RefUpdate {
/**
* Set the message to include in the reflog.
+ * <p>
+ * Repository implementations may limit which reflogs are written by default,
+ * based on the project configuration. If a repo is not configured to write
+ * logs for this ref by default, setting the message alone may have no effect.
+ * To indicate that the repo should write logs for this update in spite of
+ * configured defaults, use {@link #setForceRefLog(boolean)}.
*
* @param msg
* the message to describe this change. It may be null if
@@ -393,6 +443,26 @@ public abstract class RefUpdate {
}
/**
+ * Force writing a reflog for the updated ref.
+ *
+ * @param force whether to force.
+ * @since 4.9
+ */
+ public void setForceRefLog(boolean force) {
+ forceRefLog = force;
+ }
+
+ /**
+ * Check whether the reflog should be written regardless of repo defaults.
+ *
+ * @return whether force writing is enabled.
+ * @since 4.9
+ */
+ protected boolean isForceRefLog() {
+ return forceRefLog;
+ }
+
+ /**
* The old value of the ref, prior to the update being attempted.
* <p>
* This value may differ before and after the update method. Initially it is
@@ -627,34 +697,47 @@ public abstract class RefUpdate {
RevObject oldObj;
// don't make expensive conflict check if this is an existing Ref
- if (oldValue == null && checkConflicting && getRefDatabase().isNameConflicting(getName()))
+ if (oldValue == null && checkConflicting
+ && getRefDatabase().isNameConflicting(getName())) {
return Result.LOCK_FAILURE;
+ }
try {
// If we're detaching a symbolic reference, we should update the reference
// itself. Otherwise, we will update the leaf reference, which should be
// an ObjectIdRef.
- if (!tryLock(!detachingSymbolicRef))
+ if (!tryLock(!detachingSymbolicRef)) {
return Result.LOCK_FAILURE;
+ }
if (expValue != null) {
final ObjectId o;
o = oldValue != null ? oldValue : ObjectId.zeroId();
- if (!AnyObjectId.equals(expValue, o))
+ if (!AnyObjectId.equals(expValue, o)) {
return Result.LOCK_FAILURE;
+ }
}
- if (oldValue == null)
+ try {
+ newObj = safeParseNew(walk, newValue);
+ } catch (MissingObjectException e) {
+ return Result.REJECTED_MISSING_OBJECT;
+ }
+
+ if (oldValue == null) {
return store.execute(Result.NEW);
+ }
- newObj = safeParse(walk, newValue);
- oldObj = safeParse(walk, oldValue);
- if (newObj == oldObj && !detachingSymbolicRef)
+ oldObj = safeParseOld(walk, oldValue);
+ if (newObj == oldObj && !detachingSymbolicRef) {
return store.execute(Result.NO_CHANGE);
+ }
- if (isForceUpdate())
+ if (isForceUpdate()) {
return store.execute(Result.FORCED);
+ }
if (newObj instanceof RevCommit && oldObj instanceof RevCommit) {
- if (walk.isMergedInto((RevCommit) oldObj, (RevCommit) newObj))
+ if (walk.isMergedInto((RevCommit) oldObj, (RevCommit) newObj)) {
return store.execute(Result.FAST_FORWARD);
+ }
}
return Result.REJECTED;
@@ -674,16 +757,23 @@ public abstract class RefUpdate {
checkConflicting = check;
}
- private static RevObject safeParse(final RevWalk rw, final AnyObjectId id)
+ private static RevObject safeParseNew(RevWalk rw, AnyObjectId newId)
+ throws IOException {
+ if (newId == null || ObjectId.zeroId().equals(newId)) {
+ return null;
+ }
+ return rw.parseAny(newId);
+ }
+
+ private static RevObject safeParseOld(RevWalk rw, AnyObjectId oldId)
throws IOException {
try {
- return id != null ? rw.parseAny(id) : null;
+ return oldId != null ? rw.parseAny(oldId) : null;
} catch (MissingObjectException e) {
- // We can expect some objects to be missing, like if we are
- // trying to force a deletion of a branch and the object it
- // points to has been pruned from the database due to freak
- // corruption accidents (it happens with 'git new-work-dir').
- //
+ // We can expect some old objects to be missing, like if we are trying to
+ // force a deletion of a branch and the object it points to has been
+ // pruned from the database due to freak corruption accidents (it happens
+ // with 'git new-work-dir').
return null;
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogEntry.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogEntry.java
index 0504646ee7..afa6521d67 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogEntry.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogEntry.java
@@ -50,6 +50,39 @@ package org.eclipse.jgit.lib;
public interface ReflogEntry {
/**
+ * Prefix used in reflog messages when the ref was first created.
+ * <p>
+ * Does not have a corresponding constant in C git, but is untranslated like
+ * the other constants.
+ *
+ * @since 4.9
+ */
+ public static final String PREFIX_CREATED = "created"; //$NON-NLS-1$
+
+ /**
+ * Prefix used in reflog messages when the ref was updated with a fast
+ * forward.
+ * <p>
+ * Untranslated, and exactly matches the
+ * <a href="https://git.kernel.org/pub/scm/git/git.git/tree/builtin/fetch.c?id=f3da2b79be9565779e4f76dc5812c68e156afdf0#n680">
+ * untranslated string in C git</a>.
+ *
+ * @since 4.9
+ */
+ public static final String PREFIX_FAST_FORWARD = "fast-forward"; //$NON-NLS-1$
+
+ /**
+ * Prefix used in reflog messages when the ref was force updated.
+ * <p>
+ * Untranslated, and exactly matches the
+ * <a href="https://git.kernel.org/pub/scm/git/git.git/tree/builtin/fetch.c?id=f3da2b79be9565779e4f76dc5812c68e156afdf0#n695">
+ * untranslated string in C git</a>.
+ *
+ * @since 4.9
+ */
+ public static final String PREFIX_FORCED_UPDATE = "forced-update"; //$NON-NLS-1$
+
+ /**
* @return the commit id before the change
*/
public abstract ObjectId getOldId();
@@ -75,4 +108,4 @@ public interface ReflogEntry {
*/
public abstract CheckoutEntry parseCheckout();
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogReader.java
index fdab883fb3..d3f25369bd 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogReader.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogReader.java
@@ -86,4 +86,4 @@ public interface ReflogReader {
public abstract List<ReflogEntry> getReverseEntries(int max)
throws IOException;
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java
index bd23ab988d..fdf59667f5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java
@@ -108,7 +108,13 @@ import org.slf4j.LoggerFactory;
* A repository holds all objects and refs used for managing source code (could
* be any type of file, but source code is what SCM's are typically used for).
* <p>
- * This class is thread-safe.
+ * The thread-safety of a {@link Repository} very much depends on the concrete
+ * implementation. Applications working with a generic {@code Repository} type
+ * must not assume the instance is thread-safe.
+ * <ul>
+ * <li>{@code FileRepository} is thread-safe.
+ * <li>{@code DfsRepository} thread-safety is determined by its subclass.
+ * </ul>
*/
public abstract class Repository implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(Repository.class);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryBuilder.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryBuilder.java
index e989caf83c..95be2d1f0d 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryBuilder.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryBuilder.java
@@ -55,7 +55,7 @@ import java.io.File;
* <p>
* Single repository applications trying to be compatible with other Git
* implementations are encouraged to use a model such as:
- *
+ *
* <pre>
* new RepositoryBuilder() //
* .setGitDir(gitDirArgument) // --git-dir if supplied, no-op if null
@@ -63,7 +63,7 @@ import java.io.File;
* .findGitDir() // scan up the file system tree
* .build()
* </pre>
- *
+ *
* @see org.eclipse.jgit.storage.file.FileRepositoryBuilder
*/
public class RepositoryBuilder extends
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/SubmoduleConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/SubmoduleConfig.java
index 12f7b82343..12675061f1 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/SubmoduleConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/SubmoduleConfig.java
@@ -43,8 +43,6 @@
package org.eclipse.jgit.lib;
-import java.util.Locale;
-
import org.eclipse.jgit.util.StringUtils;
/**
@@ -79,7 +77,7 @@ public class SubmoduleConfig {
@Override
public String toConfigValue() {
- return name().toLowerCase(Locale.ROOT).replace('_', '-');
+ return configValue;
}
@Override
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/TypedConfigGetter.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/TypedConfigGetter.java
new file mode 100644
index 0000000000..594edef665
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/TypedConfigGetter.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2017, Thomas Wolf <thomas.wolf@paranor.ch>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.lib;
+
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import org.eclipse.jgit.annotations.NonNull;
+import org.eclipse.jgit.transport.RefSpec;
+
+/**
+ * Something that knows how to convert plain strings from a git {@link Config}
+ * to typed values.
+ *
+ * @since 4.9
+ */
+public interface TypedConfigGetter {
+
+ /**
+ * Get a boolean value from a git {@link Config}.
+ *
+ * @param config
+ * to get the value from
+ * @param section
+ * section the key is grouped within.
+ * @param subsection
+ * subsection name, such a remote or branch name.
+ * @param name
+ * name of the key to get.
+ * @param defaultValue
+ * default value to return if no value was present.
+ * @return true if any value or defaultValue is true, false for missing or
+ * explicit false
+ */
+ boolean getBoolean(Config config, String section, String subsection,
+ String name, boolean defaultValue);
+
+ /**
+ * Parse an enumeration from a git {@link Config}.
+ *
+ * @param <T>
+ * type of the enumeration object.
+ * @param config
+ * to get the value from
+ * @param all
+ * all possible values in the enumeration which should be
+ * recognized. Typically {@code EnumType.values()}.
+ * @param section
+ * section the key is grouped within.
+ * @param subsection
+ * subsection name, such a remote or branch name.
+ * @param name
+ * name of the key to get.
+ * @param defaultValue
+ * default value to return if no value was present.
+ * @return the selected enumeration value, or {@code defaultValue}.
+ */
+ <T extends Enum<?>> T getEnum(Config config, T[] all, String section,
+ String subsection, String name, T defaultValue);
+
+ /**
+ * Obtain an integer value from a git {@link Config}.
+ *
+ * @param config
+ * to get the value from
+ * @param section
+ * section the key is grouped within.
+ * @param subsection
+ * subsection name, such a remote or branch name.
+ * @param name
+ * name of the key to get.
+ * @param defaultValue
+ * default value to return if no value was present.
+ * @return an integer value from the configuration, or defaultValue.
+ */
+ int getInt(Config config, String section, String subsection, String name,
+ int defaultValue);
+
+ /**
+ * Obtain a long value from a git {@link Config}.
+ *
+ * @param config
+ * to get the value from
+ * @param section
+ * section the key is grouped within.
+ * @param subsection
+ * subsection name, such a remote or branch name.
+ * @param name
+ * name of the key to get.
+ * @param defaultValue
+ * default value to return if no value was present.
+ * @return a long value from the configuration, or defaultValue.
+ */
+ long getLong(Config config, String section, String subsection, String name,
+ long defaultValue);
+
+ /**
+ * Parse a numerical time unit, such as "1 minute", from a git
+ * {@link Config}.
+ *
+ * @param config
+ * to get the value from
+ * @param section
+ * section the key is in.
+ * @param subsection
+ * subsection the key is in, or null if not in a subsection.
+ * @param name
+ * the key name.
+ * @param defaultValue
+ * default value to return if no value was present.
+ * @param wantUnit
+ * the units of {@code defaultValue} and the return value, as
+ * well as the units to assume if the value does not contain an
+ * indication of the units.
+ * @return the value, or {@code defaultValue} if not set, expressed in
+ * {@code units}.
+ */
+ long getTimeUnit(Config config, String section, String subsection,
+ String name, long defaultValue, TimeUnit wantUnit);
+
+
+ /**
+ * Parse a list of {@link RefSpec}s from a git {@link Config}.
+ *
+ * @param config
+ * to get the list from
+ * @param section
+ * section the key is in.
+ * @param subsection
+ * subsection the key is in, or null if not in a subsection.
+ * @param name
+ * the key name.
+ * @return a possibly empty list of {@link RefSpec}s
+ */
+ @NonNull
+ List<RefSpec> getRefSpecs(Config config, String section, String subsection,
+ String name);
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/UserConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/UserConfig.java
index bd393dd2ff..102a4518f6 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/UserConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/UserConfig.java
@@ -51,12 +51,7 @@ import org.eclipse.jgit.util.SystemReader;
/** The standard "user" configuration parameters. */
public class UserConfig {
/** Key for {@link Config#get(SectionParser)}. */
- public static final Config.SectionParser<UserConfig> KEY = new SectionParser<UserConfig>() {
- @Override
- public UserConfig parse(final Config cfg) {
- return new UserConfig(cfg);
- }
- };
+ public static final Config.SectionParser<UserConfig> KEY = UserConfig::new;
private String authorName;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeFormatterPass.java b/org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeFormatterPass.java
index 0345921bdb..060f06884a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeFormatterPass.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeFormatterPass.java
@@ -143,4 +143,4 @@ class MergeFormatterPass {
if (out.isBeginln())
out.write('\n');
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/merge/ResolveMerger.java b/org.eclipse.jgit/src/org/eclipse/jgit/merge/ResolveMerger.java
index 86003e9243..246121bf3a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/merge/ResolveMerger.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/merge/ResolveMerger.java
@@ -2,6 +2,7 @@
* Copyright (C) 2010, Christian Halstrick <christian.halstrick@sap.com>,
* Copyright (C) 2010-2012, Matthias Sohn <matthias.sohn@sap.com>
* Copyright (C) 2012, Research In Motion Limited
+ * Copyright (C) 2017, Obeo (mathieu.cartaud@obeo.fr)
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -67,6 +68,7 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import org.eclipse.jgit.attributes.Attributes;
import org.eclipse.jgit.diff.DiffAlgorithm;
import org.eclipse.jgit.diff.DiffAlgorithm.SupportedAlgorithm;
import org.eclipse.jgit.diff.RawText;
@@ -83,6 +85,7 @@ import org.eclipse.jgit.errors.IndexWriteException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.errors.NoWorkTreeException;
import org.eclipse.jgit.lib.Config;
+import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
@@ -270,6 +273,12 @@ public class ResolveMerger extends ThreeWayMerger {
*/
protected MergeAlgorithm mergeAlgorithm;
+ /**
+ * The size limit (bytes) which controls a file to be stored in {@code Heap} or
+ * {@code LocalFile} during the merge.
+ */
+ private int inCoreLimit;
+
private static MergeAlgorithm getMergeAlgorithm(Config config) {
SupportedAlgorithm diffAlg = config.getEnum(
CONFIG_DIFF_SECTION, null, CONFIG_KEY_ALGORITHM,
@@ -277,6 +286,11 @@ public class ResolveMerger extends ThreeWayMerger {
return new MergeAlgorithm(DiffAlgorithm.getAlgorithm(diffAlg));
}
+ private static int getInCoreLimit(Config config) {
+ return config.getInt(
+ ConfigConstants.CONFIG_MERGE_SECTION, ConfigConstants.CONFIG_KEY_IN_CORE_LIMIT, 10 << 20);
+ }
+
private static String[] defaultCommitNames() {
return new String[] { "BASE", "OURS", "THEIRS" }; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
@@ -287,7 +301,9 @@ public class ResolveMerger extends ThreeWayMerger {
*/
protected ResolveMerger(Repository local, boolean inCore) {
super(local);
- mergeAlgorithm = getMergeAlgorithm(local.getConfig());
+ Config config = local.getConfig();
+ mergeAlgorithm = getMergeAlgorithm(config);
+ inCoreLimit = getInCoreLimit(config);
commitNames = defaultCommitNames();
this.inCore = inCore;
@@ -429,9 +445,10 @@ public class ResolveMerger extends ThreeWayMerger {
}
/**
- * Processes one path and tries to merge. This method will do all do all
- * trivial (not content) merges and will also detect if a merge will fail.
- * The merge will fail when one of the following is true
+ * Processes one path and tries to merge taking git attributes in account.
+ * This method will do all trivial (not content) merges and will also detect
+ * if a merge will fail. The merge will fail when one of the following is
+ * true
* <ul>
* <li>the index entry does not match the entry in ours. When merging one
* branch into the current HEAD, ours will point to HEAD and theirs will
@@ -471,11 +488,69 @@ public class ResolveMerger extends ThreeWayMerger {
* @throws CorruptObjectException
* @throws IOException
* @since 3.5
+ * @deprecated
*/
+ @Deprecated
protected boolean processEntry(CanonicalTreeParser base,
CanonicalTreeParser ours, CanonicalTreeParser theirs,
DirCacheBuildIterator index, WorkingTreeIterator work,
- boolean ignoreConflicts)
+ boolean ignoreConflicts) throws MissingObjectException,
+ IncorrectObjectTypeException, CorruptObjectException, IOException {
+ return processEntry(base, ours, theirs, index, work, ignoreConflicts,
+ null);
+ }
+
+ /**
+ * Processes one path and tries to merge taking git attributes in account.
+ * This method will do all trivial (not content) merges and will also detect
+ * if a merge will fail. The merge will fail when one of the following is
+ * true
+ * <ul>
+ * <li>the index entry does not match the entry in ours. When merging one
+ * branch into the current HEAD, ours will point to HEAD and theirs will
+ * point to the other branch. It is assumed that the index matches the HEAD
+ * because it will only not match HEAD if it was populated before the merge
+ * operation. But the merge commit should not accidentally contain
+ * modifications done before the merge. Check the <a href=
+ * "http://www.kernel.org/pub/software/scm/git/docs/git-read-tree.html#_3_way_merge"
+ * >git read-tree</a> documentation for further explanations.</li>
+ * <li>A conflict was detected and the working-tree file is dirty. When a
+ * conflict is detected the content-merge algorithm will try to write a
+ * merged version into the working-tree. If the file is dirty we would
+ * override unsaved data.</li>
+ * </ul>
+ *
+ * @param base
+ * the common base for ours and theirs
+ * @param ours
+ * the ours side of the merge. When merging a branch into the
+ * HEAD ours will point to HEAD
+ * @param theirs
+ * the theirs side of the merge. When merging a branch into the
+ * current HEAD theirs will point to the branch which is merged
+ * into HEAD.
+ * @param index
+ * the index entry
+ * @param work
+ * the file in the working tree
+ * @param ignoreConflicts
+ * see
+ * {@link ResolveMerger#mergeTrees(AbstractTreeIterator, RevTree, RevTree, boolean)}
+ * @param attributes
+ * the attributes defined for this entry
+ * @return <code>false</code> if the merge will fail because the index entry
+ * didn't match ours or the working-dir file was dirty and a
+ * conflict occurred
+ * @throws MissingObjectException
+ * @throws IncorrectObjectTypeException
+ * @throws CorruptObjectException
+ * @throws IOException
+ * @since 4.9
+ */
+ protected boolean processEntry(CanonicalTreeParser base,
+ CanonicalTreeParser ours, CanonicalTreeParser theirs,
+ DirCacheBuildIterator index, WorkingTreeIterator work,
+ boolean ignoreConflicts, Attributes attributes)
throws MissingObjectException, IncorrectObjectTypeException,
CorruptObjectException, IOException {
enterSubtree = true;
@@ -627,7 +702,8 @@ public class ResolveMerger extends ThreeWayMerger {
return false;
// Don't attempt to resolve submodule link conflicts
- if (isGitLink(modeO) || isGitLink(modeT)) {
+ if (isGitLink(modeO) || isGitLink(modeT)
+ || !attributes.canBeContentMerged()) {
add(tw.getRawPath(), base, DirCacheEntry.STAGE_1, 0, 0);
add(tw.getRawPath(), ours, DirCacheEntry.STAGE_2, 0, 0);
add(tw.getRawPath(), theirs, DirCacheEntry.STAGE_3, 0, 0);
@@ -636,8 +712,9 @@ public class ResolveMerger extends ThreeWayMerger {
}
MergeResult<RawText> result = contentMerge(base, ours, theirs);
- if (ignoreConflicts)
+ if (ignoreConflicts) {
result.setContainsConflicts(false);
+ }
updateIndex(base, ours, theirs, result);
if (result.containsConflicts() && !ignoreConflicts)
unmergedPaths.add(tw.getPathString());
@@ -760,6 +837,7 @@ public class ResolveMerger extends ThreeWayMerger {
MergeResult<RawText> result) throws FileNotFoundException,
IOException {
File mergedFile = !inCore ? writeMergedFile(result) : null;
+
if (result.containsConflicts()) {
// A conflict occurred, the file will contain conflict markers
// the index will be populated with the three stages and the
@@ -827,7 +905,7 @@ public class ResolveMerger extends ThreeWayMerger {
private ObjectId insertMergeResult(MergeResult<RawText> result)
throws IOException {
TemporaryBuffer.LocalFile buf = new TemporaryBuffer.LocalFile(
- db != null ? nonNullRepo().getDirectory() : null, 10 << 20);
+ db != null ? nonNullRepo().getDirectory() : null, inCoreLimit);
try {
new MergeFormatter().formatMerge(buf, result,
Arrays.asList(commitNames), CHARACTER_ENCODING);
@@ -1091,6 +1169,8 @@ public class ResolveMerger extends ThreeWayMerger {
protected boolean mergeTreeWalk(TreeWalk treeWalk, boolean ignoreConflicts)
throws IOException {
boolean hasWorkingTreeIterator = tw.getTreeCount() > T_FILE;
+ boolean hasAttributeNodeProvider = treeWalk
+ .getAttributesNodeProvider() != null;
while (treeWalk.next()) {
if (!processEntry(
treeWalk.getTree(T_BASE, CanonicalTreeParser.class),
@@ -1098,7 +1178,9 @@ public class ResolveMerger extends ThreeWayMerger {
treeWalk.getTree(T_THEIRS, CanonicalTreeParser.class),
treeWalk.getTree(T_INDEX, DirCacheBuildIterator.class),
hasWorkingTreeIterator ? treeWalk.getTree(T_FILE,
- WorkingTreeIterator.class) : null, ignoreConflicts)) {
+ WorkingTreeIterator.class) : null,
+ ignoreConflicts, hasAttributeNodeProvider
+ ? treeWalk.getAttributes() : new Attributes())) {
cleanUp();
return false;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/nls/TranslationBundle.java b/org.eclipse.jgit/src/org/eclipse/jgit/nls/TranslationBundle.java
index c85c179aa2..bde69c0eec 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/nls/TranslationBundle.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/nls/TranslationBundle.java
@@ -184,4 +184,4 @@ public abstract class TranslationBundle {
}
}
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/filter/SkipRevFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/filter/SkipRevFilter.java
index e230c9b6b1..51dd2ed812 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/filter/SkipRevFilter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/filter/SkipRevFilter.java
@@ -91,4 +91,4 @@ public class SkipRevFilter extends RevFilter {
public RevFilter clone() {
return new SkipRevFilter(skip);
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/storage/pack/PackConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/storage/pack/PackConfig.java
index c64aa2d782..4f2374ff7b 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/storage/pack/PackConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/storage/pack/PackConfig.java
@@ -260,6 +260,8 @@ public class PackConfig {
private boolean cutDeltaChains;
+ private boolean singlePack;
+
/** Create a default configuration. */
public PackConfig() {
// Fields are initialized to defaults.
@@ -320,6 +322,7 @@ public class PackConfig {
this.bitmapExcessiveBranchCount = cfg.bitmapExcessiveBranchCount;
this.bitmapInactiveBranchAgeInDays = cfg.bitmapInactiveBranchAgeInDays;
this.cutDeltaChains = cfg.cutDeltaChains;
+ this.singlePack = cfg.singlePack;
}
/**
@@ -555,6 +558,30 @@ public class PackConfig {
}
/**
+ * @return true if all of refs/* should be packed in a single pack. Default
+ * is false, packing a separate GC_REST pack for references outside
+ * of refs/heads/* and refs/tags/*.
+ * @since 4.9
+ */
+ public boolean getSinglePack() {
+ return singlePack;
+ }
+
+ /**
+ * If {@code true}, packs a single GC pack for all objects reachable from
+ * refs/*. Otherwise packs the GC pack with objects reachable from
+ * refs/heads/* and refs/tags/*, and a GC_REST pack with the remaining
+ * reachable objects. Disabled by default, packing GC and GC_REST.
+ *
+ * @param single
+ * true to pack a single GC pack rather than GC and GC_REST packs
+ * @since 4.9
+ */
+ public void setSinglePack(boolean single) {
+ singlePack = single;
+ }
+
+ /**
* Get the number of objects to try when looking for a delta base.
*
* This limit is per thread, if 4 threads are used the actual memory used
@@ -1026,6 +1053,8 @@ public class PackConfig {
rc.getBoolean("pack", "deltacompression", isDeltaCompress())); //$NON-NLS-1$ //$NON-NLS-2$
setCutDeltaChains(
rc.getBoolean("pack", "cutdeltachains", getCutDeltaChains())); //$NON-NLS-1$ //$NON-NLS-2$
+ setSinglePack(
+ rc.getBoolean("pack", "singlepack", getSinglePack())); //$NON-NLS-1$ //$NON-NLS-2$
setBuildBitmaps(
rc.getBoolean("pack", "buildbitmaps", isBuildBitmaps())); //$NON-NLS-1$ //$NON-NLS-2$
setBitmapContiguousCommitCount(
@@ -1073,6 +1102,7 @@ public class PackConfig {
.append(getBitmapExcessiveBranchCount());
b.append(", bitmapInactiveBranchAge=") //$NON-NLS-1$
.append(getBitmapInactiveBranchAgeInDays());
+ b.append(", singlePack=").append(getSinglePack()); //$NON-NLS-1$
return b.toString();
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/submodule/SubmoduleWalk.java b/org.eclipse.jgit/src/org/eclipse/jgit/submodule/SubmoduleWalk.java
index a10f3d7117..56784f7a76 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/submodule/SubmoduleWalk.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/submodule/SubmoduleWalk.java
@@ -45,7 +45,8 @@ package org.eclipse.jgit.submodule;
import java.io.File;
import java.io.IOException;
import java.text.MessageFormat;
-import java.util.Locale;
+import java.util.HashMap;
+import java.util.Map;
import org.eclipse.jgit.dircache.DirCache;
import org.eclipse.jgit.dircache.DirCacheIterator;
@@ -330,6 +331,8 @@ public class SubmoduleWalk implements AutoCloseable {
private String path;
+ private Map<String, String> pathToName;
+
/**
* Create submodule generator
*
@@ -355,6 +358,7 @@ public class SubmoduleWalk implements AutoCloseable {
*/
public SubmoduleWalk setModulesConfig(final Config config) {
modulesConfig = config;
+ loadPathNames();
return this;
}
@@ -374,6 +378,7 @@ public class SubmoduleWalk implements AutoCloseable {
public SubmoduleWalk setRootTree(final AbstractTreeIterator tree) {
rootTree = tree;
modulesConfig = null;
+ pathToName = null;
return this;
}
@@ -396,6 +401,7 @@ public class SubmoduleWalk implements AutoCloseable {
p.reset(walk.getObjectReader(), id);
rootTree = p;
modulesConfig = null;
+ pathToName = null;
return this;
}
@@ -419,6 +425,7 @@ public class SubmoduleWalk implements AutoCloseable {
repository.getFS());
config.load();
modulesConfig = config;
+ loadPathNames();
} else {
try (TreeWalk configWalk = new TreeWalk(repository)) {
configWalk.addTree(rootTree);
@@ -438,10 +445,12 @@ public class SubmoduleWalk implements AutoCloseable {
if (filter.isDone(configWalk)) {
modulesConfig = new BlobBasedConfig(null, repository,
configWalk.getObjectId(0));
+ loadPathNames();
return this;
}
}
modulesConfig = new Config();
+ pathToName = null;
} finally {
if (idx > 0)
rootTree.next(idx);
@@ -451,6 +460,20 @@ public class SubmoduleWalk implements AutoCloseable {
return this;
}
+ private void loadPathNames() {
+ pathToName = null;
+ if (modulesConfig != null) {
+ HashMap<String, String> pathNames = new HashMap<>();
+ for (String name : modulesConfig
+ .getSubsections(ConfigConstants.CONFIG_SUBMODULE_SECTION)) {
+ pathNames.put(modulesConfig.getString(
+ ConfigConstants.CONFIG_SUBMODULE_SECTION, name,
+ ConfigConstants.CONFIG_KEY_PATH), name);
+ }
+ pathToName = pathNames;
+ }
+ }
+
/**
* Checks whether the working tree contains a .gitmodules file. That's a
* hint that the repo contains submodules.
@@ -475,8 +498,14 @@ public class SubmoduleWalk implements AutoCloseable {
}
private void lazyLoadModulesConfig() throws IOException, ConfigInvalidException {
- if (modulesConfig == null)
+ if (modulesConfig == null) {
loadModulesConfig();
+ }
+ }
+
+ private String getModuleName(String modulePath) {
+ String name = pathToName != null ? pathToName.get(modulePath) : null;
+ return name != null ? name : modulePath;
}
/**
@@ -525,6 +554,7 @@ public class SubmoduleWalk implements AutoCloseable {
public SubmoduleWalk reset() {
repoConfig = repository.getConfig();
modulesConfig = null;
+ pathToName = null;
walk.reset();
return this;
}
@@ -586,9 +616,8 @@ public class SubmoduleWalk implements AutoCloseable {
*/
public String getModulesPath() throws IOException, ConfigInvalidException {
lazyLoadModulesConfig();
- return modulesConfig.getString(
- ConfigConstants.CONFIG_SUBMODULE_SECTION, path,
- ConfigConstants.CONFIG_KEY_PATH);
+ return modulesConfig.getString(ConfigConstants.CONFIG_SUBMODULE_SECTION,
+ getModuleName(path), ConfigConstants.CONFIG_KEY_PATH);
}
/**
@@ -600,6 +629,10 @@ public class SubmoduleWalk implements AutoCloseable {
* @throws IOException
*/
public String getConfigUrl() throws IOException, ConfigInvalidException {
+ // SubmoduleInitCommand copies the submodules.*.url and
+ // submodules.*.update values from .gitmodules to the config, and
+ // does so using the path defined in .gitmodules as the subsection
+ // name. So no path-to-name translation is necessary here.
return repoConfig.getString(ConfigConstants.CONFIG_SUBMODULE_SECTION,
path, ConfigConstants.CONFIG_KEY_URL);
}
@@ -614,9 +647,8 @@ public class SubmoduleWalk implements AutoCloseable {
*/
public String getModulesUrl() throws IOException, ConfigInvalidException {
lazyLoadModulesConfig();
- return modulesConfig.getString(
- ConfigConstants.CONFIG_SUBMODULE_SECTION, path,
- ConfigConstants.CONFIG_KEY_URL);
+ return modulesConfig.getString(ConfigConstants.CONFIG_SUBMODULE_SECTION,
+ getModuleName(path), ConfigConstants.CONFIG_KEY_URL);
}
/**
@@ -642,9 +674,8 @@ public class SubmoduleWalk implements AutoCloseable {
*/
public String getModulesUpdate() throws IOException, ConfigInvalidException {
lazyLoadModulesConfig();
- return modulesConfig.getString(
- ConfigConstants.CONFIG_SUBMODULE_SECTION, path,
- ConfigConstants.CONFIG_KEY_UPDATE);
+ return modulesConfig.getString(ConfigConstants.CONFIG_SUBMODULE_SECTION,
+ getModuleName(path), ConfigConstants.CONFIG_KEY_UPDATE);
}
/**
@@ -659,13 +690,9 @@ public class SubmoduleWalk implements AutoCloseable {
public IgnoreSubmoduleMode getModulesIgnore() throws IOException,
ConfigInvalidException {
lazyLoadModulesConfig();
- String name = modulesConfig.getString(
- ConfigConstants.CONFIG_SUBMODULE_SECTION, path,
- ConfigConstants.CONFIG_KEY_IGNORE);
- if (name == null)
- return null;
- return IgnoreSubmoduleMode
- .valueOf(name.trim().toUpperCase(Locale.ROOT));
+ return modulesConfig.getEnum(IgnoreSubmoduleMode.values(),
+ ConfigConstants.CONFIG_SUBMODULE_SECTION, getModuleName(path),
+ ConfigConstants.CONFIG_KEY_IGNORE, IgnoreSubmoduleMode.NONE);
}
/**
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java
index e8d18812f6..61c4c4b172 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java
@@ -63,7 +63,6 @@ import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.file.PackLock;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.Config.SectionParser;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.MutableObjectId;
import org.eclipse.jgit.lib.NullProgressMonitor;
@@ -250,7 +249,7 @@ public abstract class BasePackFetchConnection extends BasePackConnection
super(packTransport);
if (local != null) {
- final FetchConfig cfg = local.getConfig().get(FetchConfig.KEY);
+ final FetchConfig cfg = local.getConfig().get(FetchConfig::new);
allowOfsDelta = cfg.allowOfsDelta;
} else {
allowOfsDelta = true;
@@ -279,13 +278,6 @@ public abstract class BasePackFetchConnection extends BasePackConnection
}
private static class FetchConfig {
- static final SectionParser<FetchConfig> KEY = new SectionParser<FetchConfig>() {
- @Override
- public FetchConfig parse(final Config cfg) {
- return new FetchConfig(cfg);
- }
- };
-
final boolean allowOfsDelta;
FetchConfig(final Config c) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/BaseReceivePack.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/BaseReceivePack.java
index 6f94dbbfec..44abcd598b 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/BaseReceivePack.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/BaseReceivePack.java
@@ -78,7 +78,6 @@ import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.file.PackLock;
import org.eclipse.jgit.lib.BatchRefUpdate;
import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.Config.SectionParser;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.NullProgressMonitor;
import org.eclipse.jgit.lib.ObjectChecker;
@@ -314,7 +313,7 @@ public abstract class BaseReceivePack {
TransferConfig tc = db.getConfig().get(TransferConfig.KEY);
objectChecker = tc.newReceiveObjectChecker();
- ReceiveConfig rc = db.getConfig().get(ReceiveConfig.KEY);
+ ReceiveConfig rc = db.getConfig().get(ReceiveConfig::new);
allowCreates = rc.allowCreates;
allowAnyDeletes = true;
allowBranchDeletes = rc.allowDeletes;
@@ -332,13 +331,6 @@ public abstract class BaseReceivePack {
/** Configuration for receive operations. */
protected static class ReceiveConfig {
- static final SectionParser<ReceiveConfig> KEY = new SectionParser<ReceiveConfig>() {
- @Override
- public ReceiveConfig parse(final Config cfg) {
- return new ReceiveConfig(cfg);
- }
- };
-
final boolean allowCreates;
final boolean allowDeletes;
final boolean allowNonFastForwards;
@@ -455,6 +447,7 @@ public abstract class BaseReceivePack {
public void setAdvertisedRefs(Map<String, Ref> allRefs, Set<ObjectId> additionalHaves) {
refs = allRefs != null ? allRefs : db.getAllRefs();
refs = refFilter.filter(refs);
+ advertisedHaves.clear();
Ref head = refs.get(Constants.HEAD);
if (head != null && head.isSymbolic())
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/Daemon.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/Daemon.java
index 40b2c47df0..896b10a4ec 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/Daemon.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/Daemon.java
@@ -45,13 +45,14 @@ package org.eclipse.jgit.transport;
import java.io.IOException;
import java.io.InputStream;
-import java.io.InterruptedIOException;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketAddress;
+import java.net.SocketException;
+import java.util.concurrent.atomic.AtomicBoolean;
import org.eclipse.jgit.errors.RepositoryNotFoundException;
import org.eclipse.jgit.internal.JGitText;
@@ -77,9 +78,7 @@ public class Daemon {
private final ThreadGroup processors;
- private boolean run;
-
- Thread acceptThread;
+ private Acceptor acceptThread;
private int timeout;
@@ -281,6 +280,56 @@ public class Daemon {
receivePackFactory = (ReceivePackFactory<DaemonClient>) ReceivePackFactory.DISABLED;
}
+ private class Acceptor extends Thread {
+
+ private final ServerSocket listenSocket;
+
+ private final AtomicBoolean running = new AtomicBoolean(true);
+
+ public Acceptor(ThreadGroup group, String name, ServerSocket socket) {
+ super(group, name);
+ this.listenSocket = socket;
+ }
+
+ @Override
+ public void run() {
+ setUncaughtExceptionHandler((thread, throwable) -> terminate());
+ while (isRunning()) {
+ try {
+ startClient(listenSocket.accept());
+ } catch (SocketException e) {
+ // Test again to see if we should keep accepting.
+ } catch (IOException e) {
+ break;
+ }
+ }
+
+ terminate();
+ }
+
+ private void terminate() {
+ try {
+ shutDown();
+ } finally {
+ clearThread();
+ }
+ }
+
+ public boolean isRunning() {
+ return running.get();
+ }
+
+ public void shutDown() {
+ running.set(false);
+ try {
+ listenSocket.close();
+ } catch (IOException err) {
+ //
+ }
+ }
+
+ }
+
/**
* Start this daemon on a background thread.
*
@@ -290,52 +339,56 @@ public class Daemon {
* the daemon is already running.
*/
public synchronized void start() throws IOException {
- if (acceptThread != null)
+ if (acceptThread != null) {
throw new IllegalStateException(JGitText.get().daemonAlreadyRunning);
+ }
+ ServerSocket socket = new ServerSocket();
+ socket.setReuseAddress(true);
+ if (myAddress != null) {
+ socket.bind(myAddress, BACKLOG);
+ } else {
+ socket.bind(new InetSocketAddress((InetAddress) null, 0), BACKLOG);
+ }
+ myAddress = (InetSocketAddress) socket.getLocalSocketAddress();
- final ServerSocket listenSock = new ServerSocket(
- myAddress != null ? myAddress.getPort() : 0, BACKLOG,
- myAddress != null ? myAddress.getAddress() : null);
- myAddress = (InetSocketAddress) listenSock.getLocalSocketAddress();
-
- run = true;
- acceptThread = new Thread(processors, "Git-Daemon-Accept") { //$NON-NLS-1$
- @Override
- public void run() {
- while (isRunning()) {
- try {
- startClient(listenSock.accept());
- } catch (InterruptedIOException e) {
- // Test again to see if we should keep accepting.
- } catch (IOException e) {
- break;
- }
- }
-
- try {
- listenSock.close();
- } catch (IOException err) {
- //
- } finally {
- synchronized (Daemon.this) {
- acceptThread = null;
- }
- }
- }
- };
+ acceptThread = new Acceptor(processors, "Git-Daemon-Accept", socket); //$NON-NLS-1$
acceptThread.start();
}
+ private synchronized void clearThread() {
+ acceptThread = null;
+ }
+
/** @return true if this daemon is receiving connections. */
public synchronized boolean isRunning() {
- return run;
+ return acceptThread != null && acceptThread.isRunning();
}
- /** Stop this daemon. */
+ /**
+ * Stop this daemon.
+ */
public synchronized void stop() {
if (acceptThread != null) {
- run = false;
- acceptThread.interrupt();
+ acceptThread.shutDown();
+ }
+ }
+
+ /**
+ * Stops this daemon and waits until it's acceptor thread has finished.
+ *
+ * @throws InterruptedException
+ * if waiting for the acceptor thread is interrupted
+ *
+ * @since 4.9
+ */
+ public void stopAndWait() throws InterruptedException {
+ Thread acceptor = null;
+ synchronized (this) {
+ acceptor = acceptThread;
+ stop();
+ }
+ if (acceptor != null) {
+ acceptor.join();
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/DaemonService.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/DaemonService.java
index 80b2caebc4..566153a333 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/DaemonService.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/DaemonService.java
@@ -64,12 +64,7 @@ public abstract class DaemonService {
DaemonService(final String cmdName, final String cfgName) {
command = cmdName.startsWith("git-") ? cmdName : "git-" + cmdName; //$NON-NLS-1$ //$NON-NLS-2$
- configKey = new SectionParser<ServiceConfig>() {
- @Override
- public ServiceConfig parse(final Config cfg) {
- return new ServiceConfig(DaemonService.this, cfg, cfgName);
- }
- };
+ configKey = cfg -> new ServiceConfig(DaemonService.this, cfg, cfgName);
overridable = true;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/FetchProcess.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/FetchProcess.java
index 280e6d4df7..ed10f449df 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/FetchProcess.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/FetchProcess.java
@@ -74,6 +74,7 @@ import org.eclipse.jgit.lib.BatchRefUpdate;
import org.eclipse.jgit.lib.BatchingProgressMonitor;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdRef;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefDatabase;
@@ -202,12 +203,10 @@ class FetchProcess {
((BatchingProgressMonitor) monitor).setDelayStart(
250, TimeUnit.MILLISECONDS);
}
- if (transport.isRemoveDeletedRefs())
+ if (transport.isRemoveDeletedRefs()) {
deleteStaleTrackingRefs(result, batch);
- for (TrackingRefUpdate u : localUpdates) {
- result.add(u);
- batch.addCommand(u.asReceiveCommand());
}
+ addUpdateBatchCommands(result, batch);
for (ReceiveCommand cmd : batch.getCommands()) {
cmd.updateType(walk);
if (cmd.getType() == UPDATE_NONFASTFORWARD
@@ -220,8 +219,11 @@ class FetchProcess {
if (cmd.getResult() == NOT_ATTEMPTED)
cmd.setResult(OK);
}
- } else
+ } else {
batch.execute(walk, monitor);
+ }
+ } catch (TransportException e) {
+ throw e;
} catch (IOException err) {
throw new TransportException(MessageFormat.format(
JGitText.get().failureUpdatingTrackingRef,
@@ -238,6 +240,23 @@ class FetchProcess {
}
}
+ private void addUpdateBatchCommands(FetchResult result,
+ BatchRefUpdate batch) throws TransportException {
+ Map<String, ObjectId> refs = new HashMap<>();
+ for (TrackingRefUpdate u : localUpdates) {
+ // Try to skip duplicates if they'd update to the same object ID
+ ObjectId existing = refs.get(u.getLocalName());
+ if (existing == null) {
+ refs.put(u.getLocalName(), u.getNewObjectId());
+ result.add(u);
+ batch.addCommand(u.asReceiveCommand());
+ } else if (!existing.equals(u.getNewObjectId())) {
+ throw new TransportException(MessageFormat
+ .format(JGitText.get().duplicateRef, u.getLocalName()));
+ }
+ }
+ }
+
private void fetchObjects(final ProgressMonitor monitor)
throws TransportException {
try {
@@ -360,12 +379,19 @@ class FetchProcess {
private void expandSingle(final RefSpec spec, final Set<Ref> matched)
throws TransportException {
- final Ref src = conn.getRef(spec.getSource());
+ String want = spec.getSource();
+ if (ObjectId.isId(want)) {
+ want(ObjectId.fromString(want));
+ return;
+ }
+
+ Ref src = conn.getRef(want);
if (src == null) {
- throw new TransportException(MessageFormat.format(JGitText.get().remoteDoesNotHaveSpec, spec.getSource()));
+ throw new TransportException(MessageFormat.format(JGitText.get().remoteDoesNotHaveSpec, want));
}
- if (matched.add(src))
+ if (matched.add(src)) {
want(src, spec);
+ }
}
private Collection<Ref> expandAutoFollowTags() throws TransportException {
@@ -440,6 +466,11 @@ class FetchProcess {
fetchHeadUpdates.add(fhr);
}
+ private void want(ObjectId id) {
+ askFor.put(id,
+ new ObjectIdRef.Unpeeled(Ref.Storage.NETWORK, id.name(), id));
+ }
+
private TrackingRefUpdate createUpdate(RefSpec spec, ObjectId newId)
throws TransportException {
Ref ref = localRefs().get(spec.getDestination());
@@ -468,12 +499,14 @@ class FetchProcess {
private void deleteStaleTrackingRefs(FetchResult result,
BatchRefUpdate batch) throws IOException {
+ final Set<Ref> processed = new HashSet<>();
for (final Ref ref : localRefs().values()) {
final String refname = ref.getName();
for (final RefSpec spec : toFetch) {
if (spec.matchDestination(refname)) {
final RefSpec s = spec.expandFromDestination(refname);
- if (result.getAdvertisedRef(s.getSource()) == null) {
+ if (result.getAdvertisedRef(s.getSource()) == null
+ && processed.add(ref)) {
deleteTrackingRef(result, batch, s, ref);
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/HttpConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/HttpConfig.java
new file mode 100644
index 0000000000..db59a54a27
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/HttpConfig.java
@@ -0,0 +1,412 @@
+/*
+ * Copyright (C) 2008, 2010, Google Inc.
+ * Copyright (C) 2017, Thomas Wolf <thomas.wolf@paranor.ch>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.transport;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.text.MessageFormat;
+import java.util.Set;
+import java.util.function.Supplier;
+
+import org.eclipse.jgit.errors.ConfigInvalidException;
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.lib.Config;
+import org.eclipse.jgit.storage.file.FileBasedConfig;
+import org.eclipse.jgit.util.FS;
+import org.eclipse.jgit.util.StringUtils;
+import org.eclipse.jgit.util.SystemReader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A representation of the "http.*" config values in a git {@link Config}. git
+ * provides for setting values for specific URLs through "http.<url>.*
+ * subsections. git always considers only the initial original URL for such
+ * settings, not any redirected URL.
+ *
+ * @since 4.9
+ */
+public class HttpConfig {
+
+ private static final Logger LOG = LoggerFactory.getLogger(HttpConfig.class);
+
+ private static final String FTP = "ftp"; //$NON-NLS-1$
+
+ /** git config section key for http settings. */
+ public static final String HTTP = "http"; //$NON-NLS-1$
+
+ /** git config key for the "followRedirects" setting. */
+ public static final String FOLLOW_REDIRECTS_KEY = "followRedirects"; //$NON-NLS-1$
+
+ /** git config key for the "maxRedirects" setting. */
+ public static final String MAX_REDIRECTS_KEY = "maxRedirects"; //$NON-NLS-1$
+
+ /** git config key for the "postBuffer" setting. */
+ public static final String POST_BUFFER_KEY = "postBuffer"; //$NON-NLS-1$
+
+ /** git config key for the "sslVerify" setting. */
+ public static final String SSL_VERIFY_KEY = "sslVerify"; //$NON-NLS-1$
+
+ private static final String MAX_REDIRECT_SYSTEM_PROPERTY = "http.maxRedirects"; //$NON-NLS-1$
+
+ private static final int DEFAULT_MAX_REDIRECTS = 5;
+
+ private static final int MAX_REDIRECTS = (new Supplier<Integer>() {
+
+ @Override
+ public Integer get() {
+ String rawValue = SystemReader.getInstance()
+ .getProperty(MAX_REDIRECT_SYSTEM_PROPERTY);
+ Integer value = Integer.valueOf(DEFAULT_MAX_REDIRECTS);
+ if (rawValue != null) {
+ try {
+ value = Integer.valueOf(Integer.parseUnsignedInt(rawValue));
+ } catch (NumberFormatException e) {
+ LOG.warn(MessageFormat.format(
+ JGitText.get().invalidSystemProperty,
+ MAX_REDIRECT_SYSTEM_PROPERTY, rawValue, value));
+ }
+ }
+ return value;
+ }
+ }).get().intValue();
+
+ /**
+ * Config values for http.followRedirect.
+ */
+ public enum HttpRedirectMode implements Config.ConfigEnum {
+
+ /** Always follow redirects (up to the http.maxRedirects limit). */
+ TRUE("true"), //$NON-NLS-1$
+ /**
+ * Only follow redirects on the initial GET request. This is the
+ * default.
+ */
+ INITIAL("initial"), //$NON-NLS-1$
+ /** Never follow redirects. */
+ FALSE("false"); //$NON-NLS-1$
+
+ private final String configValue;
+
+ private HttpRedirectMode(String configValue) {
+ this.configValue = configValue;
+ }
+
+ @Override
+ public String toConfigValue() {
+ return configValue;
+ }
+
+ @Override
+ public boolean matchConfigValue(String s) {
+ return configValue.equals(s);
+ }
+ }
+
+ private int postBuffer;
+
+ private boolean sslVerify;
+
+ private HttpRedirectMode followRedirects;
+
+ private int maxRedirects;
+
+ /**
+ * @return the value of the "http.postBuffer" setting
+ */
+ public int getPostBuffer() {
+ return postBuffer;
+ }
+
+ /**
+ * @return the value of the "http.sslVerify" setting
+ */
+ public boolean isSslVerify() {
+ return sslVerify;
+ }
+
+ /**
+ * @return the value of the "http.followRedirects" setting
+ */
+ public HttpRedirectMode getFollowRedirects() {
+ return followRedirects;
+ }
+
+ /**
+ * @return the value of the "http.maxRedirects" setting
+ */
+ public int getMaxRedirects() {
+ return maxRedirects;
+ }
+
+ /**
+ * Creates a new {@link HttpConfig} tailored to the given {@link URIish}.
+ *
+ * @param config
+ * to read the {@link HttpConfig} from
+ * @param uri
+ * to get the configuration values for
+ */
+ public HttpConfig(Config config, URIish uri) {
+ init(config, uri);
+ }
+
+ /**
+ * Creates a {@link HttpConfig} that reads values solely from the user
+ * config.
+ *
+ * @param uri
+ * to get the configuration values for
+ */
+ public HttpConfig(URIish uri) {
+ FileBasedConfig userConfig = SystemReader.getInstance()
+ .openUserConfig(null, FS.DETECTED);
+ try {
+ userConfig.load();
+ } catch (IOException | ConfigInvalidException e) {
+ // Log it and then work with default values.
+ LOG.error(MessageFormat.format(JGitText.get().userConfigFileInvalid,
+ userConfig.getFile().getAbsolutePath(), e));
+ init(new Config(), uri);
+ return;
+ }
+ init(userConfig, uri);
+ }
+
+ private void init(Config config, URIish uri) {
+ // Set defaults from the section first
+ int postBufferSize = config.getInt(HTTP, POST_BUFFER_KEY,
+ 1 * 1024 * 1024);
+ boolean sslVerifyFlag = config.getBoolean(HTTP, SSL_VERIFY_KEY, true);
+ HttpRedirectMode followRedirectsMode = config.getEnum(
+ HttpRedirectMode.values(), HTTP, null,
+ FOLLOW_REDIRECTS_KEY, HttpRedirectMode.INITIAL);
+ int redirectLimit = config.getInt(HTTP, MAX_REDIRECTS_KEY,
+ MAX_REDIRECTS);
+ if (redirectLimit < 0) {
+ redirectLimit = MAX_REDIRECTS;
+ }
+ String match = findMatch(config.getSubsections(HTTP), uri);
+ if (match != null) {
+ // Override with more specific items
+ postBufferSize = config.getInt(HTTP, match, POST_BUFFER_KEY,
+ postBufferSize);
+ sslVerifyFlag = config.getBoolean(HTTP, match, SSL_VERIFY_KEY,
+ sslVerifyFlag);
+ followRedirectsMode = config.getEnum(HttpRedirectMode.values(),
+ HTTP, match, FOLLOW_REDIRECTS_KEY, followRedirectsMode);
+ int newMaxRedirects = config.getInt(HTTP, match, MAX_REDIRECTS_KEY,
+ redirectLimit);
+ if (newMaxRedirects >= 0) {
+ redirectLimit = newMaxRedirects;
+ }
+ }
+ postBuffer = postBufferSize;
+ sslVerify = sslVerifyFlag;
+ followRedirects = followRedirectsMode;
+ maxRedirects = redirectLimit;
+ }
+
+ /**
+ * Determines the best match from a set of subsection names (representing
+ * prefix URLs) for the given {@link URIish}.
+ *
+ * @param names
+ * to match against the {@code uri}
+ * @param uri
+ * to find a match for
+ * @return the best matching subsection name, or {@code null} if no
+ * subsection matches
+ */
+ private String findMatch(Set<String> names, URIish uri) {
+ String bestMatch = null;
+ int bestMatchLength = -1;
+ boolean withUser = false;
+ String uPath = uri.getPath();
+ boolean hasPath = !StringUtils.isEmptyOrNull(uPath);
+ if (hasPath) {
+ uPath = normalize(uPath);
+ if (uPath == null) {
+ // Normalization failed; warning was logged.
+ return null;
+ }
+ }
+ for (String s : names) {
+ try {
+ URIish candidate = new URIish(s);
+ // Scheme and host must match case-insensitively
+ if (!compare(uri.getScheme(), candidate.getScheme())
+ || !compare(uri.getHost(), candidate.getHost())) {
+ continue;
+ }
+ // Ports must match after default ports have been substituted
+ if (defaultedPort(uri.getPort(),
+ uri.getScheme()) != defaultedPort(candidate.getPort(),
+ candidate.getScheme())) {
+ continue;
+ }
+ // User: if present in candidate, must match
+ boolean hasUser = false;
+ if (candidate.getUser() != null) {
+ if (!candidate.getUser().equals(uri.getUser())) {
+ continue;
+ }
+ hasUser = true;
+ }
+ // Path: prefix match, longer is better
+ String cPath = candidate.getPath();
+ int matchLength = -1;
+ if (StringUtils.isEmptyOrNull(cPath)) {
+ matchLength = 0;
+ } else {
+ if (!hasPath) {
+ continue;
+ }
+ // Paths can match only on segments
+ matchLength = segmentCompare(uPath, cPath);
+ if (matchLength < 0) {
+ continue;
+ }
+ }
+ // A longer path match is always preferred even over a user
+ // match. If the path matches are equal, a match with user wins
+ // over a match without user.
+ if (matchLength > bestMatchLength || !withUser && hasUser
+ && matchLength >= 0 && matchLength == bestMatchLength) {
+ bestMatch = s;
+ bestMatchLength = matchLength;
+ withUser = hasUser;
+ }
+ } catch (URISyntaxException e) {
+ LOG.warn(MessageFormat
+ .format(JGitText.get().httpConfigInvalidURL, s));
+ }
+ }
+ return bestMatch;
+ }
+
+ private boolean compare(String a, String b) {
+ if (a == null) {
+ return b == null;
+ }
+ return a.equalsIgnoreCase(b);
+ }
+
+ private int defaultedPort(int port, String scheme) {
+ if (port >= 0) {
+ return port;
+ }
+ if (FTP.equalsIgnoreCase(scheme)) {
+ return 21;
+ } else if (HTTP.equalsIgnoreCase(scheme)) {
+ return 80;
+ } else {
+ return 443; // https
+ }
+ }
+
+ static int segmentCompare(String uriPath, String m) {
+ // Precondition: !uriPath.isEmpty() && !m.isEmpty(),and u must already
+ // be normalized
+ String matchPath = normalize(m);
+ if (matchPath == null || !uriPath.startsWith(matchPath)) {
+ return -1;
+ }
+ // We can match only on a segment boundary: either both paths are equal,
+ // or if matchPath does not end in '/', there is a '/' in uriPath right
+ // after the match.
+ int uLength = uriPath.length();
+ int mLength = matchPath.length();
+ if (mLength == uLength || matchPath.charAt(mLength - 1) == '/'
+ || mLength < uLength && uriPath.charAt(mLength) == '/') {
+ return mLength;
+ }
+ return -1;
+ }
+
+ static String normalize(String path) {
+ // C-git resolves . and .. segments
+ int i = 0;
+ int length = path.length();
+ StringBuilder builder = new StringBuilder(length);
+ builder.append('/');
+ if (length > 0 && path.charAt(0) == '/') {
+ i = 1;
+ }
+ while (i < length) {
+ int slash = path.indexOf('/', i);
+ if (slash < 0) {
+ slash = length;
+ }
+ if (slash == i || slash == i + 1 && path.charAt(i) == '.') {
+ // Skip /. or also double slashes
+ } else if (slash == i + 2 && path.charAt(i) == '.'
+ && path.charAt(i + 1) == '.') {
+ // Remove previous segment if we have "/.."
+ int l = builder.length() - 2; // Skip terminating slash.
+ while (l >= 0 && builder.charAt(l) != '/') {
+ l--;
+ }
+ if (l < 0) {
+ LOG.warn(MessageFormat.format(
+ JGitText.get().httpConfigCannotNormalizeURL, path));
+ return null;
+ }
+ builder.setLength(l + 1);
+ } else {
+ // Include the slash, if any
+ builder.append(path, i, Math.min(length, slash + 1));
+ }
+ i = slash + 1;
+ }
+ if (builder.length() > 1 && builder.charAt(builder.length() - 1) == '/'
+ && length > 0 && path.charAt(length - 1) != '/') {
+ // . or .. normalization left a trailing slash when the original
+ // path had none at the end
+ builder.setLength(builder.length() - 1);
+ }
+ return builder.toString();
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschConfigSessionFactory.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschConfigSessionFactory.java
index ce14183a56..0cc40f37d1 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschConfigSessionFactory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschConfigSessionFactory.java
@@ -53,15 +53,23 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
import java.net.ConnectException;
import java.net.UnknownHostException;
+import java.text.MessageFormat;
import java.util.HashMap;
+import java.util.Locale;
import java.util.Map;
+import java.util.concurrent.TimeUnit;
import org.eclipse.jgit.errors.TransportException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.util.FS;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import com.jcraft.jsch.ConfigRepository;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.JSchException;
import com.jcraft.jsch.Session;
@@ -80,6 +88,18 @@ import com.jcraft.jsch.UserInfo;
* to supply appropriate {@link UserInfo} to the session.
*/
public abstract class JschConfigSessionFactory extends SshSessionFactory {
+
+ private static final Logger LOG = LoggerFactory
+ .getLogger(JschConfigSessionFactory.class);
+
+ /**
+ * We use different Jsch instances for hosts that have an IdentityFile
+ * configured in ~/.ssh/config. Jsch by default would cache decrypted keys
+ * only per session, which results in repeated password prompts. Using
+ * different Jsch instances, we can cache the keys on these instances so
+ * that they will be re-used for successive sessions, and thus the user is
+ * prompted for a key password only once while Eclipse runs.
+ */
private final Map<String, JSch> byIdentityFile = new HashMap<>();
private JSch defaultJSch;
@@ -101,7 +121,6 @@ public abstract class JschConfigSessionFactory extends SshSessionFactory {
config = OpenSshConfig.get(fs);
final OpenSshConfig.Host hc = config.lookup(host);
- host = hc.getHostName();
if (port <= 0)
port = hc.getPort();
if (user == null)
@@ -153,10 +172,13 @@ public abstract class JschConfigSessionFactory extends SshSessionFactory {
} catch (JSchException je) {
final Throwable c = je.getCause();
- if (c instanceof UnknownHostException)
- throw new TransportException(uri, JGitText.get().unknownHost);
- if (c instanceof ConnectException)
- throw new TransportException(uri, c.getMessage());
+ if (c instanceof UnknownHostException) {
+ throw new TransportException(uri, JGitText.get().unknownHost,
+ je);
+ }
+ if (c instanceof ConnectException) {
+ throw new TransportException(uri, c.getMessage(), je);
+ }
throw new TransportException(uri, je.getMessage(), je);
}
@@ -170,10 +192,18 @@ public abstract class JschConfigSessionFactory extends SshSessionFactory {
return e.getCause() == null && e.getMessage().equals("Auth cancel"); //$NON-NLS-1$
}
- private Session createSession(CredentialsProvider credentialsProvider,
+ // Package visibility for tests
+ Session createSession(CredentialsProvider credentialsProvider,
FS fs, String user, final String pass, String host, int port,
final OpenSshConfig.Host hc) throws JSchException {
final Session session = createSession(hc, user, host, port, fs);
+ // Jsch will have overridden the explicit user by the one from the SSH
+ // config file...
+ setUserName(session, user);
+ // Jsch will also have overridden the port.
+ if (port > 0 && port != session.getPort()) {
+ session.setPort(port);
+ }
// We retry already in getSession() method. JSch must not retry
// on its own.
session.setConfig("MaxAuthTries", "1"); //$NON-NLS-1$ //$NON-NLS-2$
@@ -196,6 +226,28 @@ public abstract class JschConfigSessionFactory extends SshSessionFactory {
return session;
}
+ private void setUserName(Session session, String userName) {
+ // Jsch 0.1.54 picks up the user name from the ssh config, even if an
+ // explicit user name was given! We must correct that if ~/.ssh/config
+ // has a different user name.
+ if (userName == null || userName.isEmpty()
+ || userName.equals(session.getUserName())) {
+ return;
+ }
+ try {
+ Class<?>[] parameterTypes = { String.class };
+ Method method = Session.class.getDeclaredMethod("setUserName", //$NON-NLS-1$
+ parameterTypes);
+ method.setAccessible(true);
+ method.invoke(session, userName);
+ } catch (NullPointerException | IllegalAccessException
+ | IllegalArgumentException | InvocationTargetException
+ | NoSuchMethodException | SecurityException e) {
+ LOG.error(MessageFormat.format(JGitText.get().sshUserNameError,
+ userName, session.getUserName()), e);
+ }
+ }
+
/**
* Create a new remote session for the requested address.
*
@@ -259,6 +311,10 @@ public abstract class JschConfigSessionFactory extends SshSessionFactory {
protected JSch getJSch(final OpenSshConfig.Host hc, FS fs) throws JSchException {
if (defaultJSch == null) {
defaultJSch = createDefaultJSch(fs);
+ if (defaultJSch.getConfigRepository() == null) {
+ defaultJSch.setConfigRepository(
+ new JschBugFixingConfigRepository(config));
+ }
for (Object name : defaultJSch.getIdentityNames())
byIdentityFile.put((String) name, defaultJSch);
}
@@ -272,6 +328,9 @@ public abstract class JschConfigSessionFactory extends SshSessionFactory {
if (jsch == null) {
jsch = new JSch();
configureJSch(jsch);
+ if (jsch.getConfigRepository() == null) {
+ jsch.setConfigRepository(defaultJSch.getConfigRepository());
+ }
jsch.setHostKeyRepository(defaultJSch.getHostKeyRepository());
jsch.addIdentity(identityKey);
byIdentityFile.put(identityKey, jsch);
@@ -335,4 +394,101 @@ public abstract class JschConfigSessionFactory extends SshSessionFactory {
}
}
}
+
+ private static class JschBugFixingConfigRepository
+ implements ConfigRepository {
+
+ private final ConfigRepository base;
+
+ public JschBugFixingConfigRepository(ConfigRepository base) {
+ this.base = base;
+ }
+
+ @Override
+ public Config getConfig(String host) {
+ return new JschBugFixingConfig(base.getConfig(host));
+ }
+
+ /**
+ * A {@link com.jcraft.jsch.ConfigRepository.Config} that transforms
+ * some values from the config file into the format Jsch 0.1.54 expects.
+ * This is a work-around for bugs in Jsch.
+ * <p>
+ * Additionally, this config hides the IdentityFile config entries from
+ * Jsch; we manage those ourselves. Otherwise Jsch would cache passwords
+ * (or rather, decrypted keys) only for a single session, resulting in
+ * multiple password prompts for user operations that use several Jsch
+ * sessions.
+ */
+ private static class JschBugFixingConfig implements Config {
+
+ private static final String[] NO_IDENTITIES = {};
+
+ private final Config real;
+
+ public JschBugFixingConfig(Config delegate) {
+ real = delegate;
+ }
+
+ @Override
+ public String getHostname() {
+ return real.getHostname();
+ }
+
+ @Override
+ public String getUser() {
+ return real.getUser();
+ }
+
+ @Override
+ public int getPort() {
+ return real.getPort();
+ }
+
+ @Override
+ public String getValue(String key) {
+ String k = key.toUpperCase(Locale.ROOT);
+ if ("IDENTITYFILE".equals(k)) { //$NON-NLS-1$
+ return null;
+ }
+ String result = real.getValue(key);
+ if (result != null) {
+ if ("SERVERALIVEINTERVAL".equals(k) //$NON-NLS-1$
+ || "CONNECTTIMEOUT".equals(k)) { //$NON-NLS-1$
+ // These values are in seconds. Jsch 0.1.54 passes them
+ // on as is to java.net.Socket.setSoTimeout(), which
+ // expects milliseconds. So convert here to
+ // milliseconds.
+ try {
+ int timeout = Integer.parseInt(result);
+ result = Long.toString(
+ TimeUnit.SECONDS.toMillis(timeout));
+ } catch (NumberFormatException e) {
+ // Ignore
+ }
+ }
+ }
+ return result;
+ }
+
+ @Override
+ public String[] getValues(String key) {
+ String k = key.toUpperCase(Locale.ROOT);
+ if ("IDENTITYFILE".equals(k)) { //$NON-NLS-1$
+ return NO_IDENTITIES;
+ }
+ return real.getValues(key);
+ }
+ }
+ }
+
+ /**
+ * Set the {@link OpenSshConfig} to use. Intended for use in tests.
+ *
+ * @param config
+ * to use
+ */
+ void setConfig(OpenSshConfig config) {
+ this.config = config;
+ }
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java
index f445bcbcfb..a8cc032006 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java
@@ -220,6 +220,7 @@ public class JschSession implements RemoteSession {
public void destroy() {
if (channel.isConnected())
channel.disconnect();
+ closeOutputStream();
}
@Override
@@ -229,4 +230,4 @@ public class JschSession implements RemoteSession {
return exitValue();
}
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/NetRC.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/NetRC.java
index bab5bf0354..5727b034b3 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/NetRC.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/NetRC.java
@@ -317,4 +317,4 @@ public class NetRC {
}
}
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/OpenSshConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/OpenSshConfig.java
index 8b7b60da37..b5d5099239 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/OpenSshConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/OpenSshConfig.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2008, 2014, Google Inc.
+ * Copyright (C) 2008, 2017, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -46,32 +46,90 @@ package org.eclipse.jgit.transport;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
-import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
-import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
+import java.util.Set;
import org.eclipse.jgit.errors.InvalidPatternException;
import org.eclipse.jgit.fnmatch.FileNameMatcher;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.StringUtils;
+import org.eclipse.jgit.util.SystemReader;
+
+import com.jcraft.jsch.ConfigRepository;
/**
- * Simple configuration parser for the OpenSSH ~/.ssh/config file.
+ * Fairly complete configuration parser for the OpenSSH ~/.ssh/config file.
+ * <p>
+ * JSch does have its own config file parser
+ * {@link com.jcraft.jsch.OpenSSHConfig} since version 0.1.50, but it has a
+ * number of problems:
+ * <ul>
+ * <li>it splits lines of the format "keyword = value" wrongly: you'd end up
+ * with the value "= value".
+ * <li>its "Host" keyword is not case insensitive.
+ * <li>it doesn't handle quoted values.
+ * <li>JSch's OpenSSHConfig doesn't monitor for config file changes.
+ * </ul>
+ * <p>
+ * Therefore implement our own parser to read an OpenSSH configuration file. It
+ * makes the critical options available to {@link SshSessionFactory} via
+ * {@link Host} objects returned by {@link #lookup(String)}, and implements a
+ * fully conforming {@link ConfigRepository} providing
+ * {@link com.jcraft.jsch.ConfigRepository.Config}s via
+ * {@link #getConfig(String)}.
+ * </p>
+ * <p>
+ * Limitations compared to the full OpenSSH 7.5 parser:
+ * </p>
+ * <ul>
+ * <li>This parser does not handle Match or Include keywords.
+ * <li>This parser does not do host name canonicalization (Jsch ignores it
+ * anyway).
+ * </ul>
+ * <p>
+ * Note that OpenSSH's readconf.c is a validating parser; Jsch's
+ * ConfigRepository OTOH treats all option values as plain strings, so any
+ * validation must happen in Jsch outside of the parser. Thus this parser does
+ * not validate option values, except for a few options when constructing a
+ * {@link Host} object.
+ * </p>
+ * <p>
+ * This config does %-substitutions for the following tokens:
+ * </p>
+ * <ul>
+ * <li>%% - single %
+ * <li>%C - short-hand for %l%h%p%r. See %p and %r below; the replacement may be
+ * done partially only and may leave %p or %r or both unreplaced.
+ * <li>%d - home directory path
+ * <li>%h - remote host name
+ * <li>%L - local host name without domain
+ * <li>%l - FQDN of the local host
+ * <li>%n - host name as specified in {@link #lookup(String)}
+ * <li>%p - port number; replaced only if set in the config
+ * <li>%r - remote user name; replaced only if set in the config
+ * <li>%u - local user name
+ * </ul>
* <p>
- * Since JSch does not (currently) have the ability to parse an OpenSSH
- * configuration file this is a simple parser to read that file and make the
- * critical options available to {@link SshSessionFactory}.
+ * If the config doesn't set the port or the remote user name, %p and %r remain
+ * un-substituted. It's the caller's responsibility to replace them with values
+ * obtained from the connection URI. %i is not handled; Java has no concept of a
+ * "user ID".
+ * </p>
*/
-public class OpenSshConfig {
+public class OpenSshConfig implements ConfigRepository {
+
/** IANA assigned port number for SSH. */
static final int SSH_PORT = 22;
@@ -105,16 +163,31 @@ public class OpenSshConfig {
/** The .ssh/config file we read and monitor for updates. */
private final File configFile;
- /** Modification time of {@link #configFile} when {@link #hosts} loaded. */
+ /** Modification time of {@link #configFile} when it was last loaded. */
private long lastModified;
- /** Cached entries read out of the configuration file. */
- private Map<String, Host> hosts;
+ /**
+ * Encapsulates entries read out of the configuration file, and
+ * {@link Host}s created from that.
+ */
+ private static class State {
+ Map<String, HostEntry> entries = new LinkedHashMap<>();
+ Map<String, Host> hosts = new HashMap<>();
+
+ @Override
+ @SuppressWarnings("nls")
+ public String toString() {
+ return "State [entries=" + entries + ", hosts=" + hosts + "]";
+ }
+ }
+
+ /** State read from the config file, plus {@link Host}s created from it. */
+ private State state;
OpenSshConfig(final File h, final File cfg) {
home = h;
configFile = cfg;
- hosts = Collections.emptyMap();
+ state = new State();
}
/**
@@ -127,75 +200,81 @@ public class OpenSshConfig {
* @return r configuration for the requested name. Never null.
*/
public Host lookup(final String hostName) {
- final Map<String, Host> cache = refresh();
- Host h = cache.get(hostName);
- if (h == null)
- h = new Host();
- if (h.patternsApplied)
+ final State cache = refresh();
+ Host h = cache.hosts.get(hostName);
+ if (h != null) {
return h;
-
- for (final Map.Entry<String, Host> e : cache.entrySet()) {
- if (!isHostPattern(e.getKey()))
- continue;
- if (!isHostMatch(e.getKey(), hostName))
- continue;
- h.copyFrom(e.getValue());
- }
-
- if (h.hostName == null)
- h.hostName = hostName;
- if (h.user == null)
- h.user = OpenSshConfig.userName();
- if (h.port == 0)
- h.port = OpenSshConfig.SSH_PORT;
- if (h.connectionAttempts == 0)
- h.connectionAttempts = 1;
- h.patternsApplied = true;
+ }
+ HostEntry fullConfig = new HostEntry();
+ // Initialize with default entries at the top of the file, before the
+ // first Host block.
+ fullConfig.merge(cache.entries.get(HostEntry.DEFAULT_NAME));
+ for (final Map.Entry<String, HostEntry> e : cache.entries.entrySet()) {
+ String key = e.getKey();
+ if (isHostMatch(key, hostName)) {
+ fullConfig.merge(e.getValue());
+ }
+ }
+ fullConfig.substitute(hostName, home);
+ h = new Host(fullConfig, hostName, home);
+ cache.hosts.put(hostName, h);
return h;
}
- private synchronized Map<String, Host> refresh() {
+ private synchronized State refresh() {
final long mtime = configFile.lastModified();
if (mtime != lastModified) {
- try {
- final FileInputStream in = new FileInputStream(configFile);
- try {
- hosts = parse(in);
- } finally {
- in.close();
- }
- } catch (FileNotFoundException none) {
- hosts = Collections.emptyMap();
- } catch (IOException err) {
- hosts = Collections.emptyMap();
+ State newState = new State();
+ try (FileInputStream in = new FileInputStream(configFile)) {
+ newState.entries = parse(in);
+ } catch (IOException none) {
+ // Ignore -- we'll set and return an empty state
}
lastModified = mtime;
+ state = newState;
}
- return hosts;
+ return state;
}
- private Map<String, Host> parse(final InputStream in) throws IOException {
- final Map<String, Host> m = new LinkedHashMap<>();
+ private Map<String, HostEntry> parse(final InputStream in)
+ throws IOException {
+ final Map<String, HostEntry> m = new LinkedHashMap<>();
final BufferedReader br = new BufferedReader(new InputStreamReader(in));
- final List<Host> current = new ArrayList<>(4);
+ final List<HostEntry> current = new ArrayList<>(4);
String line;
+ // The man page doesn't say so, but the OpenSSH parser (readconf.c)
+ // starts out in active mode and thus always applies any lines that
+ // occur before the first host block. We gather those options in a
+ // HostEntry for DEFAULT_NAME.
+ HostEntry defaults = new HostEntry();
+ current.add(defaults);
+ m.put(HostEntry.DEFAULT_NAME, defaults);
+
while ((line = br.readLine()) != null) {
line = line.trim();
- if (line.length() == 0 || line.startsWith("#")) //$NON-NLS-1$
+ if (line.isEmpty() || line.startsWith("#")) { //$NON-NLS-1$
continue;
-
- final String[] parts = line.split("[ \t]*[= \t]", 2); //$NON-NLS-1$
- final String keyword = parts[0].trim();
- final String argValue = parts[1].trim();
+ }
+ String[] parts = line.split("[ \t]*[= \t]", 2); //$NON-NLS-1$
+ // Although the ssh-config man page doesn't say so, the OpenSSH
+ // parser does allow quoted keywords.
+ String keyword = dequote(parts[0].trim());
+ // man 5 ssh-config says lines had the format "keyword arguments",
+ // with no indication that arguments were optional. However, let's
+ // not crap out on missing arguments. See bug 444319.
+ String argValue = parts.length > 1 ? parts[1].trim() : ""; //$NON-NLS-1$
if (StringUtils.equalsIgnoreCase("Host", keyword)) { //$NON-NLS-1$
current.clear();
- for (final String pattern : argValue.split("[ \t]")) { //$NON-NLS-1$
- final String name = dequote(pattern);
- Host c = m.get(name);
+ for (String name : HostEntry.parseList(argValue)) {
+ if (name == null || name.isEmpty()) {
+ // null should not occur, but better be safe than sorry.
+ continue;
+ }
+ HostEntry c = m.get(name);
if (c == null) {
- c = new Host();
+ c = new HostEntry();
m.put(name, c);
}
current.add(c);
@@ -206,57 +285,18 @@ public class OpenSshConfig {
if (current.isEmpty()) {
// We received an option outside of a Host block. We
// don't know who this should match against, so skip.
- //
continue;
}
- if (StringUtils.equalsIgnoreCase("HostName", keyword)) { //$NON-NLS-1$
- for (final Host c : current)
- if (c.hostName == null)
- c.hostName = dequote(argValue);
- } else if (StringUtils.equalsIgnoreCase("User", keyword)) { //$NON-NLS-1$
- for (final Host c : current)
- if (c.user == null)
- c.user = dequote(argValue);
- } else if (StringUtils.equalsIgnoreCase("Port", keyword)) { //$NON-NLS-1$
- try {
- final int port = Integer.parseInt(dequote(argValue));
- for (final Host c : current)
- if (c.port == 0)
- c.port = port;
- } catch (NumberFormatException nfe) {
- // Bad port number. Don't set it.
- }
- } else if (StringUtils.equalsIgnoreCase("IdentityFile", keyword)) { //$NON-NLS-1$
- for (final Host c : current)
- if (c.identityFile == null)
- c.identityFile = toFile(dequote(argValue));
- } else if (StringUtils.equalsIgnoreCase(
- "PreferredAuthentications", keyword)) { //$NON-NLS-1$
- for (final Host c : current)
- if (c.preferredAuthentications == null)
- c.preferredAuthentications = nows(dequote(argValue));
- } else if (StringUtils.equalsIgnoreCase("BatchMode", keyword)) { //$NON-NLS-1$
- for (final Host c : current)
- if (c.batchMode == null)
- c.batchMode = yesno(dequote(argValue));
- } else if (StringUtils.equalsIgnoreCase(
- "StrictHostKeyChecking", keyword)) { //$NON-NLS-1$
- String value = dequote(argValue);
- for (final Host c : current)
- if (c.strictHostKeyChecking == null)
- c.strictHostKeyChecking = value;
- } else if (StringUtils.equalsIgnoreCase(
- "ConnectionAttempts", keyword)) { //$NON-NLS-1$
- try {
- final int connectionAttempts = Integer.parseInt(dequote(argValue));
- if (connectionAttempts > 0) {
- for (final Host c : current)
- if (c.connectionAttempts == 0)
- c.connectionAttempts = connectionAttempts;
- }
- } catch (NumberFormatException nfe) {
- // ignore bad values
+ if (HostEntry.isListKey(keyword)) {
+ List<String> args = HostEntry.parseList(argValue);
+ for (HostEntry entry : current) {
+ entry.setValue(keyword, args);
+ }
+ } else if (!argValue.isEmpty()) {
+ argValue = dequote(argValue);
+ for (HostEntry entry : current) {
+ entry.setValue(keyword, argValue);
}
}
}
@@ -264,23 +304,35 @@ public class OpenSshConfig {
return m;
}
- private static boolean isHostPattern(final String s) {
- return s.indexOf('*') >= 0 || s.indexOf('?') >= 0;
+ private static boolean isHostMatch(final String pattern,
+ final String name) {
+ if (pattern.startsWith("!")) { //$NON-NLS-1$
+ return !patternMatchesHost(pattern.substring(1), name);
+ } else {
+ return patternMatchesHost(pattern, name);
+ }
}
- private static boolean isHostMatch(final String pattern, final String name) {
- final FileNameMatcher fn;
- try {
- fn = new FileNameMatcher(pattern, null);
- } catch (InvalidPatternException e) {
- return false;
+ private static boolean patternMatchesHost(final String pattern,
+ final String name) {
+ if (pattern.indexOf('*') >= 0 || pattern.indexOf('?') >= 0) {
+ final FileNameMatcher fn;
+ try {
+ fn = new FileNameMatcher(pattern, null);
+ } catch (InvalidPatternException e) {
+ return false;
+ }
+ fn.append(name);
+ return fn.isMatch();
+ } else {
+ // Not a pattern but a full host name
+ return pattern.equals(name);
}
- fn.append(name);
- return fn.isMatch();
}
private static String dequote(final String value) {
- if (value.startsWith("\"") && value.endsWith("\"")) //$NON-NLS-1$ //$NON-NLS-2$
+ if (value.startsWith("\"") && value.endsWith("\"") //$NON-NLS-1$ //$NON-NLS-2$
+ && value.length() > 1)
return value.substring(1, value.length() - 1);
return value;
}
@@ -300,24 +352,453 @@ public class OpenSshConfig {
return Boolean.FALSE;
}
- private File toFile(final String path) {
- if (path.startsWith("~/")) //$NON-NLS-1$
+ private static File toFile(String path, File home) {
+ if (path.startsWith("~/")) { //$NON-NLS-1$
return new File(home, path.substring(2));
+ }
File ret = new File(path);
- if (ret.isAbsolute())
+ if (ret.isAbsolute()) {
return ret;
+ }
return new File(home, path);
}
+ private static int positive(final String value) {
+ if (value != null) {
+ try {
+ return Integer.parseUnsignedInt(value);
+ } catch (NumberFormatException e) {
+ // Ignore
+ }
+ }
+ return -1;
+ }
+
static String userName() {
return AccessController.doPrivileged(new PrivilegedAction<String>() {
@Override
public String run() {
- return System.getProperty("user.name"); //$NON-NLS-1$
+ return SystemReader.getInstance()
+ .getProperty(Constants.OS_USER_NAME_KEY);
}
});
}
+ private static class HostEntry implements ConfigRepository.Config {
+
+ /**
+ * "Host name" of the HostEntry for the default options before the first
+ * host block in a config file.
+ */
+ public static final String DEFAULT_NAME = ""; //$NON-NLS-1$
+
+ // See com.jcraft.jsch.OpenSSHConfig. Translates some command-line keys
+ // to ssh-config keys.
+ private static final Map<String, String> KEY_MAP = new HashMap<>();
+
+ static {
+ KEY_MAP.put("kex", "KexAlgorithms"); //$NON-NLS-1$//$NON-NLS-2$
+ KEY_MAP.put("server_host_key", "HostKeyAlgorithms"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("cipher.c2s", "Ciphers"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("cipher.s2c", "Ciphers"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("mac.c2s", "Macs"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("mac.s2c", "Macs"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("compression.s2c", "Compression"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("compression.c2s", "Compression"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("compression_level", "CompressionLevel"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("MaxAuthTries", "NumberOfPasswordPrompts"); //$NON-NLS-1$ //$NON-NLS-2$
+ }
+
+ /**
+ * Keys that can be specified multiple times, building up a list. (I.e.,
+ * those are the keys that do not follow the general rule of "first
+ * occurrence wins".)
+ */
+ private static final Set<String> MULTI_KEYS = new HashSet<>();
+
+ static {
+ MULTI_KEYS.add("CERTIFICATEFILE"); //$NON-NLS-1$
+ MULTI_KEYS.add("IDENTITYFILE"); //$NON-NLS-1$
+ MULTI_KEYS.add("LOCALFORWARD"); //$NON-NLS-1$
+ MULTI_KEYS.add("REMOTEFORWARD"); //$NON-NLS-1$
+ MULTI_KEYS.add("SENDENV"); //$NON-NLS-1$
+ }
+
+ /**
+ * Keys that take a whitespace-separated list of elements as argument.
+ * Because the dequote-handling is different, we must handle those in
+ * the parser. There are a few other keys that take comma-separated
+ * lists as arguments, but for the parser those are single arguments
+ * that must be quoted if they contain whitespace, and taking them apart
+ * is the responsibility of the user of those keys.
+ */
+ private static final Set<String> LIST_KEYS = new HashSet<>();
+
+ static {
+ LIST_KEYS.add("CANONICALDOMAINS"); //$NON-NLS-1$
+ LIST_KEYS.add("GLOBALKNOWNHOSTSFILE"); //$NON-NLS-1$
+ LIST_KEYS.add("SENDENV"); //$NON-NLS-1$
+ LIST_KEYS.add("USERKNOWNHOSTSFILE"); //$NON-NLS-1$
+ }
+
+ private Map<String, String> options;
+
+ private Map<String, List<String>> multiOptions;
+
+ private Map<String, List<String>> listOptions;
+
+ @Override
+ public String getHostname() {
+ return getValue("HOSTNAME"); //$NON-NLS-1$
+ }
+
+ @Override
+ public String getUser() {
+ return getValue("USER"); //$NON-NLS-1$
+ }
+
+ @Override
+ public int getPort() {
+ return positive(getValue("PORT")); //$NON-NLS-1$
+ }
+
+ private static String mapKey(String key) {
+ String k = KEY_MAP.get(key);
+ if (k == null) {
+ k = key;
+ }
+ return k.toUpperCase(Locale.ROOT);
+ }
+
+ private String findValue(String key) {
+ String k = mapKey(key);
+ String result = options != null ? options.get(k) : null;
+ if (result == null) {
+ // Also check the list and multi options. Modern OpenSSH treats
+ // UserKnownHostsFile and GlobalKnownHostsFile as list-valued,
+ // and so does this parser. Jsch 0.1.54 in general doesn't know
+ // about list-valued options (it _does_ know multi-valued
+ // options, though), and will ask for a single value for such
+ // options.
+ //
+ // Let's be lenient and return at least the first value from
+ // a list-valued or multi-valued key for which Jsch asks for a
+ // single value.
+ List<String> values = listOptions != null ? listOptions.get(k)
+ : null;
+ if (values == null) {
+ values = multiOptions != null ? multiOptions.get(k) : null;
+ }
+ if (values != null && !values.isEmpty()) {
+ result = values.get(0);
+ }
+ }
+ return result;
+ }
+
+ @Override
+ public String getValue(String key) {
+ // See com.jcraft.jsch.OpenSSHConfig.MyConfig.getValue() for this
+ // special case.
+ if (key.equals("compression.s2c") //$NON-NLS-1$
+ || key.equals("compression.c2s")) { //$NON-NLS-1$
+ String foo = findValue(key);
+ if (foo == null || foo.equals("no")) { //$NON-NLS-1$
+ return "none,zlib@openssh.com,zlib"; //$NON-NLS-1$
+ }
+ return "zlib@openssh.com,zlib,none"; //$NON-NLS-1$
+ }
+ return findValue(key);
+ }
+
+ @Override
+ public String[] getValues(String key) {
+ String k = mapKey(key);
+ List<String> values = listOptions != null ? listOptions.get(k)
+ : null;
+ if (values == null) {
+ values = multiOptions != null ? multiOptions.get(k) : null;
+ }
+ if (values == null || values.isEmpty()) {
+ return new String[0];
+ }
+ return values.toArray(new String[values.size()]);
+ }
+
+ public void setValue(String key, String value) {
+ String k = key.toUpperCase(Locale.ROOT);
+ if (MULTI_KEYS.contains(k)) {
+ if (multiOptions == null) {
+ multiOptions = new HashMap<>();
+ }
+ List<String> values = multiOptions.get(k);
+ if (values == null) {
+ values = new ArrayList<>(4);
+ multiOptions.put(k, values);
+ }
+ values.add(value);
+ } else {
+ if (options == null) {
+ options = new HashMap<>();
+ }
+ if (!options.containsKey(k)) {
+ options.put(k, value);
+ }
+ }
+ }
+
+ public void setValue(String key, List<String> values) {
+ if (values.isEmpty()) {
+ // Can occur only on a missing argument: ignore.
+ return;
+ }
+ String k = key.toUpperCase(Locale.ROOT);
+ // Check multi-valued keys first; because of the replacement
+ // strategy, they must take precedence over list-valued keys
+ // which always follow the "first occurrence wins" strategy.
+ //
+ // Note that SendEnv is a multi-valued list-valued key. (It's
+ // rather immaterial for JGit, though.)
+ if (MULTI_KEYS.contains(k)) {
+ if (multiOptions == null) {
+ multiOptions = new HashMap<>(2 * MULTI_KEYS.size());
+ }
+ List<String> items = multiOptions.get(k);
+ if (items == null) {
+ items = new ArrayList<>(values);
+ multiOptions.put(k, items);
+ } else {
+ items.addAll(values);
+ }
+ } else {
+ if (listOptions == null) {
+ listOptions = new HashMap<>(2 * LIST_KEYS.size());
+ }
+ if (!listOptions.containsKey(k)) {
+ listOptions.put(k, values);
+ }
+ }
+ }
+
+ public static boolean isListKey(String key) {
+ return LIST_KEYS.contains(key.toUpperCase(Locale.ROOT));
+ }
+
+ /**
+ * Splits the argument into a list of whitespace-separated elements.
+ * Elements containing whitespace must be quoted and will be de-quoted.
+ *
+ * @param argument
+ * argument part of the configuration line as read from the
+ * config file
+ * @return a {@link List} of elements, possibly empty and possibly
+ * containing empty elements
+ */
+ public static List<String> parseList(String argument) {
+ List<String> result = new ArrayList<>(4);
+ int start = 0;
+ int length = argument.length();
+ while (start < length) {
+ // Skip whitespace
+ if (Character.isSpaceChar(argument.charAt(start))) {
+ start++;
+ continue;
+ }
+ if (argument.charAt(start) == '"') {
+ int stop = argument.indexOf('"', ++start);
+ if (stop < start) {
+ // No closing double quote: skip
+ break;
+ }
+ result.add(argument.substring(start, stop));
+ start = stop + 1;
+ } else {
+ int stop = start + 1;
+ while (stop < length
+ && !Character.isSpaceChar(argument.charAt(stop))) {
+ stop++;
+ }
+ result.add(argument.substring(start, stop));
+ start = stop + 1;
+ }
+ }
+ return result;
+ }
+
+ protected void merge(HostEntry entry) {
+ if (entry == null) {
+ // Can occur if we could not read the config file
+ return;
+ }
+ if (entry.options != null) {
+ if (options == null) {
+ options = new HashMap<>();
+ }
+ for (Map.Entry<String, String> item : entry.options
+ .entrySet()) {
+ if (!options.containsKey(item.getKey())) {
+ options.put(item.getKey(), item.getValue());
+ }
+ }
+ }
+ if (entry.listOptions != null) {
+ if (listOptions == null) {
+ listOptions = new HashMap<>(2 * LIST_KEYS.size());
+ }
+ for (Map.Entry<String, List<String>> item : entry.listOptions
+ .entrySet()) {
+ if (!listOptions.containsKey(item.getKey())) {
+ listOptions.put(item.getKey(), item.getValue());
+ }
+ }
+
+ }
+ if (entry.multiOptions != null) {
+ if (multiOptions == null) {
+ multiOptions = new HashMap<>(2 * MULTI_KEYS.size());
+ }
+ for (Map.Entry<String, List<String>> item : entry.multiOptions
+ .entrySet()) {
+ List<String> values = multiOptions.get(item.getKey());
+ if (values == null) {
+ values = new ArrayList<>(item.getValue());
+ multiOptions.put(item.getKey(), values);
+ } else {
+ values.addAll(item.getValue());
+ }
+ }
+ }
+ }
+
+ private class Replacer {
+ private final Map<Character, String> replacements = new HashMap<>();
+
+ public Replacer(String originalHostName, File home) {
+ replacements.put(Character.valueOf('%'), "%"); //$NON-NLS-1$
+ replacements.put(Character.valueOf('d'), home.getPath());
+ // Needs special treatment...
+ String host = getValue("HOSTNAME"); //$NON-NLS-1$
+ replacements.put(Character.valueOf('h'), originalHostName);
+ if (host != null && host.indexOf('%') >= 0) {
+ host = substitute(host, "h"); //$NON-NLS-1$
+ options.put("HOSTNAME", host); //$NON-NLS-1$
+ }
+ if (host != null) {
+ replacements.put(Character.valueOf('h'), host);
+ }
+ String localhost = SystemReader.getInstance().getHostname();
+ replacements.put(Character.valueOf('l'), localhost);
+ int period = localhost.indexOf('.');
+ if (period > 0) {
+ localhost = localhost.substring(0, period);
+ }
+ replacements.put(Character.valueOf('L'), localhost);
+ replacements.put(Character.valueOf('n'), originalHostName);
+ replacements.put(Character.valueOf('p'), getValue("PORT")); //$NON-NLS-1$
+ replacements.put(Character.valueOf('r'), getValue("USER")); //$NON-NLS-1$
+ replacements.put(Character.valueOf('u'), userName());
+ replacements.put(Character.valueOf('C'),
+ substitute("%l%h%p%r", "hlpr")); //$NON-NLS-1$ //$NON-NLS-2$
+ }
+
+ public String substitute(String input, String allowed) {
+ if (input == null || input.length() <= 1
+ || input.indexOf('%') < 0) {
+ return input;
+ }
+ StringBuilder builder = new StringBuilder();
+ int start = 0;
+ int length = input.length();
+ while (start < length) {
+ int percent = input.indexOf('%', start);
+ if (percent < 0 || percent + 1 >= length) {
+ builder.append(input.substring(start));
+ break;
+ }
+ String replacement = null;
+ char ch = input.charAt(percent + 1);
+ if (ch == '%' || allowed.indexOf(ch) >= 0) {
+ replacement = replacements.get(Character.valueOf(ch));
+ }
+ if (replacement == null) {
+ builder.append(input.substring(start, percent + 2));
+ } else {
+ builder.append(input.substring(start, percent))
+ .append(replacement);
+ }
+ start = percent + 2;
+ }
+ return builder.toString();
+ }
+ }
+
+ private List<String> substitute(List<String> values, String allowed,
+ Replacer r) {
+ List<String> result = new ArrayList<>(values.size());
+ for (String value : values) {
+ result.add(r.substitute(value, allowed));
+ }
+ return result;
+ }
+
+ private List<String> replaceTilde(List<String> values, File home) {
+ List<String> result = new ArrayList<>(values.size());
+ for (String value : values) {
+ result.add(toFile(value, home).getPath());
+ }
+ return result;
+ }
+
+ protected void substitute(String originalHostName, File home) {
+ Replacer r = new Replacer(originalHostName, home);
+ if (multiOptions != null) {
+ List<String> values = multiOptions.get("IDENTITYFILE"); //$NON-NLS-1$
+ if (values != null) {
+ values = substitute(values, "dhlru", r); //$NON-NLS-1$
+ values = replaceTilde(values, home);
+ multiOptions.put("IDENTITYFILE", values); //$NON-NLS-1$
+ }
+ values = multiOptions.get("CERTIFICATEFILE"); //$NON-NLS-1$
+ if (values != null) {
+ values = substitute(values, "dhlru", r); //$NON-NLS-1$
+ values = replaceTilde(values, home);
+ multiOptions.put("CERTIFICATEFILE", values); //$NON-NLS-1$
+ }
+ }
+ if (listOptions != null) {
+ List<String> values = listOptions.get("GLOBALKNOWNHOSTSFILE"); //$NON-NLS-1$
+ if (values != null) {
+ values = replaceTilde(values, home);
+ listOptions.put("GLOBALKNOWNHOSTSFILE", values); //$NON-NLS-1$
+ }
+ values = listOptions.get("USERKNOWNHOSTSFILE"); //$NON-NLS-1$
+ if (values != null) {
+ values = replaceTilde(values, home);
+ listOptions.put("USERKNOWNHOSTSFILE", values); //$NON-NLS-1$
+ }
+ }
+ if (options != null) {
+ // HOSTNAME already done in Replacer constructor
+ String value = options.get("IDENTITYAGENT"); //$NON-NLS-1$
+ if (value != null) {
+ value = r.substitute(value, "dhlru"); //$NON-NLS-1$
+ value = toFile(value, home).getPath();
+ options.put("IDENTITYAGENT", value); //$NON-NLS-1$
+ }
+ }
+ // Match is not implemented and would need to be done elsewhere
+ // anyway. ControlPath, LocalCommand, ProxyCommand, and
+ // RemoteCommand are not used by Jsch.
+ }
+
+ @Override
+ @SuppressWarnings("nls")
+ public String toString() {
+ return "HostEntry [options=" + options + ", multiOptions="
+ + multiOptions + ", listOptions=" + listOptions + "]";
+ }
+ }
+
/**
* Configuration of one "Host" block in the configuration file.
* <p>
@@ -330,8 +811,6 @@ public class OpenSshConfig {
* already merged into this block.
*/
public static class Host {
- boolean patternsApplied;
-
String hostName;
int port;
@@ -348,23 +827,18 @@ public class OpenSshConfig {
int connectionAttempts;
- void copyFrom(final Host src) {
- if (hostName == null)
- hostName = src.hostName;
- if (port == 0)
- port = src.port;
- if (identityFile == null)
- identityFile = src.identityFile;
- if (user == null)
- user = src.user;
- if (preferredAuthentications == null)
- preferredAuthentications = src.preferredAuthentications;
- if (batchMode == null)
- batchMode = src.batchMode;
- if (strictHostKeyChecking == null)
- strictHostKeyChecking = src.strictHostKeyChecking;
- if (connectionAttempts == 0)
- connectionAttempts = src.connectionAttempts;
+ private Config config;
+
+ /**
+ * Creates a new uninitialized {@link Host}.
+ */
+ public Host() {
+ // For API backwards compatibility with pre-4.9 JGit
+ }
+
+ Host(Config config, String hostName, File homeDir) {
+ this.config = config;
+ complete(hostName, homeDir);
}
/**
@@ -432,5 +906,78 @@ public class OpenSshConfig {
public int getConnectionAttempts() {
return connectionAttempts;
}
+
+
+ private void complete(String initialHostName, File homeDir) {
+ // Try to set values from the options.
+ hostName = config.getHostname();
+ user = config.getUser();
+ port = config.getPort();
+ connectionAttempts = positive(
+ config.getValue("ConnectionAttempts")); //$NON-NLS-1$
+ strictHostKeyChecking = config.getValue("StrictHostKeyChecking"); //$NON-NLS-1$
+ String value = config.getValue("BatchMode"); //$NON-NLS-1$
+ if (value != null) {
+ batchMode = yesno(value);
+ }
+ value = config.getValue("PreferredAuthentications"); //$NON-NLS-1$
+ if (value != null) {
+ preferredAuthentications = nows(value);
+ }
+ // Fill in defaults if still not set
+ if (hostName == null) {
+ hostName = initialHostName;
+ }
+ if (user == null) {
+ user = OpenSshConfig.userName();
+ }
+ if (port <= 0) {
+ port = OpenSshConfig.SSH_PORT;
+ }
+ if (connectionAttempts <= 0) {
+ connectionAttempts = 1;
+ }
+ String[] identityFiles = config.getValues("IdentityFile"); //$NON-NLS-1$
+ if (identityFiles != null && identityFiles.length > 0) {
+ identityFile = toFile(identityFiles[0], homeDir);
+ }
+ }
+
+ Config getConfig() {
+ return config;
+ }
+
+ @Override
+ @SuppressWarnings("nls")
+ public String toString() {
+ return "Host [hostName=" + hostName + ", port=" + port
+ + ", identityFile=" + identityFile + ", user=" + user
+ + ", preferredAuthentications=" + preferredAuthentications
+ + ", batchMode=" + batchMode + ", strictHostKeyChecking="
+ + strictHostKeyChecking + ", connectionAttempts="
+ + connectionAttempts + ", config=" + config + "]";
+ }
+ }
+
+ /**
+ * Retrieves the full {@link com.jcraft.jsch.ConfigRepository.Config Config}
+ * for the given host name. Should be called only by Jsch and tests.
+ *
+ * @param hostName
+ * to get the config for
+ * @return the configuration for the host
+ * @since 4.9
+ */
+ @Override
+ public Config getConfig(String hostName) {
+ Host host = lookup(hostName);
+ return host.getConfig();
+ }
+
+ @Override
+ @SuppressWarnings("nls")
+ public String toString() {
+ return "OpenSshConfig [home=" + home + ", configFile=" + configFile
+ + ", lastModified=" + lastModified + ", state=" + state + "]";
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java
index c82b3891b5..833d2114cf 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java
@@ -66,6 +66,7 @@ import org.eclipse.jgit.internal.storage.file.PackLock;
import org.eclipse.jgit.internal.storage.pack.BinaryDelta;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.BatchingProgressMonitor;
+import org.eclipse.jgit.lib.BlobObjectChecker;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.InflaterCache;
import org.eclipse.jgit.lib.MutableObjectId;
@@ -82,6 +83,7 @@ import org.eclipse.jgit.lib.ObjectStream;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.util.BlockList;
import org.eclipse.jgit.util.IO;
+import org.eclipse.jgit.util.LongMap;
import org.eclipse.jgit.util.NB;
import org.eclipse.jgit.util.sha1.SHA1;
@@ -143,7 +145,7 @@ public abstract class PackParser {
private boolean expectDataAfterPackFooter;
- private long objectCount;
+ private long expectedObjectCount;
private PackedObjectInfo[] entries;
@@ -173,8 +175,8 @@ public abstract class PackParser {
private LongMap<UnresolvedDelta> baseByPos;
- /** Blobs whose contents need to be double-checked after indexing. */
- private BlockList<PackedObjectInfo> deferredCheckBlobs;
+ /** Objects need to be double-checked for collision after indexing. */
+ private BlockList<PackedObjectInfo> collisionCheckObjs;
private MessageDigest packDigest;
@@ -525,15 +527,15 @@ public abstract class PackParser {
try {
readPackHeader();
- entries = new PackedObjectInfo[(int) objectCount];
+ entries = new PackedObjectInfo[(int) expectedObjectCount];
baseById = new ObjectIdOwnerMap<>();
baseByPos = new LongMap<>();
- deferredCheckBlobs = new BlockList<>();
+ collisionCheckObjs = new BlockList<>();
receiving.beginTask(JGitText.get().receivingObjects,
- (int) objectCount);
+ (int) expectedObjectCount);
try {
- for (int done = 0; done < objectCount; done++) {
+ for (int done = 0; done < expectedObjectCount; done++) {
indexOneObject();
receiving.update(1);
if (receiving.isCancelled())
@@ -545,32 +547,12 @@ public abstract class PackParser {
receiving.endTask();
}
- if (!deferredCheckBlobs.isEmpty())
- doDeferredCheckBlobs();
- if (deltaCount > 0) {
- if (resolving instanceof BatchingProgressMonitor) {
- ((BatchingProgressMonitor) resolving).setDelayStart(
- 1000,
- TimeUnit.MILLISECONDS);
- }
- resolving.beginTask(JGitText.get().resolvingDeltas, deltaCount);
- resolveDeltas(resolving);
- if (entryCount < objectCount) {
- if (!isAllowThin()) {
- throw new IOException(MessageFormat.format(
- JGitText.get().packHasUnresolvedDeltas,
- Long.valueOf(objectCount - entryCount)));
- }
-
- resolveDeltasWithExternalBases(resolving);
+ if (!collisionCheckObjs.isEmpty()) {
+ checkObjectCollision();
+ }
- if (entryCount < objectCount) {
- throw new IOException(MessageFormat.format(
- JGitText.get().packHasUnresolvedDeltas,
- Long.valueOf(objectCount - entryCount)));
- }
- }
- resolving.endTask();
+ if (deltaCount > 0) {
+ processDeltas(resolving);
}
packDigest = null;
@@ -593,6 +575,31 @@ public abstract class PackParser {
return null; // By default there is no locking.
}
+ private void processDeltas(ProgressMonitor resolving) throws IOException {
+ if (resolving instanceof BatchingProgressMonitor) {
+ ((BatchingProgressMonitor) resolving).setDelayStart(1000,
+ TimeUnit.MILLISECONDS);
+ }
+ resolving.beginTask(JGitText.get().resolvingDeltas, deltaCount);
+ resolveDeltas(resolving);
+ if (entryCount < expectedObjectCount) {
+ if (!isAllowThin()) {
+ throw new IOException(MessageFormat.format(
+ JGitText.get().packHasUnresolvedDeltas,
+ Long.valueOf(expectedObjectCount - entryCount)));
+ }
+
+ resolveDeltasWithExternalBases(resolving);
+
+ if (entryCount < expectedObjectCount) {
+ throw new IOException(MessageFormat.format(
+ JGitText.get().packHasUnresolvedDeltas,
+ Long.valueOf(expectedObjectCount - entryCount)));
+ }
+ }
+ resolving.endTask();
+ }
+
private void resolveDeltas(final ProgressMonitor progress)
throws IOException {
final int last = entryCount;
@@ -675,10 +682,14 @@ public abstract class PackParser {
objectDigest.digest(tempObjectId);
verifySafeObject(tempObjectId, type, visit.data);
+ if (isCheckObjectCollisions() && readCurs.has(tempObjectId)) {
+ checkObjectCollision(tempObjectId, type, visit.data);
+ }
PackedObjectInfo oe;
oe = newInfo(tempObjectId, visit.delta, visit.parent.id);
oe.setOffset(visit.delta.position);
+ oe.setType(type);
onInflatedObjectData(oe, type, visit.data);
addObjectAndTrack(oe);
visit.id = oe;
@@ -849,10 +860,9 @@ public abstract class PackParser {
visit.id = baseId;
final int typeCode = ldr.getType();
final PackedObjectInfo oe = newInfo(baseId, null, null);
-
+ oe.setType(typeCode);
if (onAppendBase(typeCode, visit.data, oe))
entries[entryCount++] = oe;
-
visit.nextChild = firstChildOf(oe);
resolveDeltas(visit.next(), typeCode,
new ObjectTypeAndSize(), progress);
@@ -873,7 +883,7 @@ public abstract class PackParser {
private void growEntries(int extraObjects) {
final PackedObjectInfo[] ne;
- ne = new PackedObjectInfo[(int) objectCount + extraObjects];
+ ne = new PackedObjectInfo[(int) expectedObjectCount + extraObjects];
System.arraycopy(entries, 0, ne, 0, entryCount);
entries = ne;
}
@@ -896,9 +906,9 @@ public abstract class PackParser {
if (vers != 2 && vers != 3)
throw new IOException(MessageFormat.format(
JGitText.get().unsupportedPackVersion, Long.valueOf(vers)));
- objectCount = NB.decodeUInt32(buf, p + 8);
+ final long objectCount = NB.decodeUInt32(buf, p + 8);
use(hdrln);
-
+ setExpectedObjectCount(objectCount);
onPackHeader(objectCount);
}
@@ -1031,24 +1041,29 @@ public abstract class PackParser {
objectDigest.update((byte) 0);
final byte[] data;
- boolean checkContentLater = false;
if (type == Constants.OBJ_BLOB) {
byte[] readBuffer = buffer();
InputStream inf = inflate(Source.INPUT, sz);
+ BlobObjectChecker checker = null;
+ if (objCheck != null) {
+ checker = objCheck.newBlobObjectChecker();
+ }
+ if (checker == null) {
+ checker = BlobObjectChecker.NULL_CHECKER;
+ }
long cnt = 0;
while (cnt < sz) {
int r = inf.read(readBuffer);
if (r <= 0)
break;
objectDigest.update(readBuffer, 0, r);
+ checker.update(readBuffer, 0, r);
cnt += r;
}
inf.close();
objectDigest.digest(tempObjectId);
- checkContentLater = isCheckObjectCollisions()
- && readCurs.has(tempObjectId);
+ checker.endBlob(tempObjectId);
data = null;
-
} else {
data = inflateAndReturn(Source.INPUT, sz);
objectDigest.update(data);
@@ -1058,16 +1073,32 @@ public abstract class PackParser {
PackedObjectInfo obj = newInfo(tempObjectId, null, null);
obj.setOffset(pos);
+ obj.setType(type);
onEndWholeObject(obj);
if (data != null)
onInflatedObjectData(obj, type, data);
addObjectAndTrack(obj);
- if (checkContentLater)
- deferredCheckBlobs.add(obj);
+
+ if (isCheckObjectCollisions()) {
+ collisionCheckObjs.add(obj);
+ }
}
- private void verifySafeObject(final AnyObjectId id, final int type,
- final byte[] data) throws IOException {
+ /**
+ * Verify the integrity of the object.
+ *
+ * @param id
+ * identity of the object to be checked.
+ * @param type
+ * the type of the object.
+ * @param data
+ * raw content of the object.
+ * @throws CorruptObjectException
+ * @since 4.9
+ *
+ */
+ protected void verifySafeObject(final AnyObjectId id, final int type,
+ final byte[] data) throws CorruptObjectException {
if (objCheck != null) {
try {
objCheck.check(id, type, data);
@@ -1075,68 +1106,76 @@ public abstract class PackParser {
if (e.getErrorType() != null) {
throw e;
}
- throw new CorruptObjectException(MessageFormat.format(
- JGitText.get().invalidObject,
- Constants.typeString(type),
- readCurs.abbreviate(id, 10).name(),
- e.getMessage()), e);
+ throw new CorruptObjectException(
+ MessageFormat.format(JGitText.get().invalidObject,
+ Constants.typeString(type), id.name(),
+ e.getMessage()),
+ e);
}
}
+ }
- if (isCheckObjectCollisions()) {
- try {
- final ObjectLoader ldr = readCurs.open(id, type);
- final byte[] existingData = ldr.getCachedBytes(data.length);
- if (!Arrays.equals(data, existingData)) {
- throw new IOException(MessageFormat.format(
- JGitText.get().collisionOn, id.name()));
- }
- } catch (MissingObjectException notLocal) {
- // This is OK, we don't have a copy of the object locally
- // but the API throws when we try to read it as usually its
- // an error to read something that doesn't exist.
+ private void checkObjectCollision() throws IOException {
+ for (PackedObjectInfo obj : collisionCheckObjs) {
+ if (!readCurs.has(obj)) {
+ continue;
}
+ checkObjectCollision(obj);
}
}
- private void doDeferredCheckBlobs() throws IOException {
+ private void checkObjectCollision(PackedObjectInfo obj)
+ throws IOException {
+ ObjectTypeAndSize info = openDatabase(obj, new ObjectTypeAndSize());
final byte[] readBuffer = buffer();
final byte[] curBuffer = new byte[readBuffer.length];
- ObjectTypeAndSize info = new ObjectTypeAndSize();
-
- for (PackedObjectInfo obj : deferredCheckBlobs) {
- info = openDatabase(obj, info);
-
- if (info.type != Constants.OBJ_BLOB)
+ long sz = info.size;
+ InputStream pck = null;
+ try (ObjectStream cur = readCurs.open(obj, info.type).openStream()) {
+ if (cur.getSize() != sz) {
throw new IOException(MessageFormat.format(
- JGitText.get().unknownObjectType,
- Integer.valueOf(info.type)));
-
- ObjectStream cur = readCurs.open(obj, info.type).openStream();
- try {
- long sz = info.size;
- if (cur.getSize() != sz)
- throw new IOException(MessageFormat.format(
- JGitText.get().collisionOn, obj.name()));
- InputStream pck = inflate(Source.DATABASE, sz);
- while (0 < sz) {
- int n = (int) Math.min(readBuffer.length, sz);
- IO.readFully(cur, curBuffer, 0, n);
- IO.readFully(pck, readBuffer, 0, n);
- for (int i = 0; i < n; i++) {
- if (curBuffer[i] != readBuffer[i])
- throw new IOException(MessageFormat.format(JGitText
- .get().collisionOn, obj.name()));
+ JGitText.get().collisionOn, obj.name()));
+ }
+ pck = inflate(Source.DATABASE, sz);
+ while (0 < sz) {
+ int n = (int) Math.min(readBuffer.length, sz);
+ IO.readFully(cur, curBuffer, 0, n);
+ IO.readFully(pck, readBuffer, 0, n);
+ for (int i = 0; i < n; i++) {
+ if (curBuffer[i] != readBuffer[i]) {
+ throw new IOException(MessageFormat.format(JGitText
+ .get().collisionOn, obj.name()));
}
- sz -= n;
}
+ sz -= n;
+ }
+ } catch (MissingObjectException notLocal) {
+ // This is OK, we don't have a copy of the object locally
+ // but the API throws when we try to read it as usually its
+ // an error to read something that doesn't exist.
+ } finally {
+ if (pck != null) {
pck.close();
- } finally {
- cur.close();
}
}
}
+ private void checkObjectCollision(AnyObjectId obj, int type, byte[] data)
+ throws IOException {
+ try {
+ final ObjectLoader ldr = readCurs.open(obj, type);
+ final byte[] existingData = ldr.getCachedBytes(data.length);
+ if (!Arrays.equals(data, existingData)) {
+ throw new IOException(MessageFormat.format(
+ JGitText.get().collisionOn, obj.name()));
+ }
+ } catch (MissingObjectException notLocal) {
+ // This is OK, we don't have a copy of the object locally
+ // but the API throws when we try to read it as usually its
+ // an error to read something that doesn't exist.
+ }
+ }
+
/** @return current position of the input stream being parsed. */
private long streamPosition() {
return bBase + bOffset;
@@ -1250,6 +1289,23 @@ public abstract class PackParser {
}
/**
+ * Set the expected number of objects in the pack stream.
+ * <p>
+ * The object count in the pack header is not always correct for some Dfs
+ * pack files. e.g. INSERT pack always assume 1 object in the header since
+ * the actual object count is unknown when the pack is written.
+ * <p>
+ * If external implementation wants to overwrite the expectedObjectCount,
+ * they should call this method during {@link #onPackHeader(long)}.
+ *
+ * @param expectedObjectCount
+ * @since 4.9
+ */
+ protected void setExpectedObjectCount(long expectedObjectCount) {
+ this.expectedObjectCount = expectedObjectCount;
+ }
+
+ /**
* Store bytes received from the raw stream.
* <p>
* This method is invoked during {@link #parse(ProgressMonitor)} as data is
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackedObjectInfo.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackedObjectInfo.java
index 6da1c578c2..381c22893b 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackedObjectInfo.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackedObjectInfo.java
@@ -45,6 +45,7 @@
package org.eclipse.jgit.transport;
import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectIdOwnerMap;
/**
@@ -59,6 +60,8 @@ public class PackedObjectInfo extends ObjectIdOwnerMap.Entry {
private int crc;
+ private int type = Constants.OBJ_BAD;
+
PackedObjectInfo(final long headerOffset, final int packedCRC,
final AnyObjectId id) {
super(id);
@@ -112,4 +115,24 @@ public class PackedObjectInfo extends ObjectIdOwnerMap.Entry {
public void setCRC(final int crc) {
this.crc = crc;
}
+
+ /**
+ * @return the object type. The default type is OBJ_BAD, which is considered
+ * as unknown or invalid type.
+ * @since 4.9
+ */
+ public int getType() {
+ return type;
+ }
+
+ /**
+ * Record the object type if applicable.
+ *
+ * @param type
+ * the object type.
+ * @since 4.9
+ */
+ public void setType(int type) {
+ this.type = type;
+ }
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/PushConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PushConfig.java
new file mode 100644
index 0000000000..bff9c71e0f
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PushConfig.java
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2017, David Pursehouse <david.pursehouse@gmail.com>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.transport;
+
+import org.eclipse.jgit.lib.Config;
+import org.eclipse.jgit.util.StringUtils;
+
+/**
+ * Push section of a Git configuration file.
+ *
+ * @since 4.9
+ */
+public class PushConfig {
+ /**
+ * Config values for push.recurseSubmodules.
+ */
+ public enum PushRecurseSubmodulesMode implements Config.ConfigEnum {
+ /**
+ * Verify that all submodule commits that changed in the revisions to be
+ * pushed are available on at least one remote of the submodule.
+ */
+ CHECK("check"), //$NON-NLS-1$
+
+ /**
+ * All submodules that changed in the revisions to be pushed will be
+ * pushed.
+ */
+ ON_DEMAND("on-demand"), //$NON-NLS-1$
+
+ /** Default behavior of ignoring submodules when pushing is retained. */
+ NO("false"); //$NON-NLS-1$
+
+ private final String configValue;
+
+ private PushRecurseSubmodulesMode(String configValue) {
+ this.configValue = configValue;
+ }
+
+ @Override
+ public String toConfigValue() {
+ return configValue;
+ }
+
+ @Override
+ public boolean matchConfigValue(String s) {
+ if (StringUtils.isEmptyOrNull(s)) {
+ return false;
+ }
+ s = s.replace('-', '_');
+ return name().equalsIgnoreCase(s)
+ || configValue.equalsIgnoreCase(s);
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceiveCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceiveCommand.java
index 2b21c4a8fe..e9681b34c7 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceiveCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceiveCommand.java
@@ -52,6 +52,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
+import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
@@ -190,6 +191,20 @@ public class ReceiveCommand {
}
}
+ /**
+ * Check whether a command failed due to transaction aborted.
+ *
+ * @param cmd
+ * command.
+ * @return whether the command failed due to transaction aborted, as in {@link
+ * #abort(Iterable)}.
+ * @since 4.9
+ */
+ public static boolean isTransactionAborted(ReceiveCommand cmd) {
+ return cmd.getResult() == REJECTED_OTHER_REASON
+ && cmd.getMessage().equals(JGitText.get().transactionAborted);
+ }
+
private final ObjectId oldId;
private final ObjectId newId;
@@ -204,13 +219,21 @@ public class ReceiveCommand {
private String message;
+ private boolean customRefLog;
+
+ private String refLogMessage;
+
+ private boolean refLogIncludeResult;
+
+ private Boolean forceRefLog;
+
private boolean typeIsCorrect;
/**
* Create a new command for {@link BaseReceivePack}.
*
* @param oldId
- * the old object id; must not be null. Use
+ * the expected old object id; must not be null. Use
* {@link ObjectId#zeroId()} to indicate a ref creation.
* @param newId
* the new object id; must not be null. Use
@@ -220,15 +243,23 @@ public class ReceiveCommand {
*/
public ReceiveCommand(final ObjectId oldId, final ObjectId newId,
final String name) {
+ if (oldId == null) {
+ throw new IllegalArgumentException(JGitText.get().oldIdMustNotBeNull);
+ }
+ if (newId == null) {
+ throw new IllegalArgumentException(JGitText.get().newIdMustNotBeNull);
+ }
this.oldId = oldId;
this.newId = newId;
this.name = name;
type = Type.UPDATE;
- if (ObjectId.zeroId().equals(oldId))
+ if (ObjectId.zeroId().equals(oldId)) {
type = Type.CREATE;
- if (ObjectId.zeroId().equals(newId))
+ }
+ if (ObjectId.zeroId().equals(newId)) {
type = Type.DELETE;
+ }
}
/**
@@ -243,14 +274,45 @@ public class ReceiveCommand {
* @param name
* name of the ref being affected.
* @param type
- * type of the command.
+ * type of the command. Must be {@link Type#CREATE} if {@code
+ * oldId} is zero, or {@link Type#DELETE} if {@code newId} is zero.
* @since 2.0
*/
public ReceiveCommand(final ObjectId oldId, final ObjectId newId,
final String name, final Type type) {
+ if (oldId == null) {
+ throw new IllegalArgumentException(JGitText.get().oldIdMustNotBeNull);
+ }
+ if (newId == null) {
+ throw new IllegalArgumentException(JGitText.get().newIdMustNotBeNull);
+ }
this.oldId = oldId;
this.newId = newId;
this.name = name;
+ switch (type) {
+ case CREATE:
+ if (!ObjectId.zeroId().equals(oldId)) {
+ throw new IllegalArgumentException(
+ JGitText.get().createRequiresZeroOldId);
+ }
+ break;
+ case DELETE:
+ if (!ObjectId.zeroId().equals(newId)) {
+ throw new IllegalArgumentException(
+ JGitText.get().deleteRequiresZeroNewId);
+ }
+ break;
+ case UPDATE:
+ case UPDATE_NONFASTFORWARD:
+ if (ObjectId.zeroId().equals(newId)
+ || ObjectId.zeroId().equals(oldId)) {
+ throw new IllegalArgumentException(
+ JGitText.get().updateRequiresOldIdAndNewId);
+ }
+ break;
+ default:
+ throw new IllegalStateException(JGitText.get().enumValueNotSupported0);
+ }
this.type = type;
}
@@ -290,6 +352,116 @@ public class ReceiveCommand {
}
/**
+ * Set the message to include in the reflog.
+ * <p>
+ * Overrides the default set by {@code setRefLogMessage} on any containing
+ * {@link org.eclipse.jgit.lib.BatchRefUpdate}.
+ *
+ * @param msg
+ * the message to describe this change. If null and appendStatus is
+ * false, the reflog will not be updated.
+ * @param appendStatus
+ * true if the status of the ref change (fast-forward or
+ * forced-update) should be appended to the user supplied message.
+ * @since 4.9
+ */
+ public void setRefLogMessage(String msg, boolean appendStatus) {
+ customRefLog = true;
+ if (msg == null && !appendStatus) {
+ disableRefLog();
+ } else if (msg == null && appendStatus) {
+ refLogMessage = ""; //$NON-NLS-1$
+ refLogIncludeResult = true;
+ } else {
+ refLogMessage = msg;
+ refLogIncludeResult = appendStatus;
+ }
+ }
+
+ /**
+ * Don't record this update in the ref's associated reflog.
+ * <p>
+ * Equivalent to {@code setRefLogMessage(null, false)}.
+ *
+ * @since 4.9
+ */
+ public void disableRefLog() {
+ customRefLog = true;
+ refLogMessage = null;
+ refLogIncludeResult = false;
+ }
+
+ /**
+ * Force writing a reflog for the updated ref.
+ *
+ * @param force whether to force.
+ * @since 4.9
+ */
+ public void setForceRefLog(boolean force) {
+ forceRefLog = Boolean.valueOf(force);
+ }
+
+ /**
+ * Check whether this command has a custom reflog message setting that should
+ * override defaults in any containing
+ * {@link org.eclipse.jgit.lib.BatchRefUpdate}.
+ * <p>
+ * Does not take into account whether {@code #setForceRefLog(boolean)} has
+ * been called.
+ *
+ * @return whether a custom reflog is set.
+ * @since 4.9
+ */
+ public boolean hasCustomRefLog() {
+ return customRefLog;
+ }
+
+ /**
+ * Check whether log has been disabled by {@link #disableRefLog()}.
+ *
+ * @return true if disabled.
+ * @since 4.9
+ */
+ public boolean isRefLogDisabled() {
+ return refLogMessage == null;
+ }
+
+ /**
+ * Get the message to include in the reflog.
+ *
+ * @return message the caller wants to include in the reflog; null if the
+ * update should not be logged.
+ * @since 4.9
+ */
+ @Nullable
+ public String getRefLogMessage() {
+ return refLogMessage;
+ }
+
+ /**
+ * Check whether the reflog message should include the result of the update,
+ * such as fast-forward or force-update.
+ *
+ * @return true if the message should include the result.
+ * @since 4.9
+ */
+ public boolean isRefLogIncludingResult() {
+ return refLogIncludeResult;
+ }
+
+ /**
+ * Check whether the reflog should be written regardless of repo defaults.
+ *
+ * @return whether force writing is enabled; null if {@code
+ * #setForceRefLog(boolean)} was never called.
+ * @since 4.9
+ */
+ @Nullable
+ public Boolean isForceRefLog() {
+ return forceRefLog;
+ }
+
+ /**
* Set the status of this command.
*
* @param s
@@ -355,6 +527,7 @@ public class ReceiveCommand {
try {
final RefUpdate ru = rp.getRepository().updateRef(getRefName());
ru.setRefLogIdent(rp.getRefLogIdent());
+ ru.setRefLogMessage(refLogMessage, refLogIncludeResult);
switch (getType()) {
case DELETE:
if (!ObjectId.zeroId().equals(getOldId())) {
@@ -428,6 +601,14 @@ public class ReceiveCommand {
setResult(Result.REJECTED_CURRENT_BRANCH);
break;
+ case REJECTED_MISSING_OBJECT:
+ setResult(Result.REJECTED_MISSING_OBJECT);
+ break;
+
+ case REJECTED_OTHER_REASON:
+ setResult(Result.REJECTED_OTHER_REASON);
+ break;
+
default:
setResult(Result.REJECTED_OTHER_REASON, r.name());
break;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteConfig.java
index d91684e2f5..c968ba37cf 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteConfig.java
@@ -170,38 +170,49 @@ public class RemoteConfig implements Serializable {
vlst = rc.getStringList(SECTION, name, KEY_URL);
Map<String, String> insteadOf = getReplacements(rc, KEY_INSTEADOF);
uris = new ArrayList<>(vlst.length);
- for (final String s : vlst)
+ for (final String s : vlst) {
uris.add(new URIish(replaceUri(s, insteadOf)));
-
- Map<String, String> pushInsteadOf = getReplacements(rc,
- KEY_PUSHINSTEADOF);
- vlst = rc.getStringList(SECTION, name, KEY_PUSHURL);
- pushURIs = new ArrayList<>(vlst.length);
- for (final String s : vlst)
- pushURIs.add(new URIish(replaceUri(s, pushInsteadOf)));
-
- vlst = rc.getStringList(SECTION, name, KEY_FETCH);
- fetch = new ArrayList<>(vlst.length);
- for (final String s : vlst)
- fetch.add(new RefSpec(s));
-
- vlst = rc.getStringList(SECTION, name, KEY_PUSH);
- push = new ArrayList<>(vlst.length);
- for (final String s : vlst)
- push.add(new RefSpec(s));
-
+ }
+ String[] plst = rc.getStringList(SECTION, name, KEY_PUSHURL);
+ pushURIs = new ArrayList<>(plst.length);
+ for (final String s : plst) {
+ pushURIs.add(new URIish(s));
+ }
+ if (pushURIs.isEmpty()) {
+ // Would default to the uris. If we have pushinsteadof, we must
+ // supply rewritten push uris.
+ Map<String, String> pushInsteadOf = getReplacements(rc,
+ KEY_PUSHINSTEADOF);
+ if (!pushInsteadOf.isEmpty()) {
+ for (String s : vlst) {
+ String replaced = replaceUri(s, pushInsteadOf);
+ if (!s.equals(replaced)) {
+ pushURIs.add(new URIish(replaced));
+ }
+ }
+ }
+ }
+ fetch = rc.getRefSpecs(SECTION, name, KEY_FETCH);
+ push = rc.getRefSpecs(SECTION, name, KEY_PUSH);
val = rc.getString(SECTION, name, KEY_UPLOADPACK);
- if (val == null)
+ if (val == null) {
val = DEFAULT_UPLOAD_PACK;
+ }
uploadpack = val;
val = rc.getString(SECTION, name, KEY_RECEIVEPACK);
- if (val == null)
+ if (val == null) {
val = DEFAULT_RECEIVE_PACK;
+ }
receivepack = val;
- val = rc.getString(SECTION, name, KEY_TAGOPT);
- tagopt = TagOpt.fromOption(val);
+ try {
+ val = rc.getString(SECTION, name, KEY_TAGOPT);
+ tagopt = TagOpt.fromOption(val);
+ } catch (IllegalArgumentException e) {
+ // C git silently ignores invalid tagopt values.
+ tagopt = TagOpt.AUTO_FOLLOW;
+ }
mirror = rc.getBoolean(SECTION, name, KEY_MIRROR, DEFAULT_MIRROR);
timeout = rc.getInt(SECTION, name, KEY_TIMEOUT, 0);
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteSession.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteSession.java
index 5a73cf5af4..d6a2fe6a80 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteSession.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteSession.java
@@ -83,4 +83,4 @@ public interface RemoteSession {
* Disconnect the remote session
*/
public void disconnect();
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/SignedPushConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/SignedPushConfig.java
index 83b4acaea2..1ecbed95c7 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/SignedPushConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/SignedPushConfig.java
@@ -54,12 +54,7 @@ import org.eclipse.jgit.lib.Config.SectionParser;
public class SignedPushConfig {
/** Key for {@link Config#get(SectionParser)}. */
public static final SectionParser<SignedPushConfig> KEY =
- new SectionParser<SignedPushConfig>() {
- @Override
- public SignedPushConfig parse(Config cfg) {
- return new SignedPushConfig(cfg);
- }
- };
+ SignedPushConfig::new;
private String certNonceSeed;
private int certNonceSlopLimit;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/SshSessionFactory.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/SshSessionFactory.java
index a1aeceb2fd..2d5029a010 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/SshSessionFactory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/SshSessionFactory.java
@@ -66,7 +66,7 @@ public abstract class SshSessionFactory {
* <p>
* A factory is always available. By default the factory will read from the
* user's <code>$HOME/.ssh</code> and assume OpenSSH compatibility.
- *
+ *
* @return factory the current factory for this JVM.
*/
public static SshSessionFactory getInstance() {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/SshTransport.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/SshTransport.java
index 6f17ebf094..74865dc6dc 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/SshTransport.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/SshTransport.java
@@ -120,7 +120,7 @@ public abstract class SshTransport extends TcpTransport {
/**
* Get the default SSH session
- *
+ *
* @return a remote session
* @throws TransportException
* in case of error with opening SSH session
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java
index 2198b50f0d..099629c056 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java
@@ -69,15 +69,27 @@ public class TransferConfig {
private static final String FSCK = "fsck"; //$NON-NLS-1$
/** Key for {@link Config#get(SectionParser)}. */
- public static final Config.SectionParser<TransferConfig> KEY = new SectionParser<TransferConfig>() {
- @Override
- public TransferConfig parse(final Config cfg) {
- return new TransferConfig(cfg);
- }
- };
+ public static final Config.SectionParser<TransferConfig> KEY =
+ TransferConfig::new;
- enum FsckMode {
- ERROR, WARN, IGNORE;
+ /**
+ * A git configuration value for how to handle a fsck failure of a particular kind.
+ * Used in e.g. fsck.missingEmail.
+ * @since 4.9
+ */
+ public enum FsckMode {
+ /**
+ * Treat it as an error (the default).
+ */
+ ERROR,
+ /**
+ * Issue a warning (in fact, jgit treats this like IGNORE, but git itself does warn).
+ */
+ WARN,
+ /**
+ * Ignore the error.
+ */
+ IGNORE;
}
private final boolean fetchFsck;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportGitSsh.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportGitSsh.java
index 9a40f47cb7..b1b910ea22 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportGitSsh.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportGitSsh.java
@@ -242,14 +242,7 @@ public class TransportGitSsh extends SshTransport implements PackTransport {
args.add(getURI().getHost());
args.add(command);
- ProcessBuilder pb = new ProcessBuilder();
- pb.command(args);
-
- File directory = local.getDirectory();
- if (directory != null)
- pb.environment().put(Constants.GIT_DIR_KEY,
- directory.getPath());
-
+ ProcessBuilder pb = createProcess(args);
try {
return pb.start();
} catch (IOException err) {
@@ -257,6 +250,17 @@ public class TransportGitSsh extends SshTransport implements PackTransport {
}
}
+ private ProcessBuilder createProcess(List<String> args) {
+ ProcessBuilder pb = new ProcessBuilder();
+ pb.command(args);
+ File directory = local != null ? local.getDirectory() : null;
+ if (directory != null) {
+ pb.environment().put(Constants.GIT_DIR_KEY,
+ directory.getPath());
+ }
+ return pb;
+ }
+
@Override
public void disconnect() {
// Nothing to do
@@ -285,7 +289,7 @@ public class TransportGitSsh extends SshTransport implements PackTransport {
} catch (TransportException err) {
close();
throw err;
- } catch (IOException err) {
+ } catch (Throwable err) {
close();
throw new TransportException(uri,
JGitText.get().remoteHungUpUnexpectedly, err);
@@ -341,10 +345,18 @@ public class TransportGitSsh extends SshTransport implements PackTransport {
init(process.getInputStream(), process.getOutputStream());
} catch (TransportException err) {
- close();
+ try {
+ close();
+ } catch (Exception e) {
+ // ignore
+ }
throw err;
- } catch (IOException err) {
- close();
+ } catch (Throwable err) {
+ try {
+ close();
+ } catch (Exception e) {
+ // ignore
+ }
throw new TransportException(uri,
JGitText.get().remoteHungUpUnexpectedly, err);
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java
index 26a254d946..7c3f738d9c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java
@@ -2,6 +2,7 @@
* Copyright (C) 2008-2010, Google Inc.
* Copyright (C) 2008, Shawn O. Pearce <spearce@spearce.org>
* Copyright (C) 2013, Matthias Sohn <matthias.sohn@sap.com>
+ * Copyright (C) 2017, Thomas Wolf <thomas.wolf@paranor.ch>
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -69,7 +70,11 @@ import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.Proxy;
import java.net.ProxySelector;
+import java.net.URISyntaxException;
import java.net.URL;
+import java.security.cert.CertPathBuilderException;
+import java.security.cert.CertPathValidatorException;
+import java.security.cert.CertificateException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
@@ -78,35 +83,44 @@ import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.LinkedHashSet;
+import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
+import javax.net.ssl.SSLHandshakeException;
+
+import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.errors.NoRemoteRepositoryException;
import org.eclipse.jgit.errors.NotSupportedException;
import org.eclipse.jgit.errors.PackProtocolException;
import org.eclipse.jgit.errors.TransportException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.file.RefDirectory;
-import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.Config.SectionParser;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectIdRef;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.lib.StoredConfig;
import org.eclipse.jgit.lib.SymbolicRef;
+import org.eclipse.jgit.storage.file.FileBasedConfig;
import org.eclipse.jgit.transport.HttpAuthMethod.Type;
+import org.eclipse.jgit.transport.HttpConfig.HttpRedirectMode;
import org.eclipse.jgit.transport.http.HttpConnection;
+import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.HttpSupport;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.RawParseUtils;
+import org.eclipse.jgit.util.SystemReader;
import org.eclipse.jgit.util.TemporaryBuffer;
import org.eclipse.jgit.util.io.DisabledOutputStream;
import org.eclipse.jgit.util.io.UnionInputStream;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Transport over HTTP and FTP protocols.
@@ -127,6 +141,9 @@ import org.eclipse.jgit.util.io.UnionInputStream;
public class TransportHttp extends HttpTransport implements WalkTransport,
PackTransport {
+ private static final Logger LOG = LoggerFactory
+ .getLogger(TransportHttp.class);
+
private static final String SVC_UPLOAD_PACK = "git-upload-pack"; //$NON-NLS-1$
private static final String SVC_RECEIVE_PACK = "git-receive-pack"; //$NON-NLS-1$
@@ -231,33 +248,18 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
}
};
- private static final Config.SectionParser<HttpConfig> HTTP_KEY = new SectionParser<HttpConfig>() {
- @Override
- public HttpConfig parse(final Config cfg) {
- return new HttpConfig(cfg);
- }
- };
-
- private static class HttpConfig {
- final int postBuffer;
-
- final boolean sslVerify;
-
- HttpConfig(final Config rc) {
- postBuffer = rc.getInt("http", "postbuffer", 1 * 1024 * 1024); //$NON-NLS-1$ //$NON-NLS-2$
- sslVerify = rc.getBoolean("http", "sslVerify", true); //$NON-NLS-1$ //$NON-NLS-2$
- }
-
- HttpConfig() {
- this(new Config());
- }
- }
+ /**
+ * The current URI we're talking to. The inherited (final) field
+ * {@link #uri} stores the original URI; {@code currentUri} may be different
+ * after redirects.
+ */
+ private URIish currentUri;
- final URL baseUrl;
+ private URL baseUrl;
- private final URL objectsUrl;
+ private URL objectsUrl;
- final HttpConfig http;
+ private final HttpConfig http;
private final ProxySelector proxySelector;
@@ -267,20 +269,40 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
private Map<String, String> headers;
+ private boolean sslVerify;
+
+ private boolean sslFailure = false;
+
TransportHttp(final Repository local, final URIish uri)
throws NotSupportedException {
super(local, uri);
+ setURI(uri);
+ http = new HttpConfig(local.getConfig(), uri);
+ proxySelector = ProxySelector.getDefault();
+ sslVerify = http.isSslVerify();
+ }
+
+ private URL toURL(URIish urish) throws MalformedURLException {
+ String uriString = urish.toString();
+ if (!uriString.endsWith("/")) { //$NON-NLS-1$
+ uriString += '/';
+ }
+ return new URL(uriString);
+ }
+
+ /**
+ * @param uri
+ * @throws NotSupportedException
+ * @since 4.9
+ */
+ protected void setURI(final URIish uri) throws NotSupportedException {
try {
- String uriString = uri.toString();
- if (!uriString.endsWith("/")) //$NON-NLS-1$
- uriString += "/"; //$NON-NLS-1$
- baseUrl = new URL(uriString);
+ currentUri = uri;
+ baseUrl = toURL(uri);
objectsUrl = new URL(baseUrl, "objects/"); //$NON-NLS-1$
} catch (MalformedURLException e) {
throw new NotSupportedException(MessageFormat.format(JGitText.get().invalidURL, uri), e);
}
- http = local.getConfig().get(HTTP_KEY);
- proxySelector = ProxySelector.getDefault();
}
/**
@@ -291,17 +313,10 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
*/
TransportHttp(final URIish uri) throws NotSupportedException {
super(uri);
- try {
- String uriString = uri.toString();
- if (!uriString.endsWith("/")) //$NON-NLS-1$
- uriString += "/"; //$NON-NLS-1$
- baseUrl = new URL(uriString);
- objectsUrl = new URL(baseUrl, "objects/"); //$NON-NLS-1$
- } catch (MalformedURLException e) {
- throw new NotSupportedException(MessageFormat.format(JGitText.get().invalidURL, uri), e);
- }
- http = new HttpConfig();
+ setURI(uri);
+ http = new HttpConfig(uri);
proxySelector = ProxySelector.getDefault();
+ sslVerify = http.isSslVerify();
}
/**
@@ -469,28 +484,9 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
private HttpConnection connect(final String service)
throws TransportException, NotSupportedException {
- final URL u;
- try {
- final StringBuilder b = new StringBuilder();
- b.append(baseUrl);
-
- if (b.charAt(b.length() - 1) != '/')
- b.append('/');
- b.append(Constants.INFO_REFS);
-
- if (useSmartHttp) {
- b.append(b.indexOf("?") < 0 ? '?' : '&'); //$NON-NLS-1$
- b.append("service="); //$NON-NLS-1$
- b.append(service);
- }
-
- u = new URL(b.toString());
- } catch (MalformedURLException e) {
- throw new NotSupportedException(MessageFormat.format(JGitText.get().invalidURL, uri), e);
- }
-
-
+ URL u = getServiceURL(service);
int authAttempts = 1;
+ int redirects = 0;
Collection<Type> ignoreTypes = null;
for (;;) {
try {
@@ -527,9 +523,10 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
throw new TransportException(uri,
JGitText.get().noCredentialsProvider);
if (authAttempts > 1)
- credentialsProvider.reset(uri);
+ credentialsProvider.reset(currentUri);
if (3 < authAttempts
- || !authMethod.authorize(uri, credentialsProvider)) {
+ || !authMethod.authorize(currentUri,
+ credentialsProvider)) {
throw new TransportException(uri,
JGitText.get().notAuthorized);
}
@@ -538,8 +535,28 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
case HttpConnection.HTTP_FORBIDDEN:
throw new TransportException(uri, MessageFormat.format(
- JGitText.get().serviceNotPermitted, service));
-
+ JGitText.get().serviceNotPermitted, baseUrl,
+ service));
+
+ case HttpConnection.HTTP_MOVED_PERM:
+ case HttpConnection.HTTP_MOVED_TEMP:
+ case HttpConnection.HTTP_SEE_OTHER:
+ case HttpConnection.HTTP_11_MOVED_TEMP:
+ // SEE_OTHER should actually never be sent by a git server,
+ // and in general should occur only on POST requests. But it
+ // doesn't hurt to accept it here as a redirect.
+ if (http.getFollowRedirects() == HttpRedirectMode.FALSE) {
+ throw new TransportException(uri,
+ MessageFormat.format(
+ JGitText.get().redirectsOff,
+ Integer.valueOf(status)));
+ }
+ URIish newUri = redirect(conn.getHeaderField(HDR_LOCATION),
+ Constants.INFO_REFS, redirects++);
+ setURI(newUri);
+ u = getServiceURL(service);
+ authAttempts = 1;
+ break;
default:
String err = status + " " + conn.getResponseMessage(); //$NON-NLS-1$
throw new TransportException(uri, err);
@@ -548,6 +565,9 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
throw e;
} catch (TransportException e) {
throw e;
+ } catch (SSLHandshakeException e) {
+ handleSslFailure(e);
+ continue; // Re-try
} catch (IOException e) {
if (authMethod.getType() != HttpAuthMethod.Type.NONE) {
if (ignoreTypes == null) {
@@ -568,6 +588,215 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
}
}
+ private static class CredentialItems {
+ CredentialItem.InformationalMessage message;
+
+ /** Trust the server for this git operation */
+ CredentialItem.YesNoType now;
+
+ /**
+ * Trust the server for all git operations from this repository; may be
+ * {@code null} if the transport was created via
+ * {@link #TransportHttp(URIish)}.
+ */
+ CredentialItem.YesNoType forRepo;
+
+ /** Always trust the server from now on. */
+ CredentialItem.YesNoType always;
+
+ public CredentialItem[] items() {
+ if (forRepo == null) {
+ return new CredentialItem[] { message, now, always };
+ } else {
+ return new CredentialItem[] { message, now, forRepo, always };
+ }
+ }
+ }
+
+ private void handleSslFailure(Throwable e) throws TransportException {
+ if (sslFailure || !trustInsecureSslConnection(e.getCause())) {
+ throw new TransportException(uri,
+ MessageFormat.format(
+ JGitText.get().sslFailureExceptionMessage,
+ currentUri.setPass(null)),
+ e);
+ }
+ sslFailure = true;
+ }
+
+ private boolean trustInsecureSslConnection(Throwable cause) {
+ if (cause instanceof CertificateException
+ || cause instanceof CertPathBuilderException
+ || cause instanceof CertPathValidatorException) {
+ // Certificate expired or revoked, PKIX path building not
+ // possible, self-signed certificate, host does not match ...
+ CredentialsProvider provider = getCredentialsProvider();
+ if (provider != null) {
+ CredentialItems trust = constructSslTrustItems(cause);
+ CredentialItem[] items = trust.items();
+ if (provider.supports(items)) {
+ boolean answered = provider.get(uri, items);
+ if (answered) {
+ // Not canceled
+ boolean trustNow = trust.now.getValue();
+ boolean trustLocal = trust.forRepo != null
+ && trust.forRepo.getValue();
+ boolean trustAlways = trust.always.getValue();
+ if (trustNow || trustLocal || trustAlways) {
+ sslVerify = false;
+ if (trustAlways) {
+ updateSslVerifyUser(false);
+ } else if (trustLocal) {
+ updateSslVerify(local.getConfig(), false);
+ }
+ return true;
+ }
+ }
+ }
+ }
+ }
+ return false;
+ }
+
+ private CredentialItems constructSslTrustItems(Throwable cause) {
+ CredentialItems items = new CredentialItems();
+ String info = MessageFormat.format(JGitText.get().sslFailureInfo,
+ currentUri.setPass(null));
+ String sslMessage = cause.getLocalizedMessage();
+ if (sslMessage == null) {
+ sslMessage = cause.toString();
+ }
+ sslMessage = MessageFormat.format(JGitText.get().sslFailureCause,
+ sslMessage);
+ items.message = new CredentialItem.InformationalMessage(info + '\n'
+ + sslMessage + '\n'
+ + JGitText.get().sslFailureTrustExplanation);
+ items.now = new CredentialItem.YesNoType(JGitText.get().sslTrustNow);
+ if (local != null) {
+ items.forRepo = new CredentialItem.YesNoType(
+ MessageFormat.format(JGitText.get().sslTrustForRepo,
+ local.getDirectory()));
+ }
+ items.always = new CredentialItem.YesNoType(
+ JGitText.get().sslTrustAlways);
+ return items;
+ }
+
+ private void updateSslVerify(StoredConfig config, boolean value) {
+ // Since git uses the original URI for matching, we must also use the
+ // original URI and cannot use the current URI (which might be different
+ // after redirects).
+ String uriPattern = uri.getScheme() + "://" + uri.getHost(); //$NON-NLS-1$
+ int port = uri.getPort();
+ if (port > 0) {
+ uriPattern += ":" + port; //$NON-NLS-1$
+ }
+ config.setBoolean(HttpConfig.HTTP, uriPattern,
+ HttpConfig.SSL_VERIFY_KEY, value);
+ try {
+ config.save();
+ } catch (IOException e) {
+ LOG.error(JGitText.get().sslVerifyCannotSave, e);
+ }
+ }
+
+ private void updateSslVerifyUser(boolean value) {
+ FileBasedConfig userConfig = SystemReader.getInstance()
+ .openUserConfig(null, FS.DETECTED);
+ try {
+ userConfig.load();
+ updateSslVerify(userConfig, value);
+ } catch (IOException | ConfigInvalidException e) {
+ // Log it, but otherwise ignore here.
+ LOG.error(MessageFormat.format(JGitText.get().userConfigFileInvalid,
+ userConfig.getFile().getAbsolutePath(), e));
+ }
+ }
+
+ private URIish redirect(String location, String checkFor, int redirects)
+ throws TransportException {
+ if (location == null || location.isEmpty()) {
+ throw new TransportException(uri,
+ MessageFormat.format(JGitText.get().redirectLocationMissing,
+ baseUrl));
+ }
+ if (redirects >= http.getMaxRedirects()) {
+ throw new TransportException(uri,
+ MessageFormat.format(JGitText.get().redirectLimitExceeded,
+ Integer.valueOf(http.getMaxRedirects()), baseUrl,
+ location));
+ }
+ try {
+ if (!isValidRedirect(baseUrl, location, checkFor)) {
+ throw new TransportException(uri,
+ MessageFormat.format(JGitText.get().redirectBlocked,
+ baseUrl, location));
+ }
+ location = location.substring(0, location.indexOf(checkFor));
+ URIish result = new URIish(location);
+ if (LOG.isInfoEnabled()) {
+ LOG.info(MessageFormat.format(JGitText.get().redirectHttp,
+ uri.setPass(null),
+ Integer.valueOf(redirects), baseUrl, result));
+ }
+ return result;
+ } catch (URISyntaxException e) {
+ throw new TransportException(uri,
+ MessageFormat.format(JGitText.get().invalidRedirectLocation,
+ baseUrl, location),
+ e);
+ }
+ }
+
+ private boolean isValidRedirect(URL current, String next, String checkFor) {
+ // Protocols must be the same, or current is "http" and next "https". We
+ // do not follow redirects from https back to http.
+ String oldProtocol = current.getProtocol().toLowerCase(Locale.ROOT);
+ int schemeEnd = next.indexOf("://"); //$NON-NLS-1$
+ if (schemeEnd < 0) {
+ return false;
+ }
+ String newProtocol = next.substring(0, schemeEnd)
+ .toLowerCase(Locale.ROOT);
+ if (!oldProtocol.equals(newProtocol)) {
+ if (!"https".equals(newProtocol)) { //$NON-NLS-1$
+ return false;
+ }
+ }
+ // git allows only rewriting the root, i.e., everything before INFO_REFS
+ // or the service name
+ if (next.indexOf(checkFor) < 0) {
+ return false;
+ }
+ // Basically we should test here that whatever follows INFO_REFS is
+ // unchanged. But since we re-construct the query part
+ // anyway, it doesn't matter.
+ return true;
+ }
+
+ private URL getServiceURL(final String service)
+ throws NotSupportedException {
+ try {
+ final StringBuilder b = new StringBuilder();
+ b.append(baseUrl);
+
+ if (b.charAt(b.length() - 1) != '/') {
+ b.append('/');
+ }
+ b.append(Constants.INFO_REFS);
+
+ if (useSmartHttp) {
+ b.append(b.indexOf("?") < 0 ? '?' : '&'); //$NON-NLS-1$
+ b.append("service="); //$NON-NLS-1$
+ b.append(service);
+ }
+
+ return new URL(b.toString());
+ } catch (MalformedURLException e) {
+ throw new NotSupportedException(MessageFormat.format(JGitText.get().invalidURL, uri), e);
+ }
+ }
+
/**
* Open an HTTP connection, setting the accept-encoding request header to gzip.
*
@@ -602,10 +831,14 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
final Proxy proxy = HttpSupport.proxyFor(proxySelector, u);
HttpConnection conn = connectionFactory.create(u, proxy);
- if (!http.sslVerify && "https".equals(u.getProtocol())) { //$NON-NLS-1$
+ if (!sslVerify && "https".equals(u.getProtocol())) { //$NON-NLS-1$
HttpSupport.disableSslVerify(conn);
}
+ // We must do our own redirect handling to implement git rules and to
+ // handle http->https redirects
+ conn.setInstanceFollowRedirects(false);
+
conn.setRequestMethod(method);
conn.setUseCaches(false);
if (acceptEncoding == AcceptEncoding.GZIP) {
@@ -914,13 +1147,7 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
}
void openStream() throws IOException {
- openStream(null);
- }
-
- void openStream(final String redirectUrl) throws IOException {
- conn = httpOpen(
- METHOD_POST,
- redirectUrl == null ? new URL(baseUrl, serviceName) : new URL(redirectUrl),
+ conn = httpOpen(METHOD_POST, new URL(baseUrl, serviceName),
AcceptEncoding.GZIP);
conn.setInstanceFollowRedirects(false);
conn.setDoOutput(true);
@@ -929,12 +1156,9 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
}
void sendRequest() throws IOException {
- sendRequest(null);
- }
-
- void sendRequest(final String redirectUrl) throws IOException {
// Try to compress the content, but only if that is smaller.
- TemporaryBuffer buf = new TemporaryBuffer.Heap(http.postBuffer);
+ TemporaryBuffer buf = new TemporaryBuffer.Heap(
+ http.getPostBuffer());
try {
GZIPOutputStream gzip = new GZIPOutputStream(buf);
out.writeTo(gzip, null);
@@ -947,21 +1171,141 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
buf = out;
}
- openStream(redirectUrl);
- if (buf != out)
- conn.setRequestProperty(HDR_CONTENT_ENCODING, ENCODING_GZIP);
- conn.setFixedLengthStreamingMode((int) buf.length());
- final OutputStream httpOut = conn.getOutputStream();
- try {
- buf.writeTo(httpOut, null);
- } finally {
- httpOut.close();
- }
+ HttpAuthMethod authenticator = null;
+ Collection<Type> ignoreTypes = EnumSet.noneOf(Type.class);
+ // Counts number of repeated authentication attempts using the same
+ // authentication scheme
+ int authAttempts = 1;
+ int redirects = 0;
+ for (;;) {
+ try {
+ // The very first time we will try with the authentication
+ // method used on the initial GET request. This is a hint
+ // only; it may fail. If so, we'll then re-try with proper
+ // 401 handling, going through the available authentication
+ // schemes.
+ openStream();
+ if (buf != out) {
+ conn.setRequestProperty(HDR_CONTENT_ENCODING,
+ ENCODING_GZIP);
+ }
+ conn.setFixedLengthStreamingMode((int) buf.length());
+ try (OutputStream httpOut = conn.getOutputStream()) {
+ buf.writeTo(httpOut, null);
+ }
- final int status = HttpSupport.response(conn);
- if (status == HttpConnection.HTTP_MOVED_PERM) {
- String locationHeader = HttpSupport.responseHeader(conn, HDR_LOCATION);
- sendRequest(locationHeader);
+ final int status = HttpSupport.response(conn);
+ switch (status) {
+ case HttpConnection.HTTP_OK:
+ // We're done.
+ return;
+
+ case HttpConnection.HTTP_NOT_FOUND:
+ throw new NoRemoteRepositoryException(uri,
+ MessageFormat.format(JGitText.get().uriNotFound,
+ conn.getURL()));
+
+ case HttpConnection.HTTP_FORBIDDEN:
+ throw new TransportException(uri,
+ MessageFormat.format(
+ JGitText.get().serviceNotPermitted,
+ baseUrl, serviceName));
+
+ case HttpConnection.HTTP_MOVED_PERM:
+ case HttpConnection.HTTP_MOVED_TEMP:
+ case HttpConnection.HTTP_11_MOVED_TEMP:
+ // SEE_OTHER after a POST doesn't make sense for a git
+ // server, so we don't handle it here and thus we'll
+ // report an error in openResponse() later on.
+ if (http.getFollowRedirects() != HttpRedirectMode.TRUE) {
+ // Let openResponse() issue an error
+ return;
+ }
+ currentUri = redirect(conn.getHeaderField(HDR_LOCATION),
+ '/' + serviceName, redirects++);
+ try {
+ baseUrl = toURL(currentUri);
+ } catch (MalformedURLException e) {
+ throw new TransportException(uri,
+ MessageFormat.format(
+ JGitText.get().invalidRedirectLocation,
+ baseUrl, currentUri),
+ e);
+ }
+ continue;
+
+ case HttpConnection.HTTP_UNAUTHORIZED:
+ HttpAuthMethod nextMethod = HttpAuthMethod
+ .scanResponse(conn, ignoreTypes);
+ switch (nextMethod.getType()) {
+ case NONE:
+ throw new TransportException(uri,
+ MessageFormat.format(
+ JGitText.get().authenticationNotSupported,
+ conn.getURL()));
+ case NEGOTIATE:
+ // RFC 4559 states "When using the SPNEGO [...] with
+ // [...] POST, the authentication should be complete
+ // [...] before sending the user data." So in theory
+ // the initial GET should have been authenticated
+ // already. (Unless there was a redirect?)
+ //
+ // We try this only once:
+ ignoreTypes.add(HttpAuthMethod.Type.NEGOTIATE);
+ if (authenticator != null) {
+ ignoreTypes.add(authenticator.getType());
+ }
+ authAttempts = 1;
+ // We only do the Kerberos part of SPNEGO, which
+ // requires only one attempt. We do *not* to the
+ // NTLM part of SPNEGO; it's a multi-round
+ // negotiation and among other problems it would
+ // be unclear when to stop if no HTTP_OK is
+ // forthcoming. In theory a malicious server
+ // could keep sending requests for another NTLM
+ // round, keeping a client stuck here.
+ break;
+ default:
+ // DIGEST or BASIC. Let's be sure we ignore
+ // NEGOTIATE; if it was available, we have tried it
+ // before.
+ ignoreTypes.add(HttpAuthMethod.Type.NEGOTIATE);
+ if (authenticator == null || authenticator
+ .getType() != nextMethod.getType()) {
+ if (authenticator != null) {
+ ignoreTypes.add(authenticator.getType());
+ }
+ authAttempts = 1;
+ }
+ break;
+ }
+ authMethod = nextMethod;
+ authenticator = nextMethod;
+ CredentialsProvider credentialsProvider = getCredentialsProvider();
+ if (credentialsProvider == null) {
+ throw new TransportException(uri,
+ JGitText.get().noCredentialsProvider);
+ }
+ if (authAttempts > 1) {
+ credentialsProvider.reset(currentUri);
+ }
+ if (3 < authAttempts || !authMethod
+ .authorize(currentUri, credentialsProvider)) {
+ throw new TransportException(uri,
+ JGitText.get().notAuthorized);
+ }
+ authAttempts++;
+ continue;
+
+ default:
+ // Just return here; openResponse() will report an
+ // appropriate error.
+ return;
+ }
+ } catch (SSLHandshakeException e) {
+ handleSslFailure(e);
+ continue; // Re-try
+ }
}
}
@@ -1011,7 +1355,7 @@ public class TransportHttp extends HttpTransport implements WalkTransport,
class HttpOutputStream extends TemporaryBuffer {
HttpOutputStream() {
- super(http.postBuffer);
+ super(http.getPostBuffer());
}
@Override
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java
index 17af0b9838..cf070c6348 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java
@@ -71,7 +71,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
-
import org.eclipse.jgit.errors.CorruptObjectException;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.MissingObjectException;
@@ -719,7 +718,7 @@ public class UploadPack {
}
private void service() throws IOException {
- boolean sendPack;
+ boolean sendPack = false;
// If it's a non-bidi request, we need to read the entire request before
// writing a response. Buffer the response until then.
try {
@@ -752,6 +751,17 @@ public class UploadPack {
if (!clientShallowCommits.isEmpty())
walk.assumeShallow(clientShallowCommits);
sendPack = negotiate();
+ if (sendPack && !biDirectionalPipe) {
+ // Ensure the request was fully consumed. Any remaining input must
+ // be a protocol error. If we aren't at EOF the implementation is broken.
+ int eof = rawIn.read();
+ if (0 <= eof) {
+ sendPack = false;
+ throw new CorruptObjectException(MessageFormat.format(
+ JGitText.get().expectedEOFReceived,
+ "\\x" + Integer.toHexString(eof))); //$NON-NLS-1$
+ }
+ }
} catch (ServiceMayNotContinueException err) {
if (!err.isOutput() && err.getMessage() != null) {
try {
@@ -778,6 +788,11 @@ public class UploadPack {
}
throw err;
} finally {
+ if (!sendPack && !biDirectionalPipe) {
+ while (0 < rawIn.skip(2048) || 0 <= rawIn.read()) {
+ // Discard until EOF.
+ }
+ }
rawOut.stopBuffering();
}
@@ -1245,7 +1260,7 @@ public class UploadPack {
@Override
public void checkWants(UploadPack up, List<ObjectId> wants)
throws PackProtocolException, IOException {
- checkNotAdvertisedWants(up.getRevWalk(), wants,
+ checkNotAdvertisedWants(up, wants,
refIdSet(up.getAdvertisedRefs().values()));
}
}
@@ -1282,7 +1297,7 @@ public class UploadPack {
@Override
public void checkWants(UploadPack up, List<ObjectId> wants)
throws PackProtocolException, IOException {
- checkNotAdvertisedWants(up.getRevWalk(), wants,
+ checkNotAdvertisedWants(up, wants,
refIdSet(up.getRepository().getRefDatabase().getRefs(ALL).values()));
}
}
@@ -1300,7 +1315,7 @@ public class UploadPack {
}
}
- private static void checkNotAdvertisedWants(RevWalk walk,
+ private static void checkNotAdvertisedWants(UploadPack up,
List<ObjectId> notAdvertisedWants, Set<ObjectId> reachableFrom)
throws MissingObjectException, IncorrectObjectTypeException, IOException {
// Walk the requested commits back to the provided set of commits. If any
@@ -1309,32 +1324,34 @@ public class UploadPack {
// into an advertised branch it will be marked UNINTERESTING and no commits
// return.
- AsyncRevObjectQueue q = walk.parseAny(notAdvertisedWants, true);
- try {
- RevObject obj;
- while ((obj = q.next()) != null) {
- if (!(obj instanceof RevCommit))
- throw new WantNotValidException(obj);
- walk.markStart((RevCommit) obj);
- }
- } catch (MissingObjectException notFound) {
- throw new WantNotValidException(notFound.getObjectId(), notFound);
- } finally {
- q.release();
- }
- for (ObjectId id : reachableFrom) {
+ try (RevWalk walk = new RevWalk(up.getRevWalk().getObjectReader())) {
+ AsyncRevObjectQueue q = walk.parseAny(notAdvertisedWants, true);
try {
- walk.markUninteresting(walk.parseCommit(id));
- } catch (IncorrectObjectTypeException notCommit) {
- continue;
+ RevObject obj;
+ while ((obj = q.next()) != null) {
+ if (!(obj instanceof RevCommit))
+ throw new WantNotValidException(obj);
+ walk.markStart((RevCommit) obj);
+ }
+ } catch (MissingObjectException notFound) {
+ throw new WantNotValidException(notFound.getObjectId(),
+ notFound);
+ } finally {
+ q.release();
+ }
+ for (ObjectId id : reachableFrom) {
+ try {
+ walk.markUninteresting(walk.parseCommit(id));
+ } catch (IncorrectObjectTypeException notCommit) {
+ continue;
+ }
}
- }
- RevCommit bad = walk.next();
- if (bad != null) {
- throw new WantNotValidException(bad);
+ RevCommit bad = walk.next();
+ if (bad != null) {
+ throw new WantNotValidException(bad);
+ }
}
- walk.reset();
}
private void addCommonBase(final RevObject o) {
@@ -1390,17 +1407,6 @@ public class UploadPack {
private void sendPack() throws IOException {
final boolean sideband = options.contains(OPTION_SIDE_BAND)
|| options.contains(OPTION_SIDE_BAND_64K);
-
- if (!biDirectionalPipe) {
- // Ensure the request was fully consumed. Any remaining input must
- // be a protocol error. If we aren't at EOF the implementation is broken.
- int eof = rawIn.read();
- if (0 <= eof)
- throw new CorruptObjectException(MessageFormat.format(
- JGitText.get().expectedEOFReceived,
- "\\x" + Integer.toHexString(eof))); //$NON-NLS-1$
- }
-
if (sideband) {
try {
sendPack(true);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/http/HttpConnection.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/http/HttpConnection.java
index 58081c1c90..35a1ee15ed 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/http/HttpConnection.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/http/HttpConnection.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2013 Christian Halstrick <christian.halstrick@sap.com>
+ * Copyright (C) 2013, 2017 Christian Halstrick <christian.halstrick@sap.com>
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -79,6 +79,26 @@ public interface HttpConnection {
public static final int HTTP_MOVED_PERM = java.net.HttpURLConnection.HTTP_MOVED_PERM;
/**
+ * @see HttpURLConnection#HTTP_MOVED_TEMP
+ * @since 4.9
+ */
+ public static final int HTTP_MOVED_TEMP = java.net.HttpURLConnection.HTTP_MOVED_TEMP;
+
+ /**
+ * @see HttpURLConnection#HTTP_SEE_OTHER
+ * @since 4.9
+ */
+ public static final int HTTP_SEE_OTHER = java.net.HttpURLConnection.HTTP_SEE_OTHER;
+
+ /**
+ * HTTP 1.1 additional MOVED_TEMP status code; value = 307.
+ *
+ * @see #HTTP_MOVED_TEMP
+ * @since 4.9
+ */
+ public static final int HTTP_11_MOVED_TEMP = 307;
+
+ /**
* @see HttpURLConnection#HTTP_NOT_FOUND
*/
public static final int HTTP_NOT_FOUND = java.net.HttpURLConnection.HTTP_NOT_FOUND;
@@ -253,7 +273,7 @@ public interface HttpConnection {
/**
* Configure the connection so that it can be used for https communication.
- *
+ *
* @param km
* the keymanager managing the key material used to authenticate
* the local SSLSocket to its peer
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/resolver/FileResolver.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/resolver/FileResolver.java
index 7654d462eb..8ab112e5f9 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/resolver/FileResolver.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/resolver/FileResolver.java
@@ -244,11 +244,11 @@ public class FileResolver<C> implements RepositoryResolver<C> {
return true; // no absolute paths
if (name.startsWith("../")) //$NON-NLS-1$
- return true; // no "l../etc/passwd"
+ return true; // no "l../etc/passwd"
if (name.contains("/../")) //$NON-NLS-1$
- return true; // no "foo/../etc/passwd"
+ return true; // no "foo/../etc/passwd"
if (name.contains("/./")) //$NON-NLS-1$
- return true; // "foo/./foo" is insane to ask
+ return true; // "foo/./foo" is insane to ask
if (name.contains("//")) //$NON-NLS-1$
return true; // double slashes is sloppy, don't use it
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeOptions.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeOptions.java
index 7d2b33f43d..2b18904a5f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeOptions.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeOptions.java
@@ -55,12 +55,8 @@ import org.eclipse.jgit.lib.CoreConfig.SymLinks;
/** Options used by the {@link WorkingTreeIterator}. */
public class WorkingTreeOptions {
/** Key for {@link Config#get(SectionParser)}. */
- public static final Config.SectionParser<WorkingTreeOptions> KEY = new SectionParser<WorkingTreeOptions>() {
- @Override
- public WorkingTreeOptions parse(final Config cfg) {
- return new WorkingTreeOptions(cfg);
- }
- };
+ public static final Config.SectionParser<WorkingTreeOptions> KEY =
+ WorkingTreeOptions::new;
private final boolean fileMode;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/InterIndexDiffFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/InterIndexDiffFilter.java
index 17194162c3..2ea8228b9a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/InterIndexDiffFilter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/InterIndexDiffFilter.java
@@ -102,4 +102,4 @@ public final class InterIndexDiffFilter extends TreeFilter {
public String toString() {
return "INTERINDEX_DIFF"; //$NON-NLS-1$
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java
index ed5b87d5ca..1f4215eed0 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java
@@ -568,6 +568,10 @@ public abstract class FS {
}
private static class GobblerThread extends Thread {
+
+ /* The process has 5 seconds to exit after closing stderr */
+ private static final int PROCESS_EXIT_TIMEOUT = 5;
+
private final Process p;
private final String desc;
private final String dir;
@@ -590,15 +594,16 @@ public abstract class FS {
err.append((char) ch);
}
} catch (IOException e) {
- if (p.exitValue() != 0) {
- setError(e, e.getMessage());
+ if (waitForProcessCompletion(e) && p.exitValue() != 0) {
+ setError(e, e.getMessage(), p.exitValue());
fail.set(true);
} else {
// ignore. command terminated faster and stream was just closed
+ // or the process didn't terminate within timeout
}
} finally {
- if (err.length() > 0) {
- setError(null, err.toString());
+ if (waitForProcessCompletion(null) && err.length() > 0) {
+ setError(null, err.toString(), p.exitValue());
if (p.exitValue() != 0) {
fail.set(true);
}
@@ -606,11 +611,27 @@ public abstract class FS {
}
}
- private void setError(IOException e, String message) {
+ @SuppressWarnings("boxing")
+ private boolean waitForProcessCompletion(IOException originalError) {
+ try {
+ if (!p.waitFor(PROCESS_EXIT_TIMEOUT, TimeUnit.SECONDS)) {
+ setError(originalError, MessageFormat.format(
+ JGitText.get().commandClosedStderrButDidntExit,
+ desc, PROCESS_EXIT_TIMEOUT), -1);
+ fail.set(true);
+ }
+ } catch (InterruptedException e) {
+ LOG.error(MessageFormat.format(
+ JGitText.get().threadInterruptedWhileRunning, desc), e);
+ }
+ return false;
+ }
+
+ private void setError(IOException e, String message, int exitCode) {
exception.set(e);
errorMessage.set(MessageFormat.format(
- JGitText.get().exceptionCaughtDuringExcecutionOfCommand,
- desc, dir, Integer.valueOf(p.exitValue()), message));
+ JGitText.get().exceptionCaughtDuringExecutionOfCommand,
+ desc, dir, Integer.valueOf(exitCode), message));
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS_POSIX.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS_POSIX.java
index 607e078604..d2200309c0 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS_POSIX.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS_POSIX.java
@@ -229,7 +229,7 @@ public class FS_POSIX extends FS {
if (!isFile(f))
return false;
if (!canExecute)
- return f.setExecutable(false);
+ return f.setExecutable(false, false);
try {
Path path = f.toPath();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java
index 658dd06d46..0a3c846a0e 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java
@@ -71,6 +71,21 @@ public class IntList {
}
/**
+ * Check if an entry appears in this collection.
+ *
+ * @param value
+ * the value to search for.
+ * @return true of {@code value} appears in this list.
+ * @since 4.9
+ */
+ public boolean contains(int value) {
+ for (int i = 0; i < count; i++)
+ if (entries[i] == value)
+ return true;
+ return false;
+ }
+
+ /**
* @param i
* index to read, must be in the range [0, {@link #size()}).
* @return the number at the specified index
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/LongMap.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/LongMap.java
index 4d60202a6f..7b0b0c728a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/LongMap.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/LongMap.java
@@ -41,15 +41,16 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-package org.eclipse.jgit.transport;
+package org.eclipse.jgit.util;
/**
- * Simple Map<long,Object> helper for {@link PackParser}.
+ * Simple Map<long,Object>.
*
* @param <V>
* type of the value instance.
+ * @since 4.9
*/
-final class LongMap<V> {
+public class LongMap<V> {
private static final float LOAD_FACTOR = 0.75f;
private Node<V>[] table;
@@ -60,16 +61,27 @@ final class LongMap<V> {
/** Next {@link #size} to trigger a {@link #grow()}. */
private int growAt;
- LongMap() {
+ /** Initialize an empty LongMap. */
+ public LongMap() {
table = createArray(64);
growAt = (int) (table.length * LOAD_FACTOR);
}
- boolean containsKey(final long key) {
+ /**
+ * @param key
+ * the key to find.
+ * @return {@code true} if {@code key} is present in the map.
+ */
+ public boolean containsKey(long key) {
return get(key) != null;
}
- V get(final long key) {
+ /**
+ * @param key
+ * the key to find.
+ * @return stored value of the key, or {@code null}.
+ */
+ public V get(long key) {
for (Node<V> n = table[index(key)]; n != null; n = n.next) {
if (n.key == key)
return n.value;
@@ -77,7 +89,12 @@ final class LongMap<V> {
return null;
}
- V remove(final long key) {
+ /**
+ * @param key
+ * key to remove from the map.
+ * @return old value of the key, or {@code null}.
+ */
+ public V remove(long key) {
Node<V> n = table[index(key)];
Node<V> prior = null;
while (n != null) {
@@ -95,7 +112,14 @@ final class LongMap<V> {
return null;
}
- V put(final long key, final V value) {
+ /**
+ * @param key
+ * key to store {@code value} under.
+ * @param value
+ * new value.
+ * @return prior value, or null.
+ */
+ public V put(long key, V value) {
for (Node<V> n = table[index(key)]; n != null; n = n.next) {
if (n.key == key) {
final V o = n.value;
@@ -145,9 +169,7 @@ final class LongMap<V> {
private static class Node<V> {
final long key;
-
V value;
-
Node<V> next;
Node(final long k, final V v) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/NB.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/NB.java
index 8536f1dc25..471a4998d7 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/NB.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/NB.java
@@ -113,6 +113,24 @@ public final class NB {
}
/**
+ * Convert sequence of 3 bytes (network byte order) into unsigned value.
+ *
+ * @param intbuf
+ * buffer to acquire the 3 bytes of data from.
+ * @param offset
+ * position within the buffer to begin reading from. This
+ * position and the next 2 bytes after it (for a total of 3
+ * bytes) will be read.
+ * @return signed integer value that matches the 24 bits read.
+ * @since 4.9
+ */
+ public static int decodeUInt24(byte[] intbuf, int offset) {
+ int r = (intbuf[offset] & 0xff) << 8;
+ r |= intbuf[offset + 1] & 0xff;
+ return (r << 8) | (intbuf[offset + 2] & 0xff);
+ }
+
+ /**
* Convert sequence of 4 bytes (network byte order) into signed value.
*
* @param intbuf
@@ -223,6 +241,29 @@ public final class NB {
}
/**
+ * Write a 24 bit integer as a sequence of 3 bytes (network byte order).
+ *
+ * @param intbuf
+ * buffer to write the 3 bytes of data into.
+ * @param offset
+ * position within the buffer to begin writing to. This position
+ * and the next 2 bytes after it (for a total of 3 bytes) will be
+ * replaced.
+ * @param v
+ * the value to write.
+ * @since 4.9
+ */
+ public static void encodeInt24(byte[] intbuf, int offset, int v) {
+ intbuf[offset + 2] = (byte) v;
+ v >>>= 8;
+
+ intbuf[offset + 1] = (byte) v;
+ v >>>= 8;
+
+ intbuf[offset] = (byte) v;
+ }
+
+ /**
* Write a 32 bit integer as a sequence of 4 bytes (network byte order).
*
* @param intbuf
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java
index 86777b9cdc..ad138bbf18 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java
@@ -618,6 +618,10 @@ public final class RawParseUtils {
* <p>
* The last element (index <code>map.size()-1</code>) always contains
* <code>end</code>.
+ * <p>
+ * If the data contains a '\0' anywhere, the whole region is considered binary
+ * and a LineMap corresponding to a single line is returned.
+ * </p>
*
* @param buf
* buffer to scan.
@@ -629,14 +633,29 @@ public final class RawParseUtils {
* @return a line map indexing the start position of each line.
*/
public static final IntList lineMap(final byte[] buf, int ptr, int end) {
+ int start = ptr;
+
// Experimentally derived from multiple source repositories
// the average number of bytes/line is 36. Its a rough guess
// to initially size our map close to the target.
- //
- final IntList map = new IntList((end - ptr) / 36);
- map.fillTo(1, Integer.MIN_VALUE);
- for (; ptr < end; ptr = nextLF(buf, ptr))
- map.add(ptr);
+ IntList map = new IntList((end - ptr) / 36);
+ map.add(Integer.MIN_VALUE);
+ boolean foundLF = true;
+ for (; ptr < end; ptr++) {
+ if (foundLF) {
+ map.add(ptr);
+ }
+
+ if (buf[ptr] == '\0') {
+ // binary data.
+ map = new IntList(3);
+ map.add(Integer.MIN_VALUE);
+ map.add(start);
+ break;
+ }
+
+ foundLF = (buf[ptr] == '\n');
+ }
map.add(end);
return map;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/RefList.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/RefList.java
index 159781795e..ce4b7c7507 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/RefList.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/RefList.java
@@ -338,10 +338,11 @@ public class RefList<T extends Ref> implements Iterable<Ref> {
* Create an empty list with at least the specified capacity.
*
* @param capacity
- * the new capacity.
+ * the new capacity; if zero or negative, behavior is the same as
+ * {@link #Builder()}.
*/
public Builder(int capacity) {
- list = new Ref[capacity];
+ list = new Ref[Math.max(capacity, 16)];
}
/** @return number of items in this builder's internal collection. */
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/RelativeDateFormatter.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/RelativeDateFormatter.java
index 3cb3749cff..a5df66e99c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/RelativeDateFormatter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/RelativeDateFormatter.java
@@ -114,10 +114,11 @@ public class RelativeDateFormatter {
// up to 5 years use "year, months" rounded to months
if (ageMillis < 5 * YEAR_IN_MILLIS) {
- long years = ageMillis / YEAR_IN_MILLIS;
+ long years = round(ageMillis, MONTH_IN_MILLIS) / 12;
String yearLabel = (years > 1) ? JGitText.get().years : //
JGitText.get().year;
- long months = round(ageMillis % YEAR_IN_MILLIS, MONTH_IN_MILLIS);
+ long months = round(ageMillis - years * YEAR_IN_MILLIS,
+ MONTH_IN_MILLIS);
String monthLabel = (months > 1) ? JGitText.get().months : //
(months == 1 ? JGitText.get().month : ""); //$NON-NLS-1$
return MessageFormat.format(
@@ -140,4 +141,4 @@ public class RelativeDateFormatter {
long rounded = (n + unit / 2) / unit;
return rounded;
}
-} \ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/io/EolStreamTypeUtil.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/EolStreamTypeUtil.java
index c95992fbc2..727c1f4ad6 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/io/EolStreamTypeUtil.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/EolStreamTypeUtil.java
@@ -144,6 +144,11 @@ public final class EolStreamTypeUtil {
private static EolStreamType checkInStreamType(WorkingTreeOptions options,
Attributes attrs) {
+ if (attrs.isUnset("text")) {//$NON-NLS-1$
+ // "binary" or "-text" (which is included in the binary expansion)
+ return EolStreamType.DIRECT;
+ }
+
// old git system
if (attrs.isSet("crlf")) {//$NON-NLS-1$
return EolStreamType.TEXT_LF;
@@ -154,9 +159,6 @@ public final class EolStreamTypeUtil {
}
// new git system
- if (attrs.isUnset("text")) {//$NON-NLS-1$
- return EolStreamType.DIRECT;
- }
String eol = attrs.getValue("eol"); //$NON-NLS-1$
if (eol != null)
// check-in is always normalized to LF
@@ -183,6 +185,11 @@ public final class EolStreamTypeUtil {
private static EolStreamType checkOutStreamType(WorkingTreeOptions options,
Attributes attrs) {
+ if (attrs.isUnset("text")) {//$NON-NLS-1$
+ // "binary" or "-text" (which is included in the binary expansion)
+ return EolStreamType.DIRECT;
+ }
+
// old git system
if (attrs.isSet("crlf")) {//$NON-NLS-1$
return FORCE_EOL_LF_ON_CHECKOUT ? EolStreamType.TEXT_LF
@@ -194,9 +201,6 @@ public final class EolStreamTypeUtil {
}
// new git system
- if (attrs.isUnset("text")) {//$NON-NLS-1$
- return EolStreamType.DIRECT;
- }
String eol = attrs.getValue("eol"); //$NON-NLS-1$
if (eol != null && "crlf".equals(eol)) //$NON-NLS-1$
return EolStreamType.TEXT_CRLF;