Browse Source

Merge branch 'stable-5.3' into stable-5.4

* stable-5.3:
  Prepare 5.3.3-SNAPSHOT builds
  JGit v5.3.2.201906051522-r
  Prepare 5.1.9-SNAPSHOT builds
  JGit v5.1.8.201906050907-r
  Test detecting modified packfiles
  Enhance fsTick() to use filesystem timer resolution
  Add debug trace to measure time needed to open pack index
  Extend FileSnapshot for packfiles to also use checksum to detect changes
  Wait opening new packfile until it can't be racy anymore
  Avoid null PackConfig in GC
  Add FileSnapshot test testing recognition of file size changes
  Capture reason for result of FileSnapshot#isModified
  Skip FileSnapshotTest#testSimulatePackfileReplacement on Windows
  Tune max heap size for tests
  Fix FileSnapshotTest.testNewFileNoWait() to match its javadoc
  ObjectDirectory: fix closing of obsolete packs
  Update API filters for methods added to fix bugs
  Bazel: Increase severity of most error-prone checks to ERROR
  Enable error-prone checks by default
  Add bazel options to align with gerrit's
  Include filekey file attribute when comparing FileSnapshots
  Measure file timestamp resolution used in FileSnapshot
  Fix FileSnapshot's consideration of file size
  Fix API problem filters
  Fix API problem filters

Change-Id: Ie1c7e4752661aec9e1754660934921224e2408eb
Signed-off-by: Matthias Sohn <matthias.sohn@sap.com>
tags/v5.4.0.201906121030-r
Matthias Sohn 5 years ago
parent
commit
d9c569b747

+ 9
- 0
.bazelrc View File

@@ -0,0 +1,9 @@
build --repository_cache=~/.gerritcodereview/bazel-cache/repository
build --experimental_strict_action_env
build --action_env=PATH
build --disk_cache=~/.gerritcodereview/bazel-cache/cas
build --java_toolchain //tools:error_prone_warnings_toolchain

test --build_tests_only
test --test_output=errors


+ 17
- 11
org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/RepositoryTestCase.java View File

@@ -349,7 +349,8 @@ public abstract class RepositoryTestCase extends LocalDiskRepositoryTestCase {
* younger modification timestamp than the modification timestamp of the
* given file. This is done by touching a temporary file, reading the
* lastmodified attribute and, if needed, sleeping. After sleeping this loop
* starts again until the filesystem timer has advanced enough.
* starts again until the filesystem timer has advanced enough. The
* temporary file will be created as a sibling of lastFile.
*
* @param lastFile
* the file on which we want to wait until the filesystem timer
@@ -362,21 +363,26 @@ public abstract class RepositoryTestCase extends LocalDiskRepositoryTestCase {
*/
public static long fsTick(File lastFile) throws InterruptedException,
IOException {
long sleepTime = 64;
File tmp;
FS fs = FS.DETECTED;
if (lastFile != null && !fs.exists(lastFile))
throw new FileNotFoundException(lastFile.getPath());
File tmp = File.createTempFile("FileTreeIteratorWithTimeControl", null);
if (lastFile == null) {
lastFile = tmp = File
.createTempFile("fsTickTmpFile", null);
} else {
if (!fs.exists(lastFile)) {
throw new FileNotFoundException(lastFile.getPath());
}
tmp = File.createTempFile("fsTickTmpFile", null,
lastFile.getParentFile());
}
long res = FS.getFsTimerResolution(tmp.toPath()).toMillis();
long sleepTime = res / 10;
try {
long startTime = (lastFile == null) ? fs.lastModified(tmp) : fs
.lastModified(lastFile);
long startTime = fs.lastModified(lastFile);
long actTime = fs.lastModified(tmp);
while (actTime <= startTime) {
Thread.sleep(sleepTime);
sleepTime *= 2;
try (FileOutputStream fos = new FileOutputStream(tmp)) {
// Do nothing
}
FileUtils.touch(tmp.toPath());
actTime = fs.lastModified(tmp);
}
return actTime;

+ 0
- 1
org.eclipse.jgit.lfs.test/META-INF/MANIFEST.MF View File

@@ -22,4 +22,3 @@ Import-Package: org.eclipse.jgit.internal.storage.dfs;version="[5.4.0,5.5.0)",
org.junit.runner;version="[4.12,5.0.0)",
org.junit.runners;version="[4.12,5.0.0)"
Export-Package: org.eclipse.jgit.lfs.test;version="5.4.0";x-friends:="org.eclipse.jgit.lfs.server.test"


+ 55
- 2
org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/FileSnapshotTest.java View File

@@ -42,16 +42,22 @@
*/
package org.eclipse.jgit.internal.storage.file;

import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.List;

import org.eclipse.jgit.util.FileUtils;
import org.eclipse.jgit.util.SystemReader;
import org.junit.After;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;

@@ -121,12 +127,59 @@ public class FileSnapshotTest {
@Test
public void testNewFileNoWait() throws Exception {
File f1 = createFile("newfile");
waitNextSec(f1);
FileSnapshot save = FileSnapshot.save(f1);
Thread.sleep(1500);
assertTrue(save.isModified(f1));
}

/**
* Simulate packfile replacement in same file which may occur if set of
* objects in the pack is the same but pack config was different. On Posix
* filesystems this should change the inode (filekey in java.nio
* terminology).
*
* @throws Exception
*/
@Test
public void testSimulatePackfileReplacement() throws Exception {
Assume.assumeFalse(SystemReader.getInstance().isWindows());
File f1 = createFile("file"); // inode y
File f2 = createFile("fool"); // Guarantees new inode x
// wait on f2 since this method resets lastModified of the file
// and leaves lastModified of f1 untouched
waitNextSec(f2);
waitNextSec(f2);
FileTime timestamp = Files.getLastModifiedTime(f1.toPath());
FileSnapshot save = FileSnapshot.save(f1);
Files.move(f2.toPath(), f1.toPath(), // Now "file" is inode x
StandardCopyOption.REPLACE_EXISTING,
StandardCopyOption.ATOMIC_MOVE);
Files.setLastModifiedTime(f1.toPath(), timestamp);
assertTrue(save.isModified(f1));
assertTrue("unexpected change of fileKey", save.wasFileKeyChanged());
assertFalse("unexpected size change", save.wasSizeChanged());
assertFalse("unexpected lastModified change",
save.wasLastModifiedChanged());
assertFalse("lastModified was unexpectedly racily clean",
save.wasLastModifiedRacilyClean());
}

/**
* Append a character to a file to change its size and set original
* lastModified
*
* @throws Exception
*/
@Test
public void testFileSizeChanged() throws Exception {
File f = createFile("file");
FileTime timestamp = Files.getLastModifiedTime(f.toPath());
FileSnapshot save = FileSnapshot.save(f);
append(f, (byte) 'x');
Files.setLastModifiedTime(f.toPath(), timestamp);
assertTrue(save.isModified(f));
assertTrue(save.wasSizeChanged());
}

private File createFile(String string) throws IOException {
trash.mkdirs();
File f = File.createTempFile(string, "tdat", trash);

+ 354
- 0
org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackFileSnapshotTest.java View File

@@ -0,0 +1,354 @@
/*
* Copyright (C) 2019, Matthias Sohn <matthias.sohn@sap.com>
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.internal.storage.file;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;

import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
//import java.nio.file.attribute.BasicFileAttributes;
import java.text.ParseException;
import java.util.Collection;
import java.util.Iterator;
import java.util.Random;
import java.util.zip.Deflater;

import org.eclipse.jgit.api.GarbageCollectCommand;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.AbortedByHookException;
import org.eclipse.jgit.api.errors.ConcurrentRefUpdateException;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.NoFilepatternException;
import org.eclipse.jgit.api.errors.NoHeadException;
import org.eclipse.jgit.api.errors.NoMessageException;
import org.eclipse.jgit.api.errors.UnmergedPathsException;
import org.eclipse.jgit.api.errors.WrongRepositoryStateException;
import org.eclipse.jgit.junit.RepositoryTestCase;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.storage.file.FileBasedConfig;
import org.eclipse.jgit.storage.pack.PackConfig;
import org.junit.Test;

public class PackFileSnapshotTest extends RepositoryTestCase {

private static ObjectId unknownID = ObjectId
.fromString("1234567890123456789012345678901234567890");

@Test
public void testSamePackDifferentCompressionDetectChecksumChanged()
throws Exception {
Git git = Git.wrap(db);
File f = writeTrashFile("file", "foobar ");
for (int i = 0; i < 10; i++) {
appendRandomLine(f);
git.add().addFilepattern("file").call();
git.commit().setMessage("message" + i).call();
}

FileBasedConfig c = db.getConfig();
c.setInt(ConfigConstants.CONFIG_GC_SECTION, null,
ConfigConstants.CONFIG_KEY_AUTOPACKLIMIT, 1);
c.save();
Collection<PackFile> packs = gc(Deflater.NO_COMPRESSION);
assertEquals("expected 1 packfile after gc", 1, packs.size());
PackFile p1 = packs.iterator().next();
PackFileSnapshot snapshot = p1.getFileSnapshot();

packs = gc(Deflater.BEST_COMPRESSION);
assertEquals("expected 1 packfile after gc", 1, packs.size());
PackFile p2 = packs.iterator().next();
File pf = p2.getPackFile();

// changing compression level with aggressive gc may change size,
// fileKey (on *nix) and checksum. Hence FileSnapshot.isModified can
// return true already based on size or fileKey.
// So the only thing we can test here is that we ensure that checksum
// also changed when we read it here in this test
assertTrue("expected snapshot to detect modified pack",
snapshot.isModified(pf));
assertTrue("expected checksum changed", snapshot.isChecksumChanged(pf));
}

private void appendRandomLine(File f, int length, Random r)
throws IOException {
try (Writer w = Files.newBufferedWriter(f.toPath(),
StandardOpenOption.APPEND)) {
appendRandomLine(w, length, r);
}
}

private void appendRandomLine(File f) throws IOException {
appendRandomLine(f, 5, new Random());
}

private void appendRandomLine(Writer w, int len, Random r)
throws IOException {
final int c1 = 32; // ' '
int c2 = 126; // '~'
for (int i = 0; i < len; i++) {
w.append((char) (c1 + r.nextInt(1 + c2 - c1)));
}
}

private ObjectId createTestRepo(int testDataSeed, int testDataLength)
throws IOException, GitAPIException, NoFilepatternException,
NoHeadException, NoMessageException, UnmergedPathsException,
ConcurrentRefUpdateException, WrongRepositoryStateException,
AbortedByHookException {
// Create a repo with two commits and one file. Each commit adds
// testDataLength number of bytes. Data are random bytes. Since the
// seed for the random number generator is specified we will get
// the same set of bytes for every run and for every platform
Random r = new Random(testDataSeed);
Git git = Git.wrap(db);
File f = writeTrashFile("file", "foobar ");
appendRandomLine(f, testDataLength, r);
git.add().addFilepattern("file").call();
git.commit().setMessage("message1").call();
appendRandomLine(f, testDataLength, r);
git.add().addFilepattern("file").call();
return git.commit().setMessage("message2").call().getId();
}

// Try repacking so fast that you get two new packs which differ only in
// content/chksum but have same name, size and lastmodified.
// Since this is done with standard gc (which creates new tmp files and
// renames them) the filekeys of the new packfiles differ helping jgit
// to detect the fast modification
@Test
public void testDetectModificationAlthoughSameSizeAndModificationtime()
throws Exception {
int testDataSeed = 1;
int testDataLength = 100;
FileBasedConfig config = db.getConfig();
// don't use mtime of the parent folder to detect pack file
// modification.
config.setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_TRUSTFOLDERSTAT, false);
config.save();

createTestRepo(testDataSeed, testDataLength);

// repack to create initial packfile
PackFile pf = repackAndCheck(5, null, null, null);
Path packFilePath = pf.getPackFile().toPath();
AnyObjectId chk1 = pf.getPackChecksum();
String name = pf.getPackName();
Long length = Long.valueOf(pf.getPackFile().length());
long m1 = packFilePath.toFile().lastModified();

// Wait for a filesystem timer tick to enhance probability the rest of
// this test is done before the filesystem timer ticks again.
fsTick(packFilePath.toFile());

// Repack to create packfile with same name, length. Lastmodified and
// content and checksum are different since compression level differs
AnyObjectId chk2 = repackAndCheck(6, name, length, chk1)
.getPackChecksum();
long m2 = packFilePath.toFile().lastModified();
assumeFalse(m2 == m1);

// Repack to create packfile with same name, length. Lastmodified is
// equal to the previous one because we are in the same filesystem timer
// slot. Content and its checksum are different
AnyObjectId chk3 = repackAndCheck(7, name, length, chk2)
.getPackChecksum();
long m3 = packFilePath.toFile().lastModified();

// ask for an unknown git object to force jgit to rescan the list of
// available packs. If we would ask for a known objectid then JGit would
// skip searching for new/modified packfiles
db.getObjectDatabase().has(unknownID);
assertEquals(chk3, getSinglePack(db.getObjectDatabase().getPacks())
.getPackChecksum());
assumeTrue(m3 == m2);
}

// Try repacking so fast that we get two new packs which differ only in
// content and checksum but have same name, size and lastmodified.
// To avoid that JGit detects modification by checking the filekey create
// two new packfiles upfront and create copies of them. Then modify the
// packfiles in-place by opening them for write and then copying the
// content.
@Test
public void testDetectModificationAlthoughSameSizeAndModificationtimeAndFileKey()
throws Exception {
int testDataSeed = 1;
int testDataLength = 100;
FileBasedConfig config = db.getConfig();
config.setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
ConfigConstants.CONFIG_KEY_TRUSTFOLDERSTAT, false);
config.save();

createTestRepo(testDataSeed, testDataLength);

// Repack to create initial packfile. Make a copy of it
PackFile pf = repackAndCheck(5, null, null, null);
Path packFilePath = pf.getPackFile().toPath();
Path packFileBasePath = packFilePath.resolveSibling(
packFilePath.getFileName().toString().replaceAll(".pack", ""));
AnyObjectId chk1 = pf.getPackChecksum();
String name = pf.getPackName();
Long length = Long.valueOf(pf.getPackFile().length());
copyPack(packFileBasePath, "", ".copy1");

// Repack to create second packfile. Make a copy of it
AnyObjectId chk2 = repackAndCheck(6, name, length, chk1)
.getPackChecksum();
copyPack(packFileBasePath, "", ".copy2");

// Repack to create third packfile
AnyObjectId chk3 = repackAndCheck(7, name, length, chk2)
.getPackChecksum();
long m3 = packFilePath.toFile().lastModified();
db.getObjectDatabase().has(unknownID);
assertEquals(chk3, getSinglePack(db.getObjectDatabase().getPacks())
.getPackChecksum());

// Wait for a filesystem timer tick to enhance probability the rest of
// this test is done before the filesystem timer ticks.
fsTick(packFilePath.toFile());

// Copy copy2 to packfile data to force modification of packfile without
// changing the packfile's filekey.
copyPack(packFileBasePath, ".copy2", "");
long m2 = packFilePath.toFile().lastModified();
assumeFalse(m3 == m2);

db.getObjectDatabase().has(unknownID);
assertEquals(chk2, getSinglePack(db.getObjectDatabase().getPacks())
.getPackChecksum());

// Copy copy2 to packfile data to force modification of packfile without
// changing the packfile's filekey.
copyPack(packFileBasePath, ".copy1", "");
long m1 = packFilePath.toFile().lastModified();
assumeTrue(m2 == m1);
db.getObjectDatabase().has(unknownID);
assertEquals(chk1, getSinglePack(db.getObjectDatabase().getPacks())
.getPackChecksum());
}

// Copy file from src to dst but avoid creating a new File (with new
// FileKey) if dst already exists
private Path copyFile(Path src, Path dst) throws IOException {
if (Files.exists(dst)) {
dst.toFile().setWritable(true);
try (OutputStream dstOut = Files.newOutputStream(dst)) {
Files.copy(src, dstOut);
return dst;
}
} else {
return Files.copy(src, dst, StandardCopyOption.REPLACE_EXISTING);
}
}

private Path copyPack(Path base, String srcSuffix, String dstSuffix)
throws IOException {
copyFile(Paths.get(base + ".idx" + srcSuffix),
Paths.get(base + ".idx" + dstSuffix));
copyFile(Paths.get(base + ".bitmap" + srcSuffix),
Paths.get(base + ".bitmap" + dstSuffix));
return copyFile(Paths.get(base + ".pack" + srcSuffix),
Paths.get(base + ".pack" + dstSuffix));
}

private PackFile repackAndCheck(int compressionLevel, String oldName,
Long oldLength, AnyObjectId oldChkSum)
throws IOException, ParseException {
PackFile p = getSinglePack(gc(compressionLevel));
File pf = p.getPackFile();
// The following two assumptions should not cause the test to fail. If
// on a certain platform we get packfiles (containing the same git
// objects) where the lengths differ or the checksums don't differ we
// just skip this test. A reason for that could be that compression
// works differently or random number generator works differently. Then
// we have to search for more consistent test data or checkin these
// packfiles as test resources
assumeTrue(oldLength == null || pf.length() == oldLength.longValue());
assumeTrue(oldChkSum == null || !p.getPackChecksum().equals(oldChkSum));
assertTrue(oldName == null || p.getPackName().equals(oldName));
return p;
}

private PackFile getSinglePack(Collection<PackFile> packs) {
Iterator<PackFile> pIt = packs.iterator();
PackFile p = pIt.next();
assertFalse(pIt.hasNext());
return p;
}

private Collection<PackFile> gc(int compressionLevel)
throws IOException, ParseException {
GC gc = new GC(db);
PackConfig pc = new PackConfig(db.getConfig());
pc.setCompressionLevel(compressionLevel);

pc.setSinglePack(true);

// --aggressive
pc.setDeltaSearchWindowSize(
GarbageCollectCommand.DEFAULT_GC_AGGRESSIVE_WINDOW);
pc.setMaxDeltaDepth(GarbageCollectCommand.DEFAULT_GC_AGGRESSIVE_DEPTH);
pc.setReuseObjects(false);

gc.setPackConfig(pc);
gc.setExpireAgeMillis(0);
gc.setPackExpireAgeMillis(0);
return gc.gc();
}

}

+ 38
- 1
org.eclipse.jgit.test/tst/org/eclipse/jgit/util/FSTest.java View File

@@ -46,17 +46,24 @@ package org.eclipse.jgit.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import static org.junit.Assume.assumeNoException;
import static org.junit.Assume.assumeTrue;

import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import java.nio.file.attribute.PosixFileAttributeView;
import java.nio.file.attribute.PosixFilePermission;
import java.time.Duration;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Locale;
import java.util.Set;
import java.util.concurrent.TimeUnit;

import org.eclipse.jgit.errors.CommandFailedException;
import org.eclipse.jgit.junit.RepositoryTestCase;
@@ -215,4 +222,34 @@ public class FSTest {
new String[] { "this-command-does-not-exist" },
Charset.defaultCharset().name());
}

@Test
public void testFsTimestampResolution() throws Exception {
DateTimeFormatter formatter = DateTimeFormatter
.ofPattern("uuuu-MMM-dd HH:mm:ss.nnnnnnnnn", Locale.ENGLISH)
.withZone(ZoneId.systemDefault());
Path dir = Files.createTempDirectory("probe-filesystem");
Duration resolution = FS.getFsTimerResolution(dir);
long resolutionNs = resolution.toNanos();
assertTrue(resolutionNs > 0);
for (int i = 0; i < 10; i++) {
Path f = null;
try {
f = dir.resolve("testTimestampResolution" + i);
Files.createFile(f);
FileUtils.touch(f);
FileTime t1 = Files.getLastModifiedTime(f);
TimeUnit.NANOSECONDS.sleep(resolutionNs);
FileUtils.touch(f);
FileTime t2 = Files.getLastModifiedTime(f);
assertTrue(String.format(
"expected t2=%s to be larger than t1=%s\nsince file timestamp resolution was measured to be %,d ns",
formatter.format(t2.toInstant()),
formatter.format(t1.toInstant()),
Long.valueOf(resolutionNs)), t2.compareTo(t1) > 0);
} finally {
Files.delete(f);
}
}
}
}

+ 72
- 0
org.eclipse.jgit/.settings/.api_filters View File

@@ -30,6 +30,62 @@
</message_arguments>
</filter>
</resource>
<resource path="src/org/eclipse/jgit/storage/pack/PackConfig.java" type="org.eclipse.jgit.storage.pack.PackConfig">
<filter id="336658481">
<message_arguments>
<message_argument value="org.eclipse.jgit.storage.pack.PackConfig"/>
<message_argument value="DEFAULT_MINSIZE_PREVENT_RACY_PACK"/>
</message_arguments>
</filter>
<filter id="336658481">
<message_arguments>
<message_argument value="org.eclipse.jgit.storage.pack.PackConfig"/>
<message_argument value="DEFAULT_WAIT_PREVENT_RACY_PACK"/>
</message_arguments>
</filter>
<filter id="1142947843">
<message_arguments>
<message_argument value="5.1.8"/>
<message_argument value="DEFAULT_MINSIZE_PREVENT_RACY_PACK"/>
</message_arguments>
</filter>
<filter id="1142947843">
<message_arguments>
<message_argument value="5.1.8"/>
<message_argument value="DEFAULT_WAIT_PREVENT_RACY_PACK"/>
</message_arguments>
</filter>
<filter id="1142947843">
<message_arguments>
<message_argument value="5.1.8"/>
<message_argument value="doWaitPreventRacyPack(long)"/>
</message_arguments>
</filter>
<filter id="1142947843">
<message_arguments>
<message_argument value="5.1.8"/>
<message_argument value="getMinSizePreventRacyPack()"/>
</message_arguments>
</filter>
<filter id="1142947843">
<message_arguments>
<message_argument value="5.1.8"/>
<message_argument value="isWaitPreventRacyPack()"/>
</message_arguments>
</filter>
<filter id="1142947843">
<message_arguments>
<message_argument value="5.1.8"/>
<message_argument value="setMinSizePreventRacyPack(long)"/>
</message_arguments>
</filter>
<filter id="1142947843">
<message_arguments>
<message_argument value="5.1.8"/>
<message_argument value="setWaitPreventRacyPack(boolean)"/>
</message_arguments>
</filter>
</resource>
<resource path="src/org/eclipse/jgit/storage/pack/PackStatistics.java" type="org.eclipse.jgit.storage.pack.PackStatistics$Accumulator">
<filter comment="ignore the risk subclasses could define the same field and cause a name clash" id="336658481">
<message_arguments>
@@ -60,4 +116,20 @@
</message_arguments>
</filter>
</resource>
<resource path="src/org/eclipse/jgit/util/FS.java" type="org.eclipse.jgit.util.FS">
<filter id="1142947843">
<message_arguments>
<message_argument value="5.2.3"/>
<message_argument value="getFsTimerResolution(Path)"/>
</message_arguments>
</filter>
</resource>
<resource path="src/org/eclipse/jgit/util/FileUtils.java" type="org.eclipse.jgit.util.FileUtils">
<filter id="1142947843">
<message_arguments>
<message_argument value="5.2.3"/>
<message_argument value="touch(Path)"/>
</message_arguments>
</filter>
</resource>
</component>

+ 180
- 49
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileSnapshot.java View File

@@ -48,9 +48,13 @@ import java.io.IOException;
import java.nio.file.attribute.BasicFileAttributes;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Date;
import java.util.Locale;
import java.util.Objects;
import java.util.concurrent.TimeUnit;

import org.eclipse.jgit.annotations.NonNull;
import org.eclipse.jgit.util.FS;

/**
@@ -77,6 +81,8 @@ public class FileSnapshot {
*/
public static final long UNKNOWN_SIZE = -1;

private static final Object MISSING_FILEKEY = new Object();

/**
* A FileSnapshot that is considered to always be modified.
* <p>
@@ -84,7 +90,8 @@ public class FileSnapshot {
* file, but only after {@link #isModified(File)} gets invoked. The returned
* snapshot contains only invalid status information.
*/
public static final FileSnapshot DIRTY = new FileSnapshot(-1, -1, UNKNOWN_SIZE);
public static final FileSnapshot DIRTY = new FileSnapshot(-1, -1,
UNKNOWN_SIZE, Duration.ZERO, MISSING_FILEKEY);

/**
* A FileSnapshot that is clean if the file does not exist.
@@ -93,7 +100,8 @@ public class FileSnapshot {
* file to be clean. {@link #isModified(File)} will return false if the file
* path does not exist.
*/
public static final FileSnapshot MISSING_FILE = new FileSnapshot(0, 0, 0) {
public static final FileSnapshot MISSING_FILE = new FileSnapshot(0, 0, 0,
Duration.ZERO, MISSING_FILEKEY) {
@Override
public boolean isModified(File path) {
return FS.DETECTED.exists(path);
@@ -111,18 +119,12 @@ public class FileSnapshot {
* @return the snapshot.
*/
public static FileSnapshot save(File path) {
long read = System.currentTimeMillis();
long modified;
long size;
try {
BasicFileAttributes fileAttributes = FS.DETECTED.fileAttributes(path);
modified = fileAttributes.lastModifiedTime().toMillis();
size = fileAttributes.size();
} catch (IOException e) {
modified = path.lastModified();
size = path.length();
}
return new FileSnapshot(read, modified, size);
return new FileSnapshot(path);
}

private static Object getFileKey(BasicFileAttributes fileAttributes) {
Object fileKey = fileAttributes.fileKey();
return fileKey == null ? MISSING_FILEKEY : fileKey;
}

/**
@@ -130,6 +132,11 @@ public class FileSnapshot {
* already known.
* <p>
* This method should be invoked before the file is accessed.
* <p>
* Note that this method cannot rely on measuring file timestamp resolution
* to avoid racy git issues caused by finite file timestamp resolution since
* it's unknown in which filesystem the file is located. Hence the worst
* case fallback for timestamp resolution is used.
*
* @param modified
* the last modification time of the file
@@ -137,7 +144,8 @@ public class FileSnapshot {
*/
public static FileSnapshot save(long modified) {
final long read = System.currentTimeMillis();
return new FileSnapshot(read, modified, -1);
return new FileSnapshot(read, modified, -1, Duration.ZERO,
MISSING_FILEKEY);
}

/** Last observed modification time of the path. */
@@ -154,11 +162,57 @@ public class FileSnapshot {
* When set to {@link #UNKNOWN_SIZE} the size is not considered for modification checks. */
private final long size;

private FileSnapshot(long read, long modified, long size) {
/** measured filesystem timestamp resolution */
private Duration fsTimestampResolution;

/**
* Object that uniquely identifies the given file, or {@code
* null} if a file key is not available
*/
private final Object fileKey;

/**
* Record a snapshot for a specific file path.
* <p>
* This method should be invoked before the file is accessed.
*
* @param path
* the path to later remember. The path's current status
* information is saved.
*/
protected FileSnapshot(File path) {
this.lastRead = System.currentTimeMillis();
this.fsTimestampResolution = FS
.getFsTimerResolution(path.toPath().getParent());
BasicFileAttributes fileAttributes = null;
try {
fileAttributes = FS.DETECTED.fileAttributes(path);
} catch (IOException e) {
this.lastModified = path.lastModified();
this.size = path.length();
this.fileKey = MISSING_FILEKEY;
return;
}
this.lastModified = fileAttributes.lastModifiedTime().toMillis();
this.size = fileAttributes.size();
this.fileKey = getFileKey(fileAttributes);
}

private boolean sizeChanged;

private boolean fileKeyChanged;

private boolean lastModifiedChanged;

private boolean wasRacyClean;

private FileSnapshot(long read, long modified, long size,
@NonNull Duration fsTimestampResolution, @NonNull Object fileKey) {
this.lastRead = read;
this.lastModified = modified;
this.cannotBeRacilyClean = notRacyClean(read);
this.fsTimestampResolution = fsTimestampResolution;
this.size = size;
this.fileKey = fileKey;
}

/**
@@ -187,15 +241,30 @@ public class FileSnapshot {
public boolean isModified(File path) {
long currLastModified;
long currSize;
Object currFileKey;
try {
BasicFileAttributes fileAttributes = FS.DETECTED.fileAttributes(path);
currLastModified = fileAttributes.lastModifiedTime().toMillis();
currSize = fileAttributes.size();
currFileKey = getFileKey(fileAttributes);
} catch (IOException e) {
currLastModified = path.lastModified();
currSize = path.length();
currFileKey = MISSING_FILEKEY;
}
return (currSize != UNKNOWN_SIZE && currSize != size) || isModified(currLastModified);
sizeChanged = isSizeChanged(currSize);
if (sizeChanged) {
return true;
}
fileKeyChanged = isFileKeyChanged(currFileKey);
if (fileKeyChanged) {
return true;
}
lastModifiedChanged = isModified(currLastModified);
if (lastModifiedChanged) {
return true;
}
return false;
}

/**
@@ -222,11 +291,25 @@ public class FileSnapshot {
*/
public void setClean(FileSnapshot other) {
final long now = other.lastRead;
if (notRacyClean(now))
if (!isRacyClean(now)) {
cannotBeRacilyClean = true;
}
lastRead = now;
}

/**
* Wait until this snapshot's file can't be racy anymore
*
* @throws InterruptedException
* if sleep was interrupted
*/
public void waitUntilNotRacy() throws InterruptedException {
while (isRacyClean(System.currentTimeMillis())) {
TimeUnit.NANOSECONDS
.sleep((fsTimestampResolution.toNanos() + 1) * 11 / 10);
}
}

/**
* Compare two snapshots to see if they cache the same information.
*
@@ -235,72 +318,120 @@ public class FileSnapshot {
* @return true if the two snapshots share the same information.
*/
public boolean equals(FileSnapshot other) {
return lastModified == other.lastModified;
return lastModified == other.lastModified && size == other.size
&& Objects.equals(fileKey, other.fileKey);
}

/** {@inheritDoc} */
@Override
public boolean equals(Object other) {
if (other instanceof FileSnapshot)
return equals((FileSnapshot) other);
return false;
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof FileSnapshot)) {
return false;
}
FileSnapshot other = (FileSnapshot) obj;
return equals(other);
}

/** {@inheritDoc} */
@Override
public int hashCode() {
// This is pretty pointless, but override hashCode to ensure that
// x.hashCode() == y.hashCode() when x.equals(y) is true.
//
return (int) lastModified;
return Objects.hash(Long.valueOf(lastModified), Long.valueOf(size),
fileKey);
}

/**
* @return {@code true} if FileSnapshot.isModified(File) found the file size
* changed
*/
boolean wasSizeChanged() {
return sizeChanged;
}

/**
* @return {@code true} if FileSnapshot.isModified(File) found the file key
* changed
*/
boolean wasFileKeyChanged() {
return fileKeyChanged;
}

/**
* @return {@code true} if FileSnapshot.isModified(File) found the file's
* lastModified changed
*/
boolean wasLastModifiedChanged() {
return lastModifiedChanged;
}

/**
* @return {@code true} if FileSnapshot.isModified(File) detected that
* lastModified is racily clean
*/
boolean wasLastModifiedRacilyClean() {
return wasRacyClean;
}

/** {@inheritDoc} */
@SuppressWarnings("nls")
@Override
public String toString() {
if (this == DIRTY)
return "DIRTY"; //$NON-NLS-1$
if (this == MISSING_FILE)
return "MISSING_FILE"; //$NON-NLS-1$
DateFormat f = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS", //$NON-NLS-1$
if (this == DIRTY) {
return "DIRTY";
}
if (this == MISSING_FILE) {
return "MISSING_FILE";
}
DateFormat f = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS",
Locale.US);
return "FileSnapshot[modified: " + f.format(new Date(lastModified)) //$NON-NLS-1$
+ ", read: " + f.format(new Date(lastRead)) + "]"; //$NON-NLS-1$ //$NON-NLS-2$
return "FileSnapshot[modified: " + f.format(new Date(lastModified))
+ ", read: " + f.format(new Date(lastRead)) + ", size:" + size
+ ", fileKey: " + fileKey + "]";
}

private boolean notRacyClean(long read) {
// The last modified time granularity of FAT filesystems is 2 seconds.
// Using 2.5 seconds here provides a reasonably high assurance that
// a modification was not missed.
//
return read - lastModified > 2500;
private boolean isRacyClean(long read) {
// add a 10% safety margin
long racyNanos = (fsTimestampResolution.toNanos() + 1) * 11 / 10;
return wasRacyClean = (read - lastModified) * 1_000_000 <= racyNanos;
}

private boolean isModified(long currLastModified) {
// Any difference indicates the path was modified.
//
if (lastModified != currLastModified)

lastModifiedChanged = lastModified != currLastModified;
if (lastModifiedChanged) {
return true;
}

// We have already determined the last read was far enough
// after the last modification that any new modifications
// are certain to change the last modified time.
//
if (cannotBeRacilyClean)
if (cannotBeRacilyClean) {
return false;
if (notRacyClean(lastRead)) {
}
if (!isRacyClean(lastRead)) {
// Our last read should have marked cannotBeRacilyClean,
// but this thread may not have seen the change. The read
// of the volatile field lastRead should have fixed that.
//
return false;
}

// We last read this path too close to its last observed
// modification time. We may have missed a modification.
// Scan again, to ensure we still see the same state.
//
return true;
}

private boolean isFileKeyChanged(Object currFileKey) {
return currFileKey != MISSING_FILEKEY && !currFileKey.equals(fileKey);
}

private boolean isSizeChanged(long currSize) {
return currSize != UNKNOWN_SIZE && currSize != size;
}
}

+ 24
- 8
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java View File

@@ -177,7 +177,7 @@ public class GC {

private Date packExpire;

private PackConfig pconfig = null;
private PackConfig pconfig;

/**
* the refs which existed during the last call to {@link #repack()}. This is
@@ -213,6 +213,7 @@ public class GC {
*/
public GC(FileRepository repo) {
this.repo = repo;
this.pconfig = new PackConfig(repo);
this.pm = NullProgressMonitor.INSTANCE;
}

@@ -397,7 +398,7 @@ public class GC {
*/
private void removeOldPack(File packFile, String packName, PackExt ext,
int deleteOptions) throws IOException {
if (pconfig != null && pconfig.isPreserveOldPacks()) {
if (pconfig.isPreserveOldPacks()) {
File oldPackDir = repo.getObjectDatabase().getPreservedDirectory();
FileUtils.mkdir(oldPackDir, true);

@@ -413,7 +414,7 @@ public class GC {
* Delete the preserved directory including all pack files within
*/
private void prunePreserved() {
if (pconfig != null && pconfig.isPrunePreserved()) {
if (pconfig.isPrunePreserved()) {
try {
FileUtils.delete(repo.getObjectDatabase().getPreservedDirectory(),
FileUtils.RECURSIVE | FileUtils.RETRY | FileUtils.SKIP_MISSING);
@@ -855,7 +856,7 @@ public class GC {
nonHeads.addAll(indexObjects);

// Combine the GC_REST objects into the GC pack if requested
if (pconfig != null && pconfig.getSinglePack()) {
if (pconfig.getSinglePack()) {
allHeadsAndTags.addAll(nonHeads);
nonHeads.clear();
}
@@ -1159,7 +1160,7 @@ public class GC {
return Integer.signum(o1.hashCode() - o2.hashCode());
});
try (PackWriter pw = new PackWriter(
(pconfig == null) ? new PackConfig(repo) : pconfig,
pconfig,
repo.newObjectReader())) {
// prepare the PackWriter
pw.setDeltaBaseAsOffset(true);
@@ -1255,8 +1256,23 @@ public class GC {
realExt), e);
}
}

return repo.getObjectDatabase().openPack(realPack);
boolean interrupted = false;
try {
FileSnapshot snapshot = FileSnapshot.save(realPack);
if (pconfig.doWaitPreventRacyPack(snapshot.size())) {
snapshot.waitUntilNotRacy();
}
} catch (InterruptedException e) {
interrupted = true;
}
try {
return repo.getObjectDatabase().openPack(realPack);
} finally {
if (interrupted) {
// Re-set interrupted flag
Thread.currentThread().interrupt();
}
}
} finally {
if (tmpPack != null && tmpPack.exists())
tmpPack.delete();
@@ -1434,7 +1450,7 @@ public class GC {
* the {@link org.eclipse.jgit.storage.pack.PackConfig} used when
* writing packs
*/
public void setPackConfig(PackConfig pconfig) {
public void setPackConfig(@NonNull PackConfig pconfig) {
this.pconfig = pconfig;
}


+ 2
- 1
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectory.java View File

@@ -910,9 +910,10 @@ public class ObjectDirectory extends FileObjectDatabase {

final String packName = base + PACK.getExtension();
final File packFile = new File(packDirectory, packName);
final PackFile oldPack = forReuse.remove(packName);
final PackFile oldPack = forReuse.get(packName);
if (oldPack != null
&& !oldPack.getFileSnapshot().isModified(packFile)) {
forReuse.remove(packName);
list.add(oldPack);
continue;
}

+ 18
- 0
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ObjectDirectoryPackParser.java View File

@@ -65,6 +65,7 @@ import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.CoreConfig;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.storage.pack.PackConfig;
import org.eclipse.jgit.transport.PackParser;
import org.eclipse.jgit.transport.PackedObjectInfo;
import org.eclipse.jgit.util.FileUtils;
@@ -122,9 +123,12 @@ public class ObjectDirectoryPackParser extends PackParser {
/** The pack that was created, if parsing was successful. */
private PackFile newPack;

private PackConfig pconfig;

ObjectDirectoryPackParser(FileObjectDatabase odb, InputStream src) {
super(odb, src);
this.db = odb;
this.pconfig = new PackConfig(odb.getConfig());
this.crc = new CRC32();
this.tailDigest = Constants.newMessageDigest();

@@ -514,6 +518,15 @@ public class ObjectDirectoryPackParser extends PackParser {
JGitText.get().cannotMoveIndexTo, finalIdx), e);
}

boolean interrupted = false;
try {
FileSnapshot snapshot = FileSnapshot.save(finalPack);
if (pconfig.doWaitPreventRacyPack(snapshot.size())) {
snapshot.waitUntilNotRacy();
}
} catch (InterruptedException e) {
interrupted = true;
}
try {
newPack = db.openPack(finalPack);
} catch (IOException err) {
@@ -523,6 +536,11 @@ public class ObjectDirectoryPackParser extends PackParser {
if (finalIdx.exists())
FileUtils.delete(finalIdx);
throw err;
} finally {
if (interrupted) {
// Re-set interrupted flag
Thread.currentThread().interrupt();
}
}

return lockMessage != null ? keep : null;

+ 30
- 3
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java View File

@@ -93,6 +93,8 @@ import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.util.LongList;
import org.eclipse.jgit.util.NB;
import org.eclipse.jgit.util.RawParseUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* A Git version 2 pack file representation. A pack file contains Git objects in
@@ -100,6 +102,7 @@ import org.eclipse.jgit.util.RawParseUtils;
* objects are similar.
*/
public class PackFile implements Iterable<PackIndex.MutableEntry> {
private final static Logger LOG = LoggerFactory.getLogger(PackFile.class);
/** Sorts PackFiles to be most recently created to least recently created. */
public static final Comparator<PackFile> SORT = (PackFile a,
PackFile b) -> b.packLastModified - a.packLastModified;
@@ -127,7 +130,7 @@ public class PackFile implements Iterable<PackIndex.MutableEntry> {

int packLastModified;

private FileSnapshot fileSnapshot;
private PackFileSnapshot fileSnapshot;

private volatile boolean invalid;

@@ -164,7 +167,7 @@ public class PackFile implements Iterable<PackIndex.MutableEntry> {
*/
public PackFile(File packFile, int extensions) {
this.packFile = packFile;
this.fileSnapshot = FileSnapshot.save(packFile);
this.fileSnapshot = PackFileSnapshot.save(packFile);
this.packLastModified = (int) (fileSnapshot.lastModified() >> 10);
this.extensions = extensions;

@@ -185,10 +188,22 @@ public class PackFile implements Iterable<PackIndex.MutableEntry> {
throw new PackInvalidException(packFile, invalidatingCause);
}
try {
long start = System.currentTimeMillis();
idx = PackIndex.open(extFile(INDEX));
if (LOG.isDebugEnabled()) {
LOG.debug(String.format(
"Opening pack index %s, size %.3f MB took %d ms", //$NON-NLS-1$
extFile(INDEX).getAbsolutePath(),
Float.valueOf(extFile(INDEX).length()
/ (1024f * 1024)),
Long.valueOf(System.currentTimeMillis()
- start)));
}

if (packChecksum == null) {
packChecksum = idx.packChecksum;
fileSnapshot.setChecksum(
ObjectId.fromRaw(packChecksum));
} else if (!Arrays.equals(packChecksum,
idx.packChecksum)) {
throw new PackMismatchException(MessageFormat
@@ -367,10 +382,14 @@ public class PackFile implements Iterable<PackIndex.MutableEntry> {
*
* @return the packfile @{@link FileSnapshot} that the object is loaded from.
*/
FileSnapshot getFileSnapshot() {
PackFileSnapshot getFileSnapshot() {
return fileSnapshot;
}

AnyObjectId getPackChecksum() {
return ObjectId.fromRaw(packChecksum);
}

private final byte[] decompress(final long position, final int sz,
final WindowCursor curs) throws IOException, DataFormatException {
byte[] dstbuf;
@@ -1205,4 +1224,12 @@ public class PackFile implements Iterable<PackIndex.MutableEntry> {
private boolean hasExt(PackExt ext) {
return (extensions & ext.getBit()) != 0;
}

@SuppressWarnings("nls")
@Override
public String toString() {
return "PackFile [packFileName=" + packFile.getName() + ", length="
+ packFile.length() + ", packChecksum="
+ ObjectId.fromRaw(packChecksum).name() + "]";
}
}

+ 123
- 0
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFileSnapshot.java View File

@@ -0,0 +1,123 @@
/*
* Copyright (C) 2019, Matthias Sohn <matthias.sohn@sap.com>
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.internal.storage.file;

import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;

import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;

class PackFileSnapshot extends FileSnapshot {

private static final ObjectId MISSING_CHECKSUM = ObjectId.zeroId();

/**
* Record a snapshot for a specific packfile path.
* <p>
* This method should be invoked before the packfile is accessed.
*
* @param path
* the path to later remember. The path's current status
* information is saved.
* @return the snapshot.
*/
public static PackFileSnapshot save(File path) {
return new PackFileSnapshot(path);
}

private AnyObjectId checksum = MISSING_CHECKSUM;

private boolean wasChecksumChanged;


PackFileSnapshot(File packFile) {
super(packFile);
}

void setChecksum(AnyObjectId checksum) {
this.checksum = checksum;
}

/** {@inheritDoc} */
@Override
public boolean isModified(File packFile) {
if (!super.isModified(packFile)) {
return false;
}
if (wasSizeChanged() || wasFileKeyChanged()
|| !wasLastModifiedRacilyClean()) {
return true;
}
return isChecksumChanged(packFile);
}

boolean isChecksumChanged(File packFile) {
return wasChecksumChanged = checksum != MISSING_CHECKSUM
&& !checksum.equals(readChecksum(packFile));
}

private AnyObjectId readChecksum(File packFile) {
try (RandomAccessFile fd = new RandomAccessFile(packFile, "r")) { //$NON-NLS-1$
fd.seek(fd.length() - 20);
final byte[] buf = new byte[20];
fd.readFully(buf, 0, 20);
return ObjectId.fromRaw(buf);
} catch (IOException e) {
return MISSING_CHECKSUM;
}
}

boolean wasChecksumChanged() {
return wasChecksumChanged;
}

@SuppressWarnings("nls")
@Override
public String toString() {
return "PackFileSnapshot [checksum=" + checksum + ", "
+ super.toString() + "]";
}

}

+ 23
- 3
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInserter.java View File

@@ -86,6 +86,7 @@ import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.ObjectStream;
import org.eclipse.jgit.storage.pack.PackConfig;
import org.eclipse.jgit.transport.PackParser;
import org.eclipse.jgit.transport.PackedObjectInfo;
import org.eclipse.jgit.util.BlockList;
@@ -115,8 +116,11 @@ public class PackInserter extends ObjectInserter {
private PackStream packOut;
private Inflater cachedInflater;

private PackConfig pconfig;

PackInserter(ObjectDirectory db) {
this.db = db;
this.pconfig = new PackConfig(db.getConfig());
}

/**
@@ -296,9 +300,25 @@ public class PackInserter extends ObjectInserter {
realIdx), e);
}

db.openPack(realPack);
rollback = false;
clear();
boolean interrupted = false;
try {
FileSnapshot snapshot = FileSnapshot.save(realPack);
if (pconfig.doWaitPreventRacyPack(snapshot.size())) {
snapshot.waitUntilNotRacy();
}
} catch (InterruptedException e) {
interrupted = true;
}
try {
db.openPack(realPack);
rollback = false;
} finally {
clear();
if (interrupted) {
// Re-set interrupted flag
Thread.currentThread().interrupt();
}
}
}

private static void writePackIndex(File idx, byte[] packHash,

+ 99
- 1
org.eclipse.jgit/src/org/eclipse/jgit/storage/pack/PackConfig.java View File

@@ -116,12 +116,30 @@ public class PackConfig {
*/
public static final int DEFAULT_DELTA_SEARCH_WINDOW_SIZE = 10;

private static final int MB = 1 << 20;

/**
* Default big file threshold: {@value}
*
* @see #setBigFileThreshold(int)
*/
public static final int DEFAULT_BIG_FILE_THRESHOLD = 50 * 1024 * 1024;
public static final int DEFAULT_BIG_FILE_THRESHOLD = 50 * MB;

/**
* Default if we wait before opening a newly written pack to prevent its
* lastModified timestamp could be racy
*
* @since 5.1.8
*/
public static final boolean DEFAULT_WAIT_PREVENT_RACY_PACK = false;

/**
* Default if we wait before opening a newly written pack to prevent its
* lastModified timestamp could be racy
*
* @since 5.1.8
*/
public static final long DEFAULT_MINSIZE_PREVENT_RACY_PACK = 100 * MB;

/**
* Default delta cache size: {@value}
@@ -238,6 +256,10 @@ public class PackConfig {

private int bigFileThreshold = DEFAULT_BIG_FILE_THRESHOLD;

private boolean waitPreventRacyPack = DEFAULT_WAIT_PREVENT_RACY_PACK;

private long minSizePreventRacyPack = DEFAULT_MINSIZE_PREVENT_RACY_PACK;

private int threads;

private Executor executor;
@@ -314,6 +336,8 @@ public class PackConfig {
this.deltaCacheSize = cfg.deltaCacheSize;
this.deltaCacheLimit = cfg.deltaCacheLimit;
this.bigFileThreshold = cfg.bigFileThreshold;
this.waitPreventRacyPack = cfg.waitPreventRacyPack;
this.minSizePreventRacyPack = cfg.minSizePreventRacyPack;
this.threads = cfg.threads;
this.executor = cfg.executor;
this.indexVersion = cfg.indexVersion;
@@ -736,6 +760,76 @@ public class PackConfig {
this.bigFileThreshold = bigFileThreshold;
}

/**
* Get whether we wait before opening a newly written pack to prevent its
* lastModified timestamp could be racy
*
* @return whether we wait before opening a newly written pack to prevent
* its lastModified timestamp could be racy
* @since 5.1.8
*/
public boolean isWaitPreventRacyPack() {
return waitPreventRacyPack;
}

/**
* Get whether we wait before opening a newly written pack to prevent its
* lastModified timestamp could be racy. Returns {@code true} if
* {@code waitToPreventRacyPack = true} and
* {@code packSize > minSizePreventRacyPack}, {@code false} otherwise.
*
* @param packSize
* size of the pack file
*
* @return whether we wait before opening a newly written pack to prevent
* its lastModified timestamp could be racy
* @since 5.1.8
*/
public boolean doWaitPreventRacyPack(long packSize) {
return isWaitPreventRacyPack()
&& packSize > getMinSizePreventRacyPack();
}

/**
* Set whether we wait before opening a newly written pack to prevent its
* lastModified timestamp could be racy
*
* @param waitPreventRacyPack
* whether we wait before opening a newly written pack to prevent
* its lastModified timestamp could be racy
* @since 5.1.8
*/
public void setWaitPreventRacyPack(boolean waitPreventRacyPack) {
this.waitPreventRacyPack = waitPreventRacyPack;
}

/**
* Get minimum packfile size for which we wait before opening a newly
* written pack to prevent its lastModified timestamp could be racy if
* {@code isWaitToPreventRacyPack} is {@code true}.
*
* @return minimum packfile size, default is 100 MiB
*
* @since 5.1.8
*/
public long getMinSizePreventRacyPack() {
return minSizePreventRacyPack;
}

/**
* Set minimum packfile size for which we wait before opening a newly
* written pack to prevent its lastModified timestamp could be racy if
* {@code isWaitToPreventRacyPack} is {@code true}.
*
* @param minSizePreventRacyPack
* minimum packfile size, default is 100 MiB
*
* @since 5.1.8
*/
public void setMinSizePreventRacyPack(long minSizePreventRacyPack) {
this.minSizePreventRacyPack = minSizePreventRacyPack;
}

/**
* Get the compression level applied to objects in the pack.
*
@@ -1083,6 +1177,10 @@ public class PackConfig {
setBitmapInactiveBranchAgeInDays(
rc.getInt("pack", "bitmapinactivebranchageindays", //$NON-NLS-1$ //$NON-NLS-2$
getBitmapInactiveBranchAgeInDays()));
setWaitPreventRacyPack(rc.getBoolean("pack", "waitpreventracypack", //$NON-NLS-1$ //$NON-NLS-2$
isWaitPreventRacyPack()));
setMinSizePreventRacyPack(rc.getLong("pack", "minsizepreventracypack", //$NON-NLS-1$//$NON-NLS-2$
getMinSizePreventRacyPack()));
}

/** {@inheritDoc} */

+ 100
- 0
org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java View File

@@ -55,23 +55,31 @@ import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintStream;
import java.nio.charset.Charset;
import java.nio.file.AccessDeniedException;
import java.nio.file.FileStore;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.FileTime;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.text.MessageFormat;
import java.time.Duration;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;

import org.eclipse.jgit.annotations.NonNull;
import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.api.errors.JGitInternalException;
import org.eclipse.jgit.errors.CommandFailedException;
@@ -180,6 +188,83 @@ public abstract class FS {
}
}

private static final class FileStoreAttributeCache {
/**
* The last modified time granularity of FAT filesystems is 2 seconds.
*/
private static final Duration FALLBACK_TIMESTAMP_RESOLUTION = Duration
.ofMillis(2000);

private static final Map<FileStore, FileStoreAttributeCache> attributeCache = new ConcurrentHashMap<>();

static Duration getFsTimestampResolution(Path file) {
try {
Path dir = Files.isDirectory(file) ? file : file.getParent();
if (!dir.toFile().canWrite()) {
// can not determine FileStore of an unborn directory or in
// a read-only directory
return FALLBACK_TIMESTAMP_RESOLUTION;
}
FileStore s = Files.getFileStore(dir);
FileStoreAttributeCache c = attributeCache.get(s);
if (c == null) {
c = new FileStoreAttributeCache(dir);
attributeCache.put(s, c);
if (LOG.isDebugEnabled()) {
LOG.debug(c.toString());
}
}
return c.getFsTimestampResolution();

} catch (IOException | InterruptedException e) {
LOG.warn(e.getMessage(), e);
return FALLBACK_TIMESTAMP_RESOLUTION;
}
}

private Duration fsTimestampResolution;

Duration getFsTimestampResolution() {
return fsTimestampResolution;
}

private FileStoreAttributeCache(Path dir)
throws IOException, InterruptedException {
Path probe = dir.resolve(".probe-" + UUID.randomUUID()); //$NON-NLS-1$
Files.createFile(probe);
try {
FileTime startTime = Files.getLastModifiedTime(probe);
FileTime actTime = startTime;
long sleepTime = 512;
while (actTime.compareTo(startTime) <= 0) {
TimeUnit.NANOSECONDS.sleep(sleepTime);
FileUtils.touch(probe);
actTime = Files.getLastModifiedTime(probe);
// limit sleep time to max. 100ms
if (sleepTime < 100_000_000L) {
sleepTime = sleepTime * 2;
}
}
fsTimestampResolution = Duration.between(startTime.toInstant(),
actTime.toInstant());
} catch (AccessDeniedException e) {
LOG.error(e.getLocalizedMessage(), e);
} finally {
Files.delete(probe);
}
}

@SuppressWarnings("nls")
@Override
public String toString() {
return "FileStoreAttributeCache[" + attributeCache.keySet()
.stream()
.map(key -> "FileStore[" + key + "]: fsTimestampResolution="
+ attributeCache.get(key).getFsTimestampResolution())
.collect(Collectors.joining(",\n")) + "]";
}
}

/** The auto-detected implementation selected for this operating system and JRE. */
public static final FS DETECTED = detect();

@@ -221,6 +306,21 @@ public abstract class FS {
return factory.detect(cygwinUsed);
}

/**
* Get an estimate for the filesystem timestamp resolution from a cache of
* timestamp resolution per FileStore, if not yet available it is measured
* for a probe file under the given directory.
*
* @param dir
* the directory under which the probe file will be created to
* measure the timer resolution.
* @return measured filesystem timestamp resolution
* @since 5.2.3
*/
public static Duration getFsTimerResolution(@NonNull Path dir) {
return FileStoreAttributeCache.getFsTimestampResolution(dir);
}

private volatile Holder<File> userHome;

private volatile Holder<File> gitSystemConfig;

+ 15
- 0
org.eclipse.jgit/src/org/eclipse/jgit/util/FileUtils.java View File

@@ -50,6 +50,7 @@ import static java.nio.charset.StandardCharsets.UTF_8;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.AtomicMoveNotSupportedException;
import java.nio.file.CopyOption;
import java.nio.file.Files;
@@ -928,4 +929,18 @@ public class FileUtils {
}
return path;
}

/**
* Touch the given file
*
* @param f
* the file to touch
* @throws IOException
* @since 5.2.3
*/
public static void touch(Path f) throws IOException {
try (OutputStream fos = Files.newOutputStream(f)) {
// touch the file
}
}
}

+ 42
- 40
tools/BUILD View File

@@ -14,8 +14,10 @@ default_java_toolchain(
visibility = ["//visibility:public"],
)

# This EP warnings list borrowed from here:
# Error Prone errors enabled by default; see ../.bazelrc for how this is
# enabled. This warnings list is originally based on:
# https://github.com/bazelbuild/BUILD_file_generator/blob/master/tools/bazel_defs/java.bzl
# However, feel free to add any additional errors. Thus far they have all been pretty useful.
java_package_configuration(
name = "error_prone",
javacopts = [
@@ -23,61 +25,61 @@ java_package_configuration(
"-Xep:MissingCasesInEnumSwitch:ERROR",
"-Xep:ReferenceEquality:WARN",
"-Xep:StringEquality:WARN",
"-Xep:WildcardImport:WARN",
"-Xep:WildcardImport:ERROR",
"-Xep:AmbiguousMethodReference:WARN",
"-Xep:BadAnnotationImplementation:WARN",
"-Xep:BadAnnotationImplementation:ERROR",
"-Xep:BadComparable:WARN",
"-Xep:BoxedPrimitiveConstructor:ERROR",
"-Xep:CannotMockFinalClass:WARN",
"-Xep:CannotMockFinalClass:ERROR",
"-Xep:ClassCanBeStatic:ERROR",
"-Xep:ClassNewInstance:WARN",
"-Xep:DefaultCharset:ERROR",
"-Xep:DoubleCheckedLocking:WARN",
"-Xep:ElementsCountedInLoop:WARN",
"-Xep:EqualsHashCode:WARN",
"-Xep:EqualsIncompatibleType:WARN",
"-Xep:DoubleCheckedLocking:ERROR",
"-Xep:ElementsCountedInLoop:ERROR",
"-Xep:EqualsHashCode:ERROR",
"-Xep:EqualsIncompatibleType:ERROR",
"-Xep:ExpectedExceptionChecker:ERROR",
"-Xep:Finally:WARN",
"-Xep:FloatingPointLiteralPrecision:WARN",
"-Xep:FragmentInjection:WARN",
"-Xep:FragmentNotInstantiable:WARN",
"-Xep:FunctionalInterfaceClash:WARN",
"-Xep:FloatingPointLiteralPrecision:ERROR",
"-Xep:FragmentInjection:ERROR",
"-Xep:FragmentNotInstantiable:ERROR",
"-Xep:FunctionalInterfaceClash:ERROR",
"-Xep:FutureReturnValueIgnored:WARN",
"-Xep:GetClassOnEnum:WARN",
"-Xep:ImmutableAnnotationChecker:WARN",
"-Xep:GetClassOnEnum:ERROR",
"-Xep:ImmutableAnnotationChecker:ERROR",
"-Xep:ImmutableEnumChecker:WARN",
"-Xep:IncompatibleModifiers:WARN",
"-Xep:InjectOnConstructorOfAbstractClass:WARN",
"-Xep:InputStreamSlowMultibyteRead:WARN",
"-Xep:IterableAndIterator:WARN",
"-Xep:JUnit3FloatingPointComparisonWithoutDelta:WARN",
"-Xep:JUnitAmbiguousTestClass:WARN",
"-Xep:LiteralClassName:WARN",
"-Xep:IncompatibleModifiers:ERROR",
"-Xep:InjectOnConstructorOfAbstractClass:ERROR",
"-Xep:InputStreamSlowMultibyteRead:ERROR",
"-Xep:IterableAndIterator:ERROR",
"-Xep:JUnit3FloatingPointComparisonWithoutDelta:ERROR",
"-Xep:JUnitAmbiguousTestClass:ERROR",
"-Xep:LiteralClassName:ERROR",
"-Xep:MissingFail:ERROR",
"-Xep:MissingOverride:WARN",
"-Xep:MutableConstantField:WARN",
"-Xep:MissingOverride:ERROR",
"-Xep:MutableConstantField:ERROR",
"-Xep:NarrowingCompoundAssignment:WARN",
"-Xep:NonAtomicVolatileUpdate:WARN",
"-Xep:NonAtomicVolatileUpdate:ERROR",
"-Xep:NonOverridingEquals:WARN",
"-Xep:NullableConstructor:WARN",
"-Xep:NullablePrimitive:WARN",
"-Xep:NullableVoid:WARN",
"-Xep:NullableConstructor:ERROR",
"-Xep:NullablePrimitive:ERROR",
"-Xep:NullableVoid:ERROR",
"-Xep:OperatorPrecedence:WARN",
"-Xep:OverridesGuiceInjectableMethod:WARN",
"-Xep:PreconditionsInvalidPlaceholder:WARN",
"-Xep:ProtoFieldPreconditionsCheckNotNull:WARN",
"-Xep:ProtocolBufferOrdinal:WARN",
"-Xep:RequiredModifiers:WARN",
"-Xep:OverridesGuiceInjectableMethod:ERROR",
"-Xep:PreconditionsInvalidPlaceholder:ERROR",
"-Xep:ProtoFieldPreconditionsCheckNotNull:ERROR",
"-Xep:ProtocolBufferOrdinal:ERROR",
"-Xep:RequiredModifiers:ERROR",
"-Xep:ShortCircuitBoolean:WARN",
"-Xep:SimpleDateFormatConstant:WARN",
"-Xep:StaticGuardedByInstance:WARN",
"-Xep:SynchronizeOnNonFinalField:WARN",
"-Xep:TruthConstantAsserts:WARN",
"-Xep:TypeParameterShadowing:WARN",
"-Xep:SimpleDateFormatConstant:ERROR",
"-Xep:StaticGuardedByInstance:ERROR",
"-Xep:SynchronizeOnNonFinalField:ERROR",
"-Xep:TruthConstantAsserts:ERROR",
"-Xep:TypeParameterShadowing:ERROR",
"-Xep:TypeParameterUnusedInFormals:WARN",
"-Xep:URLEqualsHashCode:WARN",
"-Xep:UnsynchronizedOverridesSynchronized:WARN",
"-Xep:WaitNotInLoop:WARN",
"-Xep:URLEqualsHashCode:ERROR",
"-Xep:UnsynchronizedOverridesSynchronized:ERROR",
"-Xep:WaitNotInLoop:ERROR",
],
packages = ["error_prone_packages"],
)

Loading…
Cancel
Save