Browse Source

BatchRefUpdate: repro racy atomic update, and fix it

PackedBatchRefUpdate was creating a new packed-refs list that was
potentially unsorted. This would be papered over when the list was
read back from disk in parsePackedRef, which detects unsorted ref
lists on reading, and sorts them. However, the BatchRefUpdate also
installed the new (unsorted) list in-memory in
RefDirectory#packedRefs.

With the timestamp granularity code committed to stable-5.1, we can
more often accurately decide that the packed-refs file is clean, and
will return the erroneous unsorted data more often. Unluckily timed
delays also cause the file to be clean, hence this problem was
exacerbated under load.

The symptom is that refs added by a BatchRefUpdate would stop being
visible directly after they were added. In particular, the Gerrit
integration tests uses BatchRefUpdate in its setup for creating the
Admin group, and then tries to read it out directly afterward.

The tests recreates one failure case. A better approach would be to
revise RefList.Builder, so it detects out-of-order lists and
automatically sorts them.

Fixes https://bugs.eclipse.org/bugs/show_bug.cgi?id=548716 and
https://bugs.chromium.org/p/gerrit/issues/detail?id=11373.

Bug: 548716
Change-Id: I613c8059964513ce2370543620725b540b3cb6d1
Signed-off-by: Han-Wen Nienhuys <hanwen@google.com>
Signed-off-by: Matthias Sohn <matthias.sohn@sap.com>
tags/v4.11.9.201909030838-r
Han-Wen Nienhuys 4 years ago
parent
commit
0e3d4a273f

+ 29
- 0
org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java View File

@@ -43,6 +43,7 @@

package org.eclipse.jgit.internal.storage.file;

import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.eclipse.jgit.internal.storage.file.BatchRefUpdateTest.Result.LOCK_FAILURE;
@@ -64,6 +65,7 @@ import static org.junit.Assume.assumeTrue;

import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
@@ -161,6 +163,33 @@ public class BatchRefUpdateTest extends LocalDiskRepositoryTestCase {
refsChangedEvents = 0;
}

@Test
public void packedRefsFileIsSorted() throws IOException {
assumeTrue(atomic);

for (int i = 0; i < 2; i++) {
BatchRefUpdate bu = diskRepo.getRefDatabase().newBatchUpdate();
String b1 = String.format("refs/heads/a%d",i);
String b2 = String.format("refs/heads/b%d",i);
bu.setAtomic(atomic);
ReceiveCommand c1 = new ReceiveCommand(ObjectId.zeroId(), A, b1);
ReceiveCommand c2 = new ReceiveCommand(ObjectId.zeroId(), B, b2);
bu.addCommand(c1, c2);
try (RevWalk rw = new RevWalk(diskRepo)) {
bu.execute(rw, NullProgressMonitor.INSTANCE);
}
assertEquals(c1.getResult(), ReceiveCommand.Result.OK);
assertEquals(c2.getResult(), ReceiveCommand.Result.OK);
}

File packed = new File(diskRepo.getDirectory(), "packed-refs");
String packedStr = new String(Files.readAllBytes(packed.toPath()), UTF_8);

int a2 = packedStr.indexOf("refs/heads/a1");
int b1 = packedStr.indexOf("refs/heads/b0");
assertTrue(a2 < b1);
}

@Test
public void simpleNoForce() throws IOException {
writeLooseRef("refs/heads/master", A);

+ 58
- 60
org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackedBatchRefUpdate.java View File

@@ -50,10 +50,10 @@ import static org.eclipse.jgit.transport.ReceiveCommand.Result.REJECTED_NONFASTF

import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -347,65 +347,72 @@ class PackedBatchRefUpdate extends BatchRefUpdate {

private static RefList<Ref> applyUpdates(RevWalk walk, RefList<Ref> refs,
List<ReceiveCommand> commands) throws IOException {
int nDeletes = 0;
List<ReceiveCommand> adds = new ArrayList<>(commands.size());
// Construct a new RefList by merging the old list with the updates.
// This assumes that each ref occurs at most once as a ReceiveCommand.
Collections.sort(commands, new Comparator<ReceiveCommand>() {
@Override
public int compare(ReceiveCommand a, ReceiveCommand b) {
return a.getRefName().compareTo(b.getRefName());
}
});

int delta = 0;
for (ReceiveCommand c : commands) {
if (c.getType() == ReceiveCommand.Type.CREATE) {
adds.add(c);
} else if (c.getType() == ReceiveCommand.Type.DELETE) {
nDeletes++;
switch (c.getType()) {
case DELETE:
delta--;
break;
case CREATE:
delta++;
break;
default:
}
}
int addIdx = 0;

// Construct a new RefList by linearly scanning the old list, and merging in
// any updates.
Map<String, ReceiveCommand> byName = byName(commands);
RefList.Builder<Ref> b =
new RefList.Builder<>(refs.size() - nDeletes + adds.size());
for (Ref ref : refs) {
String name = ref.getName();
ReceiveCommand cmd = byName.remove(name);
if (cmd == null) {
b.add(ref);
continue;
}
if (!cmd.getOldId().equals(ref.getObjectId())) {
lockFailure(cmd, commands);
return null;

RefList.Builder<Ref> b = new RefList.Builder<>(refs.size() + delta);
int refIdx = 0;
int cmdIdx = 0;
while (refIdx < refs.size() || cmdIdx < commands.size()) {
Ref ref = (refIdx < refs.size()) ? refs.get(refIdx) : null;
ReceiveCommand cmd = (cmdIdx < commands.size())
? commands.get(cmdIdx)
: null;
int cmp = 0;
if (ref != null && cmd != null) {
cmp = ref.getName().compareTo(cmd.getRefName());
} else if (ref == null) {
cmp = 1;
} else if (cmd == null) {
cmp = -1;
}

// Consume any adds between the last and current ref.
while (addIdx < adds.size()) {
ReceiveCommand currAdd = adds.get(addIdx);
if (currAdd.getRefName().compareTo(name) < 0) {
b.add(peeledRef(walk, currAdd));
byName.remove(currAdd.getRefName());
} else {
break;
if (cmp < 0) {
b.add(ref);
refIdx++;
} else if (cmp > 0) {
assert cmd != null;
if (cmd.getType() != ReceiveCommand.Type.CREATE) {
lockFailure(cmd, commands);
return null;
}
addIdx++;
}

if (cmd.getType() != ReceiveCommand.Type.DELETE) {
b.add(peeledRef(walk, cmd));
}
}

// All remaining adds are valid, since the refs didn't exist.
while (addIdx < adds.size()) {
ReceiveCommand cmd = adds.get(addIdx++);
byName.remove(cmd.getRefName());
b.add(peeledRef(walk, cmd));
}
cmdIdx++;
} else {
assert cmd != null;
assert ref != null;
if (!cmd.getOldId().equals(ref.getObjectId())) {
lockFailure(cmd, commands);
return null;
}

// Any remaining updates/deletes do not correspond to any existing refs, so
// they are lock failures.
if (!byName.isEmpty()) {
lockFailure(byName.values().iterator().next(), commands);
return null;
if (cmd.getType() != ReceiveCommand.Type.DELETE) {
b.add(peeledRef(walk, cmd));
}
cmdIdx++;
refIdx++;
}
}

return b.toRefList();
}

@@ -484,15 +491,6 @@ class PackedBatchRefUpdate extends BatchRefUpdate {
}
}

private static Map<String, ReceiveCommand> byName(
List<ReceiveCommand> commands) {
Map<String, ReceiveCommand> ret = new LinkedHashMap<>();
for (ReceiveCommand cmd : commands) {
ret.put(cmd.getRefName(), cmd);
}
return ret;
}

private static Ref peeledRef(RevWalk walk, ReceiveCommand cmd)
throws IOException {
ObjectId newId = cmd.getNewId().copy();

Loading…
Cancel
Save