You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

InMemoryRepository.java 8.0KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313
  1. package org.eclipse.jgit.internal.storage.dfs;
  2. import java.io.ByteArrayOutputStream;
  3. import java.io.FileNotFoundException;
  4. import java.io.IOException;
  5. import java.nio.ByteBuffer;
  6. import java.util.ArrayList;
  7. import java.util.Collection;
  8. import java.util.HashMap;
  9. import java.util.List;
  10. import java.util.Map;
  11. import java.util.Objects;
  12. import java.util.concurrent.ConcurrentHashMap;
  13. import java.util.concurrent.ConcurrentMap;
  14. import java.util.concurrent.atomic.AtomicInteger;
  15. import org.eclipse.jgit.internal.storage.pack.PackExt;
  16. import org.eclipse.jgit.lib.ObjectId;
  17. import org.eclipse.jgit.lib.ObjectIdRef;
  18. import org.eclipse.jgit.lib.Ref;
  19. import org.eclipse.jgit.lib.Ref.Storage;
  20. import org.eclipse.jgit.lib.SymbolicRef;
  21. import org.eclipse.jgit.revwalk.RevWalk;
  22. import org.eclipse.jgit.util.RefList;
  23. /**
  24. * Git repository stored entirely in the local process memory.
  25. * <p>
  26. * This implementation builds on the DFS repository by storing all reference and
  27. * object data in the local process. It is not very efficient and exists only
  28. * for unit testing and small experiments.
  29. * <p>
  30. * The repository is thread-safe. Memory used is released only when this object
  31. * is garbage collected. Closing the repository has no impact on its memory.
  32. */
  33. public class InMemoryRepository extends DfsRepository {
  34. private static final AtomicInteger packId = new AtomicInteger();
  35. private final DfsObjDatabase objdb;
  36. private final DfsRefDatabase refdb;
  37. /**
  38. * Initialize a new in-memory repository.
  39. *
  40. * @param repoDesc
  41. * description of the repository.
  42. * @since 2.0
  43. */
  44. public InMemoryRepository(DfsRepositoryDescription repoDesc) {
  45. super(new DfsRepositoryBuilder<DfsRepositoryBuilder, InMemoryRepository>() {
  46. @Override
  47. public InMemoryRepository build() throws IOException {
  48. throw new UnsupportedOperationException();
  49. }
  50. }.setRepositoryDescription(repoDesc));
  51. objdb = new MemObjDatabase(this);
  52. refdb = new MemRefDatabase();
  53. }
  54. @Override
  55. public DfsObjDatabase getObjectDatabase() {
  56. return objdb;
  57. }
  58. @Override
  59. public DfsRefDatabase getRefDatabase() {
  60. return refdb;
  61. }
  62. private class MemObjDatabase extends DfsObjDatabase {
  63. private List<DfsPackDescription> packs = new ArrayList<DfsPackDescription>();
  64. MemObjDatabase(DfsRepository repo) {
  65. super(repo, new DfsReaderOptions());
  66. }
  67. @Override
  68. protected synchronized List<DfsPackDescription> listPacks() {
  69. return packs;
  70. }
  71. @Override
  72. protected DfsPackDescription newPack(PackSource source) {
  73. int id = packId.incrementAndGet();
  74. DfsPackDescription desc = new MemPack(
  75. "pack-" + id + "-" + source.name(), //$NON-NLS-1$ //$NON-NLS-2$
  76. getRepository().getDescription());
  77. return desc.setPackSource(source);
  78. }
  79. @Override
  80. protected synchronized void commitPackImpl(
  81. Collection<DfsPackDescription> desc,
  82. Collection<DfsPackDescription> replace) {
  83. List<DfsPackDescription> n;
  84. n = new ArrayList<DfsPackDescription>(desc.size() + packs.size());
  85. n.addAll(desc);
  86. n.addAll(packs);
  87. if (replace != null)
  88. n.removeAll(replace);
  89. packs = n;
  90. }
  91. @Override
  92. protected void rollbackPack(Collection<DfsPackDescription> desc) {
  93. // Do nothing. Pack is not recorded until commitPack.
  94. }
  95. @Override
  96. protected ReadableChannel openFile(DfsPackDescription desc, PackExt ext)
  97. throws FileNotFoundException, IOException {
  98. MemPack memPack = (MemPack) desc;
  99. byte[] file = memPack.fileMap.get(ext);
  100. if (file == null)
  101. throw new FileNotFoundException(desc.getFileName(ext));
  102. return new ByteArrayReadableChannel(file);
  103. }
  104. @Override
  105. protected DfsOutputStream writeFile(
  106. DfsPackDescription desc, final PackExt ext) throws IOException {
  107. final MemPack memPack = (MemPack) desc;
  108. return new Out() {
  109. @Override
  110. public void flush() {
  111. memPack.fileMap.put(ext, getData());
  112. }
  113. };
  114. }
  115. }
  116. private static class MemPack extends DfsPackDescription {
  117. private final Map<PackExt, byte[]>
  118. fileMap = new HashMap<PackExt, byte[]>();
  119. MemPack(String name, DfsRepositoryDescription repoDesc) {
  120. super(repoDesc, name);
  121. }
  122. }
  123. private abstract static class Out extends DfsOutputStream {
  124. private final ByteArrayOutputStream dst = new ByteArrayOutputStream();
  125. private byte[] data;
  126. @Override
  127. public void write(byte[] buf, int off, int len) {
  128. data = null;
  129. dst.write(buf, off, len);
  130. }
  131. @Override
  132. public int read(long position, ByteBuffer buf) {
  133. byte[] d = getData();
  134. int n = Math.min(buf.remaining(), d.length - (int) position);
  135. if (n == 0)
  136. return -1;
  137. buf.put(d, (int) position, n);
  138. return n;
  139. }
  140. byte[] getData() {
  141. if (data == null)
  142. data = dst.toByteArray();
  143. return data;
  144. }
  145. @Override
  146. public abstract void flush();
  147. @Override
  148. public void close() {
  149. flush();
  150. }
  151. }
  152. private static class ByteArrayReadableChannel implements ReadableChannel {
  153. private final byte[] data;
  154. private int position;
  155. private boolean open = true;
  156. ByteArrayReadableChannel(byte[] buf) {
  157. data = buf;
  158. }
  159. public int read(ByteBuffer dst) {
  160. int n = Math.min(dst.remaining(), data.length - position);
  161. if (n == 0)
  162. return -1;
  163. dst.put(data, position, n);
  164. position += n;
  165. return n;
  166. }
  167. public void close() {
  168. open = false;
  169. }
  170. public boolean isOpen() {
  171. return open;
  172. }
  173. public long position() {
  174. return position;
  175. }
  176. public void position(long newPosition) {
  177. position = (int) newPosition;
  178. }
  179. public long size() {
  180. return data.length;
  181. }
  182. public int blockSize() {
  183. return 0;
  184. }
  185. }
  186. private class MemRefDatabase extends DfsRefDatabase {
  187. private final ConcurrentMap<String, Ref> refs = new ConcurrentHashMap<String, Ref>();
  188. MemRefDatabase() {
  189. super(InMemoryRepository.this);
  190. }
  191. @Override
  192. protected RefCache scanAllRefs() throws IOException {
  193. RefList.Builder<Ref> ids = new RefList.Builder<Ref>();
  194. RefList.Builder<Ref> sym = new RefList.Builder<Ref>();
  195. for (Ref ref : refs.values()) {
  196. if (ref.isSymbolic())
  197. sym.add(ref);
  198. ids.add(ref);
  199. }
  200. ids.sort();
  201. sym.sort();
  202. return new RefCache(ids.toRefList(), sym.toRefList());
  203. }
  204. @Override
  205. protected boolean compareAndPut(Ref oldRef, Ref newRef)
  206. throws IOException {
  207. ObjectId id = newRef.getObjectId();
  208. if (id != null) {
  209. try (RevWalk rw = new RevWalk(getRepository())) {
  210. // Validate that the target exists in a new RevWalk, as the RevWalk
  211. // from the RefUpdate might be reading back unflushed objects.
  212. rw.parseAny(id);
  213. }
  214. }
  215. String name = newRef.getName();
  216. if (oldRef == null)
  217. return refs.putIfAbsent(name, newRef) == null;
  218. synchronized (refs) {
  219. Ref cur = refs.get(name);
  220. Ref toCompare = cur;
  221. if (toCompare != null) {
  222. if (toCompare.isSymbolic()) {
  223. // Arm's-length dereference symrefs before the compare, since
  224. // DfsRefUpdate#doLink(String) stores them undereferenced.
  225. Ref leaf = toCompare.getLeaf();
  226. if (leaf.getObjectId() == null) {
  227. leaf = refs.get(leaf.getName());
  228. if (leaf.isSymbolic())
  229. // Not supported at the moment.
  230. throw new IllegalArgumentException();
  231. toCompare = new SymbolicRef(
  232. name,
  233. new ObjectIdRef.Unpeeled(
  234. Storage.NEW,
  235. leaf.getName(),
  236. leaf.getObjectId()));
  237. } else
  238. toCompare = toCompare.getLeaf();
  239. }
  240. if (eq(toCompare, oldRef))
  241. return refs.replace(name, cur, newRef);
  242. }
  243. }
  244. if (oldRef.getStorage() == Storage.NEW)
  245. return refs.putIfAbsent(name, newRef) == null;
  246. return false;
  247. }
  248. @Override
  249. protected boolean compareAndRemove(Ref oldRef) throws IOException {
  250. String name = oldRef.getName();
  251. Ref cur = refs.get(name);
  252. if (cur != null && eq(cur, oldRef))
  253. return refs.remove(name, cur);
  254. else
  255. return false;
  256. }
  257. private boolean eq(Ref a, Ref b) {
  258. if (!Objects.equals(a.getName(), b.getName()))
  259. return false;
  260. // Compare leaf object IDs, since the oldRef passed into compareAndPut
  261. // when detaching a symref is an ObjectIdRef.
  262. return Objects.equals(a.getLeaf().getObjectId(),
  263. b.getLeaf().getObjectId());
  264. }
  265. }
  266. }