]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-6993 Search duplication candidates by hashes
authorJulien Lancelot <julien.lancelot@sonarsource.com>
Tue, 10 Nov 2015 16:59:22 +0000 (17:59 +0100)
committerJulien Lancelot <julien.lancelot@sonarsource.com>
Thu, 12 Nov 2015 10:01:30 +0000 (11:01 +0100)
sonar-db/src/main/java/org/sonar/db/duplication/DuplicationDao.java
sonar-db/src/main/java/org/sonar/db/duplication/DuplicationMapper.java
sonar-db/src/main/java/org/sonar/db/duplication/DuplicationUnitDto.java
sonar-db/src/main/resources/org/sonar/db/duplication/DuplicationMapper.xml
sonar-db/src/test/java/org/sonar/db/duplication/DuplicationDaoTest.java
sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/insert-result.xml [new file with mode: 0644]
sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/insert.xml [new file with mode: 0644]
sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/select_candidates.xml [new file with mode: 0644]
sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/shouldGetByHash.xml [deleted file]
sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/shouldInsert-result.xml [deleted file]
sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/shouldInsert.xml [deleted file]

index 94d7ef81c731a746ed01fd6c844e8a0a98d15b2f..a53017ea7b10fb44751d3b7998e0fa6f0ecd308b 100644 (file)
  */
 package org.sonar.db.duplication;
 
+import com.google.common.base.Function;
 import java.util.Collection;
 import java.util.List;
-import org.apache.ibatis.session.SqlSession;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
 import org.sonar.db.Dao;
+import org.sonar.db.DatabaseUtils;
 import org.sonar.db.DbSession;
-import org.sonar.db.MyBatis;
 
 public class DuplicationDao implements Dao {
 
-  private final MyBatis mybatis;
-
-  public DuplicationDao(MyBatis mybatis) {
-    this.mybatis = mybatis;
-  }
-
-  public List<DuplicationUnitDto> selectCandidates(int resourceSnapshotId, Integer lastSnapshotId, String language) {
-    SqlSession session = mybatis.openSession(false);
-    try {
-      DuplicationMapper mapper = session.getMapper(DuplicationMapper.class);
-      return mapper.selectCandidates(resourceSnapshotId, lastSnapshotId, language);
-    } finally {
-      MyBatis.closeQuietly(session);
-    }
+  /**
+   * @param projectSnapshotId snapshot id of the project from the previous analysis (islast=true)
+   */
+  public List<DuplicationUnitDto> selectCandidates(final DbSession session, @Nullable final Long projectSnapshotId, final String language, Collection<String> hashes) {
+    return DatabaseUtils.executeLargeInputs(hashes, new Function<List<String>, List<DuplicationUnitDto>>() {
+      @Override
+      public List<DuplicationUnitDto> apply(@Nonnull List<String> partition) {
+        return session.getMapper(DuplicationMapper.class).selectCandidates(projectSnapshotId, language, partition);
+      }
+    });
   }
 
   /**
    * Insert rows in the table DUPLICATIONS_INDEX.
    * Note that generated ids are not returned.
    */
-  public void insert(Collection<DuplicationUnitDto> units) {
-    DbSession session = mybatis.openSession(true);
-    try {
-      DuplicationMapper mapper = session.getMapper(DuplicationMapper.class);
-      for (DuplicationUnitDto unit : units) {
-        mapper.batchInsert(unit);
-      }
-      session.commit();
-
-    } finally {
-      MyBatis.closeQuietly(session);
+  public void insert(DbSession session, Collection<DuplicationUnitDto> units) {
+    DuplicationMapper mapper = session.getMapper(DuplicationMapper.class);
+    for (DuplicationUnitDto unit : units) {
+      mapper.batchInsert(unit);
     }
   }
 
index fe366d9ffc098caa6527eac225102f1548e34489..284a08487bc1464ce450e4c7ef15322c6e294242 100644 (file)
  */
 package org.sonar.db.duplication;
 
+import java.util.Collection;
 import java.util.List;
+import javax.annotation.Nullable;
 import org.apache.ibatis.annotations.Param;
 
 public interface DuplicationMapper {
 
   List<DuplicationUnitDto> selectCandidates(
-    @Param("resource_snapshot_id") int resourceSnapshotId,
-    @Param("last_project_snapshot_id") Integer lastSnapshotId,
-    @Param("language") String language);
+    @Nullable @Param("projectSnapshotId") Long projectSnapshotId,
+    @Param("language") String language,
+    @Param("hashes") Collection<String> hashes);
 
   void batchInsert(DuplicationUnitDto unit);
 
index 443782a63bda791030329fee078c7551cd53cad7..d45e5b7b291344590a0dddc5fa4c190d5cb15c8f 100644 (file)
  */
 package org.sonar.db.duplication;
 
-/**
- * A simple DTO (Data Transfer Object) class that provides the mapping of data to a table.
- */
 public final class DuplicationUnitDto {
 
-  private Long id;
-  private Integer snapshotId;
-  private Integer projectSnapshotId;
+  private long id;
+  private long snapshotId;
+  private long projectSnapshotId;
 
   private String hash;
   private int indexInFile;
   private int startLine;
   private int endLine;
 
-  private String resourceKey;
-
-  public DuplicationUnitDto() {
-  }
+  // Return by join
+  private String componentKey;
 
-  public DuplicationUnitDto(Integer projectSnapshotId, Integer snapshotId, String hash, Integer indexInFile, Integer startLine, Integer endLine) {
-    this.projectSnapshotId = projectSnapshotId;
-    this.snapshotId = snapshotId;
-    this.hash = hash;
-    this.indexInFile = indexInFile;
-    this.startLine = startLine;
-    this.endLine = endLine;
-  }
-
-  public Long getId() {
+  public long getId() {
     return id;
   }
 
-  public DuplicationUnitDto setId(Long id) {
+  public DuplicationUnitDto setId(long id) {
     this.id = id;
     return this;
   }
 
-  public Integer getSnapshotId() {
+  public long getSnapshotId() {
     return snapshotId;
   }
 
-  public void setSnapshotId(Integer snapshotId) {
+  public DuplicationUnitDto setSnapshotId(long snapshotId) {
     this.snapshotId = snapshotId;
+    return this;
   }
 
-  public Integer getProjectSnapshotId() {
+  public long getProjectSnapshotId() {
     return projectSnapshotId;
   }
 
-  public void setProjectSnapshotId(Integer projectSnapshotId) {
+  public DuplicationUnitDto setProjectSnapshotId(long projectSnapshotId) {
     this.projectSnapshotId = projectSnapshotId;
+    return this;
   }
 
   public String getHash() {
     return hash;
   }
 
-  public void setHash(String hash) {
+  public DuplicationUnitDto setHash(String hash) {
     this.hash = hash;
+    return this;
   }
 
   public int getIndexInFile() {
     return indexInFile;
   }
 
-  public void setIndexInFile(int indexInFile) {
+  public DuplicationUnitDto setIndexInFile(int indexInFile) {
     this.indexInFile = indexInFile;
+    return this;
   }
 
   public int getStartLine() {
     return startLine;
   }
 
-  public void setStartLine(int startLine) {
+  public DuplicationUnitDto setStartLine(int startLine) {
     this.startLine = startLine;
+    return this;
   }
 
   public int getEndLine() {
     return endLine;
   }
 
-  public void setEndLine(int endLine) {
+  public DuplicationUnitDto setEndLine(int endLine) {
     this.endLine = endLine;
+    return this;
   }
 
-  public String getResourceKey() {
-    return resourceKey;
-  }
-
-  public void setResourceKey(String resourceKey) {
-    this.resourceKey = resourceKey;
+  public String getComponentKey() {
+    return componentKey;
   }
 
 }
index 66212b3b20545da2798cfdcc056deb3622a7780e..916eba5ad9682b4909dba17407d385d34d21405a 100644 (file)
@@ -4,22 +4,30 @@
 <mapper namespace="org.sonar.db.duplication.DuplicationMapper">
 
   <select id="selectCandidates" parameterType="map" resultType="DuplicationUnit">
-    SELECT DISTINCT to_blocks.hash as hash, res.kee as resourceKey, to_blocks.index_in_file as indexInFile,
-    to_blocks.start_line as startLine, to_blocks.end_line as endLine
-    FROM duplications_index to_blocks, duplications_index from_blocks, snapshots snapshot, projects res
-    WHERE from_blocks.snapshot_id = #{resource_snapshot_id}
-    AND to_blocks.hash = from_blocks.hash
-    AND to_blocks.snapshot_id = snapshot.id
-    AND snapshot.islast = ${_true}
-    AND snapshot.project_id = res.id
-    AND res.language = #{language}
-    <if test="last_project_snapshot_id != null">
-      AND to_blocks.project_snapshot_id != #{last_project_snapshot_id}
-    </if>
+    SELECT DISTINCT
+    duplication_block.id as id,
+    duplication_block.snapshot_id as snapshotId,
+    duplication_block.project_snapshot_id as projectSnapshotId,
+    duplication_block.hash as hash,
+    duplication_block.index_in_file as indexInFile,
+    duplication_block.start_line as startLine,
+    duplication_block.end_line as endLine,
+    file.kee as componentKey
+    FROM duplications_index duplication_block
+    INNER JOIN snapshots snapshot ON duplication_block.snapshot_id=snapshot.id AND snapshot.islast=${_true}
+    INNER JOIN projects file ON file.id=snapshot.project_id AND file.language=#{language} AND file.enabled=${_true}
+    <where>
+      AND duplication_block.hash in
+      <foreach collection="hashes" open="(" close=")" item="hash" separator=",">#{hash}</foreach>
+      <if test="projectSnapshotId != null">
+        AND duplication_block.project_snapshot_id &lt;&gt; #{projectSnapshotId}
+      </if>
+    </where>
   </select>
 
   <insert id="batchInsert" parameterType="DuplicationUnit" useGeneratedKeys="false">
     INSERT INTO duplications_index (snapshot_id, project_snapshot_id, hash, index_in_file, start_line, end_line)
     VALUES (#{snapshotId}, #{projectSnapshotId}, #{hash}, #{indexInFile}, #{startLine}, #{endLine})
   </insert>
+
 </mapper>
index 9d4180ad97688e1b0ddf7f904619f5a9d34fc33f..7936524d89872dacde7e5a9f861203c4ca65ea2d 100644 (file)
  */
 package org.sonar.db.duplication;
 
-import java.util.Arrays;
 import java.util.List;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.sonar.api.utils.System2;
+import org.sonar.db.DbSession;
 import org.sonar.db.DbTester;
 import org.sonar.test.DbTests;
 
-import static org.hamcrest.Matchers.is;
-import static org.junit.Assert.assertThat;
+import static java.util.Collections.singletonList;
+import static org.assertj.core.api.Assertions.assertThat;
 
 @Category(DbTests.class)
 public class DuplicationDaoTest {
@@ -37,34 +37,45 @@ public class DuplicationDaoTest {
   @Rule
   public DbTester db = DbTester.create(System2.INSTANCE);
 
+  DbSession dbSession = db.getSession();
+
   DuplicationDao dao = db.getDbClient().duplicationDao();
 
   @Test
-  public void shouldGetByHash() {
-    db.prepareDbUnit(getClass(), "shouldGetByHash.xml");
+  public void select_candidates() {
+    db.prepareDbUnit(getClass(), "select_candidates.xml");
+    dbSession.commit();
 
-    List<DuplicationUnitDto> blocks = dao.selectCandidates(10, 7, "java");
-    assertThat(blocks.size(), is(1));
+    List<DuplicationUnitDto> blocks = dao.selectCandidates(dbSession, 7L, "java", singletonList("aa"));
+    assertThat(blocks).hasSize(1);
 
     DuplicationUnitDto block = blocks.get(0);
-    assertThat("block resourceId", block.getResourceKey(), is("bar-last"));
-    assertThat("block hash", block.getHash(), is("aa"));
-    assertThat("block index in file", block.getIndexInFile(), is(0));
-    assertThat("block start line", block.getStartLine(), is(1));
-    assertThat("block end line", block.getEndLine(), is(2));
+    assertThat(block.getComponentKey()).isEqualTo("bar-last");
+    assertThat(block.getHash()).isEqualTo("aa");
+    assertThat(block.getIndexInFile()).isEqualTo(0);
+    assertThat(block.getStartLine()).isEqualTo(1);
+    assertThat(block.getEndLine()).isEqualTo(2);
 
     // check null for lastSnapshotId
-    blocks = dao.selectCandidates(10, null, "java");
-    assertThat(blocks.size(), is(2));
+    blocks = dao.selectCandidates(dbSession, null, "java", singletonList("aa"));
+    assertThat(blocks).hasSize(2);
   }
 
   @Test
-  public void shouldInsert() {
-    db.prepareDbUnit(getClass(), "shouldInsert.xml");
+  public void insert() {
+    db.prepareDbUnit(getClass(), "insert.xml");
+    dbSession.commit();
 
-    dao.insert(Arrays.asList(new DuplicationUnitDto(1, 2, "bb", 0, 1, 2)));
+    dao.insert(dbSession, singletonList(new DuplicationUnitDto()
+      .setProjectSnapshotId(1)
+      .setSnapshotId(2)
+      .setHash("bb")
+      .setIndexInFile(0)
+      .setStartLine(1)
+      .setEndLine(2)));
+    dbSession.commit();
 
-    db.assertDbUnit(getClass(), "shouldInsert-result.xml", "duplications_index");
+    db.assertDbUnit(getClass(), "insert-result.xml", "duplications_index");
   }
 
 }
diff --git a/sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/insert-result.xml b/sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/insert-result.xml
new file mode 100644 (file)
index 0000000..797be8d
--- /dev/null
@@ -0,0 +1,10 @@
+<dataset>
+
+  <snapshots purge_status="[null]" id="1" status="U" islast="0" project_id="0"/>
+  <snapshots purge_status="[null]" id="2" status="U" islast="0" project_id="1"/>
+  <projects id="1" uuid="1" kee="foo" enabled="1" scope="FIL" qualifier="CLA"/>
+
+  <duplications_index id="1" project_snapshot_id="1" snapshot_id="2" hash="bb" index_in_file="0" start_line="1"
+                      end_line="2"/>
+
+</dataset>
diff --git a/sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/insert.xml b/sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/insert.xml
new file mode 100644 (file)
index 0000000..fd4fdc7
--- /dev/null
@@ -0,0 +1,7 @@
+<dataset>
+
+  <snapshots purge_status="[null]" id="1" status="U" islast="0" project_id="0"/>
+  <snapshots purge_status="[null]" id="2" status="U" islast="0" project_id="1"/>
+  <projects id="1" uuid="1" kee="foo" enabled="1" scope="FIL" qualifier="CLA"/>
+
+</dataset>
diff --git a/sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/select_candidates.xml b/sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/select_candidates.xml
new file mode 100644 (file)
index 0000000..7b4ddce
--- /dev/null
@@ -0,0 +1,61 @@
+<dataset>
+
+  <snapshots id="1" project_id="1" status="P" islast="[false]" purge_status="[null]"/>
+  <snapshots id="2" project_id="1" status="P" islast="[false]" purge_status="[null]"/>
+  <projects id="1" uuid="1" kee="bar-old" enabled="[true]" scope="FIL" qualifier="CLA" language="java"/>
+
+  <snapshots id="3" project_id="2" status="P" islast="[true]" purge_status="[null]"/>
+  <snapshots id="4" project_id="2" status="P" islast="[true]" purge_status="[null]"/>
+  <projects id="2" uuid="2" kee="bar-last" enabled="[true]" scope="FIL" qualifier="CLA" language="java"/>
+
+  <snapshots id="5" project_id="3" status="P" islast="[false]" purge_status="[null]"/>
+  <snapshots id="6" project_id="3" status="P" islast="[false]" purge_status="[null]"/>
+  <projects id="3" uuid="3" kee="foo-old" enabled="[true]" scope="FIL" qualifier="CLA" language="java"/>
+
+  <snapshots id="7" project_id="4" status="P" islast="[true]" purge_status="[null]"/>
+  <snapshots id="8" project_id="4" status="P" islast="[true]" purge_status="[null]"/>
+  <projects id="4" uuid="4" kee="foo-last" enabled="[true]" scope="FIL" qualifier="CLA" language="java"/>
+
+  <snapshots id="9" project_id="5" status="U" islast="[false]" purge_status="[null]"/>
+  <snapshots id="10" project_id="5" status="U" islast="[false]" purge_status="[null]"/>
+  <projects id="5" uuid="5" kee="foo" enabled="[true]" scope="FIL" qualifier="CLA" language="java"/>
+
+  <snapshots id="11" project_id="6" purge_status="[null]" status="P" islast="1"/>
+  <projects id="6" uuid="6" kee="baz" enabled="[true]" scope="FIL" qualifier="CLA" language="grvy"/>
+
+  <!-- Old snapshot of another project -->
+  <!-- bar-old -->
+  <duplications_index id="1" project_snapshot_id="1" snapshot_id="2" hash="bb" index_in_file="0" start_line="0"
+                      end_line="0"/>
+
+  <!-- Last snapshot of another project -->
+  <!-- bar-last -->
+  <duplications_index id="2" project_snapshot_id="3" snapshot_id="4" hash="aa" index_in_file="0" start_line="1"
+                      end_line="2"/>
+
+  <!-- Old snapshot of current project -->
+  <!-- foo-old -->
+  <duplications_index id="3" project_snapshot_id="5" snapshot_id="6" hash="bb" index_in_file="0" start_line="0"
+                      end_line="0"/>
+
+  <!-- Last snapshot of current project -->
+  <!-- foo-last -->
+  <duplications_index id="4" project_snapshot_id="7" snapshot_id="8" hash="aa" index_in_file="0" start_line="0"
+                      end_line="0"/>
+
+  <!-- New snapshot of current project -->
+  <!-- foo -->
+  <duplications_index id="5" project_snapshot_id="9" snapshot_id="10" hash="aa" index_in_file="0" start_line="0"
+                      end_line="0"/>
+
+  <!-- Note that there is two blocks with same hash for current analysis to verify that we use "SELECT DISTINCT", -->
+  <!-- without "DISTINCT" we will select block from "bar-last" two times. -->
+  <duplications_index id="6" project_snapshot_id="9" snapshot_id="10" hash="aa" index_in_file="1" start_line="1"
+                      end_line="1"/>
+
+  <!-- Last snapshot of project with another language -->
+  <!-- baz -->
+  <duplications_index id="7" project_snapshot_id="1" snapshot_id="11" hash="aa" index_in_file="0" start_line="0"
+                      end_line="0"/>
+
+</dataset>
diff --git a/sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/shouldGetByHash.xml b/sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/shouldGetByHash.xml
deleted file mode 100644 (file)
index 37efb61..0000000
+++ /dev/null
@@ -1,61 +0,0 @@
-<dataset>
-
-  <snapshots id="1" project_id="1" status="P" islast="0" purge_status="[null]"/>
-  <snapshots id="2" project_id="1" status="P" islast="0" purge_status="[null]"/>
-  <projects id="1" uuid="1" kee="bar-old" enabled="1" scope="FIL" qualifier="CLA" language="java"/>
-
-  <snapshots id="3" project_id="2" status="P" islast="1" purge_status="[null]"/>
-  <snapshots id="4" project_id="2" status="P" islast="1" purge_status="[null]"/>
-  <projects id="2" uuid="2" kee="bar-last" enabled="1" scope="FIL" qualifier="CLA" language="java"/>
-
-  <snapshots id="5" project_id="3" status="P" islast="0" purge_status="[null]"/>
-  <snapshots id="6" project_id="3" status="P" islast="0" purge_status="[null]"/>
-  <projects id="3" uuid="3" kee="foo-old" enabled="1" scope="FIL" qualifier="CLA" language="java"/>
-
-  <snapshots id="7" project_id="4" status="P" islast="1" purge_status="[null]"/>
-  <snapshots id="8" project_id="4" status="P" islast="1" purge_status="[null]"/>
-  <projects id="4" uuid="4" kee="foo-last" enabled="1" scope="FIL" qualifier="CLA" language="java"/>
-
-  <snapshots id="9" project_id="5" status="U" islast="0" purge_status="[null]"/>
-  <snapshots id="10" project_id="5" status="U" islast="0" purge_status="[null]"/>
-  <projects id="5" uuid="5" kee="foo" enabled="1" scope="FIL" qualifier="CLA" language="java"/>
-
-  <snapshots id="11" project_id="6" purge_status="[null]" status="P" islast="1"/>
-  <projects id="6" uuid="6" kee="baz" enabled="1" scope="FIL" qualifier="CLA" language="grvy"/>
-
-  <!-- Old snapshot of another project -->
-  <!-- bar-old -->
-  <duplications_index id="1" project_snapshot_id="1" snapshot_id="2" hash="bb" index_in_file="0" start_line="0"
-                      end_line="0"/>
-
-  <!-- Last snapshot of another project -->
-  <!-- bar-last -->
-  <duplications_index id="2" project_snapshot_id="3" snapshot_id="4" hash="aa" index_in_file="0" start_line="1"
-                      end_line="2"/>
-
-  <!-- Old snapshot of current project -->
-  <!-- foo-old -->
-  <duplications_index id="3" project_snapshot_id="5" snapshot_id="6" hash="bb" index_in_file="0" start_line="0"
-                      end_line="0"/>
-
-  <!-- Last snapshot of current project -->
-  <!-- foo-last -->
-  <duplications_index id="4" project_snapshot_id="7" snapshot_id="8" hash="aa" index_in_file="0" start_line="0"
-                      end_line="0"/>
-
-  <!-- New snapshot of current project -->
-  <!-- foo -->
-  <duplications_index id="5" project_snapshot_id="9" snapshot_id="10" hash="aa" index_in_file="0" start_line="0"
-                      end_line="0"/>
-
-  <!-- Note that there is two blocks with same hash for current analysis to verify that we use "SELECT DISTINCT", -->
-  <!-- without "DISTINCT" we will select block from "bar-last" two times. -->
-  <duplications_index id="6" project_snapshot_id="9" snapshot_id="10" hash="aa" index_in_file="1" start_line="1"
-                      end_line="1"/>
-
-  <!-- Last snapshot of project with another language -->
-  <!-- baz -->
-  <duplications_index id="7" project_snapshot_id="1" snapshot_id="11" hash="aa" index_in_file="0" start_line="0"
-                      end_line="0"/>
-
-</dataset>
diff --git a/sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/shouldInsert-result.xml b/sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/shouldInsert-result.xml
deleted file mode 100644 (file)
index 797be8d..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-<dataset>
-
-  <snapshots purge_status="[null]" id="1" status="U" islast="0" project_id="0"/>
-  <snapshots purge_status="[null]" id="2" status="U" islast="0" project_id="1"/>
-  <projects id="1" uuid="1" kee="foo" enabled="1" scope="FIL" qualifier="CLA"/>
-
-  <duplications_index id="1" project_snapshot_id="1" snapshot_id="2" hash="bb" index_in_file="0" start_line="1"
-                      end_line="2"/>
-
-</dataset>
diff --git a/sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/shouldInsert.xml b/sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/shouldInsert.xml
deleted file mode 100644 (file)
index fd4fdc7..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-<dataset>
-
-  <snapshots purge_status="[null]" id="1" status="U" islast="0" project_id="0"/>
-  <snapshots purge_status="[null]" id="2" status="U" islast="0" project_id="1"/>
-  <projects id="1" uuid="1" kee="foo" enabled="1" scope="FIL" qualifier="CLA"/>
-
-</dataset>