]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-1091 CPD over different projects
authorEvgeny Mandrikov <mandrikov@gmail.com>
Thu, 1 Sep 2011 15:06:46 +0000 (19:06 +0400)
committerEvgeny Mandrikov <mandrikov@gmail.com>
Thu, 1 Sep 2011 20:16:21 +0000 (00:16 +0400)
* Improve table clone_blocks and so DbCloneIndex.

* Add purge of old clone blocks.

15 files changed:
plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/SonarEngine.java
plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/CombinedCloneIndex.java [deleted file]
plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/DbCloneIndex.java
plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/SonarCloneIndex.java [new file with mode: 0644]
plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/index/DbCloneIndexTest.java
plugins/sonar-cpd-plugin/src/test/resources/org/sonar/plugins/cpd/index/DbCloneIndexTest/fixture.xml [deleted file]
plugins/sonar-cpd-plugin/src/test/resources/org/sonar/plugins/cpd/index/DbCloneIndexTest/shouldGetByHash.xml [new file with mode: 0644]
plugins/sonar-cpd-plugin/src/test/resources/org/sonar/plugins/cpd/index/DbCloneIndexTest/shouldInsert-result.xml
plugins/sonar-cpd-plugin/src/test/resources/org/sonar/plugins/cpd/index/DbCloneIndexTest/shouldInsert.xml [new file with mode: 0644]
plugins/sonar-dbcleaner-plugin/src/main/java/org/sonar/plugins/dbcleaner/api/PurgeUtils.java
plugins/sonar-dbcleaner-plugin/src/test/resources/org/sonar/plugins/dbcleaner/api/PurgeUtilsTest/purgeSnapshots-result.xml
plugins/sonar-dbcleaner-plugin/src/test/resources/org/sonar/plugins/dbcleaner/api/PurgeUtilsTest/purgeSnapshots.xml
sonar-core/src/main/java/org/sonar/jpa/entity/CloneBlock.java
sonar-server/src/main/webapp/WEB-INF/db/migrate/217_create_clone_blocks.rb
sonar-testing-harness/src/main/resources/org/sonar/test/persistence/sonar-test.ddl

index b3cd865cfc2135eea661603a78c034cf864fc629..e99cd441e9faaf06db2a633923f6ca1da787ede4 100644 (file)
@@ -21,6 +21,7 @@ package org.sonar.plugins.cpd;
 
 import java.io.File;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashSet;
@@ -30,7 +31,6 @@ import java.util.Set;
 import org.sonar.api.batch.SensorContext;
 import org.sonar.api.database.DatabaseSession;
 import org.sonar.api.database.model.ResourceModel;
-import org.sonar.api.database.model.Snapshot;
 import org.sonar.api.measures.CoreMetrics;
 import org.sonar.api.measures.Measure;
 import org.sonar.api.resources.InputFile;
@@ -45,19 +45,15 @@ import org.sonar.duplications.block.Block;
 import org.sonar.duplications.block.BlockChunker;
 import org.sonar.duplications.detector.original.OriginalCloneDetectionAlgorithm;
 import org.sonar.duplications.index.CloneGroup;
-import org.sonar.duplications.index.CloneIndex;
 import org.sonar.duplications.index.ClonePart;
-import org.sonar.duplications.index.PackedMemoryCloneIndex;
 import org.sonar.duplications.java.JavaStatementBuilder;
 import org.sonar.duplications.java.JavaTokenProducer;
 import org.sonar.duplications.statement.Statement;
 import org.sonar.duplications.statement.StatementChunker;
 import org.sonar.duplications.token.TokenChunker;
 import org.sonar.duplications.token.TokenQueue;
-import org.sonar.plugins.cpd.index.CombinedCloneIndex;
 import org.sonar.plugins.cpd.index.DbCloneIndex;
-
-import com.google.common.collect.Lists;
+import org.sonar.plugins.cpd.index.SonarCloneIndex;
 
 public class SonarEngine implements CpdEngine {
 
@@ -94,13 +90,12 @@ public class SonarEngine implements CpdEngine {
     }
 
     // Create index
-    CloneIndex index = new PackedMemoryCloneIndex();
+    final SonarCloneIndex index;
     if (isCrossProject(project)) {
       Logs.INFO.info("Enabled cross-project analysis");
-      Snapshot currentSnapshot = resourcePersister.getSnapshot(project);
-      Snapshot lastSnapshot = resourcePersister.getLastSnapshot(currentSnapshot, false);
-      DbCloneIndex db = new DbCloneIndex(dbSession, currentSnapshot.getId(), lastSnapshot == null ? null : lastSnapshot.getId());
-      index = new CombinedCloneIndex(index, db);
+      index = new SonarCloneIndex(new DbCloneIndex(dbSession, resourcePersister, project));
+    } else {
+      index = new SonarCloneIndex();
     }
 
     TokenChunker tokenChunker = JavaTokenProducer.build();
@@ -108,21 +103,22 @@ public class SonarEngine implements CpdEngine {
     BlockChunker blockChunker = new BlockChunker(BLOCK_SIZE);
 
     for (InputFile inputFile : inputFiles) {
+      Resource resource = getResource(inputFile);
+      String resourceKey = getFullKey(project, resource);
+
       File file = inputFile.getFile();
       TokenQueue tokenQueue = tokenChunker.chunk(file);
       List<Statement> statements = statementChunker.chunk(tokenQueue);
-      Resource resource = getResource(inputFile);
-      List<Block> blocks = blockChunker.chunk(getFullKey(project, resource), statements);
-      for (Block block : blocks) {
-        index.insert(block);
-      }
+      List<Block> blocks = blockChunker.chunk(resourceKey, statements);
+      index.insert(resource, blocks);
     }
 
     // Detect
     for (InputFile inputFile : inputFiles) {
       Resource resource = getResource(inputFile);
+      String resourceKey = getFullKey(project, resource);
 
-      List<Block> fileBlocks = Lists.newArrayList(index.getByResourceId(getFullKey(project, resource)));
+      Collection<Block> fileBlocks = index.getByResource(resource, resourceKey);
       List<CloneGroup> clones = OriginalCloneDetectionAlgorithm.detect(index, fileBlocks);
       if (!clones.isEmpty()) {
         // Save
diff --git a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/CombinedCloneIndex.java b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/CombinedCloneIndex.java
deleted file mode 100644 (file)
index 0b2f4e4..0000000
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Sonar, open source software quality management tool.
- * Copyright (C) 2008-2011 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * Sonar is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * Sonar is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with Sonar; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02
- */
-package org.sonar.plugins.cpd.index;
-
-import java.util.Collection;
-import java.util.List;
-
-import org.sonar.duplications.block.Block;
-import org.sonar.duplications.block.ByteArray;
-import org.sonar.duplications.index.AbstractCloneIndex;
-import org.sonar.duplications.index.CloneIndex;
-
-import com.google.common.collect.Lists;
-
-public class CombinedCloneIndex extends AbstractCloneIndex {
-
-  private final CloneIndex mem;
-  private final DbCloneIndex db;
-
-  public CombinedCloneIndex(CloneIndex mem, DbCloneIndex db) {
-    this.mem = mem;
-    this.db = db;
-  }
-
-  public Collection<Block> getByResourceId(String resourceId) {
-    db.prepareCache(resourceId);
-    return mem.getByResourceId(resourceId);
-  }
-
-  public Collection<Block> getBySequenceHash(ByteArray hash) {
-    List<Block> result = Lists.newArrayList();
-    result.addAll(mem.getBySequenceHash(hash));
-    result.addAll(db.getBySequenceHash(hash));
-    return result;
-  }
-
-  public void insert(Block block) {
-    mem.insert(block);
-    db.insert(block);
-  }
-
-}
index 06f6c481cad76838219b401830426d372404fa3d..571dc43d3ecb52374640b7544590f874ced2268c 100644 (file)
@@ -24,48 +24,88 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
+import javax.persistence.Query;
+
+import org.hibernate.ejb.HibernateQuery;
+import org.hibernate.transform.Transformers;
 import org.sonar.api.database.DatabaseSession;
+import org.sonar.api.database.model.Snapshot;
+import org.sonar.api.resources.Project;
+import org.sonar.api.resources.Resource;
+import org.sonar.batch.index.ResourcePersister;
 import org.sonar.duplications.block.Block;
 import org.sonar.duplications.block.ByteArray;
-import org.sonar.duplications.index.AbstractCloneIndex;
 import org.sonar.jpa.entity.CloneBlock;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
-public class DbCloneIndex extends AbstractCloneIndex {
+public class DbCloneIndex {
+
+  private final Map<ByteArray, Collection<Block>> cache = Maps.newHashMap();
 
-  private final Map<ByteArray, List<Block>> cache = Maps.newHashMap();
+  private final DatabaseSession session;
+  private final ResourcePersister resourcePersister;
+  private final int currentProjectSnapshotId;
+  private final Integer lastSnapshotId;
 
-  private DatabaseSession session;
-  private int currentSnapshotId;
-  private Integer lastSnapshotId;
+  public DbCloneIndex(DatabaseSession session, ResourcePersister resourcePersister, Project currentProject) {
+    this.session = session;
+    this.resourcePersister = resourcePersister;
+    Snapshot currentSnapshot = resourcePersister.getSnapshotOrFail(currentProject);
+    Snapshot lastSnapshot = resourcePersister.getLastSnapshot(currentSnapshot, false);
+    this.currentProjectSnapshotId = currentSnapshot.getId();
+    this.lastSnapshotId = lastSnapshot == null ? null : lastSnapshot.getId();
+  }
 
-  public DbCloneIndex(DatabaseSession session, Integer currentSnapshotId, Integer lastSnapshotId) {
+  /**
+   * For tests.
+   */
+  DbCloneIndex(DatabaseSession session, ResourcePersister resourcePersister, Integer currentProjectSnapshotId, Integer prevSnapshotId) {
     this.session = session;
-    this.currentSnapshotId = currentSnapshotId;
-    this.lastSnapshotId = lastSnapshotId;
+    this.resourcePersister = resourcePersister;
+    this.currentProjectSnapshotId = currentProjectSnapshotId;
+    this.lastSnapshotId = prevSnapshotId;
   }
 
-  public void prepareCache(String resourceKey) {
-    String sql = "SELECT block.id, hash, block.snapshot_id, resource_key, index_in_file, start_line, end_line FROM clone_blocks AS block, snapshots AS snapshot" +
-        " WHERE block.snapshot_id=snapshot.id AND snapshot.islast=true" +
-        " AND hash IN ( SELECT hash FROM clone_blocks WHERE resource_key = :resource_key AND snapshot_id = :current_snapshot_id )";
+  int getSnapshotIdFor(Resource resource) {
+    return resourcePersister.getSnapshotOrFail(resource).getId();
+  }
+
+  public void prepareCache(Resource resource) {
+    int resourceSnapshotId = getSnapshotIdFor(resource);
+
+    // Order of columns is important - see code below!
+    String sql = "SELECT hash, resource.kee, index_in_file, start_line, end_line" +
+        " FROM clone_blocks AS block, snapshots AS snapshot, projects AS resource" +
+        " WHERE block.snapshot_id=snapshot.id AND snapshot.islast=true AND snapshot.project_id=resource.id" +
+        " AND hash IN ( SELECT hash FROM clone_blocks WHERE snapshot_id = :resource_snapshot_id AND project_snapshot_id = :current_project_snapshot_id )";
     if (lastSnapshotId != null) {
       // Filter for blocks from previous snapshot of current project
-      sql += " AND snapshot.id != " + lastSnapshotId;
+      sql += " AND block.project_snapshot_id != :last_project_snapshot_id";
     }
-    List<CloneBlock> blocks = session.getEntityManager()
-        .createNativeQuery(sql, CloneBlock.class)
-        .setParameter("resource_key", resourceKey)
-        .setParameter("current_snapshot_id", currentSnapshotId)
-        .getResultList();
+    Query query = session.getEntityManager().createNativeQuery(sql)
+        .setParameter("resource_snapshot_id", resourceSnapshotId)
+        .setParameter("current_project_snapshot_id", currentProjectSnapshotId);
+    if (lastSnapshotId != null) {
+      query.setParameter("last_project_snapshot_id", lastSnapshotId);
+    }
+    // Ugly hack for mapping results of custom SQL query into plain list (MyBatis is coming soon)
+    ((HibernateQuery) query).getHibernateQuery().setResultTransformer(Transformers.TO_LIST);
+    List<List<Object>> blocks = query.getResultList();
 
     cache.clear();
-    for (CloneBlock dbBlock : blocks) {
-      Block block = new Block(dbBlock.getResourceKey(), new ByteArray(dbBlock.getHash()), dbBlock.getIndexInFile(), dbBlock.getStartLine(), dbBlock.getEndLine());
+    for (List<Object> dbBlock : blocks) {
+      String hash = (String) dbBlock.get(0);
+      String resourceKey = (String) dbBlock.get(1);
+      int indexInFile = (Integer) dbBlock.get(2);
+      int startLine = (Integer) dbBlock.get(3);
+      int endLine = (Integer) dbBlock.get(4);
+
+      Block block = new Block(resourceKey, new ByteArray(hash), indexInFile, startLine, endLine);
 
-      List<Block> sameHash = cache.get(block.getBlockHash());
+      // Group blocks by hash
+      Collection<Block> sameHash = cache.get(block.getBlockHash());
       if (sameHash == null) {
         sameHash = Lists.newArrayList();
         cache.put(block.getBlockHash(), sameHash);
@@ -74,28 +114,28 @@ public class DbCloneIndex extends AbstractCloneIndex {
     }
   }
 
-  public Collection<Block> getByResourceId(String resourceId) {
-    throw new UnsupportedOperationException();
-  }
-
-  public Collection<Block> getBySequenceHash(ByteArray sequenceHash) {
-    List<Block> result = cache.get(sequenceHash);
+  public Collection<Block> getByHash(ByteArray hash) {
+    Collection<Block> result = cache.get(hash);
     if (result != null) {
       return result;
     } else {
-      // not in cache
       return Collections.emptyList();
     }
   }
 
-  public void insert(Block block) {
-    CloneBlock dbBlock = new CloneBlock(currentSnapshotId,
-        block.getBlockHash().toString(),
-        block.getResourceId(),
-        block.getIndexInFile(),
-        block.getFirstLineNumber(),
-        block.getLastLineNumber());
-    session.save(dbBlock);
+  public void insert(Resource resource, Collection<Block> blocks) {
+    int resourceSnapshotId = getSnapshotIdFor(resource);
+    for (Block block : blocks) {
+      CloneBlock dbBlock = new CloneBlock(
+          currentProjectSnapshotId,
+          resourceSnapshotId,
+          block.getBlockHash().toString(),
+          block.getIndexInFile(),
+          block.getFirstLineNumber(),
+          block.getLastLineNumber());
+      session.save(dbBlock);
+    }
+    session.commit();
   }
 
 }
diff --git a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/SonarCloneIndex.java b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/SonarCloneIndex.java
new file mode 100644 (file)
index 0000000..f5bd395
--- /dev/null
@@ -0,0 +1,81 @@
+/*
+ * Sonar, open source software quality management tool.
+ * Copyright (C) 2008-2011 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * Sonar is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Sonar is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with Sonar; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02
+ */
+package org.sonar.plugins.cpd.index;
+
+import java.util.Collection;
+import java.util.List;
+
+import org.sonar.api.resources.Resource;
+import org.sonar.duplications.block.Block;
+import org.sonar.duplications.block.ByteArray;
+import org.sonar.duplications.index.AbstractCloneIndex;
+import org.sonar.duplications.index.CloneIndex;
+import org.sonar.duplications.index.PackedMemoryCloneIndex;
+
+import com.google.common.collect.Lists;
+
+public class SonarCloneIndex extends AbstractCloneIndex {
+
+  private final CloneIndex mem = new PackedMemoryCloneIndex();
+  private final DbCloneIndex db;
+
+  public SonarCloneIndex() {
+    this(null);
+  }
+
+  public SonarCloneIndex(DbCloneIndex db) {
+    this.db = db;
+  }
+
+  public void insert(Resource resource, Collection<Block> blocks) {
+    for (Block block : blocks) {
+      mem.insert(block);
+    }
+    if (db != null) {
+      db.insert(resource, blocks);
+    }
+  }
+
+  public Collection<Block> getByResource(Resource resource, String resourceKey) {
+    if (db != null) {
+      db.prepareCache(resource);
+    }
+    return mem.getByResourceId(resourceKey);
+  }
+
+  public Collection<Block> getBySequenceHash(ByteArray hash) {
+    if (db == null) {
+      return mem.getBySequenceHash(hash);
+    } else {
+      List<Block> result = Lists.newArrayList(mem.getBySequenceHash(hash));
+      result.addAll(db.getByHash(hash));
+      return result;
+    }
+  }
+
+  public Collection<Block> getByResourceId(String resourceId) {
+    throw new UnsupportedOperationException();
+  }
+
+  public void insert(Block block) {
+    throw new UnsupportedOperationException();
+  }
+
+}
index 86698e89d641694c15d033cba87b9aef0c302d49..5823c9f60448a691d73d8e74a17bd97337a1e4a8 100644 (file)
@@ -21,12 +21,16 @@ package org.sonar.plugins.cpd.index;
 
 import static org.hamcrest.Matchers.is;
 import static org.junit.Assert.assertThat;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.spy;
 
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Iterator;
 
-import org.junit.Before;
 import org.junit.Test;
+import org.sonar.api.resources.JavaFile;
+import org.sonar.api.resources.Resource;
 import org.sonar.duplications.block.Block;
 import org.sonar.duplications.block.ByteArray;
 import org.sonar.jpa.test.AbstractDbUnitTestCase;
@@ -35,35 +39,35 @@ public class DbCloneIndexTest extends AbstractDbUnitTestCase {
 
   private DbCloneIndex index;
 
-  @Before
-  public void setUp() {
-    index = new DbCloneIndex(getSession(), 5, 4);
-  }
-
-  @Test(expected = UnsupportedOperationException.class)
-  public void shouldNotGetByResource() {
-    index.getByResourceId("foo");
-  }
-
   @Test
   public void shouldGetByHash() {
-    setupData("fixture");
+    Resource resource = new JavaFile("foo");
+    index = spy(new DbCloneIndex(getSession(), null, 9, 7));
+    doReturn(10).when(index).getSnapshotIdFor(resource);
+    setupData("shouldGetByHash");
 
-    index.prepareCache("foo");
-    Collection<Block> blocks = index.getBySequenceHash(new ByteArray("aa"));
+    index.prepareCache(resource);
+    Collection<Block> blocks = index.getByHash(new ByteArray("aa"));
     Iterator<Block> blocksIterator = blocks.iterator();
 
     assertThat(blocks.size(), is(1));
 
     Block block = blocksIterator.next();
-    assertThat(block.getResourceId(), is("bar-last"));
+    assertThat("block resourceId", block.getResourceId(), is("bar-last"));
+    assertThat("block hash", block.getBlockHash(), is(new ByteArray("aa")));
+    assertThat("block index in file", block.getIndexInFile(), is(0));
+    assertThat("block start line", block.getFirstLineNumber(), is(1));
+    assertThat("block end line", block.getLastLineNumber(), is(2));
   }
 
   @Test
   public void shouldInsert() {
-    setupData("fixture");
+    Resource resource = new JavaFile("foo");
+    index = spy(new DbCloneIndex(getSession(), null, 1, null));
+    doReturn(2).when(index).getSnapshotIdFor(resource);
+    setupData("shouldInsert");
 
-    index.insert(new Block("baz", new ByteArray("bb"), 0, 0, 1));
+    index.insert(resource, Arrays.asList(new Block("foo", new ByteArray("bb"), 0, 1, 2)));
 
     checkTables("shouldInsert", "clone_blocks");
   }
diff --git a/plugins/sonar-cpd-plugin/src/test/resources/org/sonar/plugins/cpd/index/DbCloneIndexTest/fixture.xml b/plugins/sonar-cpd-plugin/src/test/resources/org/sonar/plugins/cpd/index/DbCloneIndexTest/fixture.xml
deleted file mode 100644 (file)
index 9559989..0000000
+++ /dev/null
@@ -1,25 +0,0 @@
-<dataset>
-
-  <snapshots id="1" status="P" islast="false" />
-  <snapshots id="2" status="P" islast="true" />
-
-  <snapshots id="3" status="P" islast="false" />
-  <snapshots id="4" status="P" islast="true" />
-  <snapshots id="5" status="U" islast="false" />
-
-  <!-- Old snapshot of another project -->
-  <clone_blocks id="1" snapshot_id="1" hash="aa" resource_key="bar-old" index_in_file="0" start_line="0" end_line="1" />
-
-  <!-- Last snapshot of another project -->
-  <clone_blocks id="2" snapshot_id="2" hash="aa" resource_key="bar-last" index_in_file="0" start_line="0" end_line="1" />
-
-  <!-- Old snapshot of current project -->
-  <clone_blocks id="3" snapshot_id="3" hash="aa" resource_key="foo-old" index_in_file="0" start_line="0" end_line="1" />
-
-  <!-- Last snapshot of current project -->
-  <clone_blocks id="4" snapshot_id="4" hash="aa" resource_key="foo-last" index_in_file="0" start_line="0" end_line="1" />
-
-  <!-- New snapshot of current project -->
-  <clone_blocks id="5" snapshot_id="5" hash="aa" resource_key="foo" index_in_file="0" start_line="0" end_line="1" />
-
-</dataset>
diff --git a/plugins/sonar-cpd-plugin/src/test/resources/org/sonar/plugins/cpd/index/DbCloneIndexTest/shouldGetByHash.xml b/plugins/sonar-cpd-plugin/src/test/resources/org/sonar/plugins/cpd/index/DbCloneIndexTest/shouldGetByHash.xml
new file mode 100644 (file)
index 0000000..1dab2d4
--- /dev/null
@@ -0,0 +1,43 @@
+<dataset>
+
+  <snapshots id="1" status="P" islast="false" project_id="0" />
+  <snapshots id="2" status="P" islast="false" project_id="1" />
+  <projects id="1" kee="bar-old" enabled="true" scope="FIL" qualifier="CLA" />
+
+  <snapshots id="3" status="P" islast="true" />
+  <snapshots id="4" status="P" islast="true" project_id="2" />
+  <projects id="2" kee="bar-last" enabled="true" scope="FIL" qualifier="CLA" />
+
+  <snapshots id="5" status="P" islast="false" />
+  <snapshots id="6" status="P" islast="false" project_id="3" />
+  <projects id="3" kee="foo-old" enabled="true" scope="FIL" qualifier="CLA" />
+
+  <snapshots id="7" status="P" islast="true" />
+  <snapshots id="8" status="P" islast="true" project_id="4" />
+  <projects id="4" kee="foo-last" enabled="true" scope="FIL" qualifier="CLA" />
+
+  <snapshots id="9" status="U" islast="false" />
+  <snapshots id="10" status="U" islast="false" project_id="5" />
+  <projects id="5" kee="foo" enabled="true" scope="FIL" qualifier="CLA" />
+
+  <!-- Old snapshot of another project -->
+  <!-- bar-old -->
+  <clone_blocks id="1" project_snapshot_id="1" snapshot_id="2" hash="bb" index_in_file="0" start_line="0" end_line="0" />
+
+  <!-- Last snapshot of another project -->
+  <!-- bar-last -->
+  <clone_blocks id="2" project_snapshot_id="3" snapshot_id="4" hash="aa" index_in_file="0" start_line="1" end_line="2" />
+
+  <!-- Old snapshot of current project -->
+  <!-- foo-old -->
+  <clone_blocks id="3" project_snapshot_id="5" snapshot_id="6" hash="bb" index_in_file="0" start_line="0" end_line="0" />
+
+  <!-- Last snapshot of current project -->
+  <!-- foo-last -->
+  <clone_blocks id="4" project_snapshot_id="7" snapshot_id="8" hash="bb" index_in_file="0" start_line="0" end_line="0" />
+
+  <!-- New snapshot of current project -->
+  <!-- foo -->
+  <clone_blocks id="5" project_snapshot_id="9" snapshot_id="10" hash="aa" index_in_file="0" start_line="0" end_line="0" />
+
+</dataset>
index ae2767dfedb4784105506a781b16ac53effb8e50..e3e709ffc45b82b7e3674f7425e3ead9e9d4be13 100644 (file)
@@ -1,27 +1,9 @@
 <dataset>
 
-  <snapshots id="1" status="P" islast="false" />
-  <snapshots id="2" status="P" islast="true" />
+  <snapshots id="1" status="U" islast="false" project_id="0" />
+  <snapshots id="2" status="U" islast="false" project_id="1" />
+  <projects id="1" kee="foo" enabled="true" scope="FIL" qualifier="CLA" />
 
-  <snapshots id="3" status="P" islast="false" />
-  <snapshots id="4" status="P" islast="true" />
-  <snapshots id="5" status="U" islast="false" />
-
-  <!-- Old snapshot of another project -->
-  <clone_blocks id="1" snapshot_id="1" hash="aa" resource_key="bar-old" index_in_file="0" start_line="0" end_line="1" />
-
-  <!-- Last snapshot of another project -->
-  <clone_blocks id="2" snapshot_id="2" hash="aa" resource_key="bar-last" index_in_file="0" start_line="0" end_line="1" />
-
-  <!-- Old snapshot of current project -->
-  <clone_blocks id="3" snapshot_id="3" hash="aa" resource_key="foo-old" index_in_file="0" start_line="0" end_line="1" />
-
-  <!-- Last snapshot of current project -->
-  <clone_blocks id="4" snapshot_id="4" hash="aa" resource_key="foo-last" index_in_file="0" start_line="0" end_line="1" />
-
-  <!-- New snapshot of current project -->
-  <clone_blocks id="5" snapshot_id="5" hash="aa" resource_key="foo" index_in_file="0" start_line="0" end_line="1" />
-
-  <clone_blocks id="6" snapshot_id="5" hash="bb" resource_key="baz" index_in_file="0" start_line="0" end_line="1" />
+  <clone_blocks id="1" project_snapshot_id="1" snapshot_id="2" hash="bb" index_in_file="0" start_line="1" end_line="2" />
 
 </dataset>
diff --git a/plugins/sonar-cpd-plugin/src/test/resources/org/sonar/plugins/cpd/index/DbCloneIndexTest/shouldInsert.xml b/plugins/sonar-cpd-plugin/src/test/resources/org/sonar/plugins/cpd/index/DbCloneIndexTest/shouldInsert.xml
new file mode 100644 (file)
index 0000000..940281a
--- /dev/null
@@ -0,0 +1,7 @@
+<dataset>
+
+  <snapshots id="1" status="U" islast="false" project_id="0" />
+  <snapshots id="2" status="U" islast="false" project_id="1" />
+  <projects id="1" kee="foo" enabled="true" scope="FIL" qualifier="CLA" />
+
+</dataset>
index c417befec6721761acef730245b5d46b13fd5d38..8a7451b9d809de35ff96f6a3724320014d6758d0 100644 (file)
@@ -24,6 +24,7 @@ import org.sonar.api.database.DatabaseSession;
 import org.sonar.api.database.model.*;
 import org.sonar.api.design.DependencyDto;
 import org.sonar.api.utils.TimeProfiler;
+import org.sonar.jpa.entity.CloneBlock;
 
 import javax.persistence.Query;
 import java.util.List;
@@ -58,6 +59,7 @@ public final class PurgeUtils {
     deleteSources(session, snapshotIds);
     deleteViolations(session, snapshotIds);
     deleteDependencies(session, snapshotIds);
+    deleteCloneBlocks(session, snapshotIds);
     deleteSnapshots(session, snapshotIds);
   }
 
@@ -96,6 +98,13 @@ public final class PurgeUtils {
     executeQuery(session, "delete violations", snapshotIds, "delete from " + RuleFailureModel.class.getSimpleName() + " e where e.snapshotId in (:ids)");
   }
 
+  /**
+   * @since 2.11
+   */
+  private static void deleteCloneBlocks(DatabaseSession session, List<Integer> snapshotIds) {
+    executeQuery(session, "delete clone blocks", snapshotIds, "delete from " + CloneBlock.class.getSimpleName() + " e where e.snapshotId in (:ids)");
+  }
+
   /**
    * Delete SNAPSHOTS table
    */
index 912541667d80c5cf937b65b070d2f906c9afe4ca..2384797283621dac1d67241f6ad2837234c80b41 100644 (file)
                 <!--parent_dependency_id="[null]" project_snapshot_id="1"-->
                 <!--dep_usage="INHERITS" dep_weight="1" from_scope="FIL" to_scope="FIL"/>-->
 
-</dataset>
\ No newline at end of file
+  <!--<clone_blocks id="1" project_snapshot_id="1" snapshot_id="3" hash="bb" index_in_file="0" start_line="0" end_line="0" />-->
+  <!--<clone_blocks id="2" project_snapshot_id="1" snapshot_id="4" hash="bb" index_in_file="0" start_line="0" end_line="0" />-->
+
+</dataset>
index 2b92c63361a9dd1a6d4e21f787e7ece7999c8cb4..6f2a149c5138a37d1c639b39879b7d85ff963900 100644 (file)
                 parent_dependency_id="[null]" project_snapshot_id="1"
                 dep_usage="INHERITS" dep_weight="1" from_scope="FIL" to_scope="FIL" />
 
-</dataset>
\ No newline at end of file
+  <clone_blocks id="1" project_snapshot_id="1" snapshot_id="3" hash="bb" index_in_file="0" start_line="0" end_line="0" />
+  <clone_blocks id="2" project_snapshot_id="1" snapshot_id="4" hash="bb" index_in_file="0" start_line="0" end_line="0" />
+
+</dataset>
index b4de0db290ae0db63f9f0c1594c44768a913485a..7589a9947dd26e9f72f5a62104dc5038483b0878 100644 (file)
@@ -25,8 +25,6 @@ import javax.persistence.GeneratedValue;
 import javax.persistence.Id;
 import javax.persistence.Table;
 
-import org.sonar.api.database.model.ResourceModel;
-
 /**
  * @since 2.11
  */
@@ -44,12 +42,12 @@ public class CloneBlock {
   @Column(name = "snapshot_id", updatable = false, nullable = false)
   private Integer snapshotId;
 
+  @Column(name = "project_snapshot_id", updatable = false, nullable = false)
+  private Integer projectSnapshotId;
+
   @Column(name = "hash", updatable = false, nullable = false, length = BLOCK_HASH_SIZE)
   private String hash;
 
-  @Column(name = "resource_key", updatable = false, nullable = false, length = ResourceModel.KEY_SIZE)
-  private String resourceKey;
-
   @Column(name = "index_in_file", updatable = false, nullable = false)
   private Integer indexInFile;
 
@@ -62,11 +60,11 @@ public class CloneBlock {
   public CloneBlock() {
   }
 
-  public CloneBlock(Integer snapshotId, String hash, String resourceKey, Integer indexInFile, Integer startLine, Integer endLine) {
+  public CloneBlock(Integer projectSnapshotId, Integer snapshotId, String hash, Integer indexInFile, Integer startLine, Integer endLine) {
+    this.projectSnapshotId = projectSnapshotId;
     this.snapshotId = snapshotId;
     this.hash = hash;
     this.indexInFile = indexInFile;
-    this.resourceKey = resourceKey;
     this.startLine = startLine;
     this.endLine = endLine;
   }
@@ -79,8 +77,8 @@ public class CloneBlock {
     return snapshotId;
   }
 
-  public String getResourceKey() {
-    return resourceKey;
+  public Integer getProjectSnapshotId() {
+    return projectSnapshotId;
   }
 
   public String getHash() {
index abd611c748aa4fc7bd273c2121c11e22ecc994cd..4ea9a8343d157fffa2b87a718b5f7e11479c8dfa 100644 (file)
@@ -25,16 +25,17 @@ class CreateCloneBlocks < ActiveRecord::Migration
 
   def self.up
     create_table :clone_blocks do |t|
+      t.column :project_snapshot_id, :integer, :null => false
       t.column :snapshot_id, :integer, :null => false
       t.column :hash, :string, :null => false, :limit => 50
-      t.column :resource_key, :string, :null => false, :limit => 400
       t.column :index_in_file, :integer, :null => false
       t.column :start_line, :integer, :null => false
       t.column :end_line, :integer, :null => false
     end
 
+    add_index :clone_blocks, :project_snapshot_id, :name => 'clone_blocks_project_snapshot'
+    add_index :clone_blocks, :snapshot_id, :name => 'clone_blocks_snapshot'
     add_index :clone_blocks, :hash, :name => 'clone_blocks_hash'
-    add_index :clone_blocks, [:snapshot_id, :resource_key], :name => 'clone_blocks_resource'
   end
 
 end
index 1c8c3eb23fe0144bb0abc41bc2549c282cfc8bf9..7ebfd7cdd4eb1a58384d5be7bfe3a4a5ec47af71 100644 (file)
@@ -489,12 +489,13 @@ CREATE TABLE REVIEW_COMMENTS (
 );
 
 CREATE TABLE CLONE_BLOCKS (
+  PROJECT_SNAPSHOT_ID INTEGER,
   SNAPSHOT_ID INTEGER,
   HASH VARCHAR(50),
-  RESOURCE_KEY VARCHAR(400),
   INDEX_IN_FILE INTEGER NOT NULL,
   START_LINE INTEGER NOT NULL,
   END_LINE INTEGER NOT NULL
 );
+CREATE INDEX CLONE_BLOCKS_PROJECT_SNAPSHOT ON CLONE_BLOCKS (PROJECT_SNAPSHOT_ID);
+CREATE INDEX CLONE_BLOCKS_SNAPSHOT ON CLONE_BLOCKS (SNAPSHOT_ID);
 CREATE INDEX CLONE_BLOCKS_HASH ON CLONE_BLOCKS (HASH);
-CREATE INDEX CLONE_BLOCKS_RESOURCE ON CLONE_BLOCKS (SNAPSHOT_ID, RESOURCE_KEY);