]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-7778 support addition of DUPLICATIONS_INDEX.COMPONENT_UUID 1048/head
authorSébastien Lesaint <sebastien.lesaint@sonarsource.com>
Fri, 17 Jun 2016 09:36:49 +0000 (11:36 +0200)
committerSébastien Lesaint <sebastien.lesaint@sonarsource.com>
Tue, 21 Jun 2016 10:08:41 +0000 (12:08 +0200)
12 files changed:
server/sonar-server/src/main/java/org/sonar/server/computation/step/PersistCrossProjectDuplicationIndexStep.java
server/sonar-server/src/test/java/org/sonar/server/computation/step/LoadCrossProjectDuplicationsRepositoryStepTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/step/PersistCrossProjectDuplicationIndexStepTest.java
sonar-db/src/main/java/org/sonar/db/duplication/DuplicationUnitDto.java
sonar-db/src/main/resources/org/sonar/db/duplication/DuplicationMapper.xml
sonar-db/src/test/java/org/sonar/db/duplication/DuplicationDaoTest.java
sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/insert-result.xml
sonar-db/src/test/resources/org/sonar/db/duplication/DuplicationDaoTest/select_candidates.xml
sonar-db/src/test/resources/org/sonar/db/purge/PurgeCommandsTest/shouldDeleteSnapshot-result.xml
sonar-db/src/test/resources/org/sonar/db/purge/PurgeCommandsTest/shouldDeleteSnapshot.xml
sonar-db/src/test/resources/org/sonar/db/purge/PurgeCommandsTest/shouldPurgeSnapshot-result.xml
sonar-db/src/test/resources/org/sonar/db/purge/PurgeCommandsTest/shouldPurgeSnapshot.xml

index 8a2410363febf0a52a78069a3a8e025b843d5fbc..a59c2af1306a6b12d4ce8bf6430ae1593ec38589 100644 (file)
@@ -100,8 +100,8 @@ public class PersistCrossProjectDuplicationIndexStep implements ComputationStep
               .setEndLine(block.getEndLine())
               .setIndexInFile(indexInFile)
               .setSnapshotId(dbIdsRepository.getSnapshotId(component))
-              .setProjectSnapshotId(projectSnapshotId)
-            );
+              .setComponentUuid(component.getUuid())
+              .setProjectSnapshotId(projectSnapshotId));
           indexInFile++;
         }
       } finally {
index 356e33c0b5ab1a04f708b3ef658d85653702640f..1b84b4591a9a698e2219257e56ff812a5389858e 100644 (file)
  */
 package org.sonar.server.computation.step;
 
-import java.util.*;
-
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -50,7 +54,11 @@ import org.sonar.server.computation.snapshot.Snapshot;
 
 import static java.util.Arrays.asList;
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyZeroInteractions;
+import static org.mockito.Mockito.when;
 import static org.sonar.server.computation.component.Component.Type.FILE;
 import static org.sonar.server.computation.component.Component.Type.PROJECT;
 
@@ -73,8 +81,7 @@ public class LoadCrossProjectDuplicationsRepositoryStepTest {
   @Rule
   public TreeRootHolderRule treeRootHolder = new TreeRootHolderRule().setRoot(
     ReportComponent.builder(PROJECT, PROJECT_REF)
-      .addChildren(CURRENT_FILE
-      ).build());
+      .addChildren(CURRENT_FILE).build());
 
   @Rule
   public BatchReportReaderRule batchReportReader = new BatchReportReaderRule();
@@ -130,7 +137,8 @@ public class LoadCrossProjectDuplicationsRepositoryStepTest {
       .setEndLine(55)
       .setIndexInFile(0)
       .setProjectSnapshotId(otherProjectSnapshot.getId())
-      .setSnapshotId(otherFileSnapshot.getId());
+      .setSnapshotId(otherFileSnapshot.getId())
+      .setComponentUuid(otherFileSnapshot.getComponentUuid());
     dbClient.duplicationDao().insert(dbSession, duplicate);
     dbSession.commit();
 
@@ -153,17 +161,14 @@ public class LoadCrossProjectDuplicationsRepositoryStepTest {
           .setIndexInFile(0)
           .setLines(originBlock.getStartLine(), originBlock.getEndLine())
           .setUnit(originBlock.getStartTokenIndex(), originBlock.getEndTokenIndex())
-          .build()
-        ),
+          .build()),
       Arrays.asList(
         new Block.Builder()
           .setResourceId(otherFIle.getKey())
           .setBlockHash(new ByteArray(hash))
           .setIndexInFile(duplicate.getIndexInFile())
           .setLines(duplicate.getStartLine(), duplicate.getEndLine())
-          .build()
-        )
-      );
+          .build()));
   }
 
   @Test
@@ -199,7 +204,8 @@ public class LoadCrossProjectDuplicationsRepositoryStepTest {
       .setEndLine(55)
       .setIndexInFile(0)
       .setProjectSnapshotId(otherProjectSnapshot.getId())
-      .setSnapshotId(otherFileSnapshot.getId());
+      .setSnapshotId(otherFileSnapshot.getId())
+      .setComponentUuid(otherFileSnapshot.getComponentUuid());
 
     DuplicationUnitDto duplicate2 = new DuplicationUnitDto()
       .setHash(originBlock2.getHash())
@@ -207,7 +213,8 @@ public class LoadCrossProjectDuplicationsRepositoryStepTest {
       .setEndLine(35)
       .setIndexInFile(1)
       .setProjectSnapshotId(otherProjectSnapshot.getId())
-      .setSnapshotId(otherFileSnapshot.getId());
+      .setSnapshotId(otherFileSnapshot.getId())
+      .setComponentUuid(otherFileSnapshot.getComponentUuid());
     dbClient.duplicationDao().insert(dbSession, duplicate1);
     dbClient.duplicationDao().insert(dbSession, duplicate2);
     dbSession.commit();
@@ -228,8 +235,7 @@ public class LoadCrossProjectDuplicationsRepositoryStepTest {
         .setIndexInFile(0)
         .setLines(originBlock1.getStartLine(), originBlock1.getEndLine())
         .setUnit(originBlock1.getStartTokenIndex(), originBlock1.getEndTokenIndex())
-        .build()
-      );
+        .build());
     assertThat(originBlocksByIndex.get(1)).isEqualTo(
       new Block.Builder()
         .setResourceId(CURRENT_FILE_KEY)
@@ -237,8 +243,7 @@ public class LoadCrossProjectDuplicationsRepositoryStepTest {
         .setIndexInFile(1)
         .setLines(originBlock2.getStartLine(), originBlock2.getEndLine())
         .setUnit(originBlock2.getStartTokenIndex(), originBlock2.getEndTokenIndex())
-        .build()
-      );
+        .build());
 
     Map<Integer, Block> duplicationBlocksByIndex = blocksByIndexInFile(duplicationBlocks.getValue());
     assertThat(duplicationBlocksByIndex.get(0)).isEqualTo(
@@ -247,16 +252,14 @@ public class LoadCrossProjectDuplicationsRepositoryStepTest {
         .setBlockHash(new ByteArray(originBlock1.getHash()))
         .setIndexInFile(duplicate1.getIndexInFile())
         .setLines(duplicate1.getStartLine(), duplicate1.getEndLine())
-        .build()
-      );
+        .build());
     assertThat(duplicationBlocksByIndex.get(1)).isEqualTo(
       new Block.Builder()
         .setResourceId(otherFIle.getKey())
         .setBlockHash(new ByteArray(originBlock2.getHash()))
         .setIndexInFile(duplicate2.getIndexInFile())
         .setLines(duplicate2.getStartLine(), duplicate2.getEndLine())
-        .build()
-      );
+        .build());
   }
 
   @Test
@@ -277,7 +280,8 @@ public class LoadCrossProjectDuplicationsRepositoryStepTest {
       .setEndLine(55)
       .setIndexInFile(0)
       .setProjectSnapshotId(otherProjectSnapshot.getId())
-      .setSnapshotId(otherFileSnapshot.getId());
+      .setSnapshotId(otherFileSnapshot.getId())
+      .setComponentUuid(otherFileSnapshot.getComponentUuid());
     dbClient.duplicationDao().insert(dbSession, duplicate);
     dbSession.commit();
 
index cfee41ef09a6e98c700f189c4fb4dc765f774518..6fcabab19e8a469e16ee499b1ef69976e3d251cf 100644 (file)
@@ -23,7 +23,6 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
-
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -91,12 +90,13 @@ public class PersistCrossProjectDuplicationIndexStepTest {
     underTest.execute();
 
     Map<String, Object> dto = dbTester.selectFirst("select hash as \"hash\", start_line as \"startLine\", end_line as \"endLine\", index_in_file as \"indexInFile\", " +
-      "snapshot_id as \"snapshotId\", project_snapshot_id as \"projectSnapshotId\" from duplications_index");
+      "snapshot_id as \"snapshotId\", component_uuid as \"componentUuid\", project_snapshot_id as \"projectSnapshotId\" from duplications_index");
     assertThat(dto.get("hash")).isEqualTo(CPD_TEXT_BLOCK.getHash());
     assertThat(dto.get("startLine")).isEqualTo(30L);
     assertThat(dto.get("endLine")).isEqualTo(45L);
     assertThat(dto.get("indexInFile")).isEqualTo(0L);
     assertThat(dto.get("snapshotId")).isEqualTo(FILE_SNAPSHOT_ID);
+    assertThat(dto.get("componentUuid")).isEqualTo(FILE.getUuid());
     assertThat(dto.get("projectSnapshotId")).isEqualTo(PROJECT_SNAPSHOT_ID);
   }
 
@@ -114,12 +114,13 @@ public class PersistCrossProjectDuplicationIndexStepTest {
     underTest.execute();
 
     List<Map<String, Object>> dtos = dbTester.select("select hash as \"hash\", start_line as \"startLine\", end_line as \"endLine\", index_in_file as \"indexInFile\", " +
-      "snapshot_id as \"snapshotId\", project_snapshot_id as \"projectSnapshotId\" from duplications_index");
+      "snapshot_id as \"snapshotId\", component_uuid as \"componentUuid\", project_snapshot_id as \"projectSnapshotId\" from duplications_index");
     assertThat(dtos).extracting("hash").containsOnly(CPD_TEXT_BLOCK.getHash(), "b1234353e96320ff");
     assertThat(dtos).extracting("startLine").containsOnly(30L, 20L);
     assertThat(dtos).extracting("endLine").containsOnly(45L, 15L);
     assertThat(dtos).extracting("indexInFile").containsOnly(0L, 1L);
     assertThat(dtos).extracting("snapshotId").containsOnly(FILE_SNAPSHOT_ID);
+    assertThat(dtos).extracting("componentUuid").containsOnly(FILE.getUuid());
     assertThat(dtos).extracting("projectSnapshotId").containsOnly(PROJECT_SNAPSHOT_ID);
   }
 
index 6cccfd94872eec07921e83f76ade5485469cadc0..d915fb4466a134ef70395324938aed6f2452934c 100644 (file)
@@ -23,6 +23,7 @@ public final class DuplicationUnitDto {
 
   private long id;
   private long snapshotId;
+  private String componentUuid;
   private long projectSnapshotId;
 
   private String hash;
@@ -51,6 +52,15 @@ public final class DuplicationUnitDto {
     return this;
   }
 
+  public String getComponentUuid() {
+    return componentUuid;
+  }
+
+  public DuplicationUnitDto setComponentUuid(String componentUuid) {
+    this.componentUuid = componentUuid;
+    return this;
+  }
+
   public long getProjectSnapshotId() {
     return projectSnapshotId;
   }
index b1ce10e4069a5c6bdb5240b896a9668c8bba15a2..ec02d2e7ef8b0c376e587b4dc7da680af81eef24 100644 (file)
@@ -7,6 +7,7 @@
     SELECT DISTINCT
     duplication_block.id as id,
     duplication_block.snapshot_id as snapshotId,
+    duplication_block.component_uuid as componentUuid,
     duplication_block.project_snapshot_id as projectSnapshotId,
     duplication_block.hash as hash,
     duplication_block.index_in_file as indexInFile,
@@ -15,7 +16,7 @@
     file_component.kee as componentKey
     FROM duplications_index duplication_block
     INNER JOIN snapshots snapshot ON duplication_block.snapshot_id=snapshot.id AND snapshot.islast=${_true}
-    INNER JOIN projects file_component ON file_component.uuid=snapshot.component_uuid AND file_component.language=#{language}
+    INNER JOIN projects file_component ON file_component.uuid=duplication_block.component_uuid AND file_component.language=#{language}
     AND file_component.enabled=${_true}
     <where>
       AND duplication_block.hash in
@@ -27,8 +28,8 @@
   </select>
 
   <insert id="batchInsert" parameterType="DuplicationUnit" useGeneratedKeys="false">
-    INSERT INTO duplications_index (snapshot_id, project_snapshot_id, hash, index_in_file, start_line, end_line)
-    VALUES (#{snapshotId}, #{projectSnapshotId}, #{hash}, #{indexInFile}, #{startLine}, #{endLine})
+    INSERT INTO duplications_index (snapshot_id, component_uuid, project_snapshot_id, hash, index_in_file, start_line, end_line)
+    VALUES (#{snapshotId}, #{componentUuid}, #{projectSnapshotId}, #{hash}, #{indexInFile}, #{startLine}, #{endLine})
   </insert>
 
 </mapper>
index eeaf8e5e07aa1396155e31e8988c02f1c6d55cf5..4374de333075377ca8993318dc414bf8de106e52 100644 (file)
@@ -49,6 +49,7 @@ public class DuplicationDaoTest {
 
     DuplicationUnitDto block = blocks.get(0);
     assertThat(block.getComponentKey()).isEqualTo("bar-last");
+    assertThat(block.getComponentUuid()).isEqualTo("uuid_2");
     assertThat(block.getHash()).isEqualTo("aa");
     assertThat(block.getIndexInFile()).isEqualTo(0);
     assertThat(block.getStartLine()).isEqualTo(1);
@@ -67,6 +68,7 @@ public class DuplicationDaoTest {
     dao.insert(dbSession, new DuplicationUnitDto()
       .setProjectSnapshotId(1)
       .setSnapshotId(2)
+      .setComponentUuid("uuid_1")
       .setHash("bb")
       .setIndexInFile(0)
       .setStartLine(1)
index 61362f40909f1a0c4a700c540134ded1b09bf7a2..072ce7fd58672308fc317055987eb1c6e5c639ac 100644 (file)
              project_id="1"/>
   <projects id="1" uuid="1" kee="foo" enabled="1" scope="FIL" qualifier="CLA"/>
 
-  <duplications_index id="1" project_snapshot_id="1" snapshot_id="2" hash="bb" index_in_file="0" start_line="1"
+  <duplications_index id="1"
+                      project_snapshot_id="1"
+                      snapshot_id="2"
+                      component_uuid="uuid_1"
+                      hash="bb"
+                      index_in_file="0"
+                      start_line="1"
                       end_line="2"/>
 
 </dataset>
index 32b73f94ea439bc084412a87131100ef38d8ed80..ec60a919101282eca91c804f8c49fefffc157992 100644 (file)
 
   <!-- Old snapshot of another project -->
   <!-- bar-old -->
-  <duplications_index id="1" project_snapshot_id="1" snapshot_id="2" hash="bb" index_in_file="0" start_line="0"
+  <duplications_index id="1"
+                      project_snapshot_id="1"
+                      snapshot_id="2"
+                      component_uuid="uuid_1"
+                      hash="bb"
+                      index_in_file="0"
+                      start_line="0"
                       end_line="0"/>
 
   <!-- Last snapshot of another project -->
   <!-- bar-last -->
-  <duplications_index id="2" project_snapshot_id="3" snapshot_id="4" hash="aa" index_in_file="0" start_line="1"
+  <duplications_index id="2"
+                      project_snapshot_id="3"
+                      snapshot_id="4"
+                      component_uuid="uuid_2"
+                      hash="aa"
+                      index_in_file="0"
+                      start_line="1"
                       end_line="2"/>
 
   <!-- Old snapshot of current project -->
   <!-- foo-old -->
-  <duplications_index id="3" project_snapshot_id="5" snapshot_id="6" hash="bb" index_in_file="0" start_line="0"
+  <duplications_index id="3"
+                      project_snapshot_id="5"
+                      snapshot_id="6"
+                      component_uuid="uuid_3"
+                      hash="bb"
+                      index_in_file="0"
+                      start_line="0"
                       end_line="0"/>
 
   <!-- Last snapshot of current project -->
   <!-- foo-last -->
-  <duplications_index id="4" project_snapshot_id="7" snapshot_id="8" hash="aa" index_in_file="0" start_line="0"
+  <duplications_index id="4"
+                      project_snapshot_id="7"
+                      snapshot_id="8"
+                      component_uuid="uuid_4"
+                      hash="aa"
+                      index_in_file="0"
+                      start_line="0"
                       end_line="0"/>
 
   <!-- New snapshot of current project -->
   <!-- foo -->
-  <duplications_index id="5" project_snapshot_id="9" snapshot_id="10" hash="aa" index_in_file="0" start_line="0"
+  <duplications_index id="5"
+                      project_snapshot_id="9"
+                      snapshot_id="10"
+                      component_uuid="uuid_5"
+                      hash="aa"
+                      index_in_file="0"
+                      start_line="0"
                       end_line="0"/>
 
   <!-- Note that there is two blocks with same hash for current analysis to verify that we use "SELECT DISTINCT", -->
   <!-- without "DISTINCT" we will select block from "bar-last" two times. -->
-  <duplications_index id="6" project_snapshot_id="9" snapshot_id="10" hash="aa" index_in_file="1" start_line="1"
+  <duplications_index id="6"
+                      project_snapshot_id="9"
+                      snapshot_id="10"
+                      component_uuid="uuid_5"
+                      hash="aa"
+                      index_in_file="1"
+                      start_line="1"
                       end_line="1"/>
 
   <!-- Last snapshot of project with another language -->
   <!-- baz -->
-  <duplications_index id="7" project_snapshot_id="1" snapshot_id="11" hash="aa" index_in_file="0" start_line="0"
+  <duplications_index id="7"
+                      project_snapshot_id="1"
+                      snapshot_id="11"
+                      component_uuid="uuid_6"
+                      hash="aa"
+                      index_in_file="0"
+                      start_line="0"
                       end_line="0"/>
 
 </dataset>
index d3ca84c438da44d0c62611671a639a318a1ba3a8..1c9ba52e8f7fbe3eb5915680c5a83df90e2c31d9 100644 (file)
                     alert_status="[null]" description="[null]" measure_data="[null]"/>
   <events id="1" name="Version 1.0" component_uuid="1" snapshot_id="1" category="VERSION" description="[null]"
           event_date="1228222680000" created_at="1228222680000" event_data="[null]"/>
-  <duplications_index id="1" project_snapshot_id="1" snapshot_id="1" hash="bb" index_in_file="0" start_line="0"
+  <duplications_index id="1"
+                      project_snapshot_id="1"
+                      component_uuid="uuid_1"
+                      snapshot_id="1"
+                      hash="bb"
+                      index_in_file="0"
+                      start_line="0"
                       end_line="0"/>
 </dataset>
index a4330d00591c6577b108d44d32c316ddc1ffb0ea..9263dc9015e5a77c0178e4b9c81a9c0e4eba25b8 100644 (file)
                     alert_status="[null]" description="[null]" measure_data="[null]"/>
   <events id="1" name="Version 1.0" component_uuid="1" snapshot_id="1" category="VERSION" description="[null]"
           event_date="1228222680000" created_at="1228222680000" event_data="[null]"/>
-  <duplications_index id="1" project_snapshot_id="1" snapshot_id="1" hash="bb" index_in_file="0" start_line="0"
+  <duplications_index id="1"
+                      project_snapshot_id="1"
+                      snapshot_id="1"
+                      component_uuid="uuid_1"
+                      hash="bb"
+                      index_in_file="0"
+                      start_line="0"
                       end_line="0"/>
 
 
                     alert_status="[null]" description="[null]" measure_data="[null]"/>
   <events id="2" name="Version 1.0" component_uuid="5" snapshot_id="5" category="VERSION" description="[null]"
           event_date="1228222680000" created_at="1228222680000" event_data="[null]"/>
-  <duplications_index id="2" project_snapshot_id="5" snapshot_id="5" hash="bb" index_in_file="0" start_line="0"
+  <duplications_index id="2"
+                      project_snapshot_id="5"
+                      snapshot_id="5"
+                      component_uuid="uuid_5"
+                      hash="bb"
+                      index_in_file="0"
+                      start_line="0"
                       end_line="0"/>
 
 
index 4ac3b5f57074ffe94363b031f5895fe9fa7b12aa..a6cd4cf7a528e2ca9c1a80c184e8810d1c634c4e 100644 (file)
@@ -73,7 +73,13 @@ Note that measures, events and reviews are not deleted.
           created_at="1228222680000"
           event_data="[null]"/>
 
-  <duplications_index id="2" project_snapshot_id="2" snapshot_id="2"
-                      hash="bb" index_in_file="0" start_line="0" end_line="0"/>
+  <duplications_index id="2"
+                      project_snapshot_id="2"
+                      snapshot_id="2"
+                      component_uuid="uuid_2"
+                      hash="bb"
+                      index_in_file="0"
+                      start_line="0"
+                      end_line="0"/>
 
 </dataset>
index 54feba39511e081ab9d52b5d879aa1157e149466..af33f74ab3775e1657067dbab7b56d36517f1c40 100644 (file)
           created_at="1228222680000"
           event_data="[null]"/>
 
-  <duplications_index id="1" project_snapshot_id="1" snapshot_id="1"
-                      hash="bb" index_in_file="0" start_line="0" end_line="0"/>
+  <duplications_index id="1"
+                      project_snapshot_id="1"
+                      snapshot_id="1"
+                      component_uuid="uuid_1"
+                      hash="bb"
+                      index_in_file="0"
+                      start_line="0"
+                      end_line="0"/>
 
 
   <!-- The following is not purged but is kept for DBUnit -->
           created_at="1228222680000"
           event_data="[null]"/>
 
-  <duplications_index id="2" project_snapshot_id="2" snapshot_id="2"
-                      hash="bb" index_in_file="0" start_line="0" end_line="0"/>
+  <duplications_index id="2"
+                      project_snapshot_id="2"
+                      snapshot_id="2"
+                      component_uuid="uuid_2"
+                      hash="bb"
+                      index_in_file="0"
+                      start_line="0"
+                      end_line="0"/>
 
 </dataset>