]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-5871 Add duplications to file_sources
authorJulien HENRY <julien.henry@sonarsource.com>
Mon, 1 Dec 2014 11:04:28 +0000 (12:04 +0100)
committerJulien HENRY <julien.henry@sonarsource.com>
Mon, 1 Dec 2014 11:04:58 +0000 (12:04 +0100)
plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/DefaultCpdEngine.java
plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/medium/CpdMediumTest.java
server/sonar-server/src/main/java/org/sonar/server/db/migrations/v50/FeedFileSources.java
server/sonar-server/src/main/java/org/sonar/server/db/migrations/v50/FileSourceDto.java
server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v50/FeedFileSourcesTest/after-with-scm.xml
server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v50/FeedFileSourcesTest/after.xml
sonar-batch/src/main/java/org/sonar/batch/index/SourcePersister.java
sonar-batch/src/test/java/org/sonar/batch/index/SourcePersisterTest.java
sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources.xml
sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistDontTouchUnchanged-result.xml
sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistEmptyFile-result.xml

index ba472cbc2b0f6e64b14ee09a2f7955dfcbc82bfb..deeb060b98f9c98b8fd33132e725f87bb0eca5b9 100644 (file)
@@ -67,7 +67,7 @@ public class DefaultCpdEngine extends CpdEngine {
   private final CpdMappings mappings;
   private final FileSystem fs;
   private final Settings settings;
-  private final BlockCache duplicationCache;
+  private final BlockCache blockCache;
   private final Project project;
 
   public DefaultCpdEngine(@Nullable Project project, IndexFactory indexFactory, CpdMappings mappings, FileSystem fs, Settings settings, BlockCache duplicationCache) {
@@ -76,7 +76,7 @@ public class DefaultCpdEngine extends CpdEngine {
     this.mappings = mappings;
     this.fs = fs;
     this.settings = settings;
-    this.duplicationCache = duplicationCache;
+    this.blockCache = duplicationCache;
   }
 
   public DefaultCpdEngine(IndexFactory indexFactory, CpdMappings mappings, FileSystem fs, Settings settings, BlockCache duplicationCache) {
@@ -146,7 +146,7 @@ public class DefaultCpdEngine extends CpdEngine {
     for (InputFile inputFile : sourceFiles) {
       LOG.debug("Populating index from {}", inputFile);
       String resourceEffectiveKey = ((DeprecatedDefaultInputFile) inputFile).key();
-      FileBlocks fileBlocks = duplicationCache.byComponent(resourceEffectiveKey);
+      FileBlocks fileBlocks = blockCache.byComponent(resourceEffectiveKey);
       if (fileBlocks != null) {
         index.insert(inputFile, fileBlocks.blocks());
       } else if (bridge != null) {
index 56805e5dd7a42da116df68c6b9ba9232e3ce4882..24ff033d557f1782329bb882534e70095d1bb5b7 100644 (file)
@@ -28,7 +28,10 @@ import org.junit.Test;
 import org.junit.rules.ExpectedException;
 import org.junit.rules.TemporaryFolder;
 import org.sonar.api.batch.fs.InputFile;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
 import org.sonar.api.batch.sensor.duplication.DuplicationGroup;
+import org.sonar.api.batch.sensor.measure.internal.DefaultMeasure;
+import org.sonar.api.measures.CoreMetrics;
 import org.sonar.batch.mediumtest.BatchMediumTester;
 import org.sonar.batch.mediumtest.BatchMediumTester.TaskResult;
 import org.sonar.plugins.cpd.CpdPlugin;
@@ -80,7 +83,7 @@ public class CpdMediumTest {
   }
 
   @Test
-  public void testDuplications() throws IOException {
+  public void testCrossFileDuplications() throws IOException {
     File srcDir = new File(baseDir, "src");
     srcDir.mkdir();
 
@@ -105,7 +108,55 @@ public class CpdMediumTest {
     // 4 measures per file
     assertThat(result.measures()).hasSize(8);
 
-    InputFile inputFile = result.inputFile("src/sample1.xoo");
+    InputFile inputFile1 = result.inputFile("src/sample1.xoo");
+    InputFile inputFile2 = result.inputFile("src/sample2.xoo");
+    // One clone group on each file
+    List<DuplicationGroup> duplicationGroupsFile1 = result.duplicationsFor(inputFile1);
+    assertThat(duplicationGroupsFile1).hasSize(1);
+
+    DuplicationGroup cloneGroupFile1 = duplicationGroupsFile1.get(0);
+    assertThat(cloneGroupFile1.duplicates()).hasSize(1);
+    assertThat(cloneGroupFile1.originBlock().startLine()).isEqualTo(1);
+    assertThat(cloneGroupFile1.originBlock().length()).isEqualTo(17);
+    assertThat(cloneGroupFile1.originBlock().resourceKey()).isEqualTo(((DefaultInputFile) inputFile1).key());
+    assertThat(cloneGroupFile1.duplicates()).hasSize(1);
+    assertThat(cloneGroupFile1.duplicates().get(0).resourceKey()).isEqualTo(((DefaultInputFile) inputFile2).key());
+
+    List<DuplicationGroup> duplicationGroupsFile2 = result.duplicationsFor(inputFile2);
+    assertThat(duplicationGroupsFile2).hasSize(1);
+
+    DuplicationGroup cloneGroupFile2 = duplicationGroupsFile2.get(0);
+    assertThat(cloneGroupFile2.duplicates()).hasSize(1);
+    assertThat(cloneGroupFile2.originBlock().startLine()).isEqualTo(1);
+    assertThat(cloneGroupFile2.originBlock().length()).isEqualTo(17);
+    assertThat(cloneGroupFile2.originBlock().resourceKey()).isEqualTo(((DefaultInputFile) inputFile2).key());
+    assertThat(cloneGroupFile2.duplicates()).hasSize(1);
+    assertThat(cloneGroupFile2.duplicates().get(0).resourceKey()).isEqualTo(((DefaultInputFile) inputFile1).key());
+  }
+
+  @Test
+  public void testIntraFileDuplications() throws IOException {
+    File srcDir = new File(baseDir, "src");
+    srcDir.mkdir();
+
+    String content = "Sample xoo\ncontent\nfoo\nbar\nSample xoo\ncontent\n";
+
+    File xooFile = new File(srcDir, "sample.xoo");
+    FileUtils.write(xooFile, content);
+
+    TaskResult result = tester.newTask()
+      .properties(builder
+        .put("sonar.sources", "src")
+        .put("sonar.cpd.xoo.minimumTokens", "2")
+        .put("sonar.cpd.xoo.minimumLines", "2")
+        .put("sonar.verbose", "true")
+        .build())
+      .start();
+
+    // 4 measures per file
+    assertThat(result.measures()).hasSize(4);
+
+    InputFile inputFile = result.inputFile("src/sample.xoo");
     // One clone group
     List<DuplicationGroup> duplicationGroups = result.duplicationsFor(inputFile);
     assertThat(duplicationGroups).hasSize(1);
@@ -113,7 +164,15 @@ public class CpdMediumTest {
     DuplicationGroup cloneGroup = duplicationGroups.get(0);
     assertThat(cloneGroup.duplicates()).hasSize(1);
     assertThat(cloneGroup.originBlock().startLine()).isEqualTo(1);
-    assertThat(cloneGroup.originBlock().length()).isEqualTo(17);
+    assertThat(cloneGroup.originBlock().length()).isEqualTo(2);
+    assertThat(cloneGroup.duplicates()).hasSize(1);
+    assertThat(cloneGroup.duplicates().get(0).startLine()).isEqualTo(5);
+    assertThat(cloneGroup.duplicates().get(0).length()).isEqualTo(2);
+
+    assertThat(result.measures()).contains(new DefaultMeasure<String>()
+      .forMetric(CoreMetrics.DUPLICATION_LINES_DATA)
+      .onFile(inputFile)
+      .withValue("1=1;2=1;3=0;4=0;5=1;6=1;7=0"));
   }
 
 }
index db5eda2f4c81617f535f26941c04cc1bb7df50a1..d7e77d53c6ae1b178fb3b32c5614780a564f48ba 100644 (file)
@@ -22,9 +22,11 @@ package org.sonar.server.db.migrations.v50;
 import org.apache.commons.lang.StringUtils;
 import org.sonar.api.utils.System2;
 import org.sonar.core.persistence.Database;
-import org.sonar.server.db.migrations.*;
+import org.sonar.server.db.migrations.BaseDataChange;
+import org.sonar.server.db.migrations.MassUpdate;
 import org.sonar.server.db.migrations.Select.Row;
 import org.sonar.server.db.migrations.Select.RowReader;
+import org.sonar.server.db.migrations.SqlStatement;
 
 import javax.annotation.Nullable;
 
@@ -147,6 +149,7 @@ public class FeedFileSources extends BaseDataChange {
     Long overallCoverageHitsByLineMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'overall_coverage_line_hits_data'").get(simpleLongReader);
     Long overallConditionsByLineMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'overall_conditions_by_line'").get(simpleLongReader);
     Long overallCoveredConditionsByLineMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'overall_covered_conditions_by_line'").get(simpleLongReader);
+    Long duplicationDataMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'duplications_data'").get(simpleLongReader);
 
     MassUpdate massUpdate = context.prepareMassUpdate();
     massUpdate.select("SELECT " +
index 176efa0be44671fff9be3a3c5482ab8f63b5d337..4114d2f1ea6e19073d80f2b6b5942ed59946b7b9 100644 (file)
@@ -73,6 +73,7 @@ class FileSourceDto {
   String[] getSourceData() {
     String highlighting = "";
     String symbolRefs = "";
+    String duplications = "";
     ByteArrayOutputStream output = new ByteArrayOutputStream();
     int line = 0;
     String sourceLine = null;
@@ -86,7 +87,7 @@ class FileSourceDto {
         utHits.get(line), utConditions.get(line), utCoveredConditions.get(line),
         itHits.get(line), itConditions.get(line), itCoveredConditions.get(line),
         overallHits.get(line), overallConditions.get(line), overallCoveredConditions.get(line),
-        highlighting, symbolRefs, sourceLine);
+        highlighting, symbolRefs, duplications, sourceLine);
     }
     csv.close();
     return new String[] {new String(output.toByteArray(), UTF_8), lineHashes.toString()};
index e68c3d0ef9289073a55df69757e551273ab7448d..e4ae2ea1c28be2d9ce1804032ac9e285e0b78c69 100644 (file)
@@ -1,7 +1,7 @@
 <dataset>
 
   <file_sources id="1" project_uuid="uuid-MyProject" file_uuid="uuid-MyFile.xoo" created_at="1416238020000" updated_at="1414770242000"
-    data="aef12a,alice,2014-04-25T12:34:56+0100,1,4,2,2,5,3,3,6,4,,,class Foo {&#13;&#10;abe465,bob,2014-07-25T12:34:56+0100,,,,,,,,,,,,  // Empty&#13;&#10;afb789,carol,2014-03-23T12:34:56+0100,0,,,0,,,0,,,,,}&#13;&#10;afb789,carol,2014-03-23T12:34:56+0100,,,,,,,,,,,,&#13;&#10;"
+    data="aef12a,alice,2014-04-25T12:34:56+0100,1,4,2,2,5,3,3,6,4,,,,class Foo {&#13;&#10;abe465,bob,2014-07-25T12:34:56+0100,,,,,,,,,,,,,  // Empty&#13;&#10;afb789,carol,2014-03-23T12:34:56+0100,0,,,0,,,0,,,,,,}&#13;&#10;afb789,carol,2014-03-23T12:34:56+0100,,,,,,,,,,,,,&#13;&#10;"
     line_hashes="6a19ce786467960a3a9b0d26383a464a&#10;aab2dbc5fdeaa80b050b1d049ede357c&#10;cbb184dd8e05c9709e5dcaedaa0495cf&#10;&#10;"
     data_hash="" />
 
index b8058e0390fa288592405ef37284ef556f737ad8..266e06d8a5391203b95102f479d9dd07f2231db8 100644 (file)
@@ -1,7 +1,7 @@
 <dataset>
 
   <file_sources id="1" project_uuid="uuid-MyProject" file_uuid="uuid-MyFile.xoo" created_at="1416238020000" updated_at="1414770242000"
-    data=",,,,,,,,,,,,,,class Foo {&#13;&#10;,,,,,,,,,,,,,,  // Empty&#13;&#10;,,,,,,,,,,,,,,}&#13;&#10;,,,,,,,,,,,,,,&#13;&#10;"
+    data=",,,,,,,,,,,,,,,class Foo {&#13;&#10;,,,,,,,,,,,,,,,  // Empty&#13;&#10;,,,,,,,,,,,,,,,}&#13;&#10;,,,,,,,,,,,,,,,&#13;&#10;"
     line_hashes="6a19ce786467960a3a9b0d26383a464a&#10;aab2dbc5fdeaa80b050b1d049ede357c&#10;cbb184dd8e05c9709e5dcaedaa0495cf&#10;&#10;"
     data_hash="" />
 
index cfbcd33800e36222fe50018ad7177b224a5766e9..dfdee4980b21cc4b031f332f8d75867e89a7f57e 100644 (file)
@@ -29,6 +29,8 @@ import org.apache.ibatis.session.ResultHandler;
 import org.sonar.api.batch.fs.InputFile;
 import org.sonar.api.batch.fs.InputPath;
 import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.sensor.duplication.DuplicationGroup;
+import org.sonar.api.batch.sensor.duplication.DuplicationGroup.Block;
 import org.sonar.api.batch.sensor.symbol.Symbol;
 import org.sonar.api.database.model.Snapshot;
 import org.sonar.api.measures.CoreMetrics;
@@ -38,6 +40,7 @@ import org.sonar.api.utils.KeyValueFormat;
 import org.sonar.api.utils.System2;
 import org.sonar.api.utils.text.CsvWriter;
 import org.sonar.batch.ProjectTree;
+import org.sonar.batch.duplication.DuplicationCache;
 import org.sonar.batch.highlighting.SyntaxHighlightingData;
 import org.sonar.batch.highlighting.SyntaxHighlightingRule;
 import org.sonar.batch.scan.filesystem.InputPathCache;
@@ -80,10 +83,11 @@ public class SourcePersister implements ScanPersister {
   private final ProjectTree projectTree;
   private final ResourceCache resourceCache;
   private CodeColorizers codeColorizers;
+  private DuplicationCache duplicationCache;
 
   public SourcePersister(ResourcePersister resourcePersister, SnapshotSourceDao sourceDao, InputPathCache inputPathCache,
-    MyBatis mybatis, MeasureCache measureCache, ComponentDataCache componentDataCache, ProjectTree projectTree, System2 system2, ResourceCache resourceCache,
-    CodeColorizers codeColorizers) {
+    MyBatis mybatis, MeasureCache measureCache, ComponentDataCache componentDataCache, ProjectTree projectTree, System2 system2,
+    ResourceCache resourceCache, CodeColorizers codeColorizers, DuplicationCache duplicationCache) {
     this.resourcePersister = resourcePersister;
     this.sourceDao = sourceDao;
     this.inputPathCache = inputPathCache;
@@ -94,6 +98,7 @@ public class SourcePersister implements ScanPersister {
     this.system2 = system2;
     this.resourceCache = resourceCache;
     this.codeColorizers = codeColorizers;
+    this.duplicationCache = duplicationCache;
   }
 
   public void saveSource(Resource resource, String source, Date updatedAt) {
@@ -223,6 +228,7 @@ public class SourcePersister implements ScanPersister {
     SyntaxHighlightingData highlighting = loadHighlighting(file);
     String[] highlightingPerLine = computeHighlightingPerLine(file, highlighting);
     String[] symbolReferencesPerLine = computeSymbolReferencesPerLine(file, loadSymbolReferences(file));
+    String[] duplicationsPerLine = computeDuplicationsPerLine(file, duplicationCache.byComponent(file.key()));
 
     ByteArrayOutputStream output = new ByteArrayOutputStream();
     CsvWriter csv = CsvWriter.of(new OutputStreamWriter(output, UTF_8));
@@ -231,13 +237,51 @@ public class SourcePersister implements ScanPersister {
         utHitsByLine.get(lineIdx), utCondByLine.get(lineIdx), utCoveredCondByLine.get(lineIdx),
         itHitsByLine.get(lineIdx), itCondByLine.get(lineIdx), itCoveredCondByLine.get(lineIdx),
         overallHitsByLine.get(lineIdx), overallCondByLine.get(lineIdx), overallCoveredCondByLine.get(lineIdx),
-        highlightingPerLine[lineIdx - 1], symbolReferencesPerLine[lineIdx - 1],
+        highlightingPerLine[lineIdx - 1], symbolReferencesPerLine[lineIdx - 1], duplicationsPerLine[lineIdx - 1],
         CharMatcher.anyOf(BOM).removeFrom(lines.get(lineIdx - 1)));
     }
     csv.close();
     return StringUtils.defaultIfEmpty(new String(output.toByteArray(), UTF_8), null);
   }
 
+  private String[] computeDuplicationsPerLine(DefaultInputFile file, List<DuplicationGroup> duplicationGroups) {
+    String[] result = new String[file.lines()];
+    if (duplicationGroups == null) {
+      return result;
+    }
+    StringBuilder[] dupPerLine = new StringBuilder[file.lines()];
+    int blockId = 1;
+    for (DuplicationGroup group : duplicationGroups) {
+      addBlock(blockId, group.originBlock(), dupPerLine);
+      blockId++;
+      for (Block dups : group.duplicates()) {
+        if (dups.resourceKey().equals(file.key())) {
+          addBlock(blockId, dups, dupPerLine);
+          blockId++;
+        }
+      }
+    }
+    for (int i = 0; i < file.lines(); i++) {
+      result[i] = dupPerLine[i] != null ? dupPerLine[i].toString() : null;
+    }
+    return result;
+  }
+
+  private void addBlock(int blockId, Block block, StringBuilder[] dupPerLine) {
+    int currentLine = block.startLine();
+    for (int i = 0; i < block.length(); i++) {
+      if (dupPerLine[currentLine - 1] == null) {
+        dupPerLine[currentLine - 1] = new StringBuilder();
+      }
+      if (dupPerLine[currentLine - 1].length() > 0) {
+        dupPerLine[currentLine - 1].append(',');
+      }
+      dupPerLine[currentLine - 1].append(blockId);
+      currentLine++;
+    }
+
+  }
+
   @CheckForNull
   private SyntaxHighlightingData loadHighlighting(DefaultInputFile file) {
     SyntaxHighlightingData highlighting = componentDataCache.getData(file.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING);
index 9117015cca47456e21aa36a9c6b9ae9e5e899b35..0633c2ee5e81eb6270e5cff66bc5dc91245a7917 100644 (file)
@@ -27,6 +27,7 @@ import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 import org.sonar.api.batch.fs.InputPath;
 import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.sensor.duplication.DuplicationGroup;
 import org.sonar.api.batch.sensor.highlighting.TypeOfText;
 import org.sonar.api.database.model.Snapshot;
 import org.sonar.api.measures.CoreMetrics;
@@ -37,6 +38,7 @@ import org.sonar.api.resources.Resource;
 import org.sonar.api.utils.DateUtils;
 import org.sonar.api.utils.System2;
 import org.sonar.batch.ProjectTree;
+import org.sonar.batch.duplication.DuplicationCache;
 import org.sonar.batch.highlighting.SyntaxHighlightingData;
 import org.sonar.batch.highlighting.SyntaxHighlightingDataBuilder;
 import org.sonar.batch.scan.filesystem.InputPathCache;
@@ -72,6 +74,7 @@ public class SourcePersisterTest extends AbstractDaoTestCase {
   private System2 system2;
   private MeasureCache measureCache;
   private ComponentDataCache componentDataCache;
+  private DuplicationCache duplicationCache;
 
   private static final String PROJECT_KEY = "foo";
 
@@ -90,9 +93,10 @@ public class SourcePersisterTest extends AbstractDaoTestCase {
     measureCache = mock(MeasureCache.class);
     when(measureCache.byMetric(anyString(), anyString())).thenReturn(Collections.<org.sonar.api.measures.Measure>emptyList());
     componentDataCache = mock(ComponentDataCache.class);
+    duplicationCache = mock(DuplicationCache.class);
     sourcePersister = new SourcePersister(resourcePersister, new SnapshotSourceDao(getMyBatis()), inputPathCache,
       getMyBatis(), measureCache, componentDataCache, projectTree, system2,
-      resourceCache, mock(CodeColorizers.class));
+      resourceCache, mock(CodeColorizers.class), duplicationCache);
     Project project = new Project(PROJECT_KEY);
     project.setUuid("projectUuid");
     when(projectTree.getRootProject()).thenReturn(project);
@@ -146,9 +150,9 @@ public class SourcePersisterTest extends AbstractDaoTestCase {
     assertThat(fileSourceDto.getCreatedAt()).isEqualTo(DateUtils.parseDateTime("2014-10-10T16:44:02+0200").getTime());
     assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
     assertThat(fileSourceDto.getData()).isEqualTo(
-      ",,,,,,,,,,,,,,changed\r\n,,,,,,,,,,,,,,content\r\n");
+      ",,,,,,,,,,,,,,,changed\r\n,,,,,,,,,,,,,,,content\r\n");
     assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("changed") + "\n" + md5Hex("content"));
-    assertThat(fileSourceDto.getDataHash()).isEqualTo("bd582d7001cfca180c3dacab10043292");
+    assertThat(fileSourceDto.getDataHash()).isEqualTo("d1a4dd62422639f665a8d80b37c59f8d");
   }
 
   @Test
@@ -190,9 +194,9 @@ public class SourcePersisterTest extends AbstractDaoTestCase {
     assertThat(fileSourceDto.getCreatedAt()).isEqualTo(now.getTime());
     assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
     assertThat(fileSourceDto.getData()).isEqualTo(
-      ",,,,,,,,,,,,,,foo\r\n,,,,,,,,,,,,,,bar\r\n,,,,,,,,,,,,,,biz\r\n");
+      ",,,,,,,,,,,,,,,foo\r\n,,,,,,,,,,,,,,,bar\r\n,,,,,,,,,,,,,,,biz\r\n");
     assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("foo") + "\n" + md5Hex("bar") + "\n" + md5Hex("biz"));
-    assertThat(fileSourceDto.getDataHash()).isEqualTo("e1827ac156bb76144486e6570a591cfb");
+    assertThat(fileSourceDto.getDataHash()).isEqualTo("a34ed99cc7d27150c82f5cba2b22b665");
 
   }
 
@@ -214,29 +218,30 @@ public class SourcePersisterTest extends AbstractDaoTestCase {
 
     mockResourceCache(relativePathNew, PROJECT_KEY, "uuidnew");
 
-    when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.SCM_AUTHORS_BY_LINE_KEY))
+    String fileKey = PROJECT_KEY + ":" + relativePathNew;
+    when(measureCache.byMetric(fileKey, CoreMetrics.SCM_AUTHORS_BY_LINE_KEY))
       .thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_AUTHORS_BY_LINE, "1=julien;2=simon;3=julien")));
-    when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE_KEY))
+    when(measureCache.byMetric(fileKey, CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE_KEY))
       .thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE, "1=2014-10-11T16:44:02+0100;2=2014-10-12T16:44:02+0100;3=2014-10-13T16:44:02+0100")));
-    when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.SCM_REVISIONS_BY_LINE_KEY))
+    when(measureCache.byMetric(fileKey, CoreMetrics.SCM_REVISIONS_BY_LINE_KEY))
       .thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_REVISIONS_BY_LINE, "1=123;2=234;3=345")));
-    when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.COVERAGE_LINE_HITS_DATA_KEY))
+    when(measureCache.byMetric(fileKey, CoreMetrics.COVERAGE_LINE_HITS_DATA_KEY))
       .thenReturn(Arrays.asList(new Measure(CoreMetrics.COVERAGE_LINE_HITS_DATA, "1=1;3=0")));
-    when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.CONDITIONS_BY_LINE_KEY))
+    when(measureCache.byMetric(fileKey, CoreMetrics.CONDITIONS_BY_LINE_KEY))
       .thenReturn(Arrays.asList(new Measure(CoreMetrics.CONDITIONS_BY_LINE, "1=4")));
-    when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.COVERED_CONDITIONS_BY_LINE_KEY))
+    when(measureCache.byMetric(fileKey, CoreMetrics.COVERED_CONDITIONS_BY_LINE_KEY))
       .thenReturn(Arrays.asList(new Measure(CoreMetrics.COVERED_CONDITIONS_BY_LINE, "1=2")));
-    when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.IT_COVERAGE_LINE_HITS_DATA_KEY))
+    when(measureCache.byMetric(fileKey, CoreMetrics.IT_COVERAGE_LINE_HITS_DATA_KEY))
       .thenReturn(Arrays.asList(new Measure(CoreMetrics.IT_COVERAGE_LINE_HITS_DATA, "1=2;3=0")));
-    when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.IT_CONDITIONS_BY_LINE_KEY))
+    when(measureCache.byMetric(fileKey, CoreMetrics.IT_CONDITIONS_BY_LINE_KEY))
       .thenReturn(Arrays.asList(new Measure(CoreMetrics.IT_CONDITIONS_BY_LINE, "1=5")));
-    when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE_KEY))
+    when(measureCache.byMetric(fileKey, CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE_KEY))
       .thenReturn(Arrays.asList(new Measure(CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE, "1=3")));
-    when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA_KEY))
+    when(measureCache.byMetric(fileKey, CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA_KEY))
       .thenReturn(Arrays.asList(new Measure(CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA, "1=3;3=0")));
-    when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.OVERALL_CONDITIONS_BY_LINE_KEY))
+    when(measureCache.byMetric(fileKey, CoreMetrics.OVERALL_CONDITIONS_BY_LINE_KEY))
       .thenReturn(Arrays.asList(new Measure(CoreMetrics.OVERALL_CONDITIONS_BY_LINE, "1=6")));
-    when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE_KEY))
+    when(measureCache.byMetric(fileKey, CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE_KEY))
       .thenReturn(Arrays.asList(new Measure(CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE, "1=4")));
 
     SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
@@ -244,19 +249,29 @@ public class SourcePersisterTest extends AbstractDaoTestCase {
       .registerHighlightingRule(4, 5, TypeOfText.COMMENT)
       .registerHighlightingRule(7, 16, TypeOfText.CONSTANT)
       .build();
-    when(componentDataCache.getData(PROJECT_KEY + ":" + relativePathNew, SnapshotDataTypes.SYNTAX_HIGHLIGHTING))
+    when(componentDataCache.getData(fileKey, SnapshotDataTypes.SYNTAX_HIGHLIGHTING))
       .thenReturn(highlighting);
 
-    DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(PROJECT_KEY + ":" + relativePathNew, null);
+    DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(fileKey, null);
     org.sonar.api.batch.sensor.symbol.Symbol s1 = symbolBuilder.newSymbol(1, 2);
     symbolBuilder.newReference(s1, 4);
     symbolBuilder.newReference(s1, 11);
     org.sonar.api.batch.sensor.symbol.Symbol s2 = symbolBuilder.newSymbol(4, 6);
     symbolBuilder.newReference(s2, 0);
     symbolBuilder.newReference(s2, 7);
-    when(componentDataCache.getData(PROJECT_KEY + ":" + relativePathNew, SnapshotDataTypes.SYMBOL_HIGHLIGHTING))
+    when(componentDataCache.getData(fileKey, SnapshotDataTypes.SYMBOL_HIGHLIGHTING))
       .thenReturn(symbolBuilder.build());
 
+    DuplicationGroup group1 = new DuplicationGroup(new DuplicationGroup.Block(fileKey, 1, 1))
+      .addDuplicate(new DuplicationGroup.Block(fileKey, 3, 1))
+      .addDuplicate(new DuplicationGroup.Block("anotherFile1", 12, 1))
+      .addDuplicate(new DuplicationGroup.Block("anotherFile2", 13, 1));
+
+    DuplicationGroup group2 = new DuplicationGroup(new DuplicationGroup.Block(fileKey, 1, 2))
+      .addDuplicate(new DuplicationGroup.Block("anotherFile1", 12, 2))
+      .addDuplicate(new DuplicationGroup.Block("anotherFile2", 13, 2));
+    when(duplicationCache.byComponent(fileKey)).thenReturn(Arrays.asList(group1, group2));
+
     sourcePersister.persist();
 
     FileSourceDto fileSourceDto = new FileSourceDao(getMyBatis()).select("uuidnew");
@@ -264,10 +279,10 @@ public class SourcePersisterTest extends AbstractDaoTestCase {
     assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
     assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("foo") + "\n" + md5Hex("bar") + "\n" + md5Hex("biz"));
     assertThat(fileSourceDto.getData()).isEqualTo(
-      "123,julien,2014-10-11T16:44:02+0100,1,4,2,2,5,3,3,6,4,\"0,3,a\",\"1,2,1;0,2,2\",foo\r\n"
-        + "234,simon,2014-10-12T16:44:02+0100,,,,,,,,,,\"0,1,cd\",\"0,1,1;0,2,2\",bar\r\n"
-        + "345,julien,2014-10-13T16:44:02+0100,0,,,0,,,0,,,\"0,9,c\",\"4,5,1;0,2,2\",biz\r\n");
-    assertThat(fileSourceDto.getDataHash()).isEqualTo("594752666dd282f4a3bb985829c790fa");
+      "123,julien,2014-10-11T16:44:02+0100,1,4,2,2,5,3,3,6,4,\"0,3,a\",\"1,2,1;0,2,2\",\"1,3\",foo\r\n"
+        + "234,simon,2014-10-12T16:44:02+0100,,,,,,,,,,\"0,1,cd\",\"0,1,1;0,2,2\",3,bar\r\n"
+        + "345,julien,2014-10-13T16:44:02+0100,0,,,0,,,0,,,\"0,9,c\",\"4,5,1;0,2,2\",2,biz\r\n");
+    assertThat(fileSourceDto.getDataHash()).isEqualTo("26930cf0250d525b04083185ff24a046");
   }
 
   @Test
index 8c17f16c4534d7569211c1a03b8f1ca246d8e3be..2448332fb2e91994a096f9da0f421fe605c986bb 100644 (file)
@@ -1,8 +1,8 @@
 <dataset>
   <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" 
-      data=",,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,content&#13;&#10;" 
+      data=",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;" 
       line_hashes="8d7b3d6b83c0a517eac07e1aac94b773&#10;9a0364b9e99bb480dd25e1f0284c8555" 
-      data_hash="21a2d025d55b25d6412b1565afb516a5
+      data_hash="0263047cd758c68c27683625f072f010
       created_at="1412952242000" updated_at="1412952242000" />
       
 </dataset>
index c4a070d9adefaf7b8bfbc137c6e56be4f076ff71..e17dd5542305bc385d7f63a0a53eb01eb94def8f 100644 (file)
@@ -1,9 +1,9 @@
 <dataset>
 
   <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" 
-      data=",,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,content&#13;&#10;" 
+      data=",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;" 
       line_hashes="8d7b3d6b83c0a517eac07e1aac94b773&#10;9a0364b9e99bb480dd25e1f0284c8555" 
-      data_hash="21a2d025d55b25d6412b1565afb516a5
+      data_hash="0263047cd758c68c27683625f072f010
       created_at="1412952242000" updated_at="1412952242000" />
 
 </dataset>
index 130cede5b5d4d1a6acef04af0b5dc857ee146900..6733f34a10038b6e252cca6498d71fc03ac56bdc 100644 (file)
@@ -1,8 +1,8 @@
 <dataset>
     <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" 
-      data=",,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,content&#13;&#10;" 
+      data=",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;" 
       line_hashes="8d7b3d6b83c0a517eac07e1aac94b773&#10;9a0364b9e99bb480dd25e1f0284c8555" 
-      data_hash="21a2d025d55b25d6412b1565afb516a5
+      data_hash="0263047cd758c68c27683625f072f010
       created_at="1412952242000" updated_at="1412952242000" />
       
     <file_sources id="102" project_uuid="projectUuid" file_uuid="uuidempty" data="[null]"