]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-6000 Truncate duplication data when there is too much items
authorJulien HENRY <julien.henry@sonarsource.com>
Tue, 17 Feb 2015 13:00:27 +0000 (14:00 +0100)
committerJulien HENRY <julien.henry@sonarsource.com>
Tue, 17 Feb 2015 13:00:27 +0000 (14:00 +0100)
plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/JavaCpdEngine.java
plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/medium/CpdMediumTest.java

index a5770634444170fd9249829603a468c7c2ae2748..f876cd9932ba2aeae4956463b7d3c4d651ba7844 100644 (file)
@@ -81,6 +81,9 @@ public class JavaCpdEngine extends CpdEngine {
    */
   private static final int TIMEOUT = 5 * 60;
 
+  private static final int MAX_CLONE_GROUP_PER_FILE = 100;
+  private static final int MAX_CLONE_PART_PER_GROUP = 100;
+
   private final IndexFactory indexFactory;
   private final FileSystem fs;
   private final Settings settings;
@@ -223,10 +226,24 @@ public class JavaCpdEngine extends CpdEngine {
       .build());
 
     DuplicationBuilder builder = context.duplicationBuilder(inputFile);
+    int cloneGroupCount = 0;
     for (CloneGroup duplication : duplications) {
-      builder.originBlock(duplication.getOriginPart().getStartLine(), duplication.getOriginPart().getEndLine());
+      cloneGroupCount++;
+      if (cloneGroupCount > MAX_CLONE_GROUP_PER_FILE) {
+        LOG.warn("Too many duplication groups on file " + inputFile.relativePath() + ". Keep only the first " + MAX_CLONE_GROUP_PER_FILE + " groups.");
+        break;
+      }
+      ClonePart originPart = duplication.getOriginPart();
+      builder.originBlock(originPart.getStartLine(), originPart.getEndLine());
+      int clonePartCount = 0;
       for (ClonePart part : duplication.getCloneParts()) {
-        if (!part.equals(duplication.getOriginPart())) {
+        if (!part.equals(originPart)) {
+          clonePartCount++;
+          if (clonePartCount > MAX_CLONE_PART_PER_GROUP) {
+            LOG.warn("Too many duplication references on file " + inputFile.relativePath() + " for block at line " + originPart.getStartLine() + ". Keep only the first "
+              + MAX_CLONE_PART_PER_GROUP + " references.");
+            break;
+          }
           ((DefaultDuplicationBuilder) builder).isDuplicatedBy(part.getResourceId(), part.getStartLine(), part.getEndLine());
         }
       }
index 1344179bfb21b7692e822233b4c95cf018dace30..99da5425bd0d1c4c66c63763311fe8acd7bed8a4 100644 (file)
@@ -29,6 +29,7 @@ import org.junit.rules.ExpectedException;
 import org.junit.rules.TemporaryFolder;
 import org.sonar.api.batch.fs.InputFile;
 import org.sonar.api.batch.sensor.duplication.DuplicationGroup;
+import org.sonar.api.batch.sensor.measure.Measure;
 import org.sonar.batch.mediumtest.BatchMediumTester;
 import org.sonar.batch.mediumtest.BatchMediumTester.TaskResult;
 import org.sonar.plugins.cpd.CpdPlugin;
@@ -116,4 +117,36 @@ public class CpdMediumTest {
     assertThat(cloneGroup.originBlock().length()).isEqualTo(17);
   }
 
+  // SONAR-6000
+  @Test
+  public void truncateDuplication() throws IOException {
+    File srcDir = new File(baseDir, "src");
+    srcDir.mkdir();
+    String duplicatedStuff = "Sample xoo\nfoo\n";
+    int blockCount = 10000;
+    File xooFile1 = new File(srcDir, "sample.xoo");
+    for (int i = 0; i < blockCount; i++) {
+      FileUtils.write(xooFile1, duplicatedStuff, true);
+      FileUtils.write(xooFile1, "" + i, true);
+    }
+    TaskResult result = tester.newTask()
+      .properties(builder
+        .put("sonar.sources", "src")
+        .put("sonar.cpd.xoo.minimumTokens", "1")
+        .put("sonar.cpd.xoo.minimumLines", "1")
+        .build())
+      .start();
+    Measure duplicatedBlocks = null;
+    for (Measure m : result.measures()) {
+      if (m.metric().key().equals("duplicated_blocks")) {
+        duplicatedBlocks = m;
+      }
+    }
+    assertThat(duplicatedBlocks.value()).isEqualTo(blockCount);
+    List<DuplicationGroup> duplicationGroups = result.duplicationsFor(result.inputFiles().get(0));
+    assertThat(duplicationGroups).hasSize(1);
+    DuplicationGroup cloneGroup = duplicationGroups.get(0);
+    assertThat(cloneGroup.duplicates()).hasSize(100);
+  }
+
 }