]> source.dussan.org Git - sonarqube.git/commitdiff
fix SONAR-2267 : The "duplicated blocks" measure on each file is not correct when...
authorFreddy Mallet <freddy.mallet@gmail.com>
Fri, 11 Mar 2011 10:55:25 +0000 (11:55 +0100)
committerFreddy Mallet <freddy.mallet@gmail.com>
Fri, 11 Mar 2011 11:26:02 +0000 (12:26 +0100)
plugins/sonar-cpd-plugin/.gitignore [new file with mode: 0644]
plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/CpdAnalyser.java
plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdAnalyserTest.java

diff --git a/plugins/sonar-cpd-plugin/.gitignore b/plugins/sonar-cpd-plugin/.gitignore
new file mode 100644 (file)
index 0000000..ea8c4bf
--- /dev/null
@@ -0,0 +1 @@
+/target
index 7693706f382eeb28e8ebd8e976b005c7ac858105..aef9524bd938eddb82d70ad0760d836e537b1927 100644 (file)
@@ -81,7 +81,7 @@ public class CpdAnalyser {
             continue;
           }
 
-          processClassMeasure(duplicationsData, firstFile, firstLine, secondFile, secondLine, match.getLineCount());
+          processClassMeasure(duplicationsData, firstFile, firstLine, secondFile, secondLine, match.getLineCount(), match);
         }
       }
     }
@@ -92,21 +92,21 @@ public class CpdAnalyser {
   }
 
   private void processClassMeasure(Map<Resource, DuplicationsData> fileContainer, Resource file, int duplicationStartLine,
-      Resource targetFile, int targetDuplicationStartLine, int duplicatedLines) {
+      Resource targetFile, int targetDuplicationStartLine, int duplicatedLines, Match match) {
     if (file != null && targetFile != null) {
       DuplicationsData data = fileContainer.get(file);
       if (data == null) {
         data = new DuplicationsData(file, context);
         fileContainer.put(file, data);
       }
-      data.cumulate(targetFile, targetDuplicationStartLine, duplicationStartLine, duplicatedLines);
+      data.cumulate(targetFile, targetDuplicationStartLine, duplicationStartLine, duplicatedLines, match);
     }
   }
 
   private static final class DuplicationsData {
 
     protected Set<Integer> duplicatedLines = new HashSet<Integer>();
-    protected double duplicatedBlocks;
+    protected Set<Match> duplicatedBlocks = new HashSet<Match>();
     protected Resource resource;
     private SensorContext context;
     private List<StringBuilder> duplicationXMLEntries = new ArrayList<StringBuilder>();
@@ -116,11 +116,12 @@ public class CpdAnalyser {
       this.resource = resource;
     }
 
-    protected void cumulate(Resource targetResource, int targetDuplicationStartLine, int duplicationStartLine, int duplicatedLines) {
+    protected void cumulate(Resource targetResource, int targetDuplicationStartLine, int duplicationStartLine, int duplicatedLines,
+        Match match) {
       StringBuilder xml = new StringBuilder();
-      xml.append("<duplication lines=\"").append(duplicatedLines).append("\" start=\"").append(duplicationStartLine).append(
-          "\" target-start=\"").append(targetDuplicationStartLine).append("\" target-resource=\"").append(
-          context.saveResource(targetResource)).append("\"/>");
+      xml.append("<duplication lines=\"").append(duplicatedLines).append("\" start=\"").append(duplicationStartLine)
+          .append("\" target-start=\"").append(targetDuplicationStartLine).append("\" target-resource=\"")
+          .append(context.saveResource(targetResource)).append("\"/>");
 
       duplicationXMLEntries.add(xml);
 
@@ -128,13 +129,13 @@ public class CpdAnalyser {
       for (int duplicatedLine = duplicationStartLine; duplicatedLine < duplicationStartLine + duplicatedLines; duplicatedLine++) {
         this.duplicatedLines.add(duplicatedLine);
       }
-      this.duplicatedBlocks++;
+      duplicatedBlocks.add(match);
     }
 
     protected void saveUsing(SensorContext context) {
       context.saveMeasure(resource, CoreMetrics.DUPLICATED_FILES, 1d);
       context.saveMeasure(resource, CoreMetrics.DUPLICATED_LINES, (double) duplicatedLines.size());
-      context.saveMeasure(resource, CoreMetrics.DUPLICATED_BLOCKS, duplicatedBlocks);
+      context.saveMeasure(resource, CoreMetrics.DUPLICATED_BLOCKS, (double) duplicatedBlocks.size());
       context.saveMeasure(resource, new Measure(CoreMetrics.DUPLICATIONS_DATA, getDuplicationXMLData()));
     }
 
index 1e3872068230cfbc505ee1a997371d663dc62927..a31b826a483933a456d8d3e53a4f593e675ef2eb 100644 (file)
@@ -194,7 +194,7 @@ public class CpdAnalyserTest {
     cpdAnalyser.analyse(Arrays.asList(match).iterator());
 
     verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_FILES, 1d);
-    verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_BLOCKS, 3d);
+    verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_BLOCKS, 1d);
     verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_LINES, 200d);
     verify(context).saveMeasure(
         eq(resource1),
@@ -205,7 +205,7 @@ public class CpdAnalyserTest {
 
     verify(context).saveMeasure(resource3, CoreMetrics.DUPLICATED_FILES, 1d);
     verify(context).saveMeasure(resource3, CoreMetrics.DUPLICATED_LINES, 200d);
-    verify(context).saveMeasure(resource3, CoreMetrics.DUPLICATED_BLOCKS, 3d);
+    verify(context).saveMeasure(resource3, CoreMetrics.DUPLICATED_BLOCKS, 1d);
     verify(context).saveMeasure(
         eq(resource2),
         argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "<duplications>"
@@ -215,7 +215,7 @@ public class CpdAnalyserTest {
 
     verify(context).saveMeasure(resource2, CoreMetrics.DUPLICATED_FILES, 1d);
     verify(context).saveMeasure(resource2, CoreMetrics.DUPLICATED_LINES, 200d);
-    verify(context).saveMeasure(resource2, CoreMetrics.DUPLICATED_BLOCKS, 3d);
+    verify(context).saveMeasure(resource2, CoreMetrics.DUPLICATED_BLOCKS, 1d);
     verify(context).saveMeasure(
         eq(resource3),
         argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "<duplications>"
@@ -225,7 +225,7 @@ public class CpdAnalyserTest {
 
     verify(context).saveMeasure(resource4, CoreMetrics.DUPLICATED_LINES, 200d);
     verify(context).saveMeasure(resource4, CoreMetrics.DUPLICATED_FILES, 1d);
-    verify(context).saveMeasure(resource4, CoreMetrics.DUPLICATED_BLOCKS, 3d);
+    verify(context).saveMeasure(resource4, CoreMetrics.DUPLICATED_BLOCKS, 1d);
     verify(context).saveMeasure(
         eq(resource4),
         argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "<duplications>"
@@ -261,18 +261,17 @@ public class CpdAnalyserTest {
     cpdAnalyser.analyse(Arrays.asList(match1).iterator());
 
     verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_FILES, 1d);
-    verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_BLOCKS, 2d);
     verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_LINES, 400d);
+    verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_BLOCKS, 1d);
     verify(context).saveMeasure(
         eq(resource1),
         argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "<duplications>"
-            + "<duplication lines=\"200\" start=\"5\" target-start=\"215\" target-resource=\"key1\"/>" 
-            + "<duplication lines=\"200\" start=\"215\" target-start=\"5\" target-resource=\"key1\"/>"
-            + "</duplications>")));
-    
+            + "<duplication lines=\"200\" start=\"5\" target-start=\"215\" target-resource=\"key1\"/>"
+            + "<duplication lines=\"200\" start=\"215\" target-start=\"5\" target-resource=\"key1\"/>" + "</duplications>")));
+
     verify(context, atLeastOnce()).saveResource(resource1);
   }
-  
+
   private static TokenEntry createTokenEntry(String sourceId, int line) {
     TokenEntry entry = new TokenEntry(null, sourceId, line);
     entry.setHashCode(sourceId.hashCode() + line);