diff options
author | Freddy Mallet <freddy.mallet@gmail.com> | 2011-03-11 11:55:25 +0100 |
---|---|---|
committer | Freddy Mallet <freddy.mallet@gmail.com> | 2011-03-11 12:26:02 +0100 |
commit | 26803e55544df9e5c40998224c3b9f55247bc0d8 (patch) | |
tree | b00ab7c4ef712f488e5f3063830100493a55c1d5 /plugins | |
parent | e93f4cb61c1777fcb8d4befa99a6810a5e8930d6 (diff) | |
download | sonarqube-26803e55544df9e5c40998224c3b9f55247bc0d8.tar.gz sonarqube-26803e55544df9e5c40998224c3b9f55247bc0d8.zip |
fix SONAR-2267 : The "duplicated blocks" measure on each file is not correct when a duplication involved more than two blocks
Diffstat (limited to 'plugins')
3 files changed, 21 insertions, 20 deletions
diff --git a/plugins/sonar-cpd-plugin/.gitignore b/plugins/sonar-cpd-plugin/.gitignore new file mode 100644 index 00000000000..ea8c4bf7f35 --- /dev/null +++ b/plugins/sonar-cpd-plugin/.gitignore @@ -0,0 +1 @@ +/target diff --git a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/CpdAnalyser.java b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/CpdAnalyser.java index 7693706f382..aef9524bd93 100644 --- a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/CpdAnalyser.java +++ b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/CpdAnalyser.java @@ -81,7 +81,7 @@ public class CpdAnalyser { continue; } - processClassMeasure(duplicationsData, firstFile, firstLine, secondFile, secondLine, match.getLineCount()); + processClassMeasure(duplicationsData, firstFile, firstLine, secondFile, secondLine, match.getLineCount(), match); } } } @@ -92,21 +92,21 @@ public class CpdAnalyser { } private void processClassMeasure(Map<Resource, DuplicationsData> fileContainer, Resource file, int duplicationStartLine, - Resource targetFile, int targetDuplicationStartLine, int duplicatedLines) { + Resource targetFile, int targetDuplicationStartLine, int duplicatedLines, Match match) { if (file != null && targetFile != null) { DuplicationsData data = fileContainer.get(file); if (data == null) { data = new DuplicationsData(file, context); fileContainer.put(file, data); } - data.cumulate(targetFile, targetDuplicationStartLine, duplicationStartLine, duplicatedLines); + data.cumulate(targetFile, targetDuplicationStartLine, duplicationStartLine, duplicatedLines, match); } } private static final class DuplicationsData { protected Set<Integer> duplicatedLines = new HashSet<Integer>(); - protected double duplicatedBlocks; + protected Set<Match> duplicatedBlocks = new HashSet<Match>(); protected Resource resource; private SensorContext context; private List<StringBuilder> duplicationXMLEntries = new ArrayList<StringBuilder>(); @@ -116,11 +116,12 @@ public class CpdAnalyser { this.resource = resource; } - protected void cumulate(Resource targetResource, int targetDuplicationStartLine, int duplicationStartLine, int duplicatedLines) { + protected void cumulate(Resource targetResource, int targetDuplicationStartLine, int duplicationStartLine, int duplicatedLines, + Match match) { StringBuilder xml = new StringBuilder(); - xml.append("<duplication lines=\"").append(duplicatedLines).append("\" start=\"").append(duplicationStartLine).append( - "\" target-start=\"").append(targetDuplicationStartLine).append("\" target-resource=\"").append( - context.saveResource(targetResource)).append("\"/>"); + xml.append("<duplication lines=\"").append(duplicatedLines).append("\" start=\"").append(duplicationStartLine) + .append("\" target-start=\"").append(targetDuplicationStartLine).append("\" target-resource=\"") + .append(context.saveResource(targetResource)).append("\"/>"); duplicationXMLEntries.add(xml); @@ -128,13 +129,13 @@ public class CpdAnalyser { for (int duplicatedLine = duplicationStartLine; duplicatedLine < duplicationStartLine + duplicatedLines; duplicatedLine++) { this.duplicatedLines.add(duplicatedLine); } - this.duplicatedBlocks++; + duplicatedBlocks.add(match); } protected void saveUsing(SensorContext context) { context.saveMeasure(resource, CoreMetrics.DUPLICATED_FILES, 1d); context.saveMeasure(resource, CoreMetrics.DUPLICATED_LINES, (double) duplicatedLines.size()); - context.saveMeasure(resource, CoreMetrics.DUPLICATED_BLOCKS, duplicatedBlocks); + context.saveMeasure(resource, CoreMetrics.DUPLICATED_BLOCKS, (double) duplicatedBlocks.size()); context.saveMeasure(resource, new Measure(CoreMetrics.DUPLICATIONS_DATA, getDuplicationXMLData())); } diff --git a/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdAnalyserTest.java b/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdAnalyserTest.java index 1e387206823..a31b826a483 100644 --- a/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdAnalyserTest.java +++ b/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdAnalyserTest.java @@ -194,7 +194,7 @@ public class CpdAnalyserTest { cpdAnalyser.analyse(Arrays.asList(match).iterator()); verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_FILES, 1d); - verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_BLOCKS, 3d); + verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_BLOCKS, 1d); verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_LINES, 200d); verify(context).saveMeasure( eq(resource1), @@ -205,7 +205,7 @@ public class CpdAnalyserTest { verify(context).saveMeasure(resource3, CoreMetrics.DUPLICATED_FILES, 1d); verify(context).saveMeasure(resource3, CoreMetrics.DUPLICATED_LINES, 200d); - verify(context).saveMeasure(resource3, CoreMetrics.DUPLICATED_BLOCKS, 3d); + verify(context).saveMeasure(resource3, CoreMetrics.DUPLICATED_BLOCKS, 1d); verify(context).saveMeasure( eq(resource2), argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "<duplications>" @@ -215,7 +215,7 @@ public class CpdAnalyserTest { verify(context).saveMeasure(resource2, CoreMetrics.DUPLICATED_FILES, 1d); verify(context).saveMeasure(resource2, CoreMetrics.DUPLICATED_LINES, 200d); - verify(context).saveMeasure(resource2, CoreMetrics.DUPLICATED_BLOCKS, 3d); + verify(context).saveMeasure(resource2, CoreMetrics.DUPLICATED_BLOCKS, 1d); verify(context).saveMeasure( eq(resource3), argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "<duplications>" @@ -225,7 +225,7 @@ public class CpdAnalyserTest { verify(context).saveMeasure(resource4, CoreMetrics.DUPLICATED_LINES, 200d); verify(context).saveMeasure(resource4, CoreMetrics.DUPLICATED_FILES, 1d); - verify(context).saveMeasure(resource4, CoreMetrics.DUPLICATED_BLOCKS, 3d); + verify(context).saveMeasure(resource4, CoreMetrics.DUPLICATED_BLOCKS, 1d); verify(context).saveMeasure( eq(resource4), argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "<duplications>" @@ -261,18 +261,17 @@ public class CpdAnalyserTest { cpdAnalyser.analyse(Arrays.asList(match1).iterator()); verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_FILES, 1d); - verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_BLOCKS, 2d); verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_LINES, 400d); + verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_BLOCKS, 1d); verify(context).saveMeasure( eq(resource1), argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "<duplications>" - + "<duplication lines=\"200\" start=\"5\" target-start=\"215\" target-resource=\"key1\"/>" - + "<duplication lines=\"200\" start=\"215\" target-start=\"5\" target-resource=\"key1\"/>" - + "</duplications>"))); - + + "<duplication lines=\"200\" start=\"5\" target-start=\"215\" target-resource=\"key1\"/>" + + "<duplication lines=\"200\" start=\"215\" target-start=\"5\" target-resource=\"key1\"/>" + "</duplications>"))); + verify(context, atLeastOnce()).saveResource(resource1); } - + private static TokenEntry createTokenEntry(String sourceId, int line) { TokenEntry entry = new TokenEntry(null, sourceId, line); entry.setHashCode(sourceId.hashCode() + line); |