From 26803e55544df9e5c40998224c3b9f55247bc0d8 Mon Sep 17 00:00:00 2001 From: Freddy Mallet Date: Fri, 11 Mar 2011 11:55:25 +0100 Subject: [PATCH] fix SONAR-2267 : The "duplicated blocks" measure on each file is not correct when a duplication involved more than two blocks --- plugins/sonar-cpd-plugin/.gitignore | 1 + .../org/sonar/plugins/cpd/CpdAnalyser.java | 21 ++++++++++--------- .../sonar/plugins/cpd/CpdAnalyserTest.java | 19 ++++++++--------- 3 files changed, 21 insertions(+), 20 deletions(-) create mode 100644 plugins/sonar-cpd-plugin/.gitignore diff --git a/plugins/sonar-cpd-plugin/.gitignore b/plugins/sonar-cpd-plugin/.gitignore new file mode 100644 index 00000000000..ea8c4bf7f35 --- /dev/null +++ b/plugins/sonar-cpd-plugin/.gitignore @@ -0,0 +1 @@ +/target diff --git a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/CpdAnalyser.java b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/CpdAnalyser.java index 7693706f382..aef9524bd93 100644 --- a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/CpdAnalyser.java +++ b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/CpdAnalyser.java @@ -81,7 +81,7 @@ public class CpdAnalyser { continue; } - processClassMeasure(duplicationsData, firstFile, firstLine, secondFile, secondLine, match.getLineCount()); + processClassMeasure(duplicationsData, firstFile, firstLine, secondFile, secondLine, match.getLineCount(), match); } } } @@ -92,21 +92,21 @@ public class CpdAnalyser { } private void processClassMeasure(Map fileContainer, Resource file, int duplicationStartLine, - Resource targetFile, int targetDuplicationStartLine, int duplicatedLines) { + Resource targetFile, int targetDuplicationStartLine, int duplicatedLines, Match match) { if (file != null && targetFile != null) { DuplicationsData data = fileContainer.get(file); if (data == null) { data = new DuplicationsData(file, context); fileContainer.put(file, data); } - data.cumulate(targetFile, targetDuplicationStartLine, duplicationStartLine, duplicatedLines); + data.cumulate(targetFile, targetDuplicationStartLine, duplicationStartLine, duplicatedLines, match); } } private static final class DuplicationsData { protected Set duplicatedLines = new HashSet(); - protected double duplicatedBlocks; + protected Set duplicatedBlocks = new HashSet(); protected Resource resource; private SensorContext context; private List duplicationXMLEntries = new ArrayList(); @@ -116,11 +116,12 @@ public class CpdAnalyser { this.resource = resource; } - protected void cumulate(Resource targetResource, int targetDuplicationStartLine, int duplicationStartLine, int duplicatedLines) { + protected void cumulate(Resource targetResource, int targetDuplicationStartLine, int duplicationStartLine, int duplicatedLines, + Match match) { StringBuilder xml = new StringBuilder(); - xml.append(""); + xml.append(""); duplicationXMLEntries.add(xml); @@ -128,13 +129,13 @@ public class CpdAnalyser { for (int duplicatedLine = duplicationStartLine; duplicatedLine < duplicationStartLine + duplicatedLines; duplicatedLine++) { this.duplicatedLines.add(duplicatedLine); } - this.duplicatedBlocks++; + duplicatedBlocks.add(match); } protected void saveUsing(SensorContext context) { context.saveMeasure(resource, CoreMetrics.DUPLICATED_FILES, 1d); context.saveMeasure(resource, CoreMetrics.DUPLICATED_LINES, (double) duplicatedLines.size()); - context.saveMeasure(resource, CoreMetrics.DUPLICATED_BLOCKS, duplicatedBlocks); + context.saveMeasure(resource, CoreMetrics.DUPLICATED_BLOCKS, (double) duplicatedBlocks.size()); context.saveMeasure(resource, new Measure(CoreMetrics.DUPLICATIONS_DATA, getDuplicationXMLData())); } diff --git a/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdAnalyserTest.java b/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdAnalyserTest.java index 1e387206823..a31b826a483 100644 --- a/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdAnalyserTest.java +++ b/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdAnalyserTest.java @@ -194,7 +194,7 @@ public class CpdAnalyserTest { cpdAnalyser.analyse(Arrays.asList(match).iterator()); verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_FILES, 1d); - verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_BLOCKS, 3d); + verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_BLOCKS, 1d); verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_LINES, 200d); verify(context).saveMeasure( eq(resource1), @@ -205,7 +205,7 @@ public class CpdAnalyserTest { verify(context).saveMeasure(resource3, CoreMetrics.DUPLICATED_FILES, 1d); verify(context).saveMeasure(resource3, CoreMetrics.DUPLICATED_LINES, 200d); - verify(context).saveMeasure(resource3, CoreMetrics.DUPLICATED_BLOCKS, 3d); + verify(context).saveMeasure(resource3, CoreMetrics.DUPLICATED_BLOCKS, 1d); verify(context).saveMeasure( eq(resource2), argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "" @@ -215,7 +215,7 @@ public class CpdAnalyserTest { verify(context).saveMeasure(resource2, CoreMetrics.DUPLICATED_FILES, 1d); verify(context).saveMeasure(resource2, CoreMetrics.DUPLICATED_LINES, 200d); - verify(context).saveMeasure(resource2, CoreMetrics.DUPLICATED_BLOCKS, 3d); + verify(context).saveMeasure(resource2, CoreMetrics.DUPLICATED_BLOCKS, 1d); verify(context).saveMeasure( eq(resource3), argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "" @@ -225,7 +225,7 @@ public class CpdAnalyserTest { verify(context).saveMeasure(resource4, CoreMetrics.DUPLICATED_LINES, 200d); verify(context).saveMeasure(resource4, CoreMetrics.DUPLICATED_FILES, 1d); - verify(context).saveMeasure(resource4, CoreMetrics.DUPLICATED_BLOCKS, 3d); + verify(context).saveMeasure(resource4, CoreMetrics.DUPLICATED_BLOCKS, 1d); verify(context).saveMeasure( eq(resource4), argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "" @@ -261,18 +261,17 @@ public class CpdAnalyserTest { cpdAnalyser.analyse(Arrays.asList(match1).iterator()); verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_FILES, 1d); - verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_BLOCKS, 2d); verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_LINES, 400d); + verify(context).saveMeasure(resource1, CoreMetrics.DUPLICATED_BLOCKS, 1d); verify(context).saveMeasure( eq(resource1), argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "" - + "" - + "" - + ""))); - + + "" + + "" + ""))); + verify(context, atLeastOnce()).saveResource(resource1); } - + private static TokenEntry createTokenEntry(String sourceId, int line) { TokenEntry entry = new TokenEntry(null, sourceId, line); entry.setHashCode(sourceId.hashCode() + line); -- 2.39.5