*/
private static final int TIMEOUT = 5 * 60;
+ private static final int MAX_CLONE_GROUP_PER_FILE = 100;
+ private static final int MAX_CLONE_PART_PER_GROUP = 100;
+
private final IndexFactory indexFactory;
private final FileSystem fs;
private final Settings settings;
.setFromCore()
.save();
+ int cloneGroupCount = 0;
for (CloneGroup duplication : duplications) {
+ cloneGroupCount++;
+ if (cloneGroupCount > MAX_CLONE_GROUP_PER_FILE) {
+ LOG.warn("Too many duplication groups on file " + inputFile.relativePath() + ". Keep only the first " + MAX_CLONE_GROUP_PER_FILE + " groups.");
+ break;
+ }
NewDuplication builder = context.newDuplication();
ClonePart originPart = duplication.getOriginPart();
builder.originBlock(inputFile, originPart.getStartLine(), originPart.getEndLine());
+ int clonePartCount = 0;
for (ClonePart part : duplication.getCloneParts()) {
if (!part.equals(originPart)) {
+ clonePartCount++;
+ if (clonePartCount > MAX_CLONE_PART_PER_GROUP) {
+ LOG.warn("Too many duplication references on file " + inputFile.relativePath() + " for block at line " + originPart.getStartLine() + ". Keep only the first "
+ + MAX_CLONE_PART_PER_GROUP + " references.");
+ break;
+ }
((DefaultDuplication) builder).isDuplicatedBy(part.getResourceId(), part.getStartLine(), part.getEndLine());
}
}
import org.sonar.api.batch.fs.InputFile;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.batch.sensor.duplication.Duplication;
+import org.sonar.api.batch.sensor.measure.Measure;
+import org.sonar.api.batch.sensor.measure.internal.DefaultMeasure;
+import org.sonar.api.measures.CoreMetrics;
import org.sonar.batch.mediumtest.BatchMediumTester;
import org.sonar.batch.mediumtest.TaskResult;
import org.sonar.xoo.XooPlugin;
assertThat(cloneGroupFile2.duplicates().get(0).resourceKey()).isEqualTo(((DefaultInputFile) inputFile1).key());
}
+ // SONAR-6000
+ @Test
+ public void truncateDuplication() throws IOException {
+ File srcDir = new File(baseDir, "src");
+ srcDir.mkdir();
+
+ String duplicatedStuff = "Sample xoo\nfoo\n";
+
+ int blockCount = 10000;
+ File xooFile1 = new File(srcDir, "sample.xoo");
+ for (int i = 0; i < blockCount; i++) {
+ FileUtils.write(xooFile1, duplicatedStuff, true);
+ FileUtils.write(xooFile1, "" + i, true);
+ }
+
+ TaskResult result = tester.newTask()
+ .properties(builder
+ .put("sonar.sources", "src")
+ .put("sonar.cpd.xoo.minimumTokens", "1")
+ .put("sonar.cpd.xoo.minimumLines", "1")
+ .build())
+ .start();
+
+ Measure duplicatedBlocks = null;
+ for (Measure m : result.measures()) {
+ if (m.metric().key().equals("duplicated_blocks")) {
+ duplicatedBlocks = m;
+ }
+ }
+ assertThat(duplicatedBlocks.value()).isEqualTo(blockCount);
+
+ List<Duplication> duplicationGroups = result.duplicationsFor(result.inputFile("src/sample.xoo"));
+ assertThat(duplicationGroups).hasSize(1);
+
+ Duplication cloneGroup = duplicationGroups.get(0);
+ assertThat(cloneGroup.duplicates()).hasSize(100);
+ }
+
@Test
public void testIntraFileDuplications() throws IOException {
File srcDir = new File(baseDir, "src");
assertThat(cloneGroup.duplicates().get(0).startLine()).isEqualTo(5);
assertThat(cloneGroup.duplicates().get(0).length()).isEqualTo(2);
- // assertThat(result.measures()).contains(new DefaultMeasure<String>()
- // .forMetric(CoreMetrics.DUPLICATION_LINES_DATA)
- // .onFile(inputFile)
- // .withValue("1=1;2=1;3=0;4=0;5=1;6=1;7=0"));
+ assertThat(result.measures()).contains(new DefaultMeasure<String>()
+ .forMetric(CoreMetrics.DUPLICATION_LINES_DATA)
+ .onFile(inputFile)
+ .withValue("1=1;2=1;3=0;4=0;5=1;6=1;7=0"));
}
}