aboutsummaryrefslogtreecommitdiffstats
path: root/sonar-scanner-engine/src
diff options
context:
space:
mode:
authorDuarte Meneses <duarte.meneses@sonarsource.com>2018-10-16 15:43:25 +0200
committerSonarTech <sonartech@sonarsource.com>2018-11-07 20:21:01 +0100
commit1ff8a5c8a88f2b25cc20358e3e82aac9aa5aa1c6 (patch)
treef9f46bc4fbd171096803e5a6abefb903227795a5 /sonar-scanner-engine/src
parenta99a45d00950139a735babffc2b203be0dce4266 (diff)
downloadsonarqube-1ff8a5c8a88f2b25cc20358e3e82aac9aa5aa1c6.tar.gz
sonarqube-1ff8a5c8a88f2b25cc20358e3e82aac9aa5aa1c6.zip
SONAR-11367 Write duplication for changed files in PR and SLB
Diffstat (limited to 'sonar-scanner-engine/src')
-rw-r--r--sonar-scanner-engine/src/main/java/org/sonar/scanner/cpd/CpdExecutor.java19
-rw-r--r--sonar-scanner-engine/src/main/java/org/sonar/scanner/cpd/JavaCpdBlockIndexerSensor.java2
-rw-r--r--sonar-scanner-engine/src/main/java/org/sonar/scanner/sensor/DefaultSensorContext.java2
-rw-r--r--sonar-scanner-engine/src/test/java/org/sonar/scanner/cpd/CpdExecutorTest.java91
4 files changed, 54 insertions, 60 deletions
diff --git a/sonar-scanner-engine/src/main/java/org/sonar/scanner/cpd/CpdExecutor.java b/sonar-scanner-engine/src/main/java/org/sonar/scanner/cpd/CpdExecutor.java
index d4c2e40041d..878cf807324 100644
--- a/sonar-scanner-engine/src/main/java/org/sonar/scanner/cpd/CpdExecutor.java
+++ b/sonar-scanner-engine/src/main/java/org/sonar/scanner/cpd/CpdExecutor.java
@@ -46,7 +46,6 @@ import org.sonar.scanner.protocol.output.ScannerReport;
import org.sonar.scanner.protocol.output.ScannerReport.Duplicate;
import org.sonar.scanner.protocol.output.ScannerReport.Duplication;
import org.sonar.scanner.report.ReportPublisher;
-import org.sonar.scanner.scan.branch.BranchConfiguration;
import org.sonar.scanner.scan.filesystem.InputComponentStore;
import org.sonar.scanner.util.ProgressReport;
@@ -67,25 +66,18 @@ public class CpdExecutor {
private final InputComponentStore componentStore;
private final ProgressReport progressReport;
private final CpdSettings settings;
- private final BranchConfiguration branchConfiguration;
private int count;
private int total;
- public CpdExecutor(CpdSettings settings, SonarCpdBlockIndex index, ReportPublisher publisher, InputComponentStore inputComponentCache,
- BranchConfiguration branchConfiguration) {
+ public CpdExecutor(CpdSettings settings, SonarCpdBlockIndex index, ReportPublisher publisher, InputComponentStore inputComponentCache) {
this.settings = settings;
this.index = index;
this.publisher = publisher;
this.componentStore = inputComponentCache;
- this.branchConfiguration = branchConfiguration;
this.progressReport = new ProgressReport("CPD computation", TimeUnit.SECONDS.toMillis(10));
}
public void execute() {
- if (branchConfiguration.isShortOrPullRequest()) {
- LOG.info("Skipping CPD calculation for short living branch and pull request");
- return;
- }
execute(TIMEOUT);
}
@@ -128,6 +120,10 @@ public class CpdExecutor {
}
InputFile inputFile = (InputFile) component;
+ if (inputFile.status() == InputFile.Status.SAME) {
+ return;
+ }
+
LOG.debug("Detection of duplications for {}", inputFile.absolutePath());
progressReport.message(String.format("%d/%d - current file: %s", count, total, inputFile.absolutePath()));
@@ -136,7 +132,7 @@ public class CpdExecutor {
try {
duplications = futureResult.get(timeout, TimeUnit.MILLISECONDS);
} catch (TimeoutException e) {
- LOG.warn("Timeout during detection of duplications for " + inputFile.absolutePath());
+ LOG.warn("Timeout during detection of duplications for {}", inputFile.absolutePath());
futureResult.cancel(true);
return;
} catch (Exception e) {
@@ -159,8 +155,7 @@ public class CpdExecutor {
@VisibleForTesting final void saveDuplications(final DefaultInputComponent component, List<CloneGroup> duplications) {
if (duplications.size() > MAX_CLONE_GROUP_PER_FILE) {
- LOG.warn("Too many duplication groups on file " + component + ". Keep only the first " + MAX_CLONE_GROUP_PER_FILE +
- " groups.");
+ LOG.warn("Too many duplication groups on file {}. Keep only the first {} groups.", component, MAX_CLONE_GROUP_PER_FILE);
}
Iterable<ScannerReport.Duplication> reportDuplications = duplications.stream()
.limit(MAX_CLONE_GROUP_PER_FILE)
diff --git a/sonar-scanner-engine/src/main/java/org/sonar/scanner/cpd/JavaCpdBlockIndexerSensor.java b/sonar-scanner-engine/src/main/java/org/sonar/scanner/cpd/JavaCpdBlockIndexerSensor.java
index b4e0f3f3006..8b2f2d011a7 100644
--- a/sonar-scanner-engine/src/main/java/org/sonar/scanner/cpd/JavaCpdBlockIndexerSensor.java
+++ b/sonar-scanner-engine/src/main/java/org/sonar/scanner/cpd/JavaCpdBlockIndexerSensor.java
@@ -87,7 +87,7 @@ public class JavaCpdBlockIndexerSensor implements Sensor {
for (InputFile inputFile : sourceFiles) {
LOG.debug("Populating index from {}", inputFile);
- String resourceEffectiveKey = ((DefaultInputFile) inputFile).key();
+ String resourceEffectiveKey = inputFile.key();
List<Statement> statements;
diff --git a/sonar-scanner-engine/src/main/java/org/sonar/scanner/sensor/DefaultSensorContext.java b/sonar-scanner-engine/src/main/java/org/sonar/scanner/sensor/DefaultSensorContext.java
index f79f09d4f2a..ed4b6859911 100644
--- a/sonar-scanner-engine/src/main/java/org/sonar/scanner/sensor/DefaultSensorContext.java
+++ b/sonar-scanner-engine/src/main/java/org/sonar/scanner/sensor/DefaultSensorContext.java
@@ -179,7 +179,7 @@ public class DefaultSensorContext implements SensorContext {
@Override
public NewCpdTokens newCpdTokens() {
- if (analysisMode.isIssues() || branchConfiguration.isShortOrPullRequest()) {
+ if (analysisMode.isIssues()) {
return NO_OP_NEW_CPD_TOKENS;
}
return new DefaultCpdTokens(config, sensorStorage);
diff --git a/sonar-scanner-engine/src/test/java/org/sonar/scanner/cpd/CpdExecutorTest.java b/sonar-scanner-engine/src/test/java/org/sonar/scanner/cpd/CpdExecutorTest.java
index f685e4f22cf..732f36e2696 100644
--- a/sonar-scanner-engine/src/test/java/org/sonar/scanner/cpd/CpdExecutorTest.java
+++ b/sonar-scanner-engine/src/test/java/org/sonar/scanner/cpd/CpdExecutorTest.java
@@ -25,6 +25,9 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
+import java.util.concurrent.ExecutorService;
+import java.util.function.Consumer;
+import javax.annotation.Nullable;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@@ -65,15 +68,15 @@ public class CpdExecutorTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
- private BranchConfiguration branchConfig;
private CpdExecutor executor;
- private CpdSettings settings;
- private SonarCpdBlockIndex index;
- private ReportPublisher publisher;
+ private CpdSettings settings = mock(CpdSettings.class);
+ private ReportPublisher publisher = mock(ReportPublisher.class);
+ private SonarCpdBlockIndex index = new SonarCpdBlockIndex(publisher, settings);
private ScannerReportReader reader;
private DefaultInputFile batchComponent1;
private DefaultInputFile batchComponent2;
private DefaultInputFile batchComponent3;
+ private DefaultInputFile batchComponent4;
private File baseDir;
private InputComponentStore componentStore;
@@ -81,50 +84,30 @@ public class CpdExecutorTest {
public void setUp() throws IOException {
File outputDir = temp.newFolder();
baseDir = temp.newFolder();
-
- branchConfig = mock(BranchConfiguration.class);
- settings = mock(CpdSettings.class);
- publisher = mock(ReportPublisher.class);
when(publisher.getWriter()).thenReturn(new ScannerReportWriter(outputDir));
- index = new SonarCpdBlockIndex(publisher, settings);
DefaultInputModule inputModule = TestInputFileBuilder.newDefaultInputModule("foo", baseDir);
componentStore = new InputComponentStore(inputModule, mock(BranchConfiguration.class));
- executor = new CpdExecutor(settings, index, publisher, componentStore, branchConfig);
+ executor = new CpdExecutor(settings, index, publisher, componentStore);
reader = new ScannerReportReader(outputDir);
batchComponent1 = createComponent("src/Foo.php", 5);
batchComponent2 = createComponent("src/Foo2.php", 5);
batchComponent3 = createComponent("src/Foo3.php", 5);
+ batchComponent4 = createComponent("src/Foo4.php", 5, f -> f.setStatus(InputFile.Status.SAME));
}
- @Test
- public void skipIfShortBranch() {
- when(branchConfig.isShortOrPullRequest()).thenReturn(true);
- index = mock(SonarCpdBlockIndex.class);
- executor = new CpdExecutor(settings, index, publisher, componentStore, branchConfig);
-
- executor.execute();
-
- verifyZeroInteractions(index);
- }
-
- @Test
- public void skip_if_pull_request() {
- when(branchConfig.isShortOrPullRequest()).thenReturn(true);
- index = mock(SonarCpdBlockIndex.class);
- executor = new CpdExecutor(settings, index, publisher, componentStore, branchConfig);
-
- executor.execute();
-
- verifyZeroInteractions(index);
+ private DefaultInputFile createComponent(String relativePath, int lines) {
+ return createComponent(relativePath, lines, f -> {
+ });
}
- private DefaultInputFile createComponent(String relativePath, int lines) {
- DefaultInputFile file = new TestInputFileBuilder("foo", relativePath)
+ private DefaultInputFile createComponent(String relativePath, int lines, Consumer<TestInputFileBuilder> config) {
+ TestInputFileBuilder fileBuilder = new TestInputFileBuilder("foo", relativePath)
.setModuleBaseDir(baseDir.toPath())
- .setLines(lines)
- .build();
+ .setLines(lines);
+ config.accept(fileBuilder);
+ DefaultInputFile file = fileBuilder.build();
componentStore.put(file);
return file;
}
@@ -137,7 +120,8 @@ public class CpdExecutorTest {
@Test
public void reportOneSimpleDuplicationBetweenTwoFiles() {
- List<CloneGroup> groups = Arrays.asList(newCloneGroup(new ClonePart(batchComponent1.key(), 0, 2, 4), new ClonePart(batchComponent2.key(), 0, 15, 17)));
+ List<CloneGroup> groups = Collections.singletonList(newCloneGroup(new ClonePart(batchComponent1.key(), 0, 2, 4),
+ new ClonePart(batchComponent2.key(), 0, 15, 17)));
executor.saveDuplications(batchComponent1, groups);
@@ -146,8 +130,17 @@ public class CpdExecutorTest {
}
@Test
- public void reportDuplicationOnSameFile() throws Exception {
- List<CloneGroup> groups = Arrays.asList(newCloneGroup(new ClonePart(batchComponent1.key(), 0, 5, 204), new ClonePart(batchComponent1.key(), 0, 215, 414)));
+ public void dontReportDuplicationOnUnmodifiedFileInSLB() {
+ ExecutorService executorService = mock(ExecutorService.class);
+ executor.runCpdAnalysis(executorService, batchComponent4.key(), Collections.emptyList(), 1000L);
+
+ readDuplications(batchComponent4, 0);
+ verifyZeroInteractions(executorService);
+ }
+
+ @Test
+ public void reportDuplicationOnSameFile() {
+ List<CloneGroup> groups = Collections.singletonList(newCloneGroup(new ClonePart(batchComponent1.key(), 0, 5, 204), new ClonePart(batchComponent1.key(), 0, 215, 414)));
executor.saveDuplications(batchComponent1, groups);
Duplication[] dups = readDuplications(1);
@@ -155,7 +148,7 @@ public class CpdExecutorTest {
}
@Test
- public void reportTooManyDuplicates() throws Exception {
+ public void reportTooManyDuplicates() {
// 1 origin part + 101 duplicates = 102
List<ClonePart> parts = new ArrayList<>(CpdExecutor.MAX_CLONE_PART_PER_GROUP + 2);
for (int i = 0; i < CpdExecutor.MAX_CLONE_PART_PER_GROUP + 2; i++) {
@@ -191,8 +184,8 @@ public class CpdExecutorTest {
@Test
public void reportOneDuplicatedGroupInvolvingMoreThanTwoFiles() throws Exception {
- List<CloneGroup> groups = Arrays
- .asList(newCloneGroup(new ClonePart(batchComponent1.key(), 0, 5, 204), new ClonePart(batchComponent2.key(), 0, 15, 214), new ClonePart(batchComponent3.key(), 0, 25, 224)));
+ List<CloneGroup> groups = Collections.singletonList(newCloneGroup(new ClonePart(batchComponent1.key(), 0, 5, 204),
+ new ClonePart(batchComponent2.key(), 0, 15, 214), new ClonePart(batchComponent3.key(), 0, 25, 224)));
executor.saveDuplications(batchComponent1, groups);
Duplication[] dups = readDuplications(1);
@@ -204,8 +197,10 @@ public class CpdExecutorTest {
@Test
public void reportTwoDuplicatedGroupsInvolvingThreeFiles() throws Exception {
List<CloneGroup> groups = Arrays.asList(
- newCloneGroup(new ClonePart(batchComponent1.key(), 0, 5, 204), new ClonePart(batchComponent2.key(), 0, 15, 214)),
- newCloneGroup(new ClonePart(batchComponent1.key(), 0, 15, 214), new ClonePart(batchComponent3.key(), 0, 15, 214)));
+ newCloneGroup(new ClonePart(batchComponent1.key(), 0, 5, 204),
+ new ClonePart(batchComponent2.key(), 0, 15, 214)),
+ newCloneGroup(new ClonePart(batchComponent1.key(), 0, 15, 214),
+ new ClonePart(batchComponent3.key(), 0, 15, 214)));
executor.saveDuplications(batchComponent1, groups);
Duplication[] dups = readDuplications(2);
@@ -234,7 +229,7 @@ public class CpdExecutorTest {
.setBlockHash(new ByteArray("abcd1234".getBytes()))
.build());
}
- index.insert((InputFile) component, blocks);
+ index.insert(component, blocks);
}
executor.execute(1);
@@ -247,9 +242,13 @@ public class CpdExecutorTest {
}
private Duplication[] readDuplications(int expected) {
- assertThat(reader.readComponentDuplications(batchComponent1.batchId())).hasSize(expected);
+ return readDuplications(batchComponent1, expected);
+ }
+
+ private Duplication[] readDuplications(DefaultInputFile file, int expected) {
+ assertThat(reader.readComponentDuplications(file.batchId())).hasSize(expected);
Duplication[] duplications = new Duplication[expected];
- CloseableIterator<Duplication> dups = reader.readComponentDuplications(batchComponent1.batchId());
+ CloseableIterator<Duplication> dups = reader.readComponentDuplications(file.batchId());
for (int i = 0; i < expected; i++) {
duplications[i] = dups.next();
@@ -270,7 +269,7 @@ public class CpdExecutorTest {
assertThat(d.getDuplicateList()).hasSize(numDuplicates);
}
- private void assertDuplication(Duplication d, int originStartLine, int originEndLine, Integer otherFileRef, int rangeStartLine, int rangeEndLine) {
+ private void assertDuplication(Duplication d, int originStartLine, int originEndLine, @Nullable Integer otherFileRef, int rangeStartLine, int rangeEndLine) {
assertThat(d.getOriginPosition().getStartLine()).isEqualTo(originStartLine);
assertThat(d.getOriginPosition().getEndLine()).isEqualTo(originEndLine);
assertThat(d.getDuplicateList()).hasSize(1);