diff options
6 files changed, 79 insertions, 10 deletions
diff --git a/sonar-batch/src/main/java/org/sonar/batch/cpd/CpdExecutor.java b/sonar-batch/src/main/java/org/sonar/batch/cpd/CpdExecutor.java index 41486ed09e7..48ad1edb682 100644 --- a/sonar-batch/src/main/java/org/sonar/batch/cpd/CpdExecutor.java +++ b/sonar-batch/src/main/java/org/sonar/batch/cpd/CpdExecutor.java @@ -33,6 +33,7 @@ import org.sonar.batch.protocol.output.BatchReport; import org.sonar.batch.protocol.output.BatchReport.Duplicate; import org.sonar.batch.protocol.output.BatchReport.Duplication; import org.sonar.batch.report.ReportPublisher; +import org.sonar.batch.util.ProgressReport; import org.sonar.duplications.block.Block; import org.sonar.duplications.detector.suffixtree.SuffixTreeCloneDetectionAlgorithm; import org.sonar.duplications.index.CloneGroup; @@ -42,6 +43,12 @@ import org.sonar.duplications.index.PackedMemoryCloneIndex.ResourceBlocks; import java.util.Collection; import java.util.Iterator; import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import static com.google.common.collect.FluentIterable.from; @@ -52,6 +59,8 @@ import static com.google.common.collect.FluentIterable.from; */ public class CpdExecutor { private static final Logger LOG = Loggers.get(CpdExecutor.class); + // timeout for the computation of duplicates in a file (seconds) + private static final int TIMEOUT = 5 * 60; static final int MAX_CLONE_GROUP_PER_FILE = 100; static final int MAX_CLONE_PART_PER_GROUP = 100; @@ -59,24 +68,39 @@ public class CpdExecutor { private final ReportPublisher publisher; private final BatchComponentCache batchComponentCache; private final Settings settings; + private final ExecutorService executorService; + private final ProgressReport progressReport; + private int count; + private int total; public CpdExecutor(Settings settings, SonarDuplicationsIndex index, ReportPublisher publisher, BatchComponentCache batchComponentCache) { this.settings = settings; this.index = index; this.publisher = publisher; this.batchComponentCache = batchComponentCache; + this.executorService = Executors.newSingleThreadExecutor(); + this.progressReport = new ProgressReport("CPD computation", TimeUnit.SECONDS.toMillis(10)); } public void execute() { - Iterator<ResourceBlocks> it = index.iterator(); + total = index.noResources(); + progressReport.start(String.format("Calculating CPD for %d files", total)); + try { + Iterator<ResourceBlocks> it = index.iterator(); - while (it.hasNext()) { - ResourceBlocks resourceBlocks = it.next(); - runCpdAnalysis(resourceBlocks.resourceId(), resourceBlocks.blocks()); + while (it.hasNext()) { + ResourceBlocks resourceBlocks = it.next(); + runCpdAnalysis(resourceBlocks.resourceId(), resourceBlocks.blocks()); + count++; + } + progressReport.stop("CPD calculation finished"); + } catch (Exception e) { + progressReport.stop(""); + throw e; } } - private void runCpdAnalysis(String resource, Collection<Block> fileBlocks) { + private void runCpdAnalysis(String resource, final Collection<Block> fileBlocks) { LOG.debug("Detection of duplications for {}", resource); BatchComponent component = batchComponentCache.get(resource); @@ -85,15 +109,29 @@ public class CpdExecutor { return; } + InputFile inputFile = (InputFile) component.inputComponent(); + progressReport.message(String.format("%d/%d - current file: %s", count, total, inputFile)); + List<CloneGroup> duplications; + Future<List<CloneGroup>> futureResult = null; try { - duplications = SuffixTreeCloneDetectionAlgorithm.detect(index, fileBlocks); + futureResult = executorService.submit(new Callable<List<CloneGroup>>() { + @Override + public List<CloneGroup> call() throws Exception { + return SuffixTreeCloneDetectionAlgorithm.detect(index, fileBlocks); + } + }); + duplications = futureResult.get(TIMEOUT, TimeUnit.SECONDS); + } catch (TimeoutException e) { + LOG.warn("Timeout during detection of duplications for " + inputFile, e); + if (futureResult != null) { + futureResult.cancel(true); + } + return; } catch (Exception e) { - throw new IllegalStateException("Fail during detection of duplication for " + resource, e); + throw new IllegalStateException("Fail during detection of duplication for " + inputFile, e); } - InputFile inputFile = (InputFile) component.inputComponent(); - List<CloneGroup> filtered; if (!"java".equalsIgnoreCase(inputFile.language())) { Predicate<CloneGroup> minimumTokensPredicate = DuplicationPredicates.numberOfUnitsNotLessThan(getMinimumTokens(inputFile.language())); diff --git a/sonar-batch/src/main/java/org/sonar/batch/cpd/index/SonarDuplicationsIndex.java b/sonar-batch/src/main/java/org/sonar/batch/cpd/index/SonarDuplicationsIndex.java index e843ed8e0cf..b9c904bbe01 100644 --- a/sonar-batch/src/main/java/org/sonar/batch/cpd/index/SonarDuplicationsIndex.java +++ b/sonar-batch/src/main/java/org/sonar/batch/cpd/index/SonarDuplicationsIndex.java @@ -102,5 +102,10 @@ public class SonarDuplicationsIndex extends AbstractCloneIndex { public Iterator<ResourceBlocks> iterator() { return mem.iterator(); } + + @Override + public int noResources() { + return mem.noResources(); + } } diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/index/CloneIndex.java b/sonar-duplications/src/main/java/org/sonar/duplications/index/CloneIndex.java index e529a600f54..11cdc7e680f 100644 --- a/sonar-duplications/src/main/java/org/sonar/duplications/index/CloneIndex.java +++ b/sonar-duplications/src/main/java/org/sonar/duplications/index/CloneIndex.java @@ -52,4 +52,6 @@ public interface CloneIndex { */ Iterator<ResourceBlocks> iterator(); + + int noResources(); } diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/index/MemoryCloneIndex.java b/sonar-duplications/src/main/java/org/sonar/duplications/index/MemoryCloneIndex.java index 0acb4edc0bb..bbfff14680f 100644 --- a/sonar-duplications/src/main/java/org/sonar/duplications/index/MemoryCloneIndex.java +++ b/sonar-duplications/src/main/java/org/sonar/duplications/index/MemoryCloneIndex.java @@ -54,4 +54,9 @@ public class MemoryCloneIndex implements CloneIndex { throw new UnsupportedOperationException(); } + @Override + public int noResources() { + return byResource.keySet().size(); + } + } diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/index/PackedMemoryCloneIndex.java b/sonar-duplications/src/main/java/org/sonar/duplications/index/PackedMemoryCloneIndex.java index a0e43ffab6f..97a97d792c4 100644 --- a/sonar-duplications/src/main/java/org/sonar/duplications/index/PackedMemoryCloneIndex.java +++ b/sonar-duplications/src/main/java/org/sonar/duplications/index/PackedMemoryCloneIndex.java @@ -120,7 +120,7 @@ public class PackedMemoryCloneIndex extends AbstractCloneIndex { private Block createBlock(int index, String resourceId, @Nullable ByteArray byteHash) { int offset = index * blockInts; ByteArray blockHash; - + if (byteHash == null) { int[] hash = new int[hashInts]; for (int j = 0; j < hashInts; j++) { @@ -371,4 +371,22 @@ public class PackedMemoryCloneIndex extends AbstractCloneIndex { } }; + @Override + /** + * Computation is O(N) + */ + public int noResources() { + ensureSorted(); + int count = 0; + String lastResource = null; + + for (int i = 0; i < size; i++) { + String resource = resourceIds[resourceIdsIndex[i]]; + if (resource != null && !resource.equals(lastResource)) { + count++; + lastResource = resource; + } + } + return count; + } } diff --git a/sonar-duplications/src/test/java/org/sonar/duplications/index/PackedMemoryCloneIndexTest.java b/sonar-duplications/src/test/java/org/sonar/duplications/index/PackedMemoryCloneIndexTest.java index d96f6ebeaf9..09ff7f055cc 100644 --- a/sonar-duplications/src/test/java/org/sonar/duplications/index/PackedMemoryCloneIndexTest.java +++ b/sonar-duplications/src/test/java/org/sonar/duplications/index/PackedMemoryCloneIndexTest.java @@ -54,6 +54,7 @@ public class PackedMemoryCloneIndexTest { index.insert(newBlock("e", 2)); index.insert(newBlock("e", 3)); + assertThat(index.noResources()).isEqualTo(5); assertThat(index.getBySequenceHash(new ByteArray(1L)).size(), is(5)); assertThat(index.getBySequenceHash(new ByteArray(2L)).size(), is(2)); assertThat(index.getBySequenceHash(new ByteArray(3L)).size(), is(1)); |