]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-7158 Time limit for calculation of duplications is ineffective 834/head
authorDuarte Meneses <duarte.meneses@sonarsource.com>
Mon, 14 Mar 2016 12:21:26 +0000 (13:21 +0100)
committerDuarte Meneses <duarte.meneses@sonarsource.com>
Tue, 15 Mar 2016 10:51:50 +0000 (11:51 +0100)
sonar-batch/src/main/java/org/sonar/batch/cpd/CpdExecutor.java
sonar-batch/src/main/java/org/sonar/batch/cpd/index/SonarDuplicationsIndex.java
sonar-duplications/src/main/java/org/sonar/duplications/index/CloneIndex.java
sonar-duplications/src/main/java/org/sonar/duplications/index/MemoryCloneIndex.java
sonar-duplications/src/main/java/org/sonar/duplications/index/PackedMemoryCloneIndex.java
sonar-duplications/src/test/java/org/sonar/duplications/index/PackedMemoryCloneIndexTest.java

index 41486ed09e7e7c1fee39ab5e2c8da1e190635ac8..48ad1edb6824ec4b034257cf43d49029825f5828 100644 (file)
@@ -33,6 +33,7 @@ import org.sonar.batch.protocol.output.BatchReport;
 import org.sonar.batch.protocol.output.BatchReport.Duplicate;
 import org.sonar.batch.protocol.output.BatchReport.Duplication;
 import org.sonar.batch.report.ReportPublisher;
+import org.sonar.batch.util.ProgressReport;
 import org.sonar.duplications.block.Block;
 import org.sonar.duplications.detector.suffixtree.SuffixTreeCloneDetectionAlgorithm;
 import org.sonar.duplications.index.CloneGroup;
@@ -42,6 +43,12 @@ import org.sonar.duplications.index.PackedMemoryCloneIndex.ResourceBlocks;
 import java.util.Collection;
 import java.util.Iterator;
 import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
 
 import static com.google.common.collect.FluentIterable.from;
 
@@ -52,6 +59,8 @@ import static com.google.common.collect.FluentIterable.from;
  */
 public class CpdExecutor {
   private static final Logger LOG = Loggers.get(CpdExecutor.class);
+  // timeout for the computation of duplicates in a file (seconds)
+  private static final int TIMEOUT = 5 * 60;
   static final int MAX_CLONE_GROUP_PER_FILE = 100;
   static final int MAX_CLONE_PART_PER_GROUP = 100;
 
@@ -59,24 +68,39 @@ public class CpdExecutor {
   private final ReportPublisher publisher;
   private final BatchComponentCache batchComponentCache;
   private final Settings settings;
+  private final ExecutorService executorService;
+  private final ProgressReport progressReport;
+  private int count;
+  private int total;
 
   public CpdExecutor(Settings settings, SonarDuplicationsIndex index, ReportPublisher publisher, BatchComponentCache batchComponentCache) {
     this.settings = settings;
     this.index = index;
     this.publisher = publisher;
     this.batchComponentCache = batchComponentCache;
+    this.executorService = Executors.newSingleThreadExecutor();
+    this.progressReport = new ProgressReport("CPD computation", TimeUnit.SECONDS.toMillis(10));
   }
 
   public void execute() {
-    Iterator<ResourceBlocks> it = index.iterator();
+    total = index.noResources();
+    progressReport.start(String.format("Calculating CPD for %d files", total));
+    try {
+      Iterator<ResourceBlocks> it = index.iterator();
 
-    while (it.hasNext()) {
-      ResourceBlocks resourceBlocks = it.next();
-      runCpdAnalysis(resourceBlocks.resourceId(), resourceBlocks.blocks());
+      while (it.hasNext()) {
+        ResourceBlocks resourceBlocks = it.next();
+        runCpdAnalysis(resourceBlocks.resourceId(), resourceBlocks.blocks());
+        count++;
+      }
+      progressReport.stop("CPD calculation finished");
+    } catch (Exception e) {
+      progressReport.stop("");
+      throw e;
     }
   }
 
-  private void runCpdAnalysis(String resource, Collection<Block> fileBlocks) {
+  private void runCpdAnalysis(String resource, final Collection<Block> fileBlocks) {
     LOG.debug("Detection of duplications for {}", resource);
 
     BatchComponent component = batchComponentCache.get(resource);
@@ -85,15 +109,29 @@ public class CpdExecutor {
       return;
     }
 
+    InputFile inputFile = (InputFile) component.inputComponent();
+    progressReport.message(String.format("%d/%d - current file: %s", count, total, inputFile));
+
     List<CloneGroup> duplications;
+    Future<List<CloneGroup>> futureResult = null;
     try {
-      duplications = SuffixTreeCloneDetectionAlgorithm.detect(index, fileBlocks);
+      futureResult = executorService.submit(new Callable<List<CloneGroup>>() {
+        @Override
+        public List<CloneGroup> call() throws Exception {
+          return SuffixTreeCloneDetectionAlgorithm.detect(index, fileBlocks);
+        }
+      });
+      duplications = futureResult.get(TIMEOUT, TimeUnit.SECONDS);
+    } catch (TimeoutException e) {
+      LOG.warn("Timeout during detection of duplications for " + inputFile, e);
+      if (futureResult != null) {
+        futureResult.cancel(true);
+      }
+      return;
     } catch (Exception e) {
-      throw new IllegalStateException("Fail during detection of duplication for " + resource, e);
+      throw new IllegalStateException("Fail during detection of duplication for " + inputFile, e);
     }
 
-    InputFile inputFile = (InputFile) component.inputComponent();
-
     List<CloneGroup> filtered;
     if (!"java".equalsIgnoreCase(inputFile.language())) {
       Predicate<CloneGroup> minimumTokensPredicate = DuplicationPredicates.numberOfUnitsNotLessThan(getMinimumTokens(inputFile.language()));
index e843ed8e0cf0dcf24c0fd721d53affe31abe288e..b9c904bbe01cfccfbd6e3713d873ef88453c5d7d 100644 (file)
@@ -102,5 +102,10 @@ public class SonarDuplicationsIndex extends AbstractCloneIndex {
   public Iterator<ResourceBlocks> iterator() {
     return mem.iterator();
   }
+  
+  @Override
+  public int noResources() {
+    return mem.noResources();
+  }
 
 }
index e529a600f54bb3c209863d8f3abeaf11fb7e39c0..11cdc7e680f1384321aeb3d282576b6be2d59a05 100644 (file)
@@ -52,4 +52,6 @@ public interface CloneIndex {
    */
   Iterator<ResourceBlocks> iterator();
 
+  
+  int noResources();
 }
index 0acb4edc0bb4f45d7861a11b318fdf9e720e5186..bbfff14680f5ff5672fad2e44d7bbef109d951f6 100644 (file)
@@ -54,4 +54,9 @@ public class MemoryCloneIndex implements CloneIndex {
     throw new UnsupportedOperationException();
   }
 
+  @Override
+  public int noResources() {
+    return byResource.keySet().size();
+  }
+
 }
index a0e43ffab6fb4223aae6f6bbfbe5bd671ebaad57..97a97d792c4fdf8904d9e4d7aadfb9ac1fe3359d 100644 (file)
@@ -120,7 +120,7 @@ public class PackedMemoryCloneIndex extends AbstractCloneIndex {
   private Block createBlock(int index, String resourceId, @Nullable ByteArray byteHash) {
     int offset = index * blockInts;
     ByteArray blockHash;
-    
+
     if (byteHash == null) {
       int[] hash = new int[hashInts];
       for (int j = 0; j < hashInts; j++) {
@@ -371,4 +371,22 @@ public class PackedMemoryCloneIndex extends AbstractCloneIndex {
     }
   };
 
+  @Override
+  /**
+   * Computation is O(N)
+   */
+  public int noResources() {
+    ensureSorted();
+    int count = 0;
+    String lastResource = null;
+
+    for (int i = 0; i < size; i++) {
+      String resource = resourceIds[resourceIdsIndex[i]];
+      if (resource != null && !resource.equals(lastResource)) {
+        count++;
+        lastResource = resource;
+      }
+    }
+    return count;
+  }
 }
index d96f6ebeaf9cafb18d262f42442e4af52c47bf08..09ff7f055ccbadadb55687dab641dcb450202fd1 100644 (file)
@@ -54,6 +54,7 @@ public class PackedMemoryCloneIndexTest {
     index.insert(newBlock("e", 2));
     index.insert(newBlock("e", 3));
 
+    assertThat(index.noResources()).isEqualTo(5);
     assertThat(index.getBySequenceHash(new ByteArray(1L)).size(), is(5));
     assertThat(index.getBySequenceHash(new ByteArray(2L)).size(), is(2));
     assertThat(index.getBySequenceHash(new ByteArray(3L)).size(), is(1));