]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-6993 Persist cpd text blocks into DUPLICATIONS_INDEX
authorJulien Lancelot <julien.lancelot@sonarsource.com>
Thu, 12 Nov 2015 08:53:35 +0000 (09:53 +0100)
committerJulien Lancelot <julien.lancelot@sonarsource.com>
Thu, 12 Nov 2015 10:01:30 +0000 (11:01 +0100)
server/sonar-server/src/main/java/org/sonar/server/computation/step/LoadCrossProjectDuplicationsRepositoryStep.java
server/sonar-server/src/main/java/org/sonar/server/computation/step/PersistCrossProjectDuplicationIndexStep.java [new file with mode: 0644]
server/sonar-server/src/main/java/org/sonar/server/computation/step/ReportComputationSteps.java
server/sonar-server/src/test/java/org/sonar/server/computation/step/LoadCrossProjectDuplicationsRepositoryStepTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/step/PersistCrossProjectDuplicationIndexStepTest.java [new file with mode: 0644]
sonar-db/src/main/java/org/sonar/db/duplication/DuplicationDao.java
sonar-db/src/test/java/org/sonar/db/duplication/DuplicationDaoTest.java

index b351cc6b3457762489c068d180741c4cd729a2ec..689e58b9b7c18c3b77d741e033e2afbc757b0875 100644 (file)
@@ -108,8 +108,13 @@ public class LoadCrossProjectDuplicationsRepositoryStep implements ComputationSt
 
       Collection<String> hashes = from(cpdTextBlocks).transform(CpdTextBlockToHash.INSTANCE).toList();
       List<DuplicationUnitDto> dtos = selectDuplicates(component, hashes);
+      if (dtos.isEmpty()) {
+        return;
+      }
+
       Collection<Block> duplicatedBlocks = from(dtos).transform(DtoToBlock.INSTANCE).toList();
       Collection<Block> originBlocks = from(cpdTextBlocks).transform(new CpdTextBlockToBlock(component.getKey())).toList();
+      LOGGER.trace("Found {} duplicated cpd blocks on file {}", duplicatedBlocks.size(), component.getKey());
 
       integrateCrossProjectDuplications.computeCpd(component, originBlocks, duplicatedBlocks);
     }
diff --git a/server/sonar-server/src/main/java/org/sonar/server/computation/step/PersistCrossProjectDuplicationIndexStep.java b/server/sonar-server/src/main/java/org/sonar/server/computation/step/PersistCrossProjectDuplicationIndexStep.java
new file mode 100644 (file)
index 0000000..b391740
--- /dev/null
@@ -0,0 +1,115 @@
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+
+package org.sonar.server.computation.step;
+
+import org.sonar.batch.protocol.output.BatchReport;
+import org.sonar.core.util.CloseableIterator;
+import org.sonar.db.DbClient;
+import org.sonar.db.DbSession;
+import org.sonar.db.duplication.DuplicationUnitDto;
+import org.sonar.server.computation.batch.BatchReportReader;
+import org.sonar.server.computation.component.Component;
+import org.sonar.server.computation.component.CrawlerDepthLimit;
+import org.sonar.server.computation.component.DbIdsRepository;
+import org.sonar.server.computation.component.DepthTraversalTypeAwareCrawler;
+import org.sonar.server.computation.component.TreeRootHolder;
+import org.sonar.server.computation.component.TypeAwareVisitorAdapter;
+import org.sonar.server.computation.duplication.CrossProjectDuplicationStatusHolder;
+
+import static org.sonar.server.computation.component.ComponentVisitor.Order.PRE_ORDER;
+
+/**
+ * Persist cross project duplications text blocks into DUPLICATIONS_INDEX table
+ */
+public class PersistCrossProjectDuplicationIndexStep implements ComputationStep {
+
+  private final DbClient dbClient;
+  private final TreeRootHolder treeRootHolder;
+  private final BatchReportReader reportReader;
+  private final DbIdsRepository dbIdsRepository;
+  private final CrossProjectDuplicationStatusHolder crossProjectDuplicationStatusHolder;
+
+  public PersistCrossProjectDuplicationIndexStep(DbClient dbClient, DbIdsRepository dbIdsRepository, TreeRootHolder treeRootHolder, BatchReportReader reportReader,
+    CrossProjectDuplicationStatusHolder crossProjectDuplicationStatusHolder) {
+    this.dbClient = dbClient;
+    this.treeRootHolder = treeRootHolder;
+    this.reportReader = reportReader;
+    this.dbIdsRepository = dbIdsRepository;
+    this.crossProjectDuplicationStatusHolder = crossProjectDuplicationStatusHolder;
+  }
+
+  @Override
+  public void execute() {
+    DbSession session = dbClient.openSession(true);
+    try {
+      if (crossProjectDuplicationStatusHolder.isEnabled()) {
+        Component project = treeRootHolder.getRoot();
+        long projectSnapshotId = dbIdsRepository.getSnapshotId(project);
+        new DepthTraversalTypeAwareCrawler(new DuplicationVisitor(session, projectSnapshotId)).visit(project);
+      }
+      session.commit();
+    } finally {
+      dbClient.closeSession(session);
+    }
+  }
+
+  private class DuplicationVisitor extends TypeAwareVisitorAdapter {
+
+    private final DbSession session;
+    private final long projectSnapshotId;
+
+    private DuplicationVisitor(DbSession session, long projectSnapshotId) {
+      super(CrawlerDepthLimit.FILE, PRE_ORDER);
+      this.session = session;
+      this.projectSnapshotId = projectSnapshotId;
+    }
+
+    @Override
+    public void visitFile(Component file) {
+      visitComponent(file);
+    }
+
+    private void visitComponent(Component component) {
+      int indexInFile = 0;
+      try (CloseableIterator<BatchReport.CpdTextBlock> blocks = reportReader.readCpdTextBlocks(component.getReportAttributes().getRef())) {
+        while (blocks.hasNext()) {
+          BatchReport.CpdTextBlock block = blocks.next();
+          dbClient.duplicationDao().insert(
+            session,
+            new DuplicationUnitDto()
+              .setHash(block.getHash())
+              .setStartLine(block.getStartLine())
+              .setEndLine(block.getEndLine())
+              .setIndexInFile(indexInFile++)
+              .setSnapshotId(dbIdsRepository.getSnapshotId(component))
+              .setProjectSnapshotId(projectSnapshotId)
+            );
+        }
+      }
+    }
+  }
+
+  @Override
+  public String getDescription() {
+    return "Persist cross project duplications index";
+  }
+
+}
index 74413a1057d2d0d853ffe566026d39a874c3fb6c..8d4c2f5cd91367cd9f98082f11033580ead3e590 100644 (file)
@@ -92,6 +92,7 @@ public class ReportComputationSteps implements ComputationSteps {
       PersistDuplicationsStep.class,
       PersistFileSourcesStep.class,
       PersistTestsStep.class,
+      PersistCrossProjectDuplicationIndexStep.class,
 
       // Switch snapshot and purge
       SwitchSnapshotStep.class,
index e643e5adfb372d43a1f536bfd69aa162383fdbdc..737c232b6915be539bcc140689a49435563e4c10 100644 (file)
@@ -49,7 +49,6 @@ import org.sonar.server.computation.duplication.IntegrateCrossProjectDuplication
 import org.sonar.server.computation.snapshot.Snapshot;
 
 import static java.util.Arrays.asList;
-import static java.util.Collections.singletonList;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.verifyZeroInteractions;
@@ -134,7 +133,7 @@ public class LoadCrossProjectDuplicationsRepositoryStepTest {
       .setIndexInFile(0)
       .setProjectSnapshotId(otherProjectSnapshot.getId())
       .setSnapshotId(otherFileSnapshot.getId());
-    dbClient.duplicationDao().insert(dbSession, singletonList(duplicate));
+    dbClient.duplicationDao().insert(dbSession, duplicate);
     dbSession.commit();
 
     BatchReport.CpdTextBlock originBlock = BatchReport.CpdTextBlock.newBuilder()
@@ -188,7 +187,7 @@ public class LoadCrossProjectDuplicationsRepositoryStepTest {
       .setIndexInFile(0)
       .setProjectSnapshotId(otherProjectSnapshot.getId())
       .setSnapshotId(otherFileSnapshot.getId());
-    dbClient.duplicationDao().insert(dbSession, singletonList(duplicate));
+    dbClient.duplicationDao().insert(dbSession, duplicate);
     dbSession.commit();
 
     BatchReport.CpdTextBlock originBlock = BatchReport.CpdTextBlock.newBuilder()
@@ -219,6 +218,27 @@ public class LoadCrossProjectDuplicationsRepositoryStepTest {
     verifyZeroInteractions(integrateCrossProjectDuplications);
   }
 
+  @Test
+  public void nothing_to_do_when_cpd_text_blocks_exists_but_no_duplicated_found() throws Exception {
+    analysisMetadataHolder
+      .setCrossProjectDuplicationEnabled(true)
+      .setBranch(null)
+      .setBaseProjectSnapshot(baseProjectSnapshot);
+
+    BatchReport.CpdTextBlock originBlock = BatchReport.CpdTextBlock.newBuilder()
+      .setHash("a8998353e96320ec")
+      .setStartLine(30)
+      .setEndLine(45)
+      .setStartTokenIndex(0)
+      .setEndTokenIndex(10)
+      .build();
+    batchReportReader.putDuplicationBlocks(FILE_REF, asList(originBlock));
+
+    underTest.execute();
+
+    verifyZeroInteractions(integrateCrossProjectDuplications);
+  }
+
   private ComponentDto createProject(String projectKey) {
     ComponentDto project = ComponentTesting.newProjectDto().setKey(projectKey);
     dbClient.componentDao().insert(dbSession, project);
diff --git a/server/sonar-server/src/test/java/org/sonar/server/computation/step/PersistCrossProjectDuplicationIndexStepTest.java b/server/sonar-server/src/test/java/org/sonar/server/computation/step/PersistCrossProjectDuplicationIndexStepTest.java
new file mode 100644 (file)
index 0000000..40d86ac
--- /dev/null
@@ -0,0 +1,121 @@
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+
+package org.sonar.server.computation.step;
+
+import java.util.Collections;
+import java.util.Map;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.sonar.api.utils.System2;
+import org.sonar.batch.protocol.output.BatchReport;
+import org.sonar.db.DbClient;
+import org.sonar.db.DbTester;
+import org.sonar.server.computation.batch.BatchReportReaderRule;
+import org.sonar.server.computation.batch.TreeRootHolderRule;
+import org.sonar.server.computation.component.Component;
+import org.sonar.server.computation.component.DbIdsRepositoryImpl;
+import org.sonar.server.computation.component.ReportComponent;
+import org.sonar.server.computation.duplication.CrossProjectDuplicationStatusHolder;
+
+import static java.util.Collections.singletonList;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class PersistCrossProjectDuplicationIndexStepTest {
+
+  static final int FILE_REF = 2;
+  static final Component FILE = ReportComponent.builder(Component.Type.FILE, FILE_REF).build();
+  static final long FILE_SNAPSHOT_ID = 11L;
+
+  static final Component PROJECT = ReportComponent.builder(Component.Type.PROJECT, 1)
+    .addChildren(FILE)
+    .build();
+  static final long PROJECT_SNAPSHOT_ID = 10L;
+
+  static final BatchReport.CpdTextBlock CPD_TEXT_BLOCK = BatchReport.CpdTextBlock.newBuilder()
+    .setHash("a8998353e96320ec")
+    .setStartLine(30)
+    .setEndLine(45)
+    .build();
+
+  @Rule
+  public DbTester dbTester = DbTester.create(System2.INSTANCE);
+
+  @Rule
+  public BatchReportReaderRule reportReader = new BatchReportReaderRule();
+
+  @Rule
+  public TreeRootHolderRule treeRootHolder = new TreeRootHolderRule().setRoot(PROJECT);
+
+  CrossProjectDuplicationStatusHolder crossProjectDuplicationStatusHolder = mock(CrossProjectDuplicationStatusHolder.class);
+
+  DbIdsRepositoryImpl dbIdsRepository = new DbIdsRepositoryImpl();
+
+  DbClient dbClient = dbTester.getDbClient();
+
+  ComputationStep underTest = new PersistCrossProjectDuplicationIndexStep(dbClient, dbIdsRepository, treeRootHolder, reportReader, crossProjectDuplicationStatusHolder);
+
+  @Before
+  public void setUp() throws Exception {
+    dbIdsRepository.setSnapshotId(PROJECT, PROJECT_SNAPSHOT_ID);
+    dbIdsRepository.setSnapshotId(FILE, FILE_SNAPSHOT_ID);
+  }
+
+  @Test
+  public void persist_cpd_text_blocks() throws Exception {
+    when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(true);
+    reportReader.putDuplicationBlocks(FILE_REF, singletonList(CPD_TEXT_BLOCK));
+
+    underTest.execute();
+
+    Map<String, Object> dto = dbTester.selectFirst("select hash as \"hash\", start_line as \"startLine\", end_line as \"endLine\", index_in_file as \"indexInFile\", " +
+      "snapshot_id as \"snapshotId\", project_snapshot_id as \"projectSnapshotId\" from duplications_index");
+    assertThat(dto.get("hash")).isEqualTo(CPD_TEXT_BLOCK.getHash());
+    assertThat(dto.get("startLine")).isEqualTo(30L);
+    assertThat(dto.get("endLine")).isEqualTo(45L);
+    assertThat(dto.get("indexInFile")).isEqualTo(0L);
+    assertThat(dto.get("snapshotId")).isEqualTo(FILE_SNAPSHOT_ID);
+    assertThat(dto.get("projectSnapshotId")).isEqualTo(PROJECT_SNAPSHOT_ID);
+  }
+
+  @Test
+  public void nothing_to_persist_when_no_cpd_text_blocks_in_report() throws Exception {
+    when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(true);
+    reportReader.putDuplicationBlocks(FILE_REF, Collections.<BatchReport.CpdTextBlock>emptyList());
+
+    underTest.execute();
+
+    assertThat(dbTester.countRowsOfTable("duplications_index")).isEqualTo(0);
+  }
+
+  @Test
+  public void nothing_to_do_when_cross_project_duplication_is_disabled() throws Exception {
+    when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(false);
+    reportReader.putDuplicationBlocks(FILE_REF, singletonList(CPD_TEXT_BLOCK));
+
+    underTest.execute();
+
+    assertThat(dbTester.countRowsOfTable("duplications_index")).isEqualTo(0);
+  }
+
+}
index a53017ea7b10fb44751d3b7998e0fa6f0ecd308b..0e187ea0f76f108b684074aff2682a40a0e0774a 100644 (file)
@@ -46,11 +46,8 @@ public class DuplicationDao implements Dao {
    * Insert rows in the table DUPLICATIONS_INDEX.
    * Note that generated ids are not returned.
    */
-  public void insert(DbSession session, Collection<DuplicationUnitDto> units) {
-    DuplicationMapper mapper = session.getMapper(DuplicationMapper.class);
-    for (DuplicationUnitDto unit : units) {
-      mapper.batchInsert(unit);
-    }
+  public void insert(DbSession session, DuplicationUnitDto dto) {
+    session.getMapper(DuplicationMapper.class).batchInsert(dto);
   }
 
 }
index 7936524d89872dacde7e5a9f861203c4ca65ea2d..4427b8c69c3a803b94e24744ba8945c5cbc4da83 100644 (file)
@@ -66,13 +66,13 @@ public class DuplicationDaoTest {
     db.prepareDbUnit(getClass(), "insert.xml");
     dbSession.commit();
 
-    dao.insert(dbSession, singletonList(new DuplicationUnitDto()
+    dao.insert(dbSession, new DuplicationUnitDto()
       .setProjectSnapshotId(1)
       .setSnapshotId(2)
       .setHash("bb")
       .setIndexInFile(0)
       .setStartLine(1)
-      .setEndLine(2)));
+      .setEndLine(2));
     dbSession.commit();
 
     db.assertDbUnit(getClass(), "insert-result.xml", "duplications_index");