--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+@ParametersAreNonnullByDefault
+package org.sonar.server.computation.snapshot;
+
+import javax.annotation.ParametersAreNonnullByDefault;
+
package org.sonar.server.computation.step;
import com.google.common.base.Function;
+
import java.util.Collection;
import java.util.List;
import javax.annotation.Nonnull;
+
import org.sonar.api.utils.log.Logger;
import org.sonar.api.utils.log.Loggers;
import org.sonar.batch.protocol.output.BatchReport.CpdTextBlock;
import org.sonar.duplications.block.ByteArray;
import org.sonar.server.computation.analysis.AnalysisMetadataHolder;
import org.sonar.server.computation.batch.BatchReportReader;
-import org.sonar.server.computation.component.Component;
-import org.sonar.server.computation.component.CrawlerDepthLimit;
-import org.sonar.server.computation.component.DepthTraversalTypeAwareCrawler;
-import org.sonar.server.computation.component.TreeRootHolder;
-import org.sonar.server.computation.component.TypeAwareVisitorAdapter;
+import org.sonar.server.computation.component.*;
import org.sonar.server.computation.duplication.CrossProjectDuplicationStatusHolder;
import org.sonar.server.computation.duplication.IntegrateCrossProjectDuplications;
import org.sonar.server.computation.snapshot.Snapshot;
@Override
public void visitFile(Component file) {
- visitComponent(file);
- }
-
- private void visitComponent(Component component) {
- List<CpdTextBlock> cpdTextBlocks = newArrayList(reportReader.readCpdTextBlocks(component.getReportAttributes().getRef()));
- LOGGER.trace("Found {} cpd blocks on file {}", cpdTextBlocks.size(), component.getKey());
+ List<CpdTextBlock> cpdTextBlocks = newArrayList(reportReader.readCpdTextBlocks(file.getReportAttributes().getRef()));
+ LOGGER.trace("Found {} cpd blocks on file {}", cpdTextBlocks.size(), file.getKey());
if (cpdTextBlocks.isEmpty()) {
return;
}
Collection<String> hashes = from(cpdTextBlocks).transform(CpdTextBlockToHash.INSTANCE).toList();
- List<DuplicationUnitDto> dtos = selectDuplicates(component, hashes);
+ List<DuplicationUnitDto> dtos = selectDuplicates(file, hashes);
if (dtos.isEmpty()) {
return;
}
Collection<Block> duplicatedBlocks = from(dtos).transform(DtoToBlock.INSTANCE).toList();
- Collection<Block> originBlocks = from(cpdTextBlocks).transform(new CpdTextBlockToBlock(component.getKey())).toList();
- LOGGER.trace("Found {} duplicated cpd blocks on file {}", duplicatedBlocks.size(), component.getKey());
+ Collection<Block> originBlocks = from(cpdTextBlocks).transform(new CpdTextBlockToBlock(file.getKey())).toList();
+ LOGGER.trace("Found {} duplicated cpd blocks on file {}", duplicatedBlocks.size(), file.getKey());
- integrateCrossProjectDuplications.computeCpd(component, originBlocks, duplicatedBlocks);
+ integrateCrossProjectDuplications.computeCpd(file, originBlocks, duplicatedBlocks);
}
private List<DuplicationUnitDto> selectDuplicates(Component file, Collection<String> hashes) {
@Override
public Block apply(@Nonnull DuplicationUnitDto dto) {
- // Not that the dto doesn't contains start/end token indexes
+ // Note that the dto doesn't contains start/end token indexes
return Block.builder()
.setResourceId(dto.getComponentKey())
.setBlockHash(new ByteArray(dto.getHash()))
@Override
public Block apply(@Nonnull CpdTextBlock duplicationBlock) {
- return Block.builder()
+ Block block = Block.builder()
.setResourceId(fileKey)
.setBlockHash(new ByteArray(duplicationBlock.getHash()))
- .setIndexInFile(indexInFile++)
+ .setIndexInFile(indexInFile)
.setLines(duplicationBlock.getStartLine(), duplicationBlock.getEndLine())
.setUnit(duplicationBlock.getStartTokenIndex(), duplicationBlock.getEndTokenIndex())
.build();
+ indexInFile++;
+ return block;
}
}
import org.sonar.db.DbSession;
import org.sonar.db.duplication.DuplicationUnitDto;
import org.sonar.server.computation.batch.BatchReportReader;
-import org.sonar.server.computation.component.Component;
-import org.sonar.server.computation.component.CrawlerDepthLimit;
-import org.sonar.server.computation.component.DbIdsRepository;
-import org.sonar.server.computation.component.DepthTraversalTypeAwareCrawler;
-import org.sonar.server.computation.component.TreeRootHolder;
-import org.sonar.server.computation.component.TypeAwareVisitorAdapter;
+import org.sonar.server.computation.component.*;
import org.sonar.server.computation.duplication.CrossProjectDuplicationStatusHolder;
import static org.sonar.server.computation.component.ComponentVisitor.Order.PRE_ORDER;
private void visitComponent(Component component) {
int indexInFile = 0;
- try (CloseableIterator<BatchReport.CpdTextBlock> blocks = reportReader.readCpdTextBlocks(component.getReportAttributes().getRef())) {
+ CloseableIterator<BatchReport.CpdTextBlock> blocks = reportReader.readCpdTextBlocks(component.getReportAttributes().getRef());
+ try {
while (blocks.hasNext()) {
BatchReport.CpdTextBlock block = blocks.next();
dbClient.duplicationDao().insert(
.setHash(block.getHash())
.setStartLine(block.getStartLine())
.setEndLine(block.getEndLine())
- .setIndexInFile(indexInFile++)
+ .setIndexInFile(indexInFile)
.setSnapshotId(dbIdsRepository.getSnapshotId(component))
.setProjectSnapshotId(projectSnapshotId)
);
+ indexInFile++;
}
+ } finally {
+ blocks.close();
}
}
}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+@ParametersAreNonnullByDefault
+package org.sonar.server.computation.util;
+
+import javax.annotation.ParametersAreNonnullByDefault;
+
import java.util.Arrays;
import java.util.Collections;
+
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.sonar.server.computation.snapshot.Snapshot;
import static java.util.Arrays.asList;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.verifyZeroInteractions;
-import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.*;
import static org.sonar.server.computation.component.Component.Type.FILE;
import static org.sonar.server.computation.component.Component.Type.PROJECT;
}
@Test
- public void call_compute_cpd() throws Exception {
+ public void call_compute_cpd_on_one_duplication() throws Exception {
when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(true);
analysisMetadataHolder.setBaseProjectSnapshot(baseProjectSnapshot);
);
}
+ @Test
+ public void call_compute_cpd_on_many_duplication() throws Exception {
+ when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(true);
+ analysisMetadataHolder.setBaseProjectSnapshot(baseProjectSnapshot);
+
+ ComponentDto otherProject = createProject("OTHER_PROJECT_KEY");
+ SnapshotDto otherProjectSnapshot = createProjectSnapshot(otherProject);
+
+ ComponentDto otherFIle = createFile("OTHER_FILE_KEY", otherProject);
+ SnapshotDto otherFileSnapshot = createFileSnapshot(otherFIle, otherProjectSnapshot);
+
+ BatchReport.CpdTextBlock originBlock1 = BatchReport.CpdTextBlock.newBuilder()
+ .setHash("a8998353e96320ec")
+ .setStartLine(30)
+ .setEndLine(45)
+ .setStartTokenIndex(0)
+ .setEndTokenIndex(10)
+ .build();
+ BatchReport.CpdTextBlock originBlock2 = BatchReport.CpdTextBlock.newBuilder()
+ .setHash("b1234353e96320ff")
+ .setStartLine(10)
+ .setEndLine(25)
+ .setStartTokenIndex(5)
+ .setEndTokenIndex(15)
+ .build();
+ batchReportReader.putDuplicationBlocks(FILE_REF, asList(originBlock1, originBlock2));
+
+ DuplicationUnitDto duplicate1 = new DuplicationUnitDto()
+ .setHash(originBlock1.getHash())
+ .setStartLine(40)
+ .setEndLine(55)
+ .setIndexInFile(0)
+ .setProjectSnapshotId(otherProjectSnapshot.getId())
+ .setSnapshotId(otherFileSnapshot.getId());
+
+ DuplicationUnitDto duplicate2 = new DuplicationUnitDto()
+ .setHash(originBlock2.getHash())
+ .setStartLine(20)
+ .setEndLine(35)
+ .setIndexInFile(1)
+ .setProjectSnapshotId(otherProjectSnapshot.getId())
+ .setSnapshotId(otherFileSnapshot.getId());
+ dbClient.duplicationDao().insert(dbSession, duplicate1);
+ dbClient.duplicationDao().insert(dbSession, duplicate2);
+ dbSession.commit();
+
+ underTest.execute();
+
+ verify(integrateCrossProjectDuplications).computeCpd(CURRENT_FILE,
+ Arrays.asList(
+ new Block.Builder()
+ .setResourceId(CURRENT_FILE_KEY)
+ .setBlockHash(new ByteArray(originBlock1.getHash()))
+ .setIndexInFile(0)
+ .setLines(originBlock1.getStartLine(), originBlock1.getEndLine())
+ .setUnit(originBlock1.getStartTokenIndex(), originBlock1.getEndTokenIndex())
+ .build(),
+ new Block.Builder()
+ .setResourceId(CURRENT_FILE_KEY)
+ .setBlockHash(new ByteArray(originBlock2.getHash()))
+ .setIndexInFile(1)
+ .setLines(originBlock2.getStartLine(), originBlock2.getEndLine())
+ .setUnit(originBlock2.getStartTokenIndex(), originBlock2.getEndTokenIndex())
+ .build()
+ ),
+ Arrays.asList(
+ new Block.Builder()
+ .setResourceId(otherFIle.getKey())
+ .setBlockHash(new ByteArray(originBlock1.getHash()))
+ .setIndexInFile(duplicate1.getIndexInFile())
+ .setLines(duplicate1.getStartLine(), duplicate1.getEndLine())
+ .build(),
+ new Block.Builder()
+ .setResourceId(otherFIle.getKey())
+ .setBlockHash(new ByteArray(originBlock2.getHash()))
+ .setIndexInFile(duplicate2.getIndexInFile())
+ .setLines(duplicate2.getStartLine(), duplicate2.getEndLine())
+ .build()
+ )
+ );
+ }
+
@Test
public void nothing_to_do_when_cross_project_duplication_is_disabled() throws Exception {
when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(false);
@Test
public void nothing_to_do_when_no_cpd_text_blocks_found() throws Exception {
- analysisMetadataHolder
- .setCrossProjectDuplicationEnabled(true)
- .setBranch(null)
- .setBaseProjectSnapshot(baseProjectSnapshot);
+ when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(true);
+ analysisMetadataHolder.setBaseProjectSnapshot(baseProjectSnapshot);
batchReportReader.putDuplicationBlocks(FILE_REF, Collections.<BatchReport.CpdTextBlock>emptyList());
@Test
public void nothing_to_do_when_cpd_text_blocks_exists_but_no_duplicated_found() throws Exception {
- analysisMetadataHolder
- .setCrossProjectDuplicationEnabled(true)
- .setBranch(null)
- .setBaseProjectSnapshot(baseProjectSnapshot);
+ when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(true);
+ analysisMetadataHolder.setBaseProjectSnapshot(baseProjectSnapshot);
BatchReport.CpdTextBlock originBlock = BatchReport.CpdTextBlock.newBuilder()
.setHash("a8998353e96320ec")
package org.sonar.server.computation.step;
+import java.util.Arrays;
import java.util.Collections;
+import java.util.List;
import java.util.Map;
+
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
}
@Test
- public void persist_cpd_text_blocks() throws Exception {
+ public void persist_cpd_text_block() throws Exception {
when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(true);
reportReader.putDuplicationBlocks(FILE_REF, singletonList(CPD_TEXT_BLOCK));
assertThat(dto.get("projectSnapshotId")).isEqualTo(PROJECT_SNAPSHOT_ID);
}
+ @Test
+ public void persist_many_cpd_text_blocks() throws Exception {
+ when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(true);
+ reportReader.putDuplicationBlocks(FILE_REF, Arrays.asList(
+ CPD_TEXT_BLOCK,
+ BatchReport.CpdTextBlock.newBuilder()
+ .setHash("b1234353e96320ff")
+ .setStartLine(20)
+ .setEndLine(15)
+ .build()));
+
+ underTest.execute();
+
+ List<Map<String, Object>> dtos = dbTester.select("select hash as \"hash\", start_line as \"startLine\", end_line as \"endLine\", index_in_file as \"indexInFile\", " +
+ "snapshot_id as \"snapshotId\", project_snapshot_id as \"projectSnapshotId\" from duplications_index");
+ assertThat(dtos).extracting("hash").containsOnly(CPD_TEXT_BLOCK.getHash(), "b1234353e96320ff");
+ assertThat(dtos).extracting("startLine").containsOnly(30L, 20L);
+ assertThat(dtos).extracting("endLine").containsOnly(45L, 15L);
+ assertThat(dtos).extracting("indexInFile").containsOnly(0L, 1L);
+ assertThat(dtos).extracting("snapshotId").containsOnly(FILE_SNAPSHOT_ID);
+ assertThat(dtos).extracting("projectSnapshotId").containsOnly(PROJECT_SNAPSHOT_ID);
+ }
+
@Test
public void nothing_to_persist_when_no_cpd_text_blocks_in_report() throws Exception {
when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(true);