--- /dev/null
+/*
+ * SonarQube
+ * Copyright (C) 2009-2017 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.component;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import javax.annotation.CheckForNull;
+import org.apache.commons.lang.StringUtils;
+import org.sonar.db.DbClient;
+import org.sonar.db.DbSession;
+import org.sonar.db.component.ComponentDto;
+import org.sonar.server.computation.task.projectanalysis.analysis.AnalysisMetadataHolder;
+
+/**
+ * Cache a map between component keys and uuids in the merge branch
+ */
+public class MergeBranchComponentUuids {
+ private final AnalysisMetadataHolder analysisMetadataHolder;
+ private final DbClient dbClient;
+ private Map<String, String> uuidsByKey;
+
+ public MergeBranchComponentUuids(AnalysisMetadataHolder analysisMetadataHolder, DbClient dbClient) {
+ this.analysisMetadataHolder = analysisMetadataHolder;
+ this.dbClient = dbClient;
+ }
+
+ private void loadMergeBranchComponents() {
+ String mergeBranchUuid = analysisMetadataHolder.getBranch().get().getMergeBranchUuid().get();
+
+ uuidsByKey = new HashMap<>();
+ try (DbSession dbSession = dbClient.openSession(false)) {
+
+ List<ComponentDto> components = dbClient.componentDao().selectByProjectUuid(mergeBranchUuid, dbSession);
+ for (ComponentDto dto : components) {
+ uuidsByKey.put(dto.getKey(), dto.uuid());
+ }
+ }
+ }
+
+ @CheckForNull
+ public String getUuid(String dbKey) {
+ if (uuidsByKey == null) {
+ loadMergeBranchComponents();
+ }
+
+ String cleanComponentKey = removeBranchFromKey(dbKey);
+ return uuidsByKey.get(cleanComponentKey);
+ }
+
+ private static String removeBranchFromKey(String componentKey) {
+ return StringUtils.substringBeforeLast(componentKey, ComponentDto.BRANCH_KEY_SEPARATOR);
+ }
+}
import org.sonar.server.computation.task.projectanalysis.component.ConfigurationRepositoryImpl;
import org.sonar.server.computation.task.projectanalysis.component.DbIdsRepositoryImpl;
import org.sonar.server.computation.task.projectanalysis.component.DisabledComponentsHolderImpl;
+import org.sonar.server.computation.task.projectanalysis.component.MergeBranchComponentUuids;
import org.sonar.server.computation.task.projectanalysis.component.TreeRootHolderImpl;
import org.sonar.server.computation.task.projectanalysis.duplication.CrossProjectDuplicationStatusHolderImpl;
import org.sonar.server.computation.task.projectanalysis.duplication.DuplicationMeasures;
import org.sonar.server.computation.task.projectanalysis.qualitymodel.RatingSettings;
import org.sonar.server.computation.task.projectanalysis.qualitymodel.ReliabilityAndSecurityRatingMeasuresVisitor;
import org.sonar.server.computation.task.projectanalysis.qualityprofile.ActiveRulesHolderImpl;
+import org.sonar.server.computation.task.projectanalysis.scm.ScmInfoDbLoader;
import org.sonar.server.computation.task.projectanalysis.scm.ScmInfoRepositoryImpl;
import org.sonar.server.computation.task.projectanalysis.source.LastCommitVisitor;
import org.sonar.server.computation.task.projectanalysis.source.SourceHashRepositoryImpl;
MeasureComputersHolderImpl.class,
MutableTaskResultHolderImpl.class,
BatchReportReaderImpl.class,
+ MergeBranchComponentUuids.class,
// repositories
LanguageRepositoryImpl.class,
SourceLinesRepositoryImpl.class,
SourceHashRepositoryImpl.class,
ScmInfoRepositoryImpl.class,
+ ScmInfoDbLoader.class,
DuplicationRepositoryImpl.class,
// issues
package org.sonar.server.computation.task.projectanalysis.issue;
import java.util.Collections;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
-
import javax.annotation.Nullable;
-
-import org.apache.commons.lang.StringUtils;
import org.sonar.core.issue.DefaultIssue;
import org.sonar.core.issue.tracking.Input;
import org.sonar.core.issue.tracking.LazyInput;
import org.sonar.core.issue.tracking.LineHashSequence;
import org.sonar.db.DbClient;
import org.sonar.db.DbSession;
-import org.sonar.db.component.ComponentDto;
-import org.sonar.server.computation.task.projectanalysis.analysis.AnalysisMetadataHolder;
import org.sonar.server.computation.task.projectanalysis.component.Component;
+import org.sonar.server.computation.task.projectanalysis.component.MergeBranchComponentUuids;
public class TrackerMergeBranchInputFactory {
private static final LineHashSequence EMPTY_LINE_HASH_SEQUENCE = new LineHashSequence(Collections.<String>emptyList());
private final ComponentIssuesLoader mergeIssuesLoader;
private final DbClient dbClient;
- private final AnalysisMetadataHolder analysisMetadataHolder;
- private Map<String, String> uuidsByKey;
+ private final MergeBranchComponentUuids mergeBranchComponentUuids;
- public TrackerMergeBranchInputFactory(ComponentIssuesLoader mergeIssuesLoader, AnalysisMetadataHolder analysisMetadataHolder, DbClient dbClient) {
+ public TrackerMergeBranchInputFactory(ComponentIssuesLoader mergeIssuesLoader, MergeBranchComponentUuids mergeBranchComponentUuids, DbClient dbClient) {
this.mergeIssuesLoader = mergeIssuesLoader;
- this.analysisMetadataHolder = analysisMetadataHolder;
+ this.mergeBranchComponentUuids = mergeBranchComponentUuids;
this.dbClient = dbClient;
// TODO detect file moves?
}
- private void loadMergeBranchComponents() {
- String mergeBranchUuid = analysisMetadataHolder.getBranch().get().getMergeBranchUuid().get();
-
- uuidsByKey = new HashMap<>();
- try (DbSession dbSession = dbClient.openSession(false)) {
-
- List<ComponentDto> components = dbClient.componentDao().selectByProjectUuid(mergeBranchUuid, dbSession);
- for (ComponentDto dto : components) {
- uuidsByKey.put(removeBranchFromKey(dto.getDbKey()), dto.uuid());
- }
- }
- }
-
public Input<DefaultIssue> create(Component component) {
- if (uuidsByKey == null) {
- loadMergeBranchComponents();
- }
-
- String cleanComponentKey = removeBranchFromKey(component.getKey());
- String mergeBranchComponentUuid = uuidsByKey.get(cleanComponentKey);
+ String mergeBranchComponentUuid = mergeBranchComponentUuids.getUuid(component.getKey());
return new MergeLazyInput(component.getType(), mergeBranchComponentUuid);
}
}
}
- private static String removeBranchFromKey(String componentKey) {
- return StringUtils.substringBeforeLast(componentKey, ":BRANCH:");
- }
}
--- /dev/null
+/*
+ * SonarQube
+ * Copyright (C) 2009-2017 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.scm;
+
+import java.util.Optional;
+import org.sonar.api.utils.log.Logger;
+import org.sonar.api.utils.log.Loggers;
+import org.sonar.db.DbClient;
+import org.sonar.db.DbSession;
+import org.sonar.db.source.FileSourceDto;
+import org.sonar.server.computation.task.projectanalysis.analysis.AnalysisMetadataHolder;
+import org.sonar.server.computation.task.projectanalysis.analysis.Branch;
+import org.sonar.server.computation.task.projectanalysis.component.Component;
+import org.sonar.server.computation.task.projectanalysis.component.Component.Status;
+import org.sonar.server.computation.task.projectanalysis.component.MergeBranchComponentUuids;
+import org.sonar.server.computation.task.projectanalysis.scm.ScmInfoRepositoryImpl.NoScmInfo;
+import org.sonar.server.computation.task.projectanalysis.source.SourceHashRepository;
+
+public class ScmInfoDbLoader {
+ private static final Logger LOGGER = Loggers.get(ScmInfoDbLoader.class);
+
+ private final AnalysisMetadataHolder analysisMetadataHolder;
+ private final DbClient dbClient;
+ private final SourceHashRepository sourceHashRepository;
+ private final MergeBranchComponentUuids mergeBranchComponentUuid;
+
+ public ScmInfoDbLoader(AnalysisMetadataHolder analysisMetadataHolder, DbClient dbClient,
+ SourceHashRepository sourceHashRepository, MergeBranchComponentUuids mergeBranchComponentUuid) {
+ this.analysisMetadataHolder = analysisMetadataHolder;
+ this.dbClient = dbClient;
+ this.sourceHashRepository = sourceHashRepository;
+ this.mergeBranchComponentUuid = mergeBranchComponentUuid;
+ }
+
+ public ScmInfo getScmInfoFromDb(Component file) {
+ Optional<String> uuid = getFileUUid(file);
+
+ if (!uuid.isPresent()) {
+ return NoScmInfo.INSTANCE;
+ }
+
+ LOGGER.trace("Reading SCM info from db for file '{}'", uuid.get());
+ try (DbSession dbSession = dbClient.openSession(false)) {
+ FileSourceDto dto = dbClient.fileSourceDao().selectSourceByFileUuid(dbSession, uuid.get());
+ if (dto == null || !isDtoValid(file, dto)) {
+ return NoScmInfo.INSTANCE;
+ }
+ return DbScmInfo.create(file, dto.getSourceData().getLinesList()).or(NoScmInfo.INSTANCE);
+ }
+ }
+
+ private Optional<String> getFileUUid(Component file) {
+ if (!analysisMetadataHolder.isFirstAnalysis()) {
+ return Optional.of(file.getUuid());
+ }
+
+ Optional<Branch> branch = analysisMetadataHolder.getBranch();
+ if (branch.isPresent()) {
+ return Optional.ofNullable(mergeBranchComponentUuid.getUuid(file.getKey()));
+ }
+
+ return Optional.empty();
+ }
+
+ private boolean isDtoValid(Component file, FileSourceDto dto) {
+ if (file.getStatus() == Status.SAME) {
+ return true;
+ }
+ return sourceHashRepository.getRawSourceHash(file).equals(dto.getSrcHash());
+ }
+}
import java.util.Map;
import org.sonar.api.utils.log.Logger;
import org.sonar.api.utils.log.Loggers;
-import org.sonar.db.DbClient;
-import org.sonar.db.DbSession;
-import org.sonar.db.source.FileSourceDto;
import org.sonar.scanner.protocol.output.ScannerReport;
-import org.sonar.server.computation.task.projectanalysis.analysis.AnalysisMetadataHolder;
import org.sonar.server.computation.task.projectanalysis.batch.BatchReportReader;
import org.sonar.server.computation.task.projectanalysis.component.Component;
-import org.sonar.server.computation.task.projectanalysis.component.Component.Status;
-import org.sonar.server.computation.task.projectanalysis.source.SourceHashRepository;
import static java.util.Objects.requireNonNull;
private static final Logger LOGGER = Loggers.get(ScmInfoRepositoryImpl.class);
private final BatchReportReader batchReportReader;
- private final AnalysisMetadataHolder analysisMetadataHolder;
- private final DbClient dbClient;
- private final SourceHashRepository sourceHashRepository;
-
private final Map<Component, ScmInfo> scmInfoCache = new HashMap<>();
+ private final ScmInfoDbLoader scmInfoDbLoader;
- public ScmInfoRepositoryImpl(BatchReportReader batchReportReader, AnalysisMetadataHolder analysisMetadataHolder, DbClient dbClient, SourceHashRepository sourceHashRepository) {
+ public ScmInfoRepositoryImpl(BatchReportReader batchReportReader, ScmInfoDbLoader scmInfoDbLoader) {
this.batchReportReader = batchReportReader;
- this.analysisMetadataHolder = analysisMetadataHolder;
- this.dbClient = dbClient;
- this.sourceHashRepository = sourceHashRepository;
+ this.scmInfoDbLoader = scmInfoDbLoader;
}
@Override
return NoScmInfo.INSTANCE;
}
if (changesets.getCopyFromPrevious()) {
- return getScmInfoFromDb(component);
+ return scmInfoDbLoader.getScmInfoFromDb(component);
}
return getScmInfoFromReport(component, changesets);
}
- private ScmInfo getScmInfoFromDb(Component file) {
- if (analysisMetadataHolder.isFirstAnalysis()) {
- return NoScmInfo.INSTANCE;
- }
-
- LOGGER.trace("Reading SCM info from db for file '{}'", file.getKey());
- try (DbSession dbSession = dbClient.openSession(false)) {
- FileSourceDto dto = dbClient.fileSourceDao().selectSourceByFileUuid(dbSession, file.getUuid());
- if (dto == null || !isDtoValid(file, dto)) {
- return NoScmInfo.INSTANCE;
- }
- return DbScmInfo.create(file, dto.getSourceData().getLinesList()).or(NoScmInfo.INSTANCE);
- }
- }
-
- private boolean isDtoValid(Component file, FileSourceDto dto) {
- if (analysisMetadataHolder.isIncrementalAnalysis() && file.getStatus() == Status.SAME) {
- return true;
- }
- return sourceHashRepository.getRawSourceHash(file).equals(dto.getSrcHash());
- }
-
private static ScmInfo getScmInfoFromReport(Component file, ScannerReport.Changesets changesets) {
LOGGER.trace("Reading SCM info from report for file '{}'", file.getKey());
return new ReportScmInfo(changesets);
/**
* Internally used to populate cache when no ScmInfo exist.
*/
- private enum NoScmInfo implements ScmInfo {
+ enum NoScmInfo implements ScmInfo {
INSTANCE {
@Override
public Changeset getLatestChangeset() {
--- /dev/null
+/*
+ * SonarQube
+ * Copyright (C) 2009-2017 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.component;
+
+import java.util.Optional;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.sonar.db.DbTester;
+import org.sonar.db.component.ComponentDto;
+import org.sonar.db.component.ComponentTesting;
+import org.sonar.server.computation.task.projectanalysis.analysis.AnalysisMetadataHolderRule;
+import org.sonar.server.computation.task.projectanalysis.analysis.Branch;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class MergeBranchComponentUuidsTest {
+ @Rule
+ public AnalysisMetadataHolderRule analysisMetadataHolder = new AnalysisMetadataHolderRule();
+
+ @Rule
+ public DbTester db = DbTester.create();
+
+ private MergeBranchComponentUuids underTest;
+ private Branch branch = mock(Branch.class);
+
+ private ComponentDto mergeBranch;
+ private ComponentDto mergeBranchFile;
+ private ComponentDto branchFile;
+
+ @Before
+ public void setUp() {
+ underTest = new MergeBranchComponentUuids(analysisMetadataHolder, db.getDbClient());
+ analysisMetadataHolder.setBranch(branch);
+
+ ComponentDto project = db.components().insertMainBranch();
+ mergeBranch = db.components().insertProjectBranch(project, b -> b.setKey("mergeBranch"));
+ ComponentDto branch = db.components().insertProjectBranch(project, b -> b.setKey("branch1"));
+ mergeBranchFile = ComponentTesting.newFileDto(mergeBranch, null, "file").setUuid("mergeFile");
+ branchFile = ComponentTesting.newFileDto(branch, null, "file").setUuid("file1");
+ db.components().insertComponents(mergeBranchFile, branchFile);
+ }
+
+ @Test
+ public void should_support_db_key() {
+ when(branch.getMergeBranchUuid()).thenReturn(Optional.of(mergeBranch.uuid()));
+ assertThat(underTest.getUuid(branchFile.getDbKey())).isEqualTo(mergeBranchFile.uuid());
+ }
+
+ @Test
+ public void should_support_key() {
+ when(branch.getMergeBranchUuid()).thenReturn(Optional.of(mergeBranch.uuid()));
+ assertThat(underTest.getUuid(branchFile.getKey())).isEqualTo(mergeBranchFile.uuid());
+ }
+
+ @Test
+ public void return_null_if_file_doesnt_exist() {
+ when(branch.getMergeBranchUuid()).thenReturn(Optional.of(mergeBranch.uuid()));
+ assertThat(underTest.getUuid("doesnt exist")).isNull();
+ }
+}
import org.sonar.server.computation.task.projectanalysis.component.Component;
import org.sonar.server.computation.task.projectanalysis.component.Component.Status;
import org.sonar.server.computation.task.projectanalysis.component.DefaultBranchImpl;
+import org.sonar.server.computation.task.projectanalysis.component.MergeBranchComponentUuids;
import org.sonar.server.computation.task.projectanalysis.component.TreeRootHolderRule;
import org.sonar.server.computation.task.projectanalysis.component.TypeAwareVisitor;
import org.sonar.server.computation.task.projectanalysis.filemove.MovedFilesRepository;
private IssueLifecycle issueLifecycle;
@Mock
private IssueVisitor issueVisitor;
+ @Mock
+ private MergeBranchComponentUuids mergeBranchComponentsUuids;
ArgumentCaptor<DefaultIssue> defaultIssueCaptor = ArgumentCaptor.forClass(DefaultIssue.class);
TrackerRawInputFactory rawInputFactory = new TrackerRawInputFactory(treeRootHolder, reportReader, fileSourceRepository, new CommonRuleEngineImpl(), issueFilter);
TrackerBaseInputFactory baseInputFactory = new TrackerBaseInputFactory(issuesLoader, dbTester.getDbClient(), movedFilesRepository);
- TrackerMergeBranchInputFactory mergeInputFactory = new TrackerMergeBranchInputFactory(issuesLoader, analysisMetadataHolder, dbTester.getDbClient());
+ TrackerMergeBranchInputFactory mergeInputFactory = new TrackerMergeBranchInputFactory(issuesLoader, mergeBranchComponentsUuids, dbTester.getDbClient());
tracker = new TrackerExecution(baseInputFactory, rawInputFactory, new Tracker<>());
shortBranchTracker = new ShortBranchTrackerExecution(baseInputFactory, rawInputFactory, mergeInputFactory, new Tracker<>());
mergeBranchTracker = new MergeBranchTrackerExecution(rawInputFactory, mergeInputFactory, new Tracker<>());
--- /dev/null
+/*
+ * SonarQube
+ * Copyright (C) 2009-2017 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.scm;
+
+import com.google.common.collect.ImmutableList;
+import java.util.Iterator;
+import java.util.List;
+import javax.annotation.Nullable;
+import org.junit.Rule;
+import org.junit.Test;
+import org.sonar.api.utils.System2;
+import org.sonar.api.utils.log.LogTester;
+import org.sonar.core.hash.SourceHashComputer;
+import org.sonar.db.DbTester;
+import org.sonar.db.protobuf.DbFileSources;
+import org.sonar.db.source.FileSourceDto;
+import org.sonar.scanner.protocol.output.ScannerReport;
+import org.sonar.server.computation.task.projectanalysis.analysis.Analysis;
+import org.sonar.server.computation.task.projectanalysis.analysis.AnalysisMetadataHolderRule;
+import org.sonar.server.computation.task.projectanalysis.analysis.Branch;
+import org.sonar.server.computation.task.projectanalysis.batch.BatchReportReaderRule;
+import org.sonar.server.computation.task.projectanalysis.component.Component;
+import org.sonar.server.computation.task.projectanalysis.component.Component.Status;
+import org.sonar.server.computation.task.projectanalysis.component.MergeBranchComponentUuids;
+import org.sonar.server.computation.task.projectanalysis.scm.ScmInfoRepositoryImpl.NoScmInfo;
+import org.sonar.server.computation.task.projectanalysis.source.SourceHashRepositoryImpl;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesRepositoryImpl;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+import static org.sonar.api.utils.log.LoggerLevel.TRACE;
+import static org.sonar.server.computation.task.projectanalysis.component.ReportComponent.builder;
+
+public class ScmInfoDbLoaderTest {
+ static final int FILE_REF = 1;
+ static final Component FILE = builder(Component.Type.FILE, FILE_REF).setKey("FILE_KEY").setUuid("FILE_UUID").build();
+ static final long DATE_1 = 123456789L;
+ static final long DATE_2 = 1234567810L;
+
+ static Analysis baseProjectAnalysis = new Analysis.Builder()
+ .setId(1)
+ .setUuid("uuid_1")
+ .setCreatedAt(123456789L)
+ .build();
+
+ @Rule
+ public LogTester logTester = new LogTester();
+ @Rule
+ public AnalysisMetadataHolderRule analysisMetadataHolder = new AnalysisMetadataHolderRule();
+ @Rule
+ public DbTester dbTester = DbTester.create(System2.INSTANCE);
+ @Rule
+ public BatchReportReaderRule reportReader = new BatchReportReaderRule();
+
+ private Branch branch = mock(Branch.class);
+ private SourceHashRepositoryImpl sourceHashRepository = new SourceHashRepositoryImpl(new SourceLinesRepositoryImpl(reportReader));
+ private MergeBranchComponentUuids mergeBranchComponentUuids = mock(MergeBranchComponentUuids.class);
+
+ private ScmInfoDbLoader underTest = new ScmInfoDbLoader(analysisMetadataHolder, dbTester.getDbClient(), sourceHashRepository, mergeBranchComponentUuids);
+
+ @Test
+ public void dont_check_hash_for_unmodified_files_incremental_analysis() {
+ analysisMetadataHolder.setIncrementalAnalysis(true);
+ analysisMetadataHolder.setBranch(null);
+ analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
+
+ addFileSourceInDb("henry", DATE_1, "rev-1", computeSourceHash(1));
+
+ Component file = builder(Component.Type.FILE, FILE_REF).setKey("FILE_KEY").setUuid("FILE_UUID").setStatus(Status.SAME).build();
+ ScmInfo scmInfo = underTest.getScmInfoFromDb(file);
+ assertThat(scmInfo.getAllChangesets()).hasSize(1);
+
+ assertThat(logTester.logs(TRACE)).containsOnly("Reading SCM info from db for file 'FILE_UUID'");
+ }
+
+ @Test
+ public void returns_ScmInfo_from_DB_if_hashes_are_the_same() throws Exception {
+ analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
+ analysisMetadataHolder.setBranch(null);
+ analysisMetadataHolder.setIncrementalAnalysis(false);
+
+ addFileSourceInDb("henry", DATE_1, "rev-1", computeSourceHash(1));
+ addFileSourceInReport(1);
+
+ ScmInfo scmInfo = underTest.getScmInfoFromDb(FILE);
+ assertThat(scmInfo.getAllChangesets()).hasSize(1);
+
+ assertThat(logTester.logs(TRACE)).containsOnly("Reading SCM info from db for file 'FILE_UUID'");
+ }
+
+ @Test
+ public void read_from_merge_branch_if_no_base() {
+ analysisMetadataHolder.setBaseAnalysis(null);
+ analysisMetadataHolder.setBranch(branch);
+ analysisMetadataHolder.setIncrementalAnalysis(false);
+ String mergeFileUuid = "mergeFileUuid";
+
+ when(mergeBranchComponentUuids.getUuid(FILE.getKey())).thenReturn(mergeFileUuid);
+ addFileSourceInDb("henry", DATE_1, "rev-1", computeSourceHash(1), mergeFileUuid);
+ addFileSourceInReport(1);
+
+ ScmInfo scmInfo = underTest.getScmInfoFromDb(FILE);
+ assertThat(scmInfo.getAllChangesets()).hasSize(1);
+ assertThat(logTester.logs(TRACE)).containsOnly("Reading SCM info from db for file 'mergeFileUuid'");
+ }
+
+ @Test
+ public void returns_absent_when_branch_and_source_is_different() {
+ analysisMetadataHolder.setBaseAnalysis(null);
+ analysisMetadataHolder.setBranch(branch);
+ analysisMetadataHolder.setIncrementalAnalysis(false);
+ String mergeFileUuid = "mergeFileUuid";
+
+ when(mergeBranchComponentUuids.getUuid(FILE.getKey())).thenReturn(mergeFileUuid);
+ addFileSourceInDb("henry", DATE_1, "rev-1", computeSourceHash(1) + "dif", mergeFileUuid);
+ addFileSourceInReport(1);
+
+ assertThat(underTest.getScmInfoFromDb(FILE)).isEqualTo(NoScmInfo.INSTANCE);
+ assertThat(logTester.logs(TRACE)).containsOnly("Reading SCM info from db for file 'mergeFileUuid'");
+ }
+
+ @Test
+ public void returns_absent_when__hashes_are_not_the_same() throws Exception {
+ analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
+ analysisMetadataHolder.setBranch(null);
+ analysisMetadataHolder.setIncrementalAnalysis(false);
+
+ addFileSourceInReport(1);
+ addFileSourceInDb("henry", DATE_1, "rev-1", computeSourceHash(1) + "_different");
+
+ assertThat(underTest.getScmInfoFromDb(FILE)).isEqualTo(NoScmInfo.INSTANCE);
+ assertThat(logTester.logs(TRACE)).containsOnly("Reading SCM info from db for file 'FILE_UUID'");
+ }
+
+ @Test
+ public void not_read_in_db_on_first_analysis() throws Exception {
+ analysisMetadataHolder.setBaseAnalysis(null);
+ analysisMetadataHolder.setBranch(null);
+
+ addFileSourceInReport(1);
+
+ assertThat(underTest.getScmInfoFromDb(FILE)).isEqualTo(NoScmInfo.INSTANCE);
+ assertThat(logTester.logs(TRACE)).isEmpty();
+ }
+
+ private static List<String> generateLines(int lineCount) {
+ ImmutableList.Builder<String> builder = ImmutableList.builder();
+ for (int i = 0; i < lineCount; i++) {
+ builder.add("line " + i);
+ }
+ return builder.build();
+ }
+
+ private static String computeSourceHash(int lineCount) {
+ SourceHashComputer sourceHashComputer = new SourceHashComputer();
+ Iterator<String> lines = generateLines(lineCount).iterator();
+ while (lines.hasNext()) {
+ sourceHashComputer.addLine(lines.next(), lines.hasNext());
+ }
+ return sourceHashComputer.getHash();
+ }
+
+ private void addFileSourceInDb(@Nullable String author, @Nullable Long date, @Nullable String revision, String srcHash) {
+ addFileSourceInDb(author, date, revision, srcHash, FILE.getUuid());
+ }
+
+ private void addFileSourceInDb(@Nullable String author, @Nullable Long date, @Nullable String revision, String srcHash, String fileUuid) {
+ DbFileSources.Data.Builder fileDataBuilder = DbFileSources.Data.newBuilder();
+ DbFileSources.Line.Builder builder = fileDataBuilder.addLinesBuilder()
+ .setLine(1);
+ if (author != null) {
+ builder.setScmAuthor(author);
+ }
+ if (date != null) {
+ builder.setScmDate(date);
+ }
+ if (revision != null) {
+ builder.setScmRevision(revision);
+ }
+ dbTester.getDbClient().fileSourceDao().insert(dbTester.getSession(), new FileSourceDto()
+ .setFileUuid(fileUuid)
+ .setProjectUuid("PROJECT_UUID")
+ .setSourceData(fileDataBuilder.build())
+ .setSrcHash(srcHash));
+ dbTester.commit();
+ }
+
+ private void addFileSourceInReport(int lineCount) {
+ reportReader.putFileSourceLines(FILE_REF, generateLines(lineCount));
+ reportReader.putComponent(ScannerReport.Component.newBuilder()
+ .setRef(FILE_REF)
+ .setLines(lineCount)
+ .build());
+ }
+}
*/
package org.sonar.server.computation.task.projectanalysis.scm;
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.guava.api.Assertions.assertThat;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verifyNoMoreInteractions;
-import static org.sonar.api.utils.log.LoggerLevel.TRACE;
-import static org.sonar.server.computation.task.projectanalysis.component.ReportComponent.builder;
-
+import com.google.common.collect.ImmutableList;
+import com.tngtech.java.junit.dataprovider.DataProvider;
+import com.tngtech.java.junit.dataprovider.DataProviderRunner;
+import com.tngtech.java.junit.dataprovider.UseDataProvider;
import java.util.EnumSet;
-import java.util.Iterator;
import java.util.List;
-
-import javax.annotation.Nullable;
-
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
-import org.sonar.api.utils.System2;
import org.sonar.api.utils.log.LogTester;
-import org.sonar.core.hash.SourceHashComputer;
-import org.sonar.db.DbClient;
-import org.sonar.db.DbTester;
-import org.sonar.db.protobuf.DbFileSources;
-import org.sonar.db.source.FileSourceDto;
import org.sonar.scanner.protocol.output.ScannerReport;
-import org.sonar.server.computation.task.projectanalysis.analysis.Analysis;
-import org.sonar.server.computation.task.projectanalysis.analysis.AnalysisMetadataHolder;
-import org.sonar.server.computation.task.projectanalysis.analysis.AnalysisMetadataHolderRule;
import org.sonar.server.computation.task.projectanalysis.batch.BatchReportReader;
import org.sonar.server.computation.task.projectanalysis.batch.BatchReportReaderRule;
import org.sonar.server.computation.task.projectanalysis.component.Component;
-import org.sonar.server.computation.task.projectanalysis.component.Component.Status;
import org.sonar.server.computation.task.projectanalysis.component.ReportComponent;
import org.sonar.server.computation.task.projectanalysis.component.ViewsComponent;
-import org.sonar.server.computation.task.projectanalysis.source.SourceHashRepository;
-import org.sonar.server.computation.task.projectanalysis.source.SourceHashRepositoryImpl;
-import org.sonar.server.computation.task.projectanalysis.source.SourceLinesRepositoryImpl;
-import com.google.common.collect.ImmutableList;
-import com.tngtech.java.junit.dataprovider.DataProvider;
-import com.tngtech.java.junit.dataprovider.DataProviderRunner;
-import com.tngtech.java.junit.dataprovider.UseDataProvider;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.guava.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verifyZeroInteractions;
+import static org.mockito.Mockito.when;
+import static org.sonar.api.utils.log.LoggerLevel.TRACE;
+import static org.sonar.server.computation.task.projectanalysis.component.ReportComponent.builder;
@RunWith(DataProviderRunner.class)
public class ScmInfoRepositoryImplTest {
-
static final int FILE_REF = 1;
static final Component FILE = builder(Component.Type.FILE, FILE_REF).setKey("FILE_KEY").setUuid("FILE_UUID").build();
static final long DATE_1 = 123456789L;
static final long DATE_2 = 1234567810L;
- static Analysis baseProjectAnalysis = new Analysis.Builder()
- .setId(1)
- .setUuid("uuid_1")
- .setCreatedAt(123456789L)
- .build();
-
@Rule
public ExpectedException thrown = ExpectedException.none();
@Rule
public LogTester logTester = new LogTester();
@Rule
public BatchReportReaderRule reportReader = new BatchReportReaderRule();
- @Rule
- public AnalysisMetadataHolderRule analysisMetadataHolder = new AnalysisMetadataHolderRule();
- @Rule
- public DbTester dbTester = DbTester.create(System2.INSTANCE);
-
- DbClient dbClient = dbTester.getDbClient();
-
- ScmInfoRepositoryImpl underTest = new ScmInfoRepositoryImpl(reportReader, analysisMetadataHolder, dbClient,
- new SourceHashRepositoryImpl(new SourceLinesRepositoryImpl(reportReader)));
-
- @Test
- public void dont_check_hash_for_unmodified_files_incremental_analysis() {
- analysisMetadataHolder.setIncrementalAnalysis(true);
- analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
- addFileSourceInDb("henry", DATE_1, "rev-1", computeSourceHash(1));
- addCopyFromPreviousChangesetInReport();
+ private ScmInfoDbLoader dbLoader = mock(ScmInfoDbLoader.class);
- Component file = builder(Component.Type.FILE, FILE_REF).setKey("FILE_KEY").setUuid("FILE_UUID").setStatus(Status.SAME).build();
- ScmInfo scmInfo = underTest.getScmInfo(file).get();
- assertThat(scmInfo.getAllChangesets()).hasSize(1);
-
- assertThat(logTester.logs(TRACE)).containsOnly("Reading SCM info from db for file 'FILE_KEY'");
- }
+ private ScmInfoRepositoryImpl underTest = new ScmInfoRepositoryImpl(reportReader, dbLoader);
@Test
public void read_from_report() throws Exception {
- analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
addChangesetInReport("john", DATE_1, "rev-1");
ScmInfo scmInfo = underTest.getScmInfo(FILE).get();
@Test
public void getScmInfo_returns_absent_if_CopyFromPrevious_is_false_and_there_is_no_changeset_in_report() {
- analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
- // put data in DB, which should not be used
- addFileSourceInDb("henry", DATE_1, "rev-1", computeSourceHash(1));
addFileSourceInReport(1);
assertThat(underTest.getScmInfo(FILE)).isAbsent();
- }
-
- @Test
- public void getScmInfo_returns_ScmInfo_from_DB_CopyFromPrevious_is_true_if_hashes_are_the_same() throws Exception {
- analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
- analysisMetadataHolder.setIncrementalAnalysis(false);
-
- addFileSourceInDb("henry", DATE_1, "rev-1", computeSourceHash(1));
- addFileSourceInReport(1);
- addCopyFromPreviousChangesetInReport();
-
- ScmInfo scmInfo = underTest.getScmInfo(FILE).get();
- assertThat(scmInfo.getAllChangesets()).hasSize(1);
-
- assertThat(logTester.logs(TRACE)).containsOnly("Reading SCM info from db for file 'FILE_KEY'");
- }
-
- @Test
- public void getScmInfo_returns_absent_when_CopyFromPrevious_is_true_but_hashes_are_not_the_same() throws Exception {
- analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
- analysisMetadataHolder.setIncrementalAnalysis(false);
-
- addFileSourceInDb("henry", DATE_1, "rev-1", computeSourceHash(1) + "_different");
- addFileSourceInReport(1);
- addCopyFromPreviousChangesetInReport();
-
- assertThat(underTest.getScmInfo(FILE)).isAbsent();
-
- assertThat(logTester.logs(TRACE)).containsOnly("Reading SCM info from db for file 'FILE_KEY'");
+ verifyZeroInteractions(dbLoader);
}
@Test
public void read_from_report_even_if_data_in_db_exists() throws Exception {
- analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
- addFileSourceInDb("henry", DATE_1, "rev-1", computeSourceHash(1));
addChangesetInReport("john", DATE_2, "rev-2");
ScmInfo scmInfo = underTest.getScmInfo(FILE).get();
assertThat(changeset.getAuthor()).isEqualTo("john");
assertThat(changeset.getDate()).isEqualTo(DATE_2);
assertThat(changeset.getRevision()).isEqualTo("rev-2");
+ verifyZeroInteractions(dbLoader);
}
@Test
public void read_from_db_even_if_data_in_report_exists_when_CopyFromPrevious_is_true() throws Exception {
- analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
- analysisMetadataHolder.setIncrementalAnalysis(false);
+ ScmInfo info = mock(ScmInfo.class);
+ when(dbLoader.getScmInfoFromDb(FILE)).thenReturn(info);
- addFileSourceInDb("henry", DATE_1, "rev-1", computeSourceHash(1));
addFileSourceInReport(1);
addChangesetInReport("john", DATE_2, "rev-2", true);
ScmInfo scmInfo = underTest.getScmInfo(FILE).get();
-
- Changeset changeset = scmInfo.getChangesetForLine(1);
- assertThat(changeset.getAuthor()).isEqualTo("henry");
- assertThat(changeset.getDate()).isEqualTo(DATE_1);
- assertThat(changeset.getRevision()).isEqualTo("rev-1");
+ assertThat(scmInfo).isEqualTo(info);
}
@Test
public void return_nothing_when_no_data_in_report_nor_db() throws Exception {
- analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
assertThat(underTest.getScmInfo(FILE)).isAbsent();
}
@Test
public void return_nothing_when_nothing_in_report_and_db_has_no_scm() throws Exception {
- analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
- addFileSourceInDb(null, null, null, "don't care");
addFileSourceInReport(1);
-
assertThat(underTest.getScmInfo(FILE)).isAbsent();
}
@Test
public void fail_with_NPE_when_component_is_null() throws Exception {
- analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
-
thrown.expect(NullPointerException.class);
thrown.expectMessage("Component cannot be bull");
@UseDataProvider("allTypeComponentButFile")
public void do_not_query_db_nor_report_if_component_type_is_not_FILE(Component component) {
BatchReportReader batchReportReader = mock(BatchReportReader.class);
- AnalysisMetadataHolder analysisMetadataHolder = mock(AnalysisMetadataHolder.class);
- DbClient dbClient = mock(DbClient.class);
- SourceHashRepository sourceHashRepository = mock(SourceHashRepository.class);
- ScmInfoRepositoryImpl underTest = new ScmInfoRepositoryImpl(batchReportReader, analysisMetadataHolder, dbClient, sourceHashRepository);
+ ScmInfoRepositoryImpl underTest = new ScmInfoRepositoryImpl(batchReportReader, dbLoader);
assertThat(underTest.getScmInfo(component)).isAbsent();
- verifyNoMoreInteractions(batchReportReader, analysisMetadataHolder, dbClient, sourceHashRepository);
+ verifyZeroInteractions(batchReportReader, dbLoader);
}
@Test
public void load_scm_info_from_cache_when_already_read() throws Exception {
- analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
addChangesetInReport("john", DATE_1, "rev-1");
ScmInfo scmInfo = underTest.getScmInfo(FILE).get();
assertThat(scmInfo.getAllChangesets()).hasSize(1);
assertThat(logTester.logs(TRACE)).isEmpty();
}
- @Test
- public void not_read_in_db_on_first_analysis_when_CopyFromPrevious_is_true() throws Exception {
- analysisMetadataHolder.setBaseAnalysis(null);
- addFileSourceInDb("henry", DATE_1, "rev-1", "don't care");
- addFileSourceInReport(1);
- addCopyFromPreviousChangesetInReport();
-
- assertThat(underTest.getScmInfo(FILE)).isAbsent();
- assertThat(logTester.logs(TRACE)).isEmpty();
- }
-
- private void addFileSourceInDb(@Nullable String author, @Nullable Long date, @Nullable String revision, String srcHash) {
- DbFileSources.Data.Builder fileDataBuilder = DbFileSources.Data.newBuilder();
- DbFileSources.Line.Builder builder = fileDataBuilder.addLinesBuilder()
- .setLine(1);
- if (author != null) {
- builder.setScmAuthor(author);
- }
- if (date != null) {
- builder.setScmDate(date);
- }
- if (revision != null) {
- builder.setScmRevision(revision);
- }
- dbTester.getDbClient().fileSourceDao().insert(dbTester.getSession(), new FileSourceDto()
- .setFileUuid(FILE.getUuid())
- .setProjectUuid("PROJECT_UUID")
- .setSourceData(fileDataBuilder.build())
- .setSrcHash(srcHash));
- dbTester.commit();
- }
-
- private void addCopyFromPreviousChangesetInReport() {
- reportReader.putChangesets(ScannerReport.Changesets.newBuilder()
- .setComponentRef(FILE_REF)
- .setCopyFromPrevious(true)
- .build());
- }
-
private void addChangesetInReport(String author, Long date, String revision) {
addChangesetInReport(author, date, revision, false);
}
}
return builder.build();
}
-
- private static String computeSourceHash(int lineCount) {
- SourceHashComputer sourceHashComputer = new SourceHashComputer();
- Iterator<String> lines = generateLines(lineCount).iterator();
- while (lines.hasNext()) {
- sourceHashComputer.addLine(lines.next(), lines.hasNext());
- }
- return sourceHashComputer.getHash();
- }
}