import com.google.common.collect.Sets;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.stream.Collectors;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import org.apache.ibatis.session.ResultContext;
import org.sonar.db.component.FileMoveRowDto;
import org.sonar.db.source.LineHashesWithUuidDto;
-import static com.google.common.collect.FluentIterable.from;
import static org.sonar.ce.task.projectanalysis.component.ComponentVisitor.Order.POST_ORDER;
public class FileMoveDetectionStep implements ComputationStep {
Set<String> removedFileUuids = difference(dbFilesByUuid.keySet(), reportFilesByUuid.keySet());
- // can find matches if at least one of the added or removed files groups is empty => abort
+ // can't find matches if at least one of the added or removed files groups is empty => abort
if (addedFileUuids.isEmpty() || removedFileUuids.isEmpty()) {
registerAddedFiles(addedFileUuids, reportFilesByUuid, null);
LOG.debug("Either no files added or no files removed. Do nothing.");
}
// retrieve file data from report
- Map<String, File> reportFileSourcesByUuid = getReportFileSourcesByUuid(reportFilesByUuid, addedFileUuids);
+ Map<String, File> addedFileHashesByUuid = getReportFileHashesByUuid(reportFilesByUuid, addedFileUuids);
p.stopTrace("loaded");
// compute score matrix
p.start();
- ScoreMatrix scoreMatrix = computeScoreMatrix(dbFilesByUuid, removedFileUuids, reportFileSourcesByUuid);
+ ScoreMatrix scoreMatrix = computeScoreMatrix(dbFilesByUuid, removedFileUuids, addedFileHashesByUuid);
p.stopTrace("Score matrix computed");
scoreMatrixDumper.dumpAsCsv(scoreMatrix);
p.start();
MatchesByScore matchesByScore = MatchesByScore.create(scoreMatrix);
- ElectedMatches electedMatches = electMatches(removedFileUuids, reportFileSourcesByUuid, matchesByScore);
+ ElectedMatches electedMatches = electMatches(removedFileUuids, addedFileHashesByUuid, matchesByScore);
p.stopTrace("Matches elected");
context.getStatistics().add("movedFiles", electedMatches.size());
return builder.build();
}
- private Map<String, File> getReportFileSourcesByUuid(Map<String, Component> reportFilesByUuid, Set<String> addedFileUuids) {
- ImmutableMap.Builder<String, File> builder = ImmutableMap.builder();
- for (String fileUuid : addedFileUuids) {
+ private Map<String, File> getReportFileHashesByUuid(Map<String, Component> reportFilesByUuid, Set<String> addedFileUuids) {
+ return addedFileUuids.stream().collect(Collectors.toMap(fileUuid -> fileUuid, fileUuid -> {
Component component = reportFilesByUuid.get(fileUuid);
- File file = new LazyFileImpl(
- component.getName(),
- () -> getReportFileLineHashes(component),
- component.getFileAttributes().getLines());
- builder.put(fileUuid, file);
- }
- return builder.build();
+ return new LazyFileImpl(() -> getReportFileLineHashes(component), component.getFileAttributes().getLines());
+ }));
}
private List<String> getReportFileLineHashes(Component component) {
+ // this is not ideal because if the file moved, this component won't exist in DB with the same UUID.
+ // Assuming that the file also had significant code before the move, it will be fine.
return sourceLinesHash.getLineHashesMatchingDBVersion(component);
}
- private ScoreMatrix computeScoreMatrix(Map<String, DbComponent> dtosByUuid, Set<String> removedFileUuids, Map<String, File> newFileSourcesByUuid) {
- ScoreMatrix.ScoreFile[] newFiles = newFileSourcesByUuid.entrySet().stream()
+ private ScoreMatrix computeScoreMatrix(Map<String, DbComponent> dtosByUuid, Set<String> removedFileUuids, Map<String, File> addedFileHashesByUuid) {
+ ScoreMatrix.ScoreFile[] addedFiles = addedFileHashesByUuid.entrySet().stream()
.map(e -> new ScoreMatrix.ScoreFile(e.getKey(), e.getValue().getLineCount()))
.toArray(ScoreMatrix.ScoreFile[]::new);
ScoreMatrix.ScoreFile[] removedFiles = removedFileUuids.stream()
return new ScoreMatrix.ScoreFile(dbComponent.getUuid(), dbComponent.getLineCount());
})
.toArray(ScoreMatrix.ScoreFile[]::new);
+
// sort by highest line count first
- Arrays.sort(newFiles, SCORE_FILE_COMPARATOR);
+ Arrays.sort(addedFiles, SCORE_FILE_COMPARATOR);
Arrays.sort(removedFiles, SCORE_FILE_COMPARATOR);
- int[][] scoreMatrix = new int[removedFiles.length][newFiles.length];
- int lastNewFileIndex = newFiles.length - 1;
+ int[][] scoreMatrix = new int[removedFiles.length][addedFiles.length];
+ int smallestAddedFileSize = addedFiles[0].getLineCount();
+ int largestAddedFileSize = addedFiles[addedFiles.length - 1].getLineCount();
- Map<String, Integer> removedFilesIndexes = new HashMap<>(removedFileUuids.size());
+ Map<String, Integer> removedFilesIndexesByUuid = new HashMap<>(removedFileUuids.size());
for (int removeFileIndex = 0; removeFileIndex < removedFiles.length; removeFileIndex++) {
ScoreMatrix.ScoreFile removedFile = removedFiles[removeFileIndex];
int lowerBound = (int) Math.floor(removedFile.getLineCount() * LOWER_BOUND_RATIO);
int upperBound = (int) Math.ceil(removedFile.getLineCount() * UPPER_BOUND_RATIO);
// no need to compute score if all files are out of bound, so no need to load line hashes from DB
- if (newFiles[0].getLineCount() <= lowerBound || newFiles[lastNewFileIndex].getLineCount() >= upperBound) {
+ if (smallestAddedFileSize <= lowerBound || largestAddedFileSize >= upperBound) {
continue;
}
- removedFilesIndexes.put(removedFile.getFileUuid(), removeFileIndex);
+ removedFilesIndexesByUuid.put(removedFile.getFileUuid(), removeFileIndex);
}
- LineHashesWithKeyDtoResultHandler rowHandler = new LineHashesWithKeyDtoResultHandler(removedFilesIndexes, removedFiles,
- newFiles, newFileSourcesByUuid, scoreMatrix);
+ LineHashesWithKeyDtoResultHandler rowHandler = new LineHashesWithKeyDtoResultHandler(removedFilesIndexesByUuid, removedFiles,
+ addedFiles, addedFileHashesByUuid, scoreMatrix);
try (DbSession dbSession = dbClient.openSession(false)) {
- dbClient.fileSourceDao().scrollLineHashes(dbSession, removedFilesIndexes.keySet(), rowHandler);
+ dbClient.fileSourceDao().scrollLineHashes(dbSession, removedFilesIndexesByUuid.keySet(), rowHandler);
}
- return new ScoreMatrix(removedFiles, newFiles, scoreMatrix, rowHandler.getMaxScore());
+ return new ScoreMatrix(removedFiles, addedFiles, scoreMatrix, rowHandler.getMaxScore());
}
private final class LineHashesWithKeyDtoResultHandler implements ResultHandler<LineHashesWithUuidDto> {
- private final Map<String, Integer> removedFilesIndexes;
+ private final Map<String, Integer> removedFileIndexesByUuid;
private final ScoreMatrix.ScoreFile[] removedFiles;
private final ScoreMatrix.ScoreFile[] newFiles;
- private final Map<String, File> newFileSourcesByKey;
+ private final Map<String, File> newFilesByUuid;
private final int[][] scoreMatrix;
private int maxScore;
- private LineHashesWithKeyDtoResultHandler(Map<String, Integer> removedFilesIndexes, ScoreMatrix.ScoreFile[] removedFiles,
- ScoreMatrix.ScoreFile[] newFiles, Map<String, File> newFileSourcesByKey,
+ private LineHashesWithKeyDtoResultHandler(Map<String, Integer> removedFileIndexesByUuid, ScoreMatrix.ScoreFile[] removedFiles,
+ ScoreMatrix.ScoreFile[] newFiles, Map<String, File> newFilesByUuid,
int[][] scoreMatrix) {
- this.removedFilesIndexes = removedFilesIndexes;
+ this.removedFileIndexesByUuid = removedFileIndexesByUuid;
this.removedFiles = removedFiles;
this.newFiles = newFiles;
- this.newFileSourcesByKey = newFileSourcesByKey;
+ this.newFilesByUuid = newFilesByUuid;
this.scoreMatrix = scoreMatrix;
}
if (lineHashesDto.getPath() == null) {
return;
}
- int removeFileIndex = removedFilesIndexes.get(lineHashesDto.getUuid());
- ScoreMatrix.ScoreFile removedFile = removedFiles[removeFileIndex];
+ int removedFileIndex = removedFileIndexesByUuid.get(lineHashesDto.getUuid());
+ ScoreMatrix.ScoreFile removedFile = removedFiles[removedFileIndex];
int lowerBound = (int) Math.floor(removedFile.getLineCount() * LOWER_BOUND_RATIO);
int upperBound = (int) Math.ceil(removedFile.getLineCount() * UPPER_BOUND_RATIO);
break;
}
- File fileInDb = new FileImpl(lineHashesDto.getPath(), lineHashesDto.getLineHashes());
- File unmatchedFile = newFileSourcesByKey.get(newFile.getFileUuid());
- int score = fileSimilarity.score(fileInDb, unmatchedFile);
- scoreMatrix[removeFileIndex][newFileIndex] = score;
+ File fileHashesInDb = new FileImpl(lineHashesDto.getLineHashes());
+ File unmatchedFile = newFilesByUuid.get(newFile.getFileUuid());
+ int score = fileSimilarity.score(fileHashesInDb, unmatchedFile);
+ scoreMatrix[removedFileIndex][newFileIndex] = score;
if (score > maxScore) {
maxScore = score;
}
private final List<Match> matches;
private final Set<String> matchedFileUuids;
- public ElectedMatches(MatchesByScore matchesByScore, Set<String> dbFileUuids, Map<String, File> reportFileSourcesByUuid) {
+ public ElectedMatches(MatchesByScore matchesByScore, Set<String> dbFileUuids, Map<String, File> reportFileHashesByUuid) {
this.matches = new ArrayList<>(matchesByScore.getSize());
- this.matchedFileUuids = new HashSet<>(dbFileUuids.size() + reportFileSourcesByUuid.size());
+ this.matchedFileUuids = new HashSet<>(dbFileUuids.size() + reportFileHashesByUuid.size());
}
public void add(Match match) {
matchedFileUuids.add(match.getReportUuid());
}
- public List<Match> filter(Iterable<Match> matches) {
- return from(matches).filter(this::notAlreadyMatched).toList();
+ public List<Match> filter(Collection<Match> matches) {
+ return matches.stream().filter(this::notAlreadyMatched).collect(Collectors.toList());
}
private boolean notAlreadyMatched(Match input) {
public interface FileSimilarity {
interface File {
- String getPath();
-
List<String> getLineHashes();
int getLineCount();
}
final class FileImpl implements File {
- private final String path;
private final List<String> lineHashes;
private final int lineCount;
- FileImpl(String path, List<String> lineHashes) {
- this.path = requireNonNull(path, "path can not be null");
+ FileImpl(List<String> lineHashes) {
this.lineHashes = requireNonNull(lineHashes, "lineHashes can not be null");
this.lineCount = lineHashes.size();
}
- public String getPath() {
- return path;
- }
-
/**
* List of hash of each line. An empty list is returned
* if file content is empty.
}
final class LazyFileImpl implements File {
- private final String path;
private final Supplier<List<String>> supplier;
private final int lineCount;
private List<String> lineHashes;
- LazyFileImpl(String path, Supplier<List<String>> supplier, int lineCount) {
- this.path = requireNonNull(path, "path can not be null");
+ LazyFileImpl(Supplier<List<String>> supplier, int lineCount) {
this.supplier = requireNonNull(supplier, "supplier can not be null");
this.lineCount = lineCount;
}
- public String getPath() {
- return path;
- }
-
/**
* List of hash of each line. An empty list is returned
* if file content is empty.
*/
package org.sonar.ce.task.projectanalysis.filemove;
-import com.google.common.base.Optional;
+import java.util.Optional;
import javax.annotation.Nullable;
import org.sonar.ce.task.projectanalysis.component.Component;
import org.sonar.ce.task.projectanalysis.component.Component;
/**
* The original file for the specified component if it was registered as a moved file in the repository.
* <p>
- * Calling this method with a Component which is not a file, will always return {@link Optional#absent()}.
+ * Calling this method with a Component which is not a file, will always return {@link Optional#empty()}.
* </p>
*/
Optional<OriginalFile> getOriginalFile(Component file);
*/
package org.sonar.ce.task.projectanalysis.filemove;
-import com.google.common.base.Optional;
import java.util.HashMap;
import java.util.Map;
+import java.util.Optional;
import org.sonar.ce.task.projectanalysis.component.Component;
import static com.google.common.base.Preconditions.checkArgument;
public Optional<OriginalFile> getOriginalFile(Component file) {
requireNonNull(file, "file can't be null");
if (file.getType() != Component.Type.FILE) {
- return Optional.absent();
+ return Optional.empty();
}
- return Optional.fromNullable(originalFiles.get(file.getDbKey()));
+ return Optional.ofNullable(originalFiles.get(file.getDbKey()));
}
}
}
public Input<DefaultIssue> create(Component component) {
- return new ClosedIssuesLazyInput(dbClient, component, movedFilesRepository.getOriginalFile(component).orNull());
+ return new ClosedIssuesLazyInput(dbClient, component, movedFilesRepository.getOriginalFile(component).orElse(null));
}
private class ClosedIssuesLazyInput extends BaseLazyInput {
*/
package org.sonar.ce.task.projectanalysis.issue;
-import com.google.common.base.Optional;
import java.util.Date;
+import java.util.Optional;
import org.sonar.ce.task.projectanalysis.component.Component;
import org.sonar.core.issue.DefaultIssue;
import org.sonar.core.issue.IssueChangeContext;
package org.sonar.ce.task.projectanalysis.issue;
import org.sonar.ce.task.projectanalysis.component.Component;
+import java.util.Optional;
import org.sonar.ce.task.projectanalysis.component.Component;
import org.sonar.ce.task.projectanalysis.filemove.MovedFilesRepository;
-import com.google.common.base.Optional;
-
public class RemoveProcessedComponentsVisitor extends IssueVisitor {
private final ComponentsWithUnprocessedIssues componentsWithUnprocessedIssues;
private final MovedFilesRepository movedFilesRepository;
// Folders have no issues
return new EmptyTrackerBaseLazyInput(dbClient, component);
}
- return new FileTrackerBaseLazyInput(dbClient, component, movedFilesRepository.getOriginalFile(component).orNull());
+ return new FileTrackerBaseLazyInput(dbClient, component, movedFilesRepository.getOriginalFile(component).orElse(null));
}
private class FileTrackerBaseLazyInput extends BaseLazyInput {
import org.sonar.ce.task.projectanalysis.analysis.Branch;
import org.sonar.ce.task.projectanalysis.component.Component;
import org.sonar.ce.task.projectanalysis.component.MergeAndTargetBranchComponentUuids;
+import org.sonar.ce.task.projectanalysis.filemove.MovedFilesRepository;
import org.sonar.db.DbClient;
import org.sonar.db.DbSession;
import org.sonar.db.source.FileSourceDto;
private static final Logger LOGGER = Loggers.get(ScmInfoDbLoader.class);
private final AnalysisMetadataHolder analysisMetadataHolder;
+ private final MovedFilesRepository movedFilesRepository;
private final DbClient dbClient;
private final MergeAndTargetBranchComponentUuids mergeBranchComponentUuid;
- public ScmInfoDbLoader(AnalysisMetadataHolder analysisMetadataHolder, DbClient dbClient, MergeAndTargetBranchComponentUuids mergeBranchComponentUuid) {
+ public ScmInfoDbLoader(AnalysisMetadataHolder analysisMetadataHolder, MovedFilesRepository movedFilesRepository, DbClient dbClient,
+ MergeAndTargetBranchComponentUuids mergeBranchComponentUuid) {
this.analysisMetadataHolder = analysisMetadataHolder;
+ this.movedFilesRepository = movedFilesRepository;
this.dbClient = dbClient;
this.mergeBranchComponentUuid = mergeBranchComponentUuid;
}
private Optional<String> getFileUUid(Component file) {
if (!analysisMetadataHolder.isFirstAnalysis() && !analysisMetadataHolder.isSLBorPR()) {
+ Optional<MovedFilesRepository.OriginalFile> originalFile = movedFilesRepository.getOriginalFile(file);
+ if (originalFile.isPresent()) {
+ return originalFile.map(MovedFilesRepository.OriginalFile::getUuid);
+ }
return Optional.of(file.getUuid());
}
import java.util.HashMap;
import java.util.Map;
+import javax.annotation.CheckForNull;
import org.sonar.ce.task.projectanalysis.component.Component;
import org.sonar.db.DbClient;
import org.sonar.db.DbSession;
import org.sonar.db.source.LineHashVersion;
-import org.sonar.ce.task.projectanalysis.component.Component;
public class DbLineHashVersion {
private final Map<Component, LineHashVersion> lineHashVersionPerComponent = new HashMap<>();
}
/**
- * Reads from DB the version of line hashes for a component and returns if it was generated taking into account the ranges of significant code.
+ * Reads from DB the version of line hashes for a component and returns whether it was generated taking into account the ranges of significant code.
* The response is cached.
* Returns false if the component is not in the DB.
*/
return lineHashVersionPerComponent.computeIfAbsent(component, this::compute) == LineHashVersion.WITH_SIGNIFICANT_CODE;
}
+ /**
+ * Reads from DB the version of line hashes for a component and returns whether it was generated taking into account the ranges of significant code.
+ * The response is cached.
+ * Returns false if the component is not in the DB.
+ */
+ public boolean hasLineHashesWithoutSignificantCode(Component component) {
+ return lineHashVersionPerComponent.computeIfAbsent(component, this::compute) == LineHashVersion.WITHOUT_SIGNIFICANT_CODE;
+ }
+
+ @CheckForNull
private LineHashVersion compute(Component component) {
try (DbSession session = dbClient.openSession(false)) {
return dbClient.fileSourceDao().selectLineHashesVersion(session, component.getUuid());
Changeset latestChangeWithRevision = fileSourceData.getLatestChangeWithRevision();
int lineHashesVersion = sourceLinesHash.getLineHashesVersion(file);
FileSourceDto previousDto = previousFileSourcesByUuid.get(file.getUuid());
-
if (previousDto == null) {
FileSourceDto dto = new FileSourceDto()
.setProjectUuid(projectUuid)
import java.util.Collections;
import java.util.List;
+import java.util.Optional;
import org.sonar.ce.task.projectanalysis.analysis.AnalysisMetadataHolder;
import org.sonar.ce.task.projectanalysis.component.Component;
import org.sonar.ce.task.projectanalysis.component.MergeAndTargetBranchComponentUuids;
+import org.sonar.ce.task.projectanalysis.filemove.MovedFilesRepository;
import org.sonar.db.DbClient;
import org.sonar.db.DbSession;
import org.sonar.db.source.FileSourceDao;
private final FileSourceDao fileSourceDao;
private final SourceLinesHashRepository sourceLinesHash;
private final MergeAndTargetBranchComponentUuids mergeAndTargetBranchComponentUuids;
+ private final MovedFilesRepository movedFilesRepository;
private final AnalysisMetadataHolder analysisMetadataHolder;
public SourceLinesDiffImpl(DbClient dbClient, FileSourceDao fileSourceDao, SourceLinesHashRepository sourceLinesHash,
- MergeAndTargetBranchComponentUuids mergeAndTargetBranchComponentUuids, AnalysisMetadataHolder analysisMetadataHolder) {
+ MergeAndTargetBranchComponentUuids mergeAndTargetBranchComponentUuids, MovedFilesRepository movedFilesRepository, AnalysisMetadataHolder analysisMetadataHolder) {
this.dbClient = dbClient;
this.fileSourceDao = fileSourceDao;
this.sourceLinesHash = sourceLinesHash;
this.mergeAndTargetBranchComponentUuids = mergeAndTargetBranchComponentUuids;
+ this.movedFilesRepository = movedFilesRepository;
this.analysisMetadataHolder = analysisMetadataHolder;
}
uuid = mergeAndTargetBranchComponentUuids.getMergeBranchComponentUuid(component.getDbKey());
}
} else {
- uuid = component.getUuid();
+ Optional<MovedFilesRepository.OriginalFile> originalFile = movedFilesRepository.getOriginalFile(component);
+ uuid = originalFile.map(MovedFilesRepository.OriginalFile::getUuid).orElse(component.getUuid());
}
if (uuid == null) {
boolean cacheHit = cache.contains(component);
// check if line hashes are cached and if we can use it
- if (cacheHit && dbLineHashesVersion.hasLineHashesWithSignificantCode(component)) {
+ if (cacheHit && !dbLineHashesVersion.hasLineHashesWithoutSignificantCode(component)) {
return new CachedLineHashesComputer(cache.get(component));
}
}
private List<String> createLineHashesMatchingDBVersion(Component component) {
- if (!dbLineHashesVersion.hasLineHashesWithSignificantCode(component)) {
+ if (dbLineHashesVersion.hasLineHashesWithoutSignificantCode(component)) {
return createLineHashes(component, Optional.empty());
}
+ // if the file is not in the DB, this will be used too
Optional<LineRange[]> significantCodePerLine = significantCodeRepository.getRangesPerLine(component);
return createLineHashes(component, significantCodePerLine);
}
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
-import static org.assertj.guava.api.Assertions.assertThat;
import static org.sonar.ce.task.projectanalysis.component.ReportComponent.builder;
public class MutableMovedFilesRepositoryImplTest {
@Test
public void getOriginalFile_returns_absent_for_any_component_type_when_empty() {
- assertThat(underTest.getOriginalFile(SOME_FILE)).isAbsent();
+ assertThat(underTest.getOriginalFile(SOME_FILE)).isEmpty();
for (Component component : COMPONENTS_EXCEPT_FILE) {
- assertThat(underTest.getOriginalFile(component)).isAbsent();
+ assertThat(underTest.getOriginalFile(component)).isEmpty();
}
}
underTest.setOriginalFile(SOME_FILE, SOME_ORIGINAL_FILE);
for (Component component : COMPONENTS_EXCEPT_FILE) {
- assertThat(underTest.getOriginalFile(component)).isAbsent();
+ assertThat(underTest.getOriginalFile(component)).isEmpty();
}
assertThat(underTest.getOriginalFile(SOME_FILE)).contains(SOME_ORIGINAL_FILE);
}
*/
package org.sonar.ce.task.projectanalysis.filemove;
-import com.google.common.base.Optional;
import java.util.HashSet;
+import java.util.Optional;
import java.util.Set;
import javax.annotation.CheckForNull;
import org.junit.rules.ExternalResource;
*/
package org.sonar.ce.task.projectanalysis.issue;
-import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import java.util.List;
+import java.util.Optional;
import org.junit.Test;
import org.sonar.ce.task.projectanalysis.component.Component;
import org.sonar.ce.task.projectanalysis.component.ReportComponent;
public void underTest_returns_inputFactory_loading_closed_issues_only_when_getIssues_is_called() {
String componentUuid = randomAlphanumeric(12);
ReportComponent component = ReportComponent.builder(Component.Type.FILE, 1).setUuid(componentUuid).build();
- when(movedFilesRepository.getOriginalFile(component)).thenReturn(Optional.absent());
+ when(movedFilesRepository.getOriginalFile(component)).thenReturn(Optional.empty());
Input<DefaultIssue> input = underTest.create(component);
public void underTest_returns_inputFactory_which_caches_loaded_issues() {
String componentUuid = randomAlphanumeric(12);
ReportComponent component = ReportComponent.builder(Component.Type.FILE, 1).setUuid(componentUuid).build();
- when(movedFilesRepository.getOriginalFile(component)).thenReturn(Optional.absent());
+ when(movedFilesRepository.getOriginalFile(component)).thenReturn(Optional.empty());
Input<DefaultIssue> input = underTest.create(component);
*/
package org.sonar.ce.task.projectanalysis.issue;
-import com.google.common.base.Optional;
import java.util.Collections;
import java.util.List;
+import java.util.Optional;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
IssueVisitors issueVisitors = new IssueVisitors(new IssueVisitor[] {issueVisitor});
defaultIssueCaptor = ArgumentCaptor.forClass(DefaultIssue.class);
- when(movedFilesRepository.getOriginalFile(any(Component.class))).thenReturn(Optional.absent());
+ when(movedFilesRepository.getOriginalFile(any(Component.class))).thenReturn(Optional.empty());
DbClient dbClient = dbTester.getDbClient();
TrackerRawInputFactory rawInputFactory = new TrackerRawInputFactory(treeRootHolder, reportReader, sourceLinesHash, new CommonRuleEngineImpl(),
*/
package org.sonar.ce.task.projectanalysis.issue;
-import com.google.common.base.Optional;
import java.util.Date;
+import java.util.Optional;
import org.junit.Before;
import org.junit.Test;
import org.junit.rules.ExpectedException;
public void setUp() throws Exception {
analysisMetadataHolder.setAnalysisDate(ANALYSIS_DATE);
when(movedFilesRepository.getOriginalFile(any(Component.class)))
- .thenReturn(Optional.absent());
+ .thenReturn(Optional.empty());
}
@Test
*/
package org.sonar.ce.task.projectanalysis.issue;
-import com.google.common.base.Optional;
+import java.util.Optional;
import org.junit.Before;
import org.junit.Test;
import org.sonar.ce.task.projectanalysis.component.Component;
@Test
public void remove_processed_files() {
- when(movedFilesRepository.getOriginalFile(any(Component.class))).thenReturn(Optional.absent());
+ when(movedFilesRepository.getOriginalFile(any(Component.class))).thenReturn(Optional.empty());
underTest.afterComponent(component);
verify(movedFilesRepository).getOriginalFile(component);
*/
package org.sonar.ce.task.projectanalysis.issue;
-import com.google.common.base.Optional;
+import java.util.Optional;
import org.junit.Before;
import org.junit.Test;
import org.sonar.ce.task.projectanalysis.analysis.AnalysisMetadataHolderRule;
when(dbClient.openSession(false)).thenReturn(dbSession);
when(dbClient.fileSourceDao()).thenReturn(fileSourceDao);
when(movedFilesRepository.getOriginalFile(any(Component.class)))
- .thenReturn(Optional.absent());
+ .thenReturn(Optional.empty());
}
@Test
import org.sonar.ce.task.projectanalysis.batch.BatchReportReaderRule;
import org.sonar.ce.task.projectanalysis.component.Component;
import org.sonar.ce.task.projectanalysis.component.MergeAndTargetBranchComponentUuids;
+import org.sonar.ce.task.projectanalysis.filemove.MutableMovedFilesRepositoryRule;
import org.sonar.core.hash.SourceHashComputer;
import org.sonar.db.DbTester;
import org.sonar.db.component.BranchType;
public DbTester dbTester = DbTester.create(System2.INSTANCE);
@Rule
public BatchReportReaderRule reportReader = new BatchReportReaderRule();
+ @Rule
+ public MutableMovedFilesRepositoryRule movedFiles = new MutableMovedFilesRepositoryRule();
private Branch branch = mock(Branch.class);
private MergeAndTargetBranchComponentUuids mergeAndTargetBranchComponentUuids = mock(MergeAndTargetBranchComponentUuids.class);
- private ScmInfoDbLoader underTest = new ScmInfoDbLoader(analysisMetadataHolder, dbTester.getDbClient(), mergeAndTargetBranchComponentUuids);
+ private ScmInfoDbLoader underTest = new ScmInfoDbLoader(analysisMetadataHolder, movedFiles, dbTester.getDbClient(), mergeAndTargetBranchComponentUuids);
@Test
public void returns_ScmInfo_from_DB() {
import java.util.Arrays;
import javax.annotation.Nullable;
import org.junit.Before;
+import org.junit.Rule;
import org.junit.Test;
import org.sonar.ce.task.projectanalysis.analysis.AnalysisMetadataHolder;
import org.sonar.ce.task.projectanalysis.component.Component;
import org.sonar.ce.task.projectanalysis.component.MergeAndTargetBranchComponentUuids;
+import org.sonar.ce.task.projectanalysis.filemove.MutableMovedFilesRepositoryRule;
import org.sonar.db.DbClient;
import org.sonar.db.DbSession;
import org.sonar.db.component.ComponentDao;
private FileSourceDao fileSourceDao = mock(FileSourceDao.class);
private SourceLinesHashRepository sourceLinesHash = mock(SourceLinesHashRepository.class);
private AnalysisMetadataHolder analysisMetadataHolder = mock(AnalysisMetadataHolder.class);
+ @Rule
+ public MutableMovedFilesRepositoryRule movedFiles = new MutableMovedFilesRepositoryRule();
private MergeAndTargetBranchComponentUuids mergeAndTargetBranchComponentUuids = mock(MergeAndTargetBranchComponentUuids.class);
private SourceLinesDiffImpl underTest = new SourceLinesDiffImpl(dbClient, fileSourceDao, sourceLinesHash,
- mergeAndTargetBranchComponentUuids, analysisMetadataHolder);
+ mergeAndTargetBranchComponentUuids, movedFiles, analysisMetadataHolder);
private static final int FILE_REF = 1;
@Test
public void should_create_hash_without_significant_code_if_db_has_no_significant_code() {
- when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(false);
+ when(dbLineHashVersion.hasLineHashesWithoutSignificantCode(file)).thenReturn(true);
List<String> lineHashes = underTest.getLineHashesMatchingDBVersion(file);
assertLineHashes(lineHashes, "line1", "line2", "line3");
- verify(dbLineHashVersion).hasLineHashesWithSignificantCode(file);
+ verify(dbLineHashVersion).hasLineHashesWithoutSignificantCode(file);
verifyNoMoreInteractions(dbLineHashVersion);
verifyZeroInteractions(significantCodeRepository);
}
@Test
public void should_create_hash_without_significant_code_if_report_has_no_significant_code() {
- when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(true);
+ when(dbLineHashVersion.hasLineHashesWithoutSignificantCode(file)).thenReturn(false);
when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.empty());
List<String> lineHashes = underTest.getLineHashesMatchingDBVersion(file);
assertLineHashes(lineHashes, "line1", "line2", "line3");
- verify(dbLineHashVersion).hasLineHashesWithSignificantCode(file);
+ verify(dbLineHashVersion).hasLineHashesWithoutSignificantCode(file);
verifyNoMoreInteractions(dbLineHashVersion);
verify(significantCodeRepository).getRangesPerLine(file);
verifyNoMoreInteractions(significantCodeRepository);
List<String> lineHashes = underTest.getLineHashesMatchingDBVersion(file);
assertLineHashes(lineHashes, "l", "", "ine3");
- verify(dbLineHashVersion).hasLineHashesWithSignificantCode(file);
+ verify(dbLineHashVersion).hasLineHashesWithoutSignificantCode(file);
verifyNoMoreInteractions(dbLineHashVersion);
verify(significantCodeRepository).getRangesPerLine(file);
verifyNoMoreInteractions(significantCodeRepository);
LineRange[] lineRanges = {new LineRange(0, 1), null, new LineRange(1, 5)};
sourceLinesHashCache.computeIfAbsent(file, c -> lineHashes);
- when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(true);
+ when(dbLineHashVersion.hasLineHashesWithoutSignificantCode(file)).thenReturn(false);
when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(lineRanges));
LineHashesComputer hashesComputer = underTest.getLineHashesComputerToPersist(file);
List<String> lineHashes = Lists.newArrayList("line1", "line2", "line3");
sourceLinesHashCache.computeIfAbsent(file, c -> lineHashes);
- when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(false);
+ when(dbLineHashVersion.hasLineHashesWithoutSignificantCode(file)).thenReturn(true);
when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.empty());
LineHashesComputer hashesComputer = underTest.getLineHashesComputerToPersist(file);
sourceLinesHashCache.computeIfAbsent(file, c -> lineHashes);
// DB has line hashes without significant code and significant code is available in the report, so we need to generate new line hashes
- when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(false);
+ when(dbLineHashVersion.hasLineHashesWithoutSignificantCode(file)).thenReturn(true);
when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(lineRanges));
LineHashesComputer hashesComputer = underTest.getLineHashesComputerToPersist(file);
@CheckForNull
public LineHashVersion selectLineHashesVersion(DbSession dbSession, String fileUuid) {
Integer version = mapper(dbSession).selectLineHashesVersion(fileUuid);
- return version == null ? LineHashVersion.WITHOUT_SIGNIFICANT_CODE : LineHashVersion.valueOf(version);
+ return version == null ? null : LineHashVersion.valueOf(version);
}
@CheckForNull
* uuids in no specific order with 'SOURCE' source and a non null path.
*/
public void scrollLineHashes(DbSession dbSession, Collection<String> fileUUids, ResultHandler<LineHashesWithUuidDto> rowHandler) {
- for (List<String> partition : toUniqueAndSortedPartitions(fileUUids)) {
- mapper(dbSession).scrollLineHashes(partition, rowHandler);
+ for (List<String> fileUuidsPartition : toUniqueAndSortedPartitions(fileUUids)) {
+ mapper(dbSession).scrollLineHashes(fileUuidsPartition, rowHandler);
}
}
repo.done();
}
- private void verifyIndicesMarkedAsInitialized() {
- verify(metadataIndex).setInitialized(RuleIndexDefinition.TYPE_RULE, true);
- verify(metadataIndex).setInitialized(RuleIndexDefinition.TYPE_RULE_EXTENSION, true);
- verify(metadataIndex).setInitialized(RuleIndexDefinition.TYPE_ACTIVE_RULE, true);
- reset(metadataIndex);
- }
-
private RuleParamDto getParam(List<RuleParamDto> params, String key) {
for (RuleParamDto param : params) {
if (param.getName().equals(key)) {