package org.sonar.ce.task.projectanalysis.issue;
import java.util.Collection;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.sonar.ce.task.projectanalysis.component.Component;
import org.sonar.ce.task.projectanalysis.component.CrawlerDepthLimit;
if (fileStatuses.isDataUnchanged(component)) {
// we assume there's a previous analysis of the same branch
Input<DefaultIssue> baseIssues = baseInputFactory.create(component);
- useBaseIssues(component, baseIssues.getIssues(), cacheAppender);
+ var issues = new LinkedList<>(baseIssues.getIssues());
+ processIssues(component, issues);
+ issueVisitors.beforeCaching(component);
+ appendIssuesToCache(cacheAppender, issues);
} else {
Input<DefaultIssue> rawInput = rawInputFactory.create(component);
TrackingResult tracking = issueTracking.track(component, rawInput);
- fillNewOpenIssues(component, tracking.newIssues(), rawInput, cacheAppender);
- fillExistingOpenIssues(component, tracking.issuesToMerge(), cacheAppender);
- closeIssues(component, tracking.issuesToClose(), cacheAppender);
- copyIssues(component, tracking.issuesToCopy(), cacheAppender);
+ var newOpenIssues = fillNewOpenIssues(component, tracking.newIssues(), rawInput);
+ var existingOpenIssues = fillExistingOpenIssues(tracking.issuesToMerge());
+ var closedIssues = closeIssues(tracking.issuesToClose());
+ var copiedIssues = copyIssues(tracking.issuesToCopy());
+
+ var issues = Stream.of(newOpenIssues, existingOpenIssues, closedIssues, copiedIssues)
+ .flatMap(Collection::stream)
+ .collect(Collectors.toList());
+ processIssues(component, issues);
+ issueVisitors.beforeCaching(component);
+ appendIssuesToCache(cacheAppender, issues);
}
issueVisitors.afterComponent(component);
} catch (Exception e) {
}
}
- private void useBaseIssues(Component component, Collection<DefaultIssue> dbIssues, CacheAppender<DefaultIssue> cacheAppender) {
- for (DefaultIssue issue : dbIssues) {
- process(component, issue, cacheAppender);
- }
+ private void processIssues(Component component, Collection<DefaultIssue> issues) {
+ issues.forEach(issue -> processIssue(component, issue));
}
- private void fillNewOpenIssues(Component component, Stream<DefaultIssue> newIssues, Input<DefaultIssue> rawInput, CacheAppender<DefaultIssue> cacheAppender) {
+ private List<DefaultIssue> fillNewOpenIssues(Component component, Stream<DefaultIssue> newIssues, Input<DefaultIssue> rawInput) {
List<DefaultIssue> newIssuesList = newIssues
.peek(issueLifecycle::initNewOpenIssue)
.collect(MoreCollectors.toList());
if (newIssuesList.isEmpty()) {
- return;
+ return newIssuesList;
}
pullRequestSourceBranchMerger.tryMergeIssuesFromSourceBranchOfPullRequest(component, newIssuesList, rawInput);
issueStatusCopier.tryMerge(component, newIssuesList);
-
- for (DefaultIssue issue : newIssuesList) {
- process(component, issue, cacheAppender);
- }
+ return newIssuesList;
}
- private void copyIssues(Component component, Map<DefaultIssue, DefaultIssue> matched, CacheAppender<DefaultIssue> cacheAppender) {
+ private List<DefaultIssue> fillExistingOpenIssues(Map<DefaultIssue, DefaultIssue> matched) {
+ List<DefaultIssue> newIssuesList = new LinkedList<>();
for (Map.Entry<DefaultIssue, DefaultIssue> entry : matched.entrySet()) {
DefaultIssue raw = entry.getKey();
DefaultIssue base = entry.getValue();
- issueLifecycle.copyExistingOpenIssueFromBranch(raw, base, referenceBranchComponentUuids.getReferenceBranchName());
- process(component, raw, cacheAppender);
+ issueLifecycle.mergeExistingOpenIssue(raw, base);
+ newIssuesList.add(raw);
}
+ return newIssuesList;
+ }
+
+ private static List<DefaultIssue> closeIssues(Stream<DefaultIssue> issues) {
+ return issues.map(issue ->
+ // TODO should replace flag "beingClosed" by express call to transition "automaticClose"
+ issue.setBeingClosed(true)
+ // TODO manual issues -> was updater.setResolution(newIssue, Issue.RESOLUTION_REMOVED, changeContext);. Is it a problem ?
+ ).collect(Collectors.toList());
}
- private void fillExistingOpenIssues(Component component, Map<DefaultIssue, DefaultIssue> matched, CacheAppender<DefaultIssue> cacheAppender) {
+ private List<DefaultIssue> copyIssues(Map<DefaultIssue, DefaultIssue> matched) {
+ List<DefaultIssue> newIssuesList = new LinkedList<>();
for (Map.Entry<DefaultIssue, DefaultIssue> entry : matched.entrySet()) {
DefaultIssue raw = entry.getKey();
DefaultIssue base = entry.getValue();
- issueLifecycle.mergeExistingOpenIssue(raw, base);
- process(component, raw, cacheAppender);
+ issueLifecycle.copyExistingOpenIssueFromBranch(raw, base, referenceBranchComponentUuids.getReferenceBranchName());
+ newIssuesList.add(raw);
}
+ return newIssuesList;
}
- private void closeIssues(Component component, Stream<DefaultIssue> issues, CacheAppender<DefaultIssue> cacheAppender) {
- issues.forEach(issue -> {
- // TODO should replace flag "beingClosed" by express call to transition "automaticClose"
- issue.setBeingClosed(true);
- // TODO manual issues -> was updater.setResolution(newIssue, Issue.RESOLUTION_REMOVED, changeContext);. Is it a problem ?
- process(component, issue, cacheAppender);
- });
- }
-
- private void process(Component component, DefaultIssue issue, CacheAppender<DefaultIssue> cacheAppender) {
+ private void processIssue(Component component, DefaultIssue issue) {
issueLifecycle.doAutomaticTransition(issue);
issueVisitors.onIssue(component, issue);
+ }
+
+ private static void appendIssuesToCache(CacheAppender<DefaultIssue> cacheAppender, Collection<DefaultIssue> issues) {
+ issues.forEach(issue -> appendIssue(issue, cacheAppender));
+ }
+
+ private static void appendIssue(DefaultIssue issue, CacheAppender<DefaultIssue> cacheAppender) {
if (issue.isNew() || issue.isChanged() || issue.isCopied() || issue.isNoLongerNewCodeReferenceIssue() || issue.isToBeMigratedAsNewCodeReferenceIssue()) {
cacheAppender.append(issue);
}
underTest.beforeComponent(FILE_1);
underTest.onIssue(FILE_1, issue);
- underTest.afterComponent(FILE_1);
+ underTest.beforeCaching(FILE_1);
DbIssues.Locations locations = issue.getLocations();
assertThat(locations.getChecksum()).isEmpty();
.setLocations(DbIssues.Locations.newBuilder().setTextRange(createRange(1, 0, 3, EXAMPLE_LINE_OF_CODE_FORMAT.length() - 1)).build());
underTest.onIssue(FILE_1, issue);
- underTest.afterComponent(FILE_1);
+ underTest.beforeCaching(FILE_1);
DbIssues.Locations locations = issue.getLocations();
assertThat(locations.getChecksum()).isEmpty();
.setLocations(DbIssues.Locations.newBuilder().setTextRange(createRange(1, 0, 3, EXAMPLE_LINE_OF_CODE_FORMAT.length() - 1)).build());
underTest.onIssue(FILE_1, issue);
- underTest.afterComponent(FILE_1);
+ underTest.beforeCaching(FILE_1);
assertLocationHashIsMadeOf(issue, "intexample=line+of+code+1;intexample=line+of+code+2;intexample=line+of+code+3;");
}
.setLocations(DbIssues.Locations.newBuilder().setTextRange(createRange(1, 0, 1, LINE_IN_ANOTHER_FILE.length())).build());
underTest.onIssue(FILE_1, issue1);
- underTest.afterComponent(FILE_1);
+ underTest.beforeCaching(FILE_1);
underTest.onIssue(FILE_2, issue2);
- underTest.afterComponent(FILE_2);
+ underTest.beforeCaching(FILE_2);
assertLocationHashIsMadeOf(issue1, "intexample=line+of+code+1;intexample=line+of+code+2;intexample=line+of+code+3;");
assertLocationHashIsMadeOf(issue2, "Stringstring='line-in-the-another-file';");
.setLocations(DbIssues.Locations.newBuilder().setTextRange(createRange(1, 13, 1, EXAMPLE_LINE_OF_CODE_FORMAT.length() - 1)).build());
underTest.onIssue(FILE_1, issue);
- underTest.afterComponent(FILE_1);
+ underTest.beforeCaching(FILE_1);
assertLocationHashIsMadeOf(issue, "line+of+code+1;");
}
.setLocations(DbIssues.Locations.newBuilder().setTextRange(createRange(1, 13, 3, 11)).build());
underTest.onIssue(FILE_1, issue);
- underTest.afterComponent(FILE_1);
+ underTest.beforeCaching(FILE_1);
assertLocationHashIsMadeOf(issue, "line+of+code+1;intexample=line+of+code+2;intexample");
}
when(sourceLinesRepository.readLines(FILE_1)).thenReturn(newOneLineIterator(LINE_IN_THE_MAIN_FILE));
underTest.onIssue(FILE_1, issue);
- underTest.afterComponent(FILE_1);
+ underTest.beforeCaching(FILE_1);
verify(sourceLinesRepository).readLines(FILE_1);
verifyNoMoreInteractions(sourceLinesRepository);
when(sourceLinesRepository.readLines(FILE_2)).thenReturn(newOneLineIterator(LINE_IN_ANOTHER_FILE));
underTest.onIssue(FILE_1, issue);
- underTest.afterComponent(FILE_1);
+ underTest.beforeCaching(FILE_1);
DbIssues.Locations locations = issue.getLocations();
when(sourceLinesRepository.readLines(FILE_1)).thenReturn(manyLinesIterator(LINE_IN_THE_MAIN_FILE, ANOTHER_LINE_IN_THE_MAIN_FILE));
underTest.onIssue(FILE_1, issue);
- underTest.afterComponent(FILE_1);
+ underTest.beforeCaching(FILE_1);
DbIssues.Locations locations = issue.getLocations();