]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-16583 Fix location hashing to happen right before caching
authorJacek <jacek.poreda@sonarsource.com>
Fri, 5 Aug 2022 11:24:25 +0000 (13:24 +0200)
committersonartech <sonartech@sonarsource.com>
Mon, 8 Aug 2022 20:03:03 +0000 (20:03 +0000)
server/sonar-ce-task-projectanalysis/src/main/java/org/sonar/ce/task/projectanalysis/issue/ComputeLocationHashesVisitor.java
server/sonar-ce-task-projectanalysis/src/main/java/org/sonar/ce/task/projectanalysis/issue/IntegrateIssuesVisitor.java
server/sonar-ce-task-projectanalysis/src/main/java/org/sonar/ce/task/projectanalysis/issue/IssueVisitor.java
server/sonar-ce-task-projectanalysis/src/main/java/org/sonar/ce/task/projectanalysis/issue/IssueVisitors.java
server/sonar-ce-task-projectanalysis/src/test/java/org/sonar/ce/task/projectanalysis/issue/ComputeLocationHashesVisitorTest.java

index b85ccaeff9298c68d3f886e06f07c015c994ad40..9fd3df1993ee16eca78d48c6969dbc5037cf064e 100644 (file)
@@ -67,7 +67,7 @@ public class ComputeLocationHashesVisitor extends IssueVisitor {
   }
 
   @Override
-  public void afterComponent(Component component) {
+  public void beforeCaching(Component component) {
     Map<Component, List<Location>> locationsByComponent = new HashMap<>();
     List<LocationToSet> locationsToSet = new LinkedList<>();
 
index 8f4c2405d8205546cb8256d82ce78f2a5c7919a5..805354d96e0ec94a06cdfc17e2ba5d2a5ffe276d 100644 (file)
 package org.sonar.ce.task.projectanalysis.issue;
 
 import java.util.Collection;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import org.sonar.ce.task.projectanalysis.component.Component;
 import org.sonar.ce.task.projectanalysis.component.CrawlerDepthLimit;
@@ -80,14 +82,24 @@ public class IntegrateIssuesVisitor extends TypeAwareVisitorAdapter {
       if (fileStatuses.isDataUnchanged(component)) {
         // we assume there's a previous analysis of the same branch
         Input<DefaultIssue> baseIssues = baseInputFactory.create(component);
-        useBaseIssues(component, baseIssues.getIssues(), cacheAppender);
+        var issues = new LinkedList<>(baseIssues.getIssues());
+        processIssues(component, issues);
+        issueVisitors.beforeCaching(component);
+        appendIssuesToCache(cacheAppender, issues);
       } else {
         Input<DefaultIssue> rawInput = rawInputFactory.create(component);
         TrackingResult tracking = issueTracking.track(component, rawInput);
-        fillNewOpenIssues(component, tracking.newIssues(), rawInput, cacheAppender);
-        fillExistingOpenIssues(component, tracking.issuesToMerge(), cacheAppender);
-        closeIssues(component, tracking.issuesToClose(), cacheAppender);
-        copyIssues(component, tracking.issuesToCopy(), cacheAppender);
+        var newOpenIssues = fillNewOpenIssues(component, tracking.newIssues(), rawInput);
+        var existingOpenIssues = fillExistingOpenIssues(tracking.issuesToMerge());
+        var closedIssues = closeIssues(tracking.issuesToClose());
+        var copiedIssues = copyIssues(tracking.issuesToCopy());
+
+        var issues = Stream.of(newOpenIssues, existingOpenIssues, closedIssues, copiedIssues)
+          .flatMap(Collection::stream)
+          .collect(Collectors.toList());
+        processIssues(component, issues);
+        issueVisitors.beforeCaching(component);
+        appendIssuesToCache(cacheAppender, issues);
       }
       issueVisitors.afterComponent(component);
     } catch (Exception e) {
@@ -95,59 +107,64 @@ public class IntegrateIssuesVisitor extends TypeAwareVisitorAdapter {
     }
   }
 
-  private void useBaseIssues(Component component, Collection<DefaultIssue> dbIssues, CacheAppender<DefaultIssue> cacheAppender) {
-    for (DefaultIssue issue : dbIssues) {
-      process(component, issue, cacheAppender);
-    }
+  private void processIssues(Component component, Collection<DefaultIssue> issues) {
+    issues.forEach(issue -> processIssue(component, issue));
   }
 
-  private void fillNewOpenIssues(Component component, Stream<DefaultIssue> newIssues, Input<DefaultIssue> rawInput, CacheAppender<DefaultIssue> cacheAppender) {
+  private List<DefaultIssue> fillNewOpenIssues(Component component, Stream<DefaultIssue> newIssues, Input<DefaultIssue> rawInput) {
     List<DefaultIssue> newIssuesList = newIssues
       .peek(issueLifecycle::initNewOpenIssue)
       .collect(MoreCollectors.toList());
 
     if (newIssuesList.isEmpty()) {
-      return;
+      return newIssuesList;
     }
 
     pullRequestSourceBranchMerger.tryMergeIssuesFromSourceBranchOfPullRequest(component, newIssuesList, rawInput);
     issueStatusCopier.tryMerge(component, newIssuesList);
-
-    for (DefaultIssue issue : newIssuesList) {
-      process(component, issue, cacheAppender);
-    }
+    return newIssuesList;
   }
 
-  private void copyIssues(Component component, Map<DefaultIssue, DefaultIssue> matched, CacheAppender<DefaultIssue> cacheAppender) {
+  private List<DefaultIssue> fillExistingOpenIssues(Map<DefaultIssue, DefaultIssue> matched) {
+    List<DefaultIssue> newIssuesList = new LinkedList<>();
     for (Map.Entry<DefaultIssue, DefaultIssue> entry : matched.entrySet()) {
       DefaultIssue raw = entry.getKey();
       DefaultIssue base = entry.getValue();
-      issueLifecycle.copyExistingOpenIssueFromBranch(raw, base, referenceBranchComponentUuids.getReferenceBranchName());
-      process(component, raw, cacheAppender);
+      issueLifecycle.mergeExistingOpenIssue(raw, base);
+      newIssuesList.add(raw);
     }
+    return newIssuesList;
+  }
+
+  private static List<DefaultIssue> closeIssues(Stream<DefaultIssue> issues) {
+    return issues.map(issue ->
+    // TODO should replace flag "beingClosed" by express call to transition "automaticClose"
+    issue.setBeingClosed(true)
+    // TODO manual issues -> was updater.setResolution(newIssue, Issue.RESOLUTION_REMOVED, changeContext);. Is it a problem ?
+    ).collect(Collectors.toList());
   }
 
-  private void fillExistingOpenIssues(Component component, Map<DefaultIssue, DefaultIssue> matched, CacheAppender<DefaultIssue> cacheAppender) {
+  private List<DefaultIssue> copyIssues(Map<DefaultIssue, DefaultIssue> matched) {
+    List<DefaultIssue> newIssuesList = new LinkedList<>();
     for (Map.Entry<DefaultIssue, DefaultIssue> entry : matched.entrySet()) {
       DefaultIssue raw = entry.getKey();
       DefaultIssue base = entry.getValue();
-      issueLifecycle.mergeExistingOpenIssue(raw, base);
-      process(component, raw, cacheAppender);
+      issueLifecycle.copyExistingOpenIssueFromBranch(raw, base, referenceBranchComponentUuids.getReferenceBranchName());
+      newIssuesList.add(raw);
     }
+    return newIssuesList;
   }
 
-  private void closeIssues(Component component, Stream<DefaultIssue> issues, CacheAppender<DefaultIssue> cacheAppender) {
-    issues.forEach(issue -> {
-      // TODO should replace flag "beingClosed" by express call to transition "automaticClose"
-      issue.setBeingClosed(true);
-      // TODO manual issues -> was updater.setResolution(newIssue, Issue.RESOLUTION_REMOVED, changeContext);. Is it a problem ?
-      process(component, issue, cacheAppender);
-    });
-  }
-
-  private void process(Component component, DefaultIssue issue, CacheAppender<DefaultIssue> cacheAppender) {
+  private void processIssue(Component component, DefaultIssue issue) {
     issueLifecycle.doAutomaticTransition(issue);
     issueVisitors.onIssue(component, issue);
+  }
+
+  private static void appendIssuesToCache(CacheAppender<DefaultIssue> cacheAppender, Collection<DefaultIssue> issues) {
+    issues.forEach(issue -> appendIssue(issue, cacheAppender));
+  }
+
+  private static void appendIssue(DefaultIssue issue, CacheAppender<DefaultIssue> cacheAppender) {
     if (issue.isNew() || issue.isChanged() || issue.isCopied() || issue.isNoLongerNewCodeReferenceIssue() || issue.isToBeMigratedAsNewCodeReferenceIssue()) {
       cacheAppender.append(issue);
     }
index 5d70ac002180434aeb009690e981eea1be926bba..b438b7666305feca8b765e1e54e5f83a747d36e7 100644 (file)
@@ -44,6 +44,13 @@ public abstract class IssueVisitor {
 
   }
 
+  /**
+   * This method is called on a component before issues are persisted to cache.
+   */
+  public void beforeCaching(Component component) {
+
+  }
+
   public void afterComponent(Component component) {
 
   }
index 7efd0507ff38952e84264bd413a8de2a148ba9a7..d838fae9b1e04917d83c9f3fe7e44f71dab3f103 100644 (file)
@@ -47,4 +47,10 @@ public class IssueVisitors {
       visitor.afterComponent(component);
     }
   }
+
+  public void beforeCaching(Component component) {
+    for (IssueVisitor visitor : visitors) {
+      visitor.beforeCaching(component);
+    }
+  }
 }
index 24c3ee84534a08cc3f98706e4bde411d4c283f42..e40e2efbb1757d38b4cea94160e54ee2f17715ea 100644 (file)
@@ -88,7 +88,7 @@ public class ComputeLocationHashesVisitorTest {
 
     underTest.beforeComponent(FILE_1);
     underTest.onIssue(FILE_1, issue);
-    underTest.afterComponent(FILE_1);
+    underTest.beforeCaching(FILE_1);
 
     DbIssues.Locations locations = issue.getLocations();
     assertThat(locations.getChecksum()).isEmpty();
@@ -102,7 +102,7 @@ public class ComputeLocationHashesVisitorTest {
       .setLocations(DbIssues.Locations.newBuilder().setTextRange(createRange(1, 0, 3, EXAMPLE_LINE_OF_CODE_FORMAT.length() - 1)).build());
 
     underTest.onIssue(FILE_1, issue);
-    underTest.afterComponent(FILE_1);
+    underTest.beforeCaching(FILE_1);
 
     DbIssues.Locations locations = issue.getLocations();
     assertThat(locations.getChecksum()).isEmpty();
@@ -115,7 +115,7 @@ public class ComputeLocationHashesVisitorTest {
       .setLocations(DbIssues.Locations.newBuilder().setTextRange(createRange(1, 0, 3, EXAMPLE_LINE_OF_CODE_FORMAT.length() - 1)).build());
 
     underTest.onIssue(FILE_1, issue);
-    underTest.afterComponent(FILE_1);
+    underTest.beforeCaching(FILE_1);
 
     assertLocationHashIsMadeOf(issue, "intexample=line+of+code+1;intexample=line+of+code+2;intexample=line+of+code+3;");
   }
@@ -128,10 +128,10 @@ public class ComputeLocationHashesVisitorTest {
       .setLocations(DbIssues.Locations.newBuilder().setTextRange(createRange(1, 0, 1, LINE_IN_ANOTHER_FILE.length())).build());
 
     underTest.onIssue(FILE_1, issue1);
-    underTest.afterComponent(FILE_1);
+    underTest.beforeCaching(FILE_1);
 
     underTest.onIssue(FILE_2, issue2);
-    underTest.afterComponent(FILE_2);
+    underTest.beforeCaching(FILE_2);
 
     assertLocationHashIsMadeOf(issue1, "intexample=line+of+code+1;intexample=line+of+code+2;intexample=line+of+code+3;");
     assertLocationHashIsMadeOf(issue2, "Stringstring='line-in-the-another-file';");
@@ -143,7 +143,7 @@ public class ComputeLocationHashesVisitorTest {
       .setLocations(DbIssues.Locations.newBuilder().setTextRange(createRange(1, 13, 1, EXAMPLE_LINE_OF_CODE_FORMAT.length() - 1)).build());
 
     underTest.onIssue(FILE_1, issue);
-    underTest.afterComponent(FILE_1);
+    underTest.beforeCaching(FILE_1);
 
     assertLocationHashIsMadeOf(issue, "line+of+code+1;");
   }
@@ -154,7 +154,7 @@ public class ComputeLocationHashesVisitorTest {
       .setLocations(DbIssues.Locations.newBuilder().setTextRange(createRange(1, 13, 3, 11)).build());
 
     underTest.onIssue(FILE_1, issue);
-    underTest.afterComponent(FILE_1);
+    underTest.beforeCaching(FILE_1);
 
     assertLocationHashIsMadeOf(issue, "line+of+code+1;intexample=line+of+code+2;intexample");
   }
@@ -178,7 +178,7 @@ public class ComputeLocationHashesVisitorTest {
     when(sourceLinesRepository.readLines(FILE_1)).thenReturn(newOneLineIterator(LINE_IN_THE_MAIN_FILE));
 
     underTest.onIssue(FILE_1, issue);
-    underTest.afterComponent(FILE_1);
+    underTest.beforeCaching(FILE_1);
 
     verify(sourceLinesRepository).readLines(FILE_1);
     verifyNoMoreInteractions(sourceLinesRepository);
@@ -208,7 +208,7 @@ public class ComputeLocationHashesVisitorTest {
     when(sourceLinesRepository.readLines(FILE_2)).thenReturn(newOneLineIterator(LINE_IN_ANOTHER_FILE));
 
     underTest.onIssue(FILE_1, issue);
-    underTest.afterComponent(FILE_1);
+    underTest.beforeCaching(FILE_1);
 
     DbIssues.Locations locations = issue.getLocations();
 
@@ -237,7 +237,7 @@ public class ComputeLocationHashesVisitorTest {
     when(sourceLinesRepository.readLines(FILE_1)).thenReturn(manyLinesIterator(LINE_IN_THE_MAIN_FILE, ANOTHER_LINE_IN_THE_MAIN_FILE));
 
     underTest.onIssue(FILE_1, issue);
-    underTest.afterComponent(FILE_1);
+    underTest.beforeCaching(FILE_1);
 
     DbIssues.Locations locations = issue.getLocations();