]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-10647 Apply feedback
authorDuarte Meneses <duarte.meneses@sonarsource.com>
Fri, 4 May 2018 08:13:43 +0000 (10:13 +0200)
committerSonarTech <sonartech@sonarsource.com>
Wed, 9 May 2018 18:20:46 +0000 (20:20 +0200)
26 files changed:
server/sonar-db-core/src/main/resources/org/sonar/db/version/schema-h2.ddl
server/sonar-db-dao/src/main/java/org/sonar/db/source/FileSourceDao.java
server/sonar-db-dao/src/main/java/org/sonar/db/source/FileSourceDto.java
server/sonar-db-dao/src/main/java/org/sonar/db/source/FileSourceMapper.java
server/sonar-db-dao/src/main/java/org/sonar/db/source/LineHashVersion.java
server/sonar-db-dao/src/main/resources/org/sonar/db/source/FileSourceMapper.xml
server/sonar-db-dao/src/test/java/org/sonar/db/source/FileSourceDaoTest.java
server/sonar-db-dao/src/test/java/org/sonar/db/source/LineHashVersionTest.java
server/sonar-db-dao/src/test/resources/org/sonar/db/source/FileSourceDaoTest/insert-result.xml
server/sonar-db-dao/src/test/resources/org/sonar/db/source/FileSourceDaoTest/update-result.xml
server/sonar-db-migration/src/test/resources/org/sonar/server/platform/db/migration/version/v72/AddLineHashesVersionToFileSourcesTest/fileSources.sql
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/filemove/FileMoveDetectionStep.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/issue/TrackerRawInputFactory.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SignificantCodeRepository.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesDiffImpl.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashRepository.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashRepositoryImpl.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/step/PersistFileSourcesStep.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/filemove/FileMoveDetectionStepTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/issue/TrackerRawInputFactoryTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesDiffImplTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashImplTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashRepositoryImplTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/step/PersistFileSourcesStepTest.java
sonar-core/src/main/java/org/sonar/core/hash/LineRange.java
sonar-core/src/test/java/org/sonar/core/hash/LineRangeTest.java

index 8e879dc8a7a19cef1aab6f3bee4afe877fdfdce8..ee63fd3464e3b6bb3b0eefebc319b6023909b909 100644 (file)
@@ -615,14 +615,14 @@ CREATE TABLE "FILE_SOURCES" (
   "PROJECT_UUID" VARCHAR(50) NOT NULL,
   "FILE_UUID" VARCHAR(50) NOT NULL,
   "LINE_HASHES" CLOB,
+  "LINE_HASHES_VERSION" INTEGER,
   "BINARY_DATA" BLOB,
   "DATA_TYPE" VARCHAR(20),
   "DATA_HASH" VARCHAR(50),
   "SRC_HASH" VARCHAR(50),
   "REVISION" VARCHAR(100),
   "CREATED_AT" BIGINT NOT NULL,
-  "UPDATED_AT" BIGINT NOT NULL,
-  "LINE_HASHES_VERSION" INTEGER
+  "UPDATED_AT" BIGINT NOT NULL
 );
 CREATE INDEX "FILE_SOURCES_PROJECT_UUID" ON "FILE_SOURCES" ("PROJECT_UUID");
 CREATE UNIQUE INDEX "FILE_SOURCES_UUID_TYPE" ON "FILE_SOURCES" ("FILE_UUID", "DATA_TYPE");
index faf0471d474e11828de63ae5d34c24a4f61c5ac4..dcdafcbc2517892ceedc121a09fac97611a3167b 100644 (file)
@@ -51,23 +51,8 @@ public class FileSourceDao implements Dao {
 
   @CheckForNull
   public LineHashVersion selectLineHashesVersion(DbSession dbSession, String fileUuid) {
-    Connection connection = dbSession.getConnection();
-    PreparedStatement pstmt = null;
-    ResultSet rs = null;
-    try {
-      pstmt = connection.prepareStatement("SELECT line_hashes_version FROM file_sources WHERE file_uuid=? AND data_type=?");
-      pstmt.setString(1, fileUuid);
-      pstmt.setString(2, Type.SOURCE);
-      rs = pstmt.executeQuery();
-      if (rs.next()) {
-        return LineHashVersion.valueOf(rs.getInt(1));
-      }
-      return null;
-    } catch (SQLException e) {
-      throw new IllegalStateException("Fail to read FILE_SOURCES.LINE_HASHES_VERSION of file " + fileUuid, e);
-    } finally {
-      DbUtils.closeQuietly(connection, pstmt, rs);
-    }
+    Integer version = mapper(dbSession).selectLineHashesVersion(fileUuid, Type.SOURCE);
+    return version == null ? LineHashVersion.WITHOUT_SIGNIFICANT_CODE : LineHashVersion.valueOf(version);
   }
 
   @CheckForNull
index c8623847451f61145490988b2b02276749211eb1..0e30488e1d787eae0a902465edd37edd5dadf769 100644 (file)
@@ -58,7 +58,7 @@ public class FileSourceDto {
     return lineHashesVersion != null ? lineHashesVersion : LineHashVersion.WITHOUT_SIGNIFICANT_CODE.getDbValue();
   }
 
-  public FileSourceDto setLineHashesVersion(@Nullable Integer lineHashesVersion) {
+  public FileSourceDto setLineHashesVersion(int lineHashesVersion) {
     this.lineHashesVersion = lineHashesVersion;
     return this;
   }
index df139dd6523707606fa5a97f63ae8dcacd64d214..38deb679be07de2ed868b1b6469fe2f8576c946d 100644 (file)
@@ -30,6 +30,9 @@ public interface FileSourceMapper {
   @CheckForNull
   FileSourceDto select(@Param("fileUuid") String fileUuid, @Param("dataType") String dataType);
 
+  @CheckForNull
+  Integer selectLineHashesVersion(@Param("fileUuid") String fileUuid, @Param("dataType") String dataType);
+
   void insert(FileSourceDto dto);
 
   void update(FileSourceDto dto);
index b327c0b0ba6a566c6fa64e37b717590e381d136b..58460832468ce8a8ca57125beff606a260d9cfae 100644 (file)
@@ -19,8 +19,6 @@
  */
 package org.sonar.db.source;
 
-import javax.annotation.Nullable;
-
 public enum LineHashVersion {
   WITHOUT_SIGNIFICANT_CODE(0), WITH_SIGNIFICANT_CODE(1);
 
@@ -34,10 +32,7 @@ public enum LineHashVersion {
     return value;
   }
 
-  public static LineHashVersion valueOf(@Nullable Integer version) {
-    if (version == null) {
-      return LineHashVersion.WITHOUT_SIGNIFICANT_CODE;
-    }
+  public static LineHashVersion valueOf(int version) {
     if (version > 1 || version < 0) {
       throw new IllegalArgumentException("Unknown line hash version: " + version);
     }
index 26b3b5779a0c383f77b94f138a4d6eb279b1324b..2fc4b3f6bc2ec168af04a8479817705e0cbb294d 100644 (file)
     WHERE project_uuid = #{projectUuid} and data_type=#{dataType}
   </select>
 
+  <select id="selectLineHashesVersion" parameterType="map" resultType="Integer">
+    SELECT line_hashes_version 
+    FROM file_sources
+    WHERE file_uuid = #{fileUuid,jdbcType=VARCHAR} and data_type=#{dataType,jdbcType=VARCHAR}
+  </select>
+
   <insert id="insert" parameterType="org.sonar.db.source.FileSourceDto" useGeneratedKeys="false">
     INSERT INTO file_sources (project_uuid, file_uuid, created_at, updated_at, binary_data, line_hashes, data_hash,
     src_hash, data_type, revision, line_hashes_version)
index 1bcb3107689ced4d808bcb373be38aab1a1f85e5..c8fd1b8a734ea3af8e04caf78aaa4b8a33aa62a5 100644 (file)
@@ -105,7 +105,7 @@ public class FileSourceDaoTest {
       .setDataType(Type.SOURCE)
       .setCreatedAt(1500000000000L)
       .setUpdatedAt(1500000000001L)
-      .setLineHashesVersion(2)
+      .setLineHashesVersion(1)
       .setRevision("123456789"));
     session.commit();
 
@@ -133,9 +133,7 @@ public class FileSourceDaoTest {
   }
 
   @Test
-  public void selectLineHashesVersion_returns_by_default() {
-    dbTester.prepareDbUnit(getClass(), "shared.xml");
-
+  public void selectLineHashesVersion_returns_without_significant_code_by_default() {
     underTest.insert(session, new FileSourceDto()
       .setProjectUuid("PRJ_UUID")
       .setFileUuid("FILE2_UUID")
@@ -154,8 +152,6 @@ public class FileSourceDaoTest {
 
   @Test
   public void selectLineHashesVersion_succeeds() {
-    dbTester.prepareDbUnit(getClass(), "shared.xml");
-
     underTest.insert(session, new FileSourceDto()
       .setProjectUuid("PRJ_UUID")
       .setFileUuid("FILE2_UUID")
@@ -214,7 +210,7 @@ public class FileSourceDaoTest {
       .setLineHashes("NEW_LINE_HASHES")
       .setDataType(Type.SOURCE)
       .setUpdatedAt(1500000000002L)
-      .setLineHashesVersion(4)
+      .setLineHashesVersion(1)
       .setRevision("987654321"));
     session.commit();
 
index d8b27dc4c36bd11d3671305d853f05fc383f01d9..cbd0598abcd3967515fd516ac342773cf1a00ab3 100644 (file)
@@ -31,16 +31,21 @@ public class LineHashVersionTest {
 
   @Test
   public void should_create_from_int() {
-    assertThat(LineHashVersion.valueOf((Integer) null)).isEqualTo(LineHashVersion.WITHOUT_SIGNIFICANT_CODE);
     assertThat(LineHashVersion.valueOf(0)).isEqualTo(LineHashVersion.WITHOUT_SIGNIFICANT_CODE);
     assertThat(LineHashVersion.valueOf(1)).isEqualTo(LineHashVersion.WITH_SIGNIFICANT_CODE);
   }
 
   @Test
-  public void should_throw_exception_if_version_is_unknown() {
+  public void should_throw_exception_if_version_is_too_high() {
     exception.expect(IllegalArgumentException.class);
     exception.expectMessage("Unknown line hash version: 2");
     LineHashVersion.valueOf(2);
+  }
 
+  @Test
+  public void should_throw_exception_if_version_is_too_low() {
+    exception.expect(IllegalArgumentException.class);
+    exception.expectMessage("Unknown line hash version: -1");
+    LineHashVersion.valueOf(-1);
   }
 }
index b1f280de7948526bb361a54a2b122ae8126d27b6..5aec962e987b532057edf15d38922f121a088dc2 100644 (file)
@@ -14,6 +14,6 @@
                 line_hashes="LINE1_HASH\nLINE2_HASH"
                 src_hash="FILE2_HASH" revision="123456789"
                 created_at="1500000000000" updated_at="1500000000001"  data_type="SOURCE" 
-                line_hashes_version="2" />
+                line_hashes_version="1" />
 
 </dataset>
index 204974d7ab02182e9e2a54c61c4bc72a82f00561..f57da523a84049ec0e41c59ddafbd5572c425f5d 100644 (file)
@@ -6,7 +6,7 @@
                 line_hashes="NEW_LINE_HASHES"
                 src_hash="NEW_FILE_HASH" revision="987654321"
                 created_at="1500000000000" updated_at="1500000000002"  data_type="SOURCE"
-                line_hashes_version="4" />
+                line_hashes_version="1" />
 
 
 </dataset>
index ed2564fe5287bc8fd8f37fe0bd88268173d4a3ac..48ceb31c0a1a9be53900125906e0edbb37a0be0d 100644 (file)
@@ -13,4 +13,5 @@ CREATE TABLE "FILE_SOURCES" (
 );
 CREATE INDEX "FILE_SOURCES_PROJECT_UUID" ON "FILE_SOURCES" ("PROJECT_UUID");
 CREATE UNIQUE INDEX "FILE_SOURCES_UUID_TYPE" ON "FILE_SOURCES" ("FILE_UUID", "DATA_TYPE");
-CREATE INDEX "FILE_SOURCES_UPDATED_AT" ON "FILE_SOURCES" ("UPDATED_AT");
\ No newline at end of file
+CREATE INDEX "FILE_SOURCES_UPDATED_AT" ON "FILE_SOURCES" ("UPDATED_AT");
+
index e018f77d0a46c2a337355f9368e8a1cfadeee799..3a622ac76803d4843fb730667fd241eca227da34 100644 (file)
@@ -178,7 +178,7 @@ public class FileMoveDetectionStep implements ComputationStep {
     ImmutableMap.Builder<String, File> builder = ImmutableMap.builder();
     for (String fileKey : addedFileKeys) {
       Component component = reportFilesByKey.get(fileKey);
-      List<String> lineHashes = sourceLinesHash.getMatchingDB(component);
+      List<String> lineHashes = sourceLinesHash.getLineHashesMatchingDBVersion(component);
       builder.put(fileKey, new File(component.getReportAttributes().getPath(), lineHashes));
     }
     return builder.build();
index e2e5fbdf432420710f40b780488335979fc90459..878651b2c00a480ef10c7e38339f5a6d90ee5405 100644 (file)
@@ -80,7 +80,7 @@ public class TrackerRawInputFactory {
     @Override
     protected LineHashSequence loadLineHashSequence() {
       if (component.getType() == Component.Type.FILE) {
-        return new LineHashSequence(sourceLinesHash.getMatchingDB(component));
+        return new LineHashSequence(sourceLinesHash.getLineHashesMatchingDBVersion(component));
       } else {
         return new LineHashSequence(Collections.emptyList());
       }
index 10d16755bcb2992ab620fe10a88c974ba1aeb81b..d97bca8d16867abe3ec54b21fd57448fc1119c89 100644 (file)
@@ -52,7 +52,7 @@ public class SignificantCodeRepository {
         currentLine = lineRanges.next();
       }
 
-      if (currentLine.getLine() == i+1) {
+      if (currentLine.getLine() == i + 1) {
         ranges[i] = new LineRange(currentLine.getStartOffset(), currentLine.getEndOffset());
         currentLine = null;
       }
index dcf587c0763068b8fe8637549d717b270a52b643..b881e85c37310f08815bb059ba4345a7746da397 100644 (file)
@@ -57,7 +57,7 @@ public class SourceLinesDiffImpl implements SourceLinesDiff {
   }
 
   private List<String> getReportLines(Component component) {
-    return sourceLinesHash.getMatchingDB(component);
+    return sourceLinesHash.getLineHashesMatchingDBVersion(component);
   }
 
 }
index 8c616407a4e307ab510d0f980553e4b0139c2b44..be0ef67b44e6ae5c9c98f32f6bd076583c24db07 100644 (file)
@@ -23,22 +23,31 @@ import java.util.List;
 import org.sonar.server.computation.task.projectanalysis.component.Component;
 import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepositoryImpl.LineHashesComputer;
 
+/**
+ * Generates line hashes from source code included in the report.
+ * Line hashes are versioned. Currently there are 2 possible versions: Hashes created using the entire line, or hashes created using
+ * only the "significant code" part of the line. The "significant code" can be optionally provided by code analyzers, meaning that 
+ * the line hash for a given file can be of either versions.
+ * We always persist line hashes taking into account "significant code", if it's provided. 
+ * When the line hashes are used for comparison with line hashes stored in the DB, we try to generate them using the same version 
+ * as the ones in the DB. This ensures that the line hashes are actually comparable.
+ */
 public interface SourceLinesHashRepository {
   /**
-   * Get line hashes from the report matching the version of the line hashes existing in the report, if possible.
-   * The line hashes are cached.
+   * Read from the report the line hashes for a file.
+   * The line hashes will have the version matching the version of the line hashes existing in the report, if possible.
    */
-  List<String> getMatchingDB(Component component);
+  List<String> getLineHashesMatchingDBVersion(Component component);
 
   /**
-   * The line computer will compute line hashes taking into account significant code (if it was provided by a code analyzer).
-   * It will use a cached value, if possible. If it's generated, it's not cached since it's assumed that it won't be 
-   * needed again after it is persisted.
+   * Get a line hash computer that can be used when persisting the line hashes in the DB.
+   * The version of the line hashes that are generated by the computer will be the one that takes into account significant code,
+   * if it was provided by a code analyzer.
    */
-  LineHashesComputer getLineProcessorToPersist(Component component);
+  LineHashesComputer getLineHashesComputerToPersist(Component component);
 
   /**
-   * Get the version of line hashes in the report
+   * Get the version of the line hashes for a given component in the report
    */
-  Integer getLineHashesVersion(Component component);
+  int getLineHashesVersion(Component component);
 }
index fb56b853a805d5c6bf69ea7a6b935e839cb62f91..6e8ba5e726853506c9999660aee85e59513343dd 100644 (file)
@@ -43,12 +43,12 @@ public class SourceLinesHashRepositoryImpl implements SourceLinesHashRepository
   }
 
   @Override
-  public List<String> getMatchingDB(Component component) {
+  public List<String> getLineHashesMatchingDBVersion(Component component) {
     return cache.computeIfAbsent(component, this::createLineHashesMatchingDBVersion);
   }
 
   @Override
-  public Integer getLineHashesVersion(Component component) {
+  public int getLineHashesVersion(Component component) {
     if (significantCodeRepository.getRangesPerLine(component).isPresent()) {
       return LineHashVersion.WITH_SIGNIFICANT_CODE.getDbValue();
     } else {
@@ -57,7 +57,7 @@ public class SourceLinesHashRepositoryImpl implements SourceLinesHashRepository
   }
 
   @Override
-  public LineHashesComputer getLineProcessorToPersist(Component component) {
+  public LineHashesComputer getLineHashesComputerToPersist(Component component) {
     boolean cacheHit = cache.contains(component);
 
     // check if line hashes are cached and if we can use it
index 0e0427510faf951c56902634ec702d20eb3a4c37..1773bf03bc2cbafd3f998af979261da8bbad8617 100644 (file)
@@ -120,7 +120,7 @@ public class PersistFileSourcesStep implements ComputationStep {
     public void visitFile(Component file) {
       try (CloseableIterator<String> linesIterator = sourceLinesRepository.readLines(file);
         LineReaders lineReaders = new LineReaders(reportReader, scmInfoRepository, duplicationRepository, file)) {
-        LineHashesComputer lineHashesComputer = sourceLinesHash.getLineProcessorToPersist(file);
+        LineHashesComputer lineHashesComputer = sourceLinesHash.getLineHashesComputerToPersist(file);
         ComputeFileSourceData computeFileSourceData = new ComputeFileSourceData(linesIterator, lineReaders.readers(), lineHashesComputer);
         ComputeFileSourceData.Data fileSourceData = computeFileSourceData.compute();
         persistSource(fileSourceData, file, lineReaders.getLatestChangeWithRevision());
index 8d7226141a6c56864226e20cb73c1c7ecb854db2..f630dd11e4235a5f44991b704421fbae2ee109b2 100644 (file)
@@ -509,7 +509,7 @@ public class FileMoveDetectionStepTest {
     for (String line : content) {
       computer.addLine(line);
     }
-    when(sourceLinesHash.getMatchingDB(file)).thenReturn(computer.getLineHashes());
+    when(sourceLinesHash.getLineHashesMatchingDBVersion(file)).thenReturn(computer.getLineHashes());
   }
 
   private void mockContentOfFileInDb(String key, @Nullable String[] content) {
index 651557e3bc96c6e15e4f0b6f1799af9caf6b1f83..e04d29817883b1536f753e01519679b5034d6693 100644 (file)
@@ -72,7 +72,7 @@ public class TrackerRawInputFactoryTest {
 
   @Test
   public void load_source_hash_sequences() {
-    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
+    when(sourceLinesHash.getLineHashesMatchingDBVersion(FILE)).thenReturn(Collections.singletonList("line"));
     Input<DefaultIssue> input = underTest.create(FILE);
 
     assertThat(input.getLineHashSequence()).isNotNull();
@@ -94,7 +94,7 @@ public class TrackerRawInputFactoryTest {
   @Test
   public void load_issues_from_report() {
     when(issueFilter.accept(any(DefaultIssue.class), eq(FILE))).thenReturn(true);
-    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
+    when(sourceLinesHash.getLineHashesMatchingDBVersion(FILE)).thenReturn(Collections.singletonList("line"));
     ScannerReport.Issue reportIssue = ScannerReport.Issue.newBuilder()
       .setTextRange(TextRange.newBuilder().setStartLine(2).build())
       .setMsg("the message")
@@ -127,7 +127,7 @@ public class TrackerRawInputFactoryTest {
 
   @Test
   public void load_external_issues_from_report() {
-    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
+    when(sourceLinesHash.getLineHashesMatchingDBVersion(FILE)).thenReturn(Collections.singletonList("line"));
     ScannerReport.ExternalIssue reportIssue = ScannerReport.ExternalIssue.newBuilder()
       .setTextRange(TextRange.newBuilder().setStartLine(2).build())
       .setMsg("the message")
@@ -158,7 +158,7 @@ public class TrackerRawInputFactoryTest {
 
   @Test
   public void load_external_issues_from_report_with_default_effort() {
-    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
+    when(sourceLinesHash.getLineHashesMatchingDBVersion(FILE)).thenReturn(Collections.singletonList("line"));
     ScannerReport.ExternalIssue reportIssue = ScannerReport.ExternalIssue.newBuilder()
       .setTextRange(TextRange.newBuilder().setStartLine(2).build())
       .setMsg("the message")
@@ -189,7 +189,7 @@ public class TrackerRawInputFactoryTest {
   @Test
   public void ignore_issue_from_report() {
     when(issueFilter.accept(any(DefaultIssue.class), eq(FILE))).thenReturn(false);
-    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
+    when(sourceLinesHash.getLineHashesMatchingDBVersion(FILE)).thenReturn(Collections.singletonList("line"));
     ScannerReport.Issue reportIssue = ScannerReport.Issue.newBuilder()
       .setTextRange(TextRange.newBuilder().setStartLine(2).build())
       .setMsg("the message")
@@ -207,7 +207,7 @@ public class TrackerRawInputFactoryTest {
 
   @Test
   public void ignore_report_issues_on_common_rules() {
-    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
+    when(sourceLinesHash.getLineHashesMatchingDBVersion(FILE)).thenReturn(Collections.singletonList("line"));
     ScannerReport.Issue reportIssue = ScannerReport.Issue.newBuilder()
       .setMsg("the message")
       .setRuleRepository(CommonRuleKeys.commonRepositoryForLang("java"))
@@ -224,7 +224,7 @@ public class TrackerRawInputFactoryTest {
   @Test
   public void load_issues_of_compute_engine_common_rules() {
     when(issueFilter.accept(any(DefaultIssue.class), eq(FILE))).thenReturn(true);
-    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
+    when(sourceLinesHash.getLineHashesMatchingDBVersion(FILE)).thenReturn(Collections.singletonList("line"));
     DefaultIssue ceIssue = new DefaultIssue()
       .setRuleKey(RuleKey.of(CommonRuleKeys.commonRepositoryForLang("java"), "InsufficientCoverage"))
       .setMessage("not enough coverage")
@@ -240,7 +240,7 @@ public class TrackerRawInputFactoryTest {
   @Test
   public void ignore_issue_from_common_rule() {
     when(issueFilter.accept(any(DefaultIssue.class), eq(FILE))).thenReturn(false);
-    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
+    when(sourceLinesHash.getLineHashesMatchingDBVersion(FILE)).thenReturn(Collections.singletonList("line"));
     DefaultIssue ceIssue = new DefaultIssue()
       .setRuleKey(RuleKey.of(CommonRuleKeys.commonRepositoryForLang("java"), "InsufficientCoverage"))
       .setMessage("not enough coverage")
index 7af8b9553a81f419c9050856f68a6c1e572045c8..364b76756baefbe8d2e2624babca00c45f932fbb 100644 (file)
@@ -94,6 +94,6 @@ public class SourceLinesDiffImplTest {
   }
 
   private void setLineHashesInReport(Component component, String[] content) {
-    when(sourceLinesHash.getMatchingDB(component)).thenReturn(Arrays.asList(content));
+    when(sourceLinesHash.getLineHashesMatchingDBVersion(component)).thenReturn(Arrays.asList(content));
   }
 }
index fb51eb512be0650280ecd83983a8bed92f9e008b..d541c8702d154d597dec55f603c324fc1f153aa0 100644 (file)
@@ -42,7 +42,7 @@ public class SourceLinesHashImplTest {
   public void should_generate_correct_version_of_line_hashes() {
     Component component = createComponent(1);
 
-    underTest.getMatchingDB(component);
+    underTest.getLineHashesMatchingDBVersion(component);
 
   }
 
index 78e5e3aaa74951a9c0babb769bc611b2afb39ebd..0b607c689dbead8bbc31c3074dc692b773ff82e9 100644 (file)
@@ -89,7 +89,7 @@ public class SourceLinesHashRepositoryImplTest {
   @Test
   public void should_create_hash_without_significant_code_if_db_has_no_significant_code() {
     when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(false);
-    List<String> lineHashes = underTest.getMatchingDB(file);
+    List<String> lineHashes = underTest.getLineHashesMatchingDBVersion(file);
 
     assertLineHashes(lineHashes, "line1", "line2", "line3");
     verify(dbLineHashVersion).hasLineHashesWithSignificantCode(file);
@@ -102,7 +102,7 @@ public class SourceLinesHashRepositoryImplTest {
     when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(true);
     when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.empty());
 
-    List<String> lineHashes = underTest.getMatchingDB(file);
+    List<String> lineHashes = underTest.getLineHashesMatchingDBVersion(file);
 
     assertLineHashes(lineHashes, "line1", "line2", "line3");
     verify(dbLineHashVersion).hasLineHashesWithSignificantCode(file);
@@ -118,7 +118,7 @@ public class SourceLinesHashRepositoryImplTest {
     when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(true);
     when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(lineRanges));
 
-    List<String> lineHashes = underTest.getMatchingDB(file);
+    List<String> lineHashes = underTest.getLineHashesMatchingDBVersion(file);
 
     assertLineHashes(lineHashes, "l", "", "ine3");
     verify(dbLineHashVersion).hasLineHashesWithSignificantCode(file);
@@ -157,7 +157,7 @@ public class SourceLinesHashRepositoryImplTest {
     when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(true);
     when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(lineRanges));
 
-    LineHashesComputer hashesComputer = underTest.getLineProcessorToPersist(file);
+    LineHashesComputer hashesComputer = underTest.getLineHashesComputerToPersist(file);
 
     assertThat(hashesComputer).isInstanceOf(CachedLineHashesComputer.class);
     assertThat(hashesComputer.getResult()).isEqualTo(lineHashes);
@@ -171,7 +171,7 @@ public class SourceLinesHashRepositoryImplTest {
     when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(false);
     when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.empty());
 
-    LineHashesComputer hashesComputer = underTest.getLineProcessorToPersist(file);
+    LineHashesComputer hashesComputer = underTest.getLineHashesComputerToPersist(file);
 
     assertThat(hashesComputer).isInstanceOf(CachedLineHashesComputer.class);
     assertThat(hashesComputer.getResult()).isEqualTo(lineHashes);
@@ -188,7 +188,7 @@ public class SourceLinesHashRepositoryImplTest {
     when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(false);
     when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(lineRanges));
 
-    LineHashesComputer hashesComputer = underTest.getLineProcessorToPersist(file);
+    LineHashesComputer hashesComputer = underTest.getLineHashesComputerToPersist(file);
 
     assertThat(hashesComputer).isInstanceOf(SignificantCodeLineHashesComputer.class);
   }
index 9db2571b9c2e35c3bf6296761ce89964c51be605..ab910b36ac2caea1412b5133323c1807ce46db6b 100644 (file)
@@ -94,7 +94,7 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
   @Before
   public void setup() {
     when(system2.now()).thenReturn(NOW);
-    when(sourceLinesHashRepository.getLineProcessorToPersist(Mockito.any(Component.class))).thenReturn(lineHashesComputer);
+    when(sourceLinesHashRepository.getLineHashesComputerToPersist(Mockito.any(Component.class))).thenReturn(lineHashesComputer);
     underTest = new PersistFileSourcesStep(dbClient, system2, treeRootHolder, reportReader, fileSourceRepository, scmInfoRepository,
       duplicationRepository, sourceLinesHashRepository);
   }
index 7eb1592a16dfe7cf7378645c15c2d87b6a87f200..438298825143f41bf6ec77b55ef4b952640e562f 100644 (file)
@@ -26,6 +26,7 @@ public class LineRange {
   private final int endOffset;
 
   public LineRange(int startOffset, int endOffset) {
+    Preconditions.checkArgument(startOffset >= 0, "Start offset not valid: %s", startOffset);
     Preconditions.checkArgument(startOffset <= endOffset, "Line range is not valid: %s must be greater or equal than %s", endOffset, startOffset);
     this.startOffset = startOffset;
     this.endOffset = endOffset;
index 07cc5fe6bddddda03d7ef78e2c5c687e4b2164d9..2e1b5146d4f36b330c1576ff8120493059a3bce8 100644 (file)
@@ -35,6 +35,13 @@ public class LineRangeTest {
     exception.expectMessage("Line range is not valid: 1 must be greater or equal than 2");
     new LineRange(2, 1);
   }
+  
+  @Test
+  public void should_throw_ISE_if_startOffset_is_invalid() {
+    exception.expect(IllegalArgumentException.class);
+    exception.expectMessage("Start offset not valid: -1");
+    new LineRange(-1, 1);
+  }
 
   @Test
   public void check_getters() {