]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-10647 Compare lines of code taking into account significant code
authorDuarte Meneses <duarte.meneses@sonarsource.com>
Wed, 25 Apr 2018 15:18:39 +0000 (17:18 +0200)
committerSonarTech <sonartech@sonarsource.com>
Wed, 9 May 2018 18:20:46 +0000 (20:20 +0200)
51 files changed:
server/sonar-db-core/src/main/resources/org/sonar/db/version/schema-h2.ddl
server/sonar-db-dao/src/main/java/org/sonar/db/source/FileSourceDao.java
server/sonar-db-dao/src/main/java/org/sonar/db/source/FileSourceDto.java
server/sonar-db-dao/src/main/java/org/sonar/db/source/LineHashVersion.java [new file with mode: 0644]
server/sonar-db-dao/src/main/protobuf/db-file-sources.proto
server/sonar-db-dao/src/main/resources/org/sonar/db/source/FileSourceMapper.xml
server/sonar-db-dao/src/test/java/org/sonar/db/source/FileSourceDaoTest.java
server/sonar-db-dao/src/test/java/org/sonar/db/source/LineHashVersionTest.java [new file with mode: 0644]
server/sonar-db-dao/src/test/resources/org/sonar/db/source/FileSourceDaoTest/insert-result.xml
server/sonar-db-dao/src/test/resources/org/sonar/db/source/FileSourceDaoTest/no_line_hashes_when_only_test_data.xml
server/sonar-db-dao/src/test/resources/org/sonar/db/source/FileSourceDaoTest/shared.xml
server/sonar-db-dao/src/test/resources/org/sonar/db/source/FileSourceDaoTest/update-result.xml
server/sonar-db-migration/src/main/java/org/sonar/server/platform/db/migration/version/v72/AddLineHashesVersionToFileSources.java [new file with mode: 0644]
server/sonar-db-migration/src/main/java/org/sonar/server/platform/db/migration/version/v72/DbVersion72.java
server/sonar-db-migration/src/test/java/org/sonar/server/platform/db/migration/version/v72/AddLineHashesVersionToFileSourcesTest.java [new file with mode: 0644]
server/sonar-db-migration/src/test/java/org/sonar/server/platform/db/migration/version/v72/DbVersion72Test.java
server/sonar-db-migration/src/test/resources/org/sonar/server/platform/db/migration/version/v72/AddLineHashesVersionToFileSourcesTest/fileSources.sql [new file with mode: 0644]
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/batch/BatchReportReader.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/batch/BatchReportReaderImpl.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/container/ProjectAnalysisTaskContainerPopulator.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/filemove/FileMoveDetectionStep.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/issue/TrackerRawInputFactory.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/ComputeFileSourceData.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/DbLineHashVersion.java [new file with mode: 0644]
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SignificantCodeRepository.java [new file with mode: 0644]
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesDiffImpl.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashCache.java [new file with mode: 0644]
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashRepository.java [new file with mode: 0644]
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashRepositoryImpl.java [new file with mode: 0644]
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/linereader/SymbolsLineReader.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/step/PersistFileSourcesStep.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/batch/BatchReportReaderRule.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/filemove/FileMoveDetectionStepTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/issue/IntegrateIssuesVisitorTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/issue/IssueCreationDateCalculatorTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/issue/TrackerRawInputFactoryTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/ComputeFileSourceDataTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/DbLineHashVersionTest.java [new file with mode: 0644]
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SignificantCodeRepositoryTest.java [new file with mode: 0644]
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesDiffImplTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashCacheTest.java [new file with mode: 0644]
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashImplTest.java [new file with mode: 0644]
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashRepositoryImplTest.java [new file with mode: 0644]
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/step/PersistFileSourcesStepTest.java
sonar-core/src/main/java/org/sonar/core/hash/LineRange.java [new file with mode: 0644]
sonar-core/src/main/java/org/sonar/core/hash/SourceLineHashesComputer.java [new file with mode: 0644]
sonar-core/src/main/java/org/sonar/core/hash/SourceLinesHashesComputer.java [deleted file]
sonar-core/src/main/java/org/sonar/core/issue/tracking/LineHashSequence.java
sonar-core/src/test/java/org/sonar/core/hash/LineRangeTest.java [new file with mode: 0644]
sonar-core/src/test/java/org/sonar/core/hash/SourceLinesHashesComputerTest.java
sonar-scanner-protocol/src/main/java/org/sonar/scanner/protocol/output/ScannerReportReader.java

index fc72c134c975ee4416e8cb014eabfdddb5c010ec..8e879dc8a7a19cef1aab6f3bee4afe877fdfdce8 100644 (file)
@@ -621,7 +621,8 @@ CREATE TABLE "FILE_SOURCES" (
   "SRC_HASH" VARCHAR(50),
   "REVISION" VARCHAR(100),
   "CREATED_AT" BIGINT NOT NULL,
-  "UPDATED_AT" BIGINT NOT NULL
+  "UPDATED_AT" BIGINT NOT NULL,
+  "LINE_HASHES_VERSION" INTEGER
 );
 CREATE INDEX "FILE_SOURCES_PROJECT_UUID" ON "FILE_SOURCES" ("PROJECT_UUID");
 CREATE UNIQUE INDEX "FILE_SOURCES_UUID_TYPE" ON "FILE_SOURCES" ("FILE_UUID", "DATA_TYPE");
index 7921adb039f29bef19c4617c67422e15771773e2..faf0471d474e11828de63ae5d34c24a4f61c5ac4 100644 (file)
@@ -49,6 +49,27 @@ public class FileSourceDao implements Dao {
     return mapper(dbSession).select(fileUuid, Type.TEST);
   }
 
+  @CheckForNull
+  public LineHashVersion selectLineHashesVersion(DbSession dbSession, String fileUuid) {
+    Connection connection = dbSession.getConnection();
+    PreparedStatement pstmt = null;
+    ResultSet rs = null;
+    try {
+      pstmt = connection.prepareStatement("SELECT line_hashes_version FROM file_sources WHERE file_uuid=? AND data_type=?");
+      pstmt.setString(1, fileUuid);
+      pstmt.setString(2, Type.SOURCE);
+      rs = pstmt.executeQuery();
+      if (rs.next()) {
+        return LineHashVersion.valueOf(rs.getInt(1));
+      }
+      return null;
+    } catch (SQLException e) {
+      throw new IllegalStateException("Fail to read FILE_SOURCES.LINE_HASHES_VERSION of file " + fileUuid, e);
+    } finally {
+      DbUtils.closeQuietly(connection, pstmt, rs);
+    }
+  }
+
   @CheckForNull
   public List<String> selectLineHashes(DbSession dbSession, String fileUuid) {
     Connection connection = dbSession.getConnection();
index 1ac9fbe4043a4422849418e4f0fd6fdb4f1b7505..c8623847451f61145490988b2b02276749211eb1 100644 (file)
@@ -52,6 +52,16 @@ public class FileSourceDto {
   private String dataType;
   private String dataHash;
   private String revision;
+  private Integer lineHashesVersion;
+
+  public Integer getLineHashesVersion() {
+    return lineHashesVersion != null ? lineHashesVersion : LineHashVersion.WITHOUT_SIGNIFICANT_CODE.getDbValue();
+  }
+
+  public FileSourceDto setLineHashesVersion(@Nullable Integer lineHashesVersion) {
+    this.lineHashesVersion = lineHashesVersion;
+    return this;
+  }
 
   public Long getId() {
     return id;
diff --git a/server/sonar-db-dao/src/main/java/org/sonar/db/source/LineHashVersion.java b/server/sonar-db-dao/src/main/java/org/sonar/db/source/LineHashVersion.java
new file mode 100644 (file)
index 0000000..b327c0b
--- /dev/null
@@ -0,0 +1,47 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.db.source;
+
+import javax.annotation.Nullable;
+
+public enum LineHashVersion {
+  WITHOUT_SIGNIFICANT_CODE(0), WITH_SIGNIFICANT_CODE(1);
+
+  private int value;
+
+  LineHashVersion(int value) {
+    this.value = value;
+  }
+
+  public int getDbValue() {
+    return value;
+  }
+
+  public static LineHashVersion valueOf(@Nullable Integer version) {
+    if (version == null) {
+      return LineHashVersion.WITHOUT_SIGNIFICANT_CODE;
+    }
+    if (version > 1 || version < 0) {
+      throw new IllegalArgumentException("Unknown line hash version: " + version);
+    }
+
+    return LineHashVersion.values()[version];
+  }
+}
index f62226f15785df3f7f4c7252aaa766afd087d849..e330e53676321b1766cd3c17c73d8549b6f4b90e 100644 (file)
@@ -62,6 +62,11 @@ message Line {
   optional int32 covered_conditions = 20;
 }
 
+message Range {
+  optional int32 startOffset = 1;
+  optional int32 endOffset = 2;
+}
+
 // TODO should be dropped as it prevents streaming
 message Data {
   repeated Line lines = 1;
index f8f9c6498f7c70afae54571990eb1d62909ab831..26b3b5779a0c383f77b94f138a4d6eb279b1324b 100644 (file)
@@ -7,7 +7,7 @@
   <select id="select" parameterType="map" resultType="org.sonar.db.source.FileSourceDto">
     SELECT id, project_uuid as projectUuid, file_uuid as fileUuid, created_at as createdAt, updated_at as updatedAt,
     binary_data as binaryData, line_hashes as lineHashes, data_hash as dataHash, src_hash as srcHash, data_type as
-    dataType, revision
+    dataType, revision, line_hashes_version as lineHashesVersion
     FROM file_sources
     WHERE file_uuid = #{fileUuid} and data_type = #{dataType}
   </select>
 
   <insert id="insert" parameterType="org.sonar.db.source.FileSourceDto" useGeneratedKeys="false">
     INSERT INTO file_sources (project_uuid, file_uuid, created_at, updated_at, binary_data, line_hashes, data_hash,
-    src_hash, data_type, revision)
+    src_hash, data_type, revision, line_hashes_version)
     VALUES (#{projectUuid,jdbcType=VARCHAR}, #{fileUuid,jdbcType=VARCHAR}, #{createdAt,jdbcType=BIGINT},
     #{updatedAt,jdbcType=BIGINT}, #{binaryData,jdbcType=BLOB}, #{lineHashes,jdbcType=CLOB},
     #{dataHash,jdbcType=VARCHAR}, #{srcHash,jdbcType=VARCHAR},#{dataType,jdbcType=VARCHAR},
-    #{revision,jdbcType=VARCHAR})
+    #{revision,jdbcType=VARCHAR}, #{lineHashesVersion,jdbcType=INTEGER})
   </insert>
 
   <update id="update" parameterType="org.sonar.db.source.FileSourceDto" useGeneratedKeys="false">
@@ -34,7 +34,8 @@
     line_hashes = #{lineHashes,jdbcType=CLOB},
     data_hash = #{dataHash,jdbcType=VARCHAR},
     src_hash = #{srcHash,jdbcType=VARCHAR},
-    revision = #{revision,jdbcType=VARCHAR}
+    revision = #{revision,jdbcType=VARCHAR},
+    line_hashes_version = #{lineHashesVersion,jdbcType=INTEGER}
     WHERE id = #{id}
   </update>
 
index 13df64a6fec8c6e9ed557e5a588ba5e2efaa597a..1bcb3107689ced4d808bcb373be38aab1a1f85e5 100644 (file)
@@ -57,6 +57,8 @@ public class FileSourceDaoTest {
     assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(1500000000000L);
     assertThat(fileSourceDto.getDataType()).isEqualTo(Type.SOURCE);
     assertThat(fileSourceDto.getRevision()).isEqualTo("123456789");
+    assertThat(fileSourceDto.getLineHashesVersion()).isEqualTo(0);
+
   }
 
   @Test
@@ -103,11 +105,12 @@ public class FileSourceDaoTest {
       .setDataType(Type.SOURCE)
       .setCreatedAt(1500000000000L)
       .setUpdatedAt(1500000000001L)
+      .setLineHashesVersion(2)
       .setRevision("123456789"));
     session.commit();
 
     dbTester.assertDbUnitTable(getClass(), "insert-result.xml", "file_sources",
-      "project_uuid", "file_uuid", "data_hash", "line_hashes", "src_hash", "created_at", "updated_at", "data_type", "revision");
+      "project_uuid", "file_uuid", "data_hash", "line_hashes", "src_hash", "created_at", "updated_at", "data_type", "revision", "line_hashes_version");
   }
 
   @Test
@@ -129,6 +132,47 @@ public class FileSourceDaoTest {
     assertThat(underTest.selectLineHashes(dbTester.getSession(), "FILE2_UUID")).isEmpty();
   }
 
+  @Test
+  public void selectLineHashesVersion_returns_by_default() {
+    dbTester.prepareDbUnit(getClass(), "shared.xml");
+
+    underTest.insert(session, new FileSourceDto()
+      .setProjectUuid("PRJ_UUID")
+      .setFileUuid("FILE2_UUID")
+      .setBinaryData("FILE2_BINARY_DATA".getBytes())
+      .setDataHash("FILE2_DATA_HASH")
+      .setLineHashes("hashes")
+      .setSrcHash("FILE2_HASH")
+      .setDataType(Type.SOURCE)
+      .setCreatedAt(1500000000000L)
+      .setUpdatedAt(1500000000001L)
+      .setRevision("123456789"));
+    session.commit();
+
+    assertThat(underTest.selectLineHashesVersion(dbTester.getSession(), "FILE2_UUID")).isEqualTo(LineHashVersion.WITHOUT_SIGNIFICANT_CODE);
+  }
+
+  @Test
+  public void selectLineHashesVersion_succeeds() {
+    dbTester.prepareDbUnit(getClass(), "shared.xml");
+
+    underTest.insert(session, new FileSourceDto()
+      .setProjectUuid("PRJ_UUID")
+      .setFileUuid("FILE2_UUID")
+      .setBinaryData("FILE2_BINARY_DATA".getBytes())
+      .setDataHash("FILE2_DATA_HASH")
+      .setLineHashes("hashes")
+      .setSrcHash("FILE2_HASH")
+      .setDataType(Type.SOURCE)
+      .setCreatedAt(1500000000000L)
+      .setUpdatedAt(1500000000001L)
+      .setLineHashesVersion(1)
+      .setRevision("123456789"));
+    session.commit();
+
+    assertThat(underTest.selectLineHashesVersion(dbTester.getSession(), "FILE2_UUID")).isEqualTo(LineHashVersion.WITH_SIGNIFICANT_CODE);
+  }
+
   @Test
   public void readLineHashesStream_does_not_fail_when_lineshashes_is_null() {
     dbTester.prepareDbUnit(getClass(), "shared.xml");
@@ -170,11 +214,12 @@ public class FileSourceDaoTest {
       .setLineHashes("NEW_LINE_HASHES")
       .setDataType(Type.SOURCE)
       .setUpdatedAt(1500000000002L)
+      .setLineHashesVersion(4)
       .setRevision("987654321"));
     session.commit();
 
-    dbTester.assertDbUnitTable(getClass(), "update-result.xml", "file_sources",
-      "project_uuid", "file_uuid", "data_hash", "line_hashes", "src_hash", "created_at", "updated_at", "data_type", "revision");
+    dbTester.assertDbUnitTable(getClass(), "update-result.xml", "file_sources", "project_uuid", "file_uuid",
+      "data_hash", "line_hashes", "src_hash", "created_at", "updated_at", "data_type", "revision", "line_hashes_version");
   }
 
   private static class ReaderToStringConsumer implements Consumer<Reader> {
diff --git a/server/sonar-db-dao/src/test/java/org/sonar/db/source/LineHashVersionTest.java b/server/sonar-db-dao/src/test/java/org/sonar/db/source/LineHashVersionTest.java
new file mode 100644 (file)
index 0000000..d8b27dc
--- /dev/null
@@ -0,0 +1,46 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.db.source;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class LineHashVersionTest {
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  @Test
+  public void should_create_from_int() {
+    assertThat(LineHashVersion.valueOf((Integer) null)).isEqualTo(LineHashVersion.WITHOUT_SIGNIFICANT_CODE);
+    assertThat(LineHashVersion.valueOf(0)).isEqualTo(LineHashVersion.WITHOUT_SIGNIFICANT_CODE);
+    assertThat(LineHashVersion.valueOf(1)).isEqualTo(LineHashVersion.WITH_SIGNIFICANT_CODE);
+  }
+
+  @Test
+  public void should_throw_exception_if_version_is_unknown() {
+    exception.expect(IllegalArgumentException.class);
+    exception.expectMessage("Unknown line hash version: 2");
+    LineHashVersion.valueOf(2);
+
+  }
+}
index 95f9882e86fb758ec90066b64b4ebb864877f1f1..b1f280de7948526bb361a54a2b122ae8126d27b6 100644 (file)
@@ -4,7 +4,8 @@
                 binary_data="abcde" data_hash="hash"
                 line_hashes="ABC\nDEF\nGHI"
                 src_hash="FILE_HASH" revision="123456789"
-                created_at="1500000000000" updated_at="1500000000000"  data_type="SOURCE" />
+                created_at="1500000000000" updated_at="1500000000000"  data_type="SOURCE" 
+                line_hashes_version="[null]" />
 
 
   <file_sources id="102" project_uuid="PRJ_UUID" file_uuid="FILE2_UUID"
@@ -12,6 +13,7 @@
                 data_hash="FILE2_DATA_HASH"
                 line_hashes="LINE1_HASH\nLINE2_HASH"
                 src_hash="FILE2_HASH" revision="123456789"
-                created_at="1500000000000" updated_at="1500000000001"  data_type="SOURCE" />
+                created_at="1500000000000" updated_at="1500000000001"  data_type="SOURCE" 
+                line_hashes_version="2" />
 
 </dataset>
index 010394a156eba5053cafdbcfc0955c807f17ff81..481445affdc87ba69f8af3b9a57f5ce3c147dceb 100644 (file)
@@ -4,6 +4,7 @@
                   binary_data="abcde" data_hash="[null]"
                   line_hashes="[null]"
                   src_hash="[null]"
+                  line_hashes_version="[null]"
                   created_at="1500000000000" updated_at="1500000000000"  data_type="TEST" />
 
 </dataset>
index f7374caf041715cdb97f5dd66ac28c07b3392049..a2d372095c0880f9ccdf74f29b3fa47c77972148 100644 (file)
@@ -4,6 +4,7 @@
                   binary_data="abcde" data_hash="hash"
                   line_hashes="ABC\nDEF\nGHI"
                   src_hash="FILE_HASH" revision="123456789"
-                  created_at="1500000000000" updated_at="1500000000000"  data_type="SOURCE"/>
+                  created_at="1500000000000" updated_at="1500000000000"  data_type="SOURCE"
+                  line_hashes_version="[null]"/>
 
 </dataset>
index 1bcf34dac4e5d0ce853ee8b91ab1a5340dcb8a70..204974d7ab02182e9e2a54c61c4bc72a82f00561 100644 (file)
@@ -5,7 +5,8 @@
                 data_hash="NEW_DATA_HASH"
                 line_hashes="NEW_LINE_HASHES"
                 src_hash="NEW_FILE_HASH" revision="987654321"
-                created_at="1500000000000" updated_at="1500000000002"  data_type="SOURCE" />
+                created_at="1500000000000" updated_at="1500000000002"  data_type="SOURCE"
+                line_hashes_version="4" />
 
 
 </dataset>
diff --git a/server/sonar-db-migration/src/main/java/org/sonar/server/platform/db/migration/version/v72/AddLineHashesVersionToFileSources.java b/server/sonar-db-migration/src/main/java/org/sonar/server/platform/db/migration/version/v72/AddLineHashesVersionToFileSources.java
new file mode 100644 (file)
index 0000000..f10c395
--- /dev/null
@@ -0,0 +1,44 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.platform.db.migration.version.v72;
+
+import java.sql.SQLException;
+import org.sonar.db.Database;
+import org.sonar.server.platform.db.migration.def.IntegerColumnDef;
+import org.sonar.server.platform.db.migration.sql.AddColumnsBuilder;
+import org.sonar.server.platform.db.migration.step.DdlChange;
+
+public class AddLineHashesVersionToFileSources extends DdlChange {
+
+  public AddLineHashesVersionToFileSources(Database db) {
+    super(db);
+  }
+
+  @Override
+  public void execute(Context context) throws SQLException {
+    context.execute(new AddColumnsBuilder(getDialect(), "file_sources")
+      .addColumn(IntegerColumnDef.newIntegerColumnDefBuilder()
+        .setColumnName("line_hashes_version")
+        .setIsNullable(true)
+        .build())
+      .build());
+  }
+
+}
index 3613d9144b39c4872de159bce7ad8c0994c48ab2..803381a42343289553b5e4ee71c967742adaaa54 100644 (file)
@@ -32,6 +32,7 @@ public class DbVersion72 implements DbVersion {
       .add(2102, "Populate HASH_METHOD on table users", PopulateHashMethodOnUsers.class)
       .add(2103, "Add isExternal boolean to rules", AddRuleExternal.class)
       .add(2104, "Create ALM_APP_INSTALLS table", CreateAlmAppInstallsTable.class)
+      .add(2105, "Add LINE_HASHES_VERSION to table FILE_SOURCES", AddLineHashesVersionToFileSources.class)
     ;
   }
 }
diff --git a/server/sonar-db-migration/src/test/java/org/sonar/server/platform/db/migration/version/v72/AddLineHashesVersionToFileSourcesTest.java b/server/sonar-db-migration/src/test/java/org/sonar/server/platform/db/migration/version/v72/AddLineHashesVersionToFileSourcesTest.java
new file mode 100644 (file)
index 0000000..97cbe28
--- /dev/null
@@ -0,0 +1,53 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.platform.db.migration.version.v72;
+
+import java.sql.SQLException;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.sonar.db.CoreDbTester;
+
+import static java.sql.Types.INTEGER;
+
+public class AddLineHashesVersionToFileSourcesTest {
+  @Rule
+  public final CoreDbTester dbTester = CoreDbTester.createForSchema(AddLineHashesVersionToFileSourcesTest.class, "fileSources.sql");
+
+  @Rule
+  public ExpectedException expectedException = ExpectedException.none();
+
+  private AddLineHashesVersionToFileSources underTest = new AddLineHashesVersionToFileSources(dbTester.database());
+
+  @Test
+  public void column_is_added_to_table() throws SQLException {
+    underTest.execute();
+    dbTester.assertColumnDefinition("file_sources", "line_hashes_version", INTEGER, null, true);
+  }
+  
+  @Test
+  public void migration_is_not_reentrant() throws SQLException {
+    underTest.execute();
+
+    expectedException.expect(IllegalStateException.class);
+
+    underTest.execute();
+  }
+}
index 8e86219a76f1a9d249c50ad3c8e2a06031ceb74c..eb251a545de958361cf91980224e3e47931bf338 100644 (file)
@@ -34,7 +34,7 @@ public class DbVersion72Test {
 
   @Test
   public void verify_migration_count() {
-    verifyMigrationCount(underTest, 5);
+    verifyMigrationCount(underTest, 6);
   }
 
 }
diff --git a/server/sonar-db-migration/src/test/resources/org/sonar/server/platform/db/migration/version/v72/AddLineHashesVersionToFileSourcesTest/fileSources.sql b/server/sonar-db-migration/src/test/resources/org/sonar/server/platform/db/migration/version/v72/AddLineHashesVersionToFileSourcesTest/fileSources.sql
new file mode 100644 (file)
index 0000000..ed2564f
--- /dev/null
@@ -0,0 +1,16 @@
+CREATE TABLE "FILE_SOURCES" (
+  "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1),
+  "PROJECT_UUID" VARCHAR(50) NOT NULL,
+  "FILE_UUID" VARCHAR(50) NOT NULL,
+  "LINE_HASHES" CLOB,
+  "BINARY_DATA" BLOB,
+  "DATA_TYPE" VARCHAR(20),
+  "DATA_HASH" VARCHAR(50),
+  "SRC_HASH" VARCHAR(50),
+  "REVISION" VARCHAR(100),
+  "CREATED_AT" BIGINT NOT NULL,
+  "UPDATED_AT" BIGINT NOT NULL
+);
+CREATE INDEX "FILE_SOURCES_PROJECT_UUID" ON "FILE_SOURCES" ("PROJECT_UUID");
+CREATE UNIQUE INDEX "FILE_SOURCES_UUID_TYPE" ON "FILE_SOURCES" ("FILE_UUID", "DATA_TYPE");
+CREATE INDEX "FILE_SOURCES_UPDATED_AT" ON "FILE_SOURCES" ("UPDATED_AT");
\ No newline at end of file
index 80adfdee00a3acbbd914bc5e287b960b63cd9822..a2f2f7f5b99210df2199c1d08d04c9b3e1214f5c 100644 (file)
@@ -63,5 +63,5 @@ public interface BatchReportReader {
 
   CloseableIterator<ScannerReport.ContextProperty> readContextProperties();
   
-  CloseableIterator<ScannerReport.LineSgnificantCode> readComponentSignificantCode(int fileRef);
+  Optional<CloseableIterator<ScannerReport.LineSgnificantCode>> readComponentSignificantCode(int fileRef);
 }
index e9f2c81ace3ec8c3dafc92cfe6ac6748bcb85df1..1708534ab34902b1a33a05f62ca11b724928c192 100644 (file)
@@ -255,10 +255,14 @@ public class BatchReportReaderImpl implements BatchReportReader {
       fileInputStream.close();
     }
   }
+  
+  public boolean hasSignificantCode(int fileRef) {
+    return delegate.hasSignificantCode(fileRef);
+  }
 
   @Override
-  public CloseableIterator<LineSgnificantCode> readComponentSignificantCode(int fileRef) {
+  public Optional<CloseableIterator<LineSgnificantCode>> readComponentSignificantCode(int fileRef) {
     ensureInitialized();
-    return delegate.readComponentSignificantCode(fileRef);
+    return Optional.ofNullable(delegate.readComponentSignificantCode(fileRef));
   }
 }
index f40d4dece13806e9afecc54d7c4bd2b49268a180..948f56d7d329801fc31fec3379c3ffeb3eed0290 100644 (file)
@@ -112,9 +112,13 @@ import org.sonar.server.computation.task.projectanalysis.qualitymodel.Reliabilit
 import org.sonar.server.computation.task.projectanalysis.qualityprofile.ActiveRulesHolderImpl;
 import org.sonar.server.computation.task.projectanalysis.scm.ScmInfoDbLoader;
 import org.sonar.server.computation.task.projectanalysis.scm.ScmInfoRepositoryImpl;
+import org.sonar.server.computation.task.projectanalysis.source.DbLineHashVersion;
 import org.sonar.server.computation.task.projectanalysis.source.LastCommitVisitor;
+import org.sonar.server.computation.task.projectanalysis.source.SignificantCodeRepository;
 import org.sonar.server.computation.task.projectanalysis.source.SourceHashRepositoryImpl;
 import org.sonar.server.computation.task.projectanalysis.source.SourceLinesDiffImpl;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashCache;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepositoryImpl;
 import org.sonar.server.computation.task.projectanalysis.source.SourceLinesRepositoryImpl;
 import org.sonar.server.computation.task.projectanalysis.step.ReportComputationSteps;
 import org.sonar.server.computation.task.projectanalysis.step.SmallChangesetQualityGateSpecialCase;
@@ -194,6 +198,10 @@ public final class ProjectAnalysisTaskContainerPopulator implements ContainerPop
       ScmInfoRepositoryImpl.class,
       ScmInfoDbLoader.class,
       DuplicationRepositoryImpl.class,
+      SourceLinesHashRepositoryImpl.class,
+      DbLineHashVersion.class,
+      SignificantCodeRepository.class,
+      SourceLinesHashCache.class,
 
       // issues
       RuleRepositoryImpl.class,
index c6457341afbfe45be42151565686b6f8bc3e588f..e018f77d0a46c2a337355f9368e8a1cfadeee799 100644 (file)
@@ -37,8 +37,6 @@ import javax.annotation.concurrent.Immutable;
 import org.sonar.api.resources.Qualifiers;
 import org.sonar.api.utils.log.Logger;
 import org.sonar.api.utils.log.Loggers;
-import org.sonar.core.hash.SourceLinesHashesComputer;
-import org.sonar.core.util.CloseableIterator;
 import org.sonar.db.DbClient;
 import org.sonar.db.DbSession;
 import org.sonar.db.component.ComponentDto;
@@ -52,7 +50,7 @@ import org.sonar.server.computation.task.projectanalysis.component.DepthTraversa
 import org.sonar.server.computation.task.projectanalysis.component.TreeRootHolder;
 import org.sonar.server.computation.task.projectanalysis.component.TypeAwareVisitorAdapter;
 import org.sonar.server.computation.task.projectanalysis.filemove.FileSimilarity.File;
-import org.sonar.server.computation.task.projectanalysis.source.SourceLinesRepository;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepository;
 import org.sonar.server.computation.task.step.ComputationStep;
 
 import static com.google.common.base.MoreObjects.firstNonNull;
@@ -70,18 +68,18 @@ public class FileMoveDetectionStep implements ComputationStep {
   private final AnalysisMetadataHolder analysisMetadataHolder;
   private final TreeRootHolder rootHolder;
   private final DbClient dbClient;
-  private final SourceLinesRepository sourceLinesRepository;
   private final FileSimilarity fileSimilarity;
   private final MutableMovedFilesRepository movedFilesRepository;
+  private final SourceLinesHashRepository sourceLinesHash;
 
   public FileMoveDetectionStep(AnalysisMetadataHolder analysisMetadataHolder, TreeRootHolder rootHolder, DbClient dbClient,
-    SourceLinesRepository sourceLinesRepository, FileSimilarity fileSimilarity, MutableMovedFilesRepository movedFilesRepository) {
+    FileSimilarity fileSimilarity, MutableMovedFilesRepository movedFilesRepository, SourceLinesHashRepository sourceLinesHash) {
     this.analysisMetadataHolder = analysisMetadataHolder;
     this.rootHolder = rootHolder;
     this.dbClient = dbClient;
-    this.sourceLinesRepository = sourceLinesRepository;
     this.fileSimilarity = fileSimilarity;
     this.movedFilesRepository = movedFilesRepository;
+    this.sourceLinesHash = sourceLinesHash;
   }
 
   @Override
@@ -179,17 +177,9 @@ public class FileMoveDetectionStep implements ComputationStep {
   private Map<String, File> getReportFileSourcesByKey(Map<String, Component> reportFilesByKey, Set<String> addedFileKeys) {
     ImmutableMap.Builder<String, File> builder = ImmutableMap.builder();
     for (String fileKey : addedFileKeys) {
-      // FIXME computation of sourceHash and lineHashes might be done multiple times for some files: here, in ComputeFileSourceData, in
-      // SourceHashRepository
       Component component = reportFilesByKey.get(fileKey);
-      SourceLinesHashesComputer linesHashesComputer = new SourceLinesHashesComputer();
-      try (CloseableIterator<String> lineIterator = sourceLinesRepository.readLines(component)) {
-        while (lineIterator.hasNext()) {
-          String line = lineIterator.next();
-          linesHashesComputer.addLine(line);
-        }
-      }
-      builder.put(fileKey, new File(component.getReportAttributes().getPath(), linesHashesComputer.getLineHashes()));
+      List<String> lineHashes = sourceLinesHash.getMatchingDB(component);
+      builder.put(fileKey, new File(component.getReportAttributes().getPath(), lineHashes));
     }
     return builder.build();
   }
index 5344410c4590c55ac4f4c55918defb4819550320..e2e5fbdf432420710f40b780488335979fc90459 100644 (file)
@@ -42,26 +42,25 @@ import org.sonar.server.computation.task.projectanalysis.component.Component;
 import org.sonar.server.computation.task.projectanalysis.component.TreeRootHolder;
 import org.sonar.server.computation.task.projectanalysis.issue.commonrule.CommonRuleEngine;
 import org.sonar.server.computation.task.projectanalysis.issue.filter.IssueFilter;
-import org.sonar.server.computation.task.projectanalysis.source.SourceLinesRepository;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepository;
 import org.sonar.server.rule.CommonRuleKeys;
 
-import static com.google.common.collect.Lists.newArrayList;
 import static org.apache.commons.lang.StringUtils.isNotEmpty;
 
 public class TrackerRawInputFactory {
   private static final long DEFAULT_EXTERNAL_ISSUE_EFFORT = 0l;
   private final TreeRootHolder treeRootHolder;
   private final BatchReportReader reportReader;
-  private final SourceLinesRepository sourceLinesRepository;
   private final CommonRuleEngine commonRuleEngine;
   private final IssueFilter issueFilter;
+  private final SourceLinesHashRepository sourceLinesHash;
   private final RuleRepository ruleRepository;
 
   public TrackerRawInputFactory(TreeRootHolder treeRootHolder, BatchReportReader reportReader,
-    SourceLinesRepository sourceLinesRepository, CommonRuleEngine commonRuleEngine, IssueFilter issueFilter, RuleRepository ruleRepository) {
+    SourceLinesHashRepository sourceLinesHash, CommonRuleEngine commonRuleEngine, IssueFilter issueFilter, RuleRepository ruleRepository) {
     this.treeRootHolder = treeRootHolder;
     this.reportReader = reportReader;
-    this.sourceLinesRepository = sourceLinesRepository;
+    this.sourceLinesHash = sourceLinesHash;
     this.commonRuleEngine = commonRuleEngine;
     this.issueFilter = issueFilter;
     this.ruleRepository = ruleRepository;
@@ -80,15 +79,11 @@ public class TrackerRawInputFactory {
 
     @Override
     protected LineHashSequence loadLineHashSequence() {
-      List<String> lines;
       if (component.getType() == Component.Type.FILE) {
-        try (CloseableIterator<String> linesIt = sourceLinesRepository.readLines(component)) {
-          lines = newArrayList(linesIt);
-        }
+        return new LineHashSequence(sourceLinesHash.getMatchingDB(component));
       } else {
-        lines = Collections.emptyList();
+        return new LineHashSequence(Collections.emptyList());
       }
-      return LineHashSequence.createForLines(lines);
     }
 
     @Override
index dd06717d66d78c27c196947884db347d68a1a50a..d15efb9b1c0c34bdc2ec1af42988aff6eb0fec40 100644 (file)
@@ -23,68 +23,72 @@ import com.google.common.base.Joiner;
 import java.util.Iterator;
 import java.util.List;
 import org.sonar.core.hash.SourceHashComputer;
-import org.sonar.core.hash.SourceLinesHashesComputer;
 import org.sonar.db.protobuf.DbFileSources;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepositoryImpl.LineHashesComputer;
 import org.sonar.server.computation.task.projectanalysis.source.linereader.LineReader;
 
 public class ComputeFileSourceData {
+  private static final Joiner LINE_RETURN_JOINER = Joiner.on('\n');
 
   private final List<LineReader> lineReaders;
   private final Iterator<String> linesIterator;
+  private final SourceHashComputer sourceHashComputer;
+  private final LineHashesComputer lineHashesComputer;
 
-  private final int numberOfLines;
-  private int currentLine;
-
-  public ComputeFileSourceData(Iterator<String> sourceLinesIterator, List<LineReader> dataLineReaders, int numberOfLines) {
+  public ComputeFileSourceData(Iterator<String> sourceLinesIterator, List<LineReader> dataLineReaders, LineHashesComputer lineHashesComputer) {
     this.lineReaders = dataLineReaders;
     this.linesIterator = sourceLinesIterator;
-    this.numberOfLines = numberOfLines;
-    this.currentLine = 0;
+    this.lineHashesComputer = lineHashesComputer;
+    this.sourceHashComputer = new SourceHashComputer();
   }
 
   public Data compute() {
-    Data data = new Data(numberOfLines);
+    DbFileSources.Data.Builder fileSourceBuilder = DbFileSources.Data.newBuilder();
+    int currentLine = 0;
+
     while (linesIterator.hasNext()) {
       currentLine++;
-      read(data, linesIterator.next(), linesIterator.hasNext());
+      read(fileSourceBuilder, currentLine, linesIterator.next(), linesIterator.hasNext());
     }
-    return data;
+
+    return new Data(fileSourceBuilder.build(), LINE_RETURN_JOINER.join(lineHashesComputer.getResult()), sourceHashComputer.getHash());
   }
 
-  private void read(Data data, String lineSource, boolean hasNextLine) {
-    data.linesHashesComputer.addLine(lineSource);
-    data.sourceHashComputer.addLine(lineSource, hasNextLine);
+  private void read(DbFileSources.Data.Builder fileSourceBuilder, int currentLine, String lineSource, boolean hasNextLine) {
+    sourceHashComputer.addLine(lineSource, hasNextLine);
+    lineHashesComputer.addLine(lineSource);
 
-    DbFileSources.Line.Builder lineBuilder = data.fileSourceBuilder
+    DbFileSources.Line.Builder lineBuilder = fileSourceBuilder
       .addLinesBuilder()
       .setSource(lineSource)
       .setLine(currentLine);
+
     for (LineReader lineReader : lineReaders) {
       lineReader.read(lineBuilder);
     }
   }
 
   public static class Data {
-    private static final Joiner LINE_RETURN_JOINER = Joiner.on('\n');
-
-    private final SourceLinesHashesComputer linesHashesComputer;
-    private final SourceHashComputer sourceHashComputer = new SourceHashComputer();
-    private final DbFileSources.Data.Builder fileSourceBuilder = DbFileSources.Data.newBuilder();
+    private final DbFileSources.Data fileSourceData;
+    private final String lineHashes;
+    private final String srcHash;
 
-    public Data(int lineCount) {
-      this.linesHashesComputer = new SourceLinesHashesComputer(lineCount);
+    private Data(DbFileSources.Data fileSourceData, String lineHashes, String srcHash) {
+      this.fileSourceData = fileSourceData;
+      this.lineHashes = lineHashes;
+      this.srcHash = srcHash;
     }
 
     public String getSrcHash() {
-      return sourceHashComputer.getHash();
+      return srcHash;
     }
 
     public String getLineHashes() {
-      return LINE_RETURN_JOINER.join(linesHashesComputer.getLineHashes());
+      return lineHashes;
     }
 
     public DbFileSources.Data getFileSourceData() {
-      return fileSourceBuilder.build();
+      return fileSourceData;
     }
   }
 
diff --git a/server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/DbLineHashVersion.java b/server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/DbLineHashVersion.java
new file mode 100644 (file)
index 0000000..75e2d77
--- /dev/null
@@ -0,0 +1,51 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.source;
+
+import java.util.HashMap;
+import java.util.Map;
+import org.sonar.db.DbClient;
+import org.sonar.db.DbSession;
+import org.sonar.db.source.LineHashVersion;
+import org.sonar.server.computation.task.projectanalysis.component.Component;
+
+public class DbLineHashVersion {
+  private final Map<Component, LineHashVersion> lineHashVersionPerComponent = new HashMap<>();
+  private final DbClient dbClient;
+
+  public DbLineHashVersion(DbClient dbClient) {
+    this.dbClient = dbClient;
+  }
+
+  /**
+   * Reads from DB the version of line hashes for a component and returns if it was generated taking into account the ranges of significant code.
+   * The response is cached.
+   * Returns false if the component is not in the DB.
+   */
+  public boolean hasLineHashesWithSignificantCode(Component component) {
+    return lineHashVersionPerComponent.computeIfAbsent(component, this::compute) == LineHashVersion.WITH_SIGNIFICANT_CODE;
+  }
+
+  private LineHashVersion compute(Component component) {
+    try (DbSession session = dbClient.openSession(false)) {
+      return dbClient.fileSourceDao().selectLineHashesVersion(session, component.getUuid());
+    }
+  }
+}
diff --git a/server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SignificantCodeRepository.java b/server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SignificantCodeRepository.java
new file mode 100644 (file)
index 0000000..10d1675
--- /dev/null
@@ -0,0 +1,63 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.source;
+
+import java.util.Optional;
+import org.sonar.core.hash.LineRange;
+import org.sonar.core.util.CloseableIterator;
+import org.sonar.scanner.protocol.output.ScannerReport.LineSgnificantCode;
+import org.sonar.server.computation.task.projectanalysis.batch.BatchReportReader;
+import org.sonar.server.computation.task.projectanalysis.component.Component;
+
+public class SignificantCodeRepository {
+  private final BatchReportReader reportReader;
+
+  public SignificantCodeRepository(BatchReportReader reportReader) {
+    this.reportReader = reportReader;
+  }
+
+  public Optional<LineRange[]> getRangesPerLine(Component component) {
+    int numLines = component.getFileAttributes().getLines();
+
+    Optional<CloseableIterator<LineSgnificantCode>> significantCode = reportReader.readComponentSignificantCode(component.getReportAttributes().getRef());
+    return significantCode.map(s -> toArray(s, numLines));
+  }
+
+  private static LineRange[] toArray(CloseableIterator<LineSgnificantCode> lineRanges, int numLines) {
+    LineRange[] ranges = new LineRange[numLines];
+    LineSgnificantCode currentLine = null;
+
+    for (int i = 0; i < numLines; i++) {
+      if (currentLine == null) {
+        if (!lineRanges.hasNext()) {
+          break;
+        }
+        currentLine = lineRanges.next();
+      }
+
+      if (currentLine.getLine() == i+1) {
+        ranges[i] = new LineRange(currentLine.getStartOffset(), currentLine.getEndOffset());
+        currentLine = null;
+      }
+    }
+
+    return ranges;
+  }
+}
index 02e08eea5a79f9a14c1259fa4e59c2cfce924d9f..dcf587c0763068b8fe8637549d717b270a52b643 100644 (file)
@@ -21,8 +21,6 @@ package org.sonar.server.computation.task.projectanalysis.source;
 
 import java.util.Collections;
 import java.util.List;
-import org.sonar.core.hash.SourceLinesHashesComputer;
-import org.sonar.core.util.CloseableIterator;
 import org.sonar.db.DbClient;
 import org.sonar.db.DbSession;
 import org.sonar.db.source.FileSourceDao;
@@ -30,20 +28,18 @@ import org.sonar.server.computation.task.projectanalysis.component.Component;
 
 public class SourceLinesDiffImpl implements SourceLinesDiff {
 
-  private final SourceLinesRepository sourceLinesRepository;
-
   private final DbClient dbClient;
   private final FileSourceDao fileSourceDao;
+  private final SourceLinesHashRepository sourceLinesHash;
 
-  public SourceLinesDiffImpl(DbClient dbClient, FileSourceDao fileSourceDao, SourceLinesRepository sourceLinesRepository) {
+  public SourceLinesDiffImpl(DbClient dbClient, FileSourceDao fileSourceDao, SourceLinesHashRepository sourceLinesHash) {
     this.dbClient = dbClient;
     this.fileSourceDao = fileSourceDao;
-    this.sourceLinesRepository = sourceLinesRepository;
+    this.sourceLinesHash = sourceLinesHash;
   }
 
   @Override
   public int[] computeMatchingLines(Component component) {
-
     List<String> database = getDBLines(component);
     List<String> report = getReportLines(component);
 
@@ -61,14 +57,7 @@ public class SourceLinesDiffImpl implements SourceLinesDiff {
   }
 
   private List<String> getReportLines(Component component) {
-    SourceLinesHashesComputer linesHashesComputer = new SourceLinesHashesComputer();
-    try (CloseableIterator<String> lineIterator = sourceLinesRepository.readLines(component)) {
-      while (lineIterator.hasNext()) {
-        String line = lineIterator.next();
-        linesHashesComputer.addLine(line);
-      }
-    }
-    return linesHashesComputer.getLineHashes();
+    return sourceLinesHash.getMatchingDB(component);
   }
 
 }
diff --git a/server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashCache.java b/server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashCache.java
new file mode 100644 (file)
index 0000000..a35fe93
--- /dev/null
@@ -0,0 +1,93 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.source;
+
+import com.google.common.base.Preconditions;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.function.Function;
+import org.sonar.api.utils.TempFolder;
+import org.sonar.server.computation.task.projectanalysis.component.Component;
+
+public class SourceLinesHashCache {
+  private static final String FILE_NAME_PREFIX = "hashes-";
+
+  private final Path cacheDirectoryPath;
+  private final Set<Integer> cacheFileIds = new HashSet<>();
+
+  public SourceLinesHashCache(TempFolder tempFolder) {
+    this.cacheDirectoryPath = tempFolder.newDir().toPath();
+  }
+
+  public List<String> computeIfAbsent(Component component, Function<Component, List<String>> hashesComputer) {
+    int ref = getId(component);
+
+    if (cacheFileIds.add(ref)) {
+      List<String> hashes = hashesComputer.apply(component);
+      save(ref, hashes);
+      return hashes;
+    } else {
+      return load(ref);
+    }
+  }
+
+  /**
+   * @throws IllegalStateException if the requested value is not cached
+   */
+  public List<String> get(Component component) {
+    Preconditions.checkState(contains(component), "Source line hashes for component %s not cached", component);
+    return load(getId(component));
+  }
+
+  public boolean contains(Component component) {
+    return cacheFileIds.contains(getId(component));
+  }
+
+  private static int getId(Component component) {
+    return component.getReportAttributes().getRef();
+  }
+
+  private void save(int fileId, List<String> hashes) {
+    Path filePath = getFilePath(fileId);
+    try {
+      Files.write(filePath, hashes, StandardCharsets.UTF_8);
+    } catch (IOException e) {
+      throw new IllegalStateException(String.format("Failed to write to '%s'", filePath), e);
+    }
+  }
+
+  private List<String> load(int fileId) {
+    Path filePath = getFilePath(fileId);
+    try {
+      return Files.readAllLines(filePath, StandardCharsets.UTF_8);
+    } catch (IOException e) {
+      throw new IllegalStateException(String.format("Failed to read '%s'", filePath), e);
+    }
+  }
+
+  private Path getFilePath(int fileId) {
+    return cacheDirectoryPath.resolve(FILE_NAME_PREFIX + fileId);
+  }
+}
diff --git a/server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashRepository.java b/server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashRepository.java
new file mode 100644 (file)
index 0000000..60fc808
--- /dev/null
@@ -0,0 +1,44 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.source;
+
+import java.util.List;
+import org.sonar.server.computation.task.projectanalysis.component.Component;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepositoryImpl.LineHashesComputer;
+
+public interface SourceLinesHashRepository {
+  /**
+   * Get line hashes from the report matching the version of the line hashes existing in the report, if possible.
+   * The line hashes are cached.
+   */
+  List<String> getMatchingDB(Component component);
+
+  /**
+   * The line processor will return line hashes taking into account significant code (if it was provided by a code analyzer).
+   * It will use a cached value, if possible. If it's generated, it's not cached since it's assumed that it won't be 
+   * needed again after it is persisted.
+   */
+  LineHashesComputer getLineProcessorToPersist(Component component);
+
+  /**
+   * Get the version of line hashes in the report
+   */
+  Integer getLineHashesVersion(Component component);
+}
diff --git a/server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashRepositoryImpl.java b/server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashRepositoryImpl.java
new file mode 100644 (file)
index 0000000..fb56b85
--- /dev/null
@@ -0,0 +1,179 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.source;
+
+import java.util.List;
+import java.util.Optional;
+import org.apache.commons.lang.StringUtils;
+import org.sonar.core.hash.LineRange;
+import org.sonar.core.hash.SourceLineHashesComputer;
+import org.sonar.core.util.CloseableIterator;
+import org.sonar.db.source.LineHashVersion;
+import org.sonar.server.computation.task.projectanalysis.component.Component;
+
+public class SourceLinesHashRepositoryImpl implements SourceLinesHashRepository {
+  private final SourceLinesRepository sourceLinesRepository;
+  private final SignificantCodeRepository significantCodeRepository;
+  private final SourceLinesHashCache cache;
+  private final DbLineHashVersion dbLineHashesVersion;
+
+  public SourceLinesHashRepositoryImpl(SourceLinesRepository sourceLinesRepository, SignificantCodeRepository significantCodeRepository,
+    SourceLinesHashCache cache, DbLineHashVersion dbLineHashVersion) {
+    this.sourceLinesRepository = sourceLinesRepository;
+    this.significantCodeRepository = significantCodeRepository;
+    this.cache = cache;
+    this.dbLineHashesVersion = dbLineHashVersion;
+  }
+
+  @Override
+  public List<String> getMatchingDB(Component component) {
+    return cache.computeIfAbsent(component, this::createLineHashesMatchingDBVersion);
+  }
+
+  @Override
+  public Integer getLineHashesVersion(Component component) {
+    if (significantCodeRepository.getRangesPerLine(component).isPresent()) {
+      return LineHashVersion.WITH_SIGNIFICANT_CODE.getDbValue();
+    } else {
+      return LineHashVersion.WITHOUT_SIGNIFICANT_CODE.getDbValue();
+    }
+  }
+
+  @Override
+  public LineHashesComputer getLineProcessorToPersist(Component component) {
+    boolean cacheHit = cache.contains(component);
+
+    // check if line hashes are cached and if we can use it
+    if (cacheHit && dbLineHashesVersion.hasLineHashesWithSignificantCode(component)) {
+      return new CachedLineHashesComputer(cache.get(component));
+    }
+
+    Optional<LineRange[]> significantCodePerLine = significantCodeRepository.getRangesPerLine(component);
+    if (cacheHit && !significantCodePerLine.isPresent()) {
+      return new CachedLineHashesComputer(cache.get(component));
+    }
+
+    // Generate the line hashes taking into account significant code ranges
+    return createLineHashesProcessor(component.getFileAttributes().getLines(), significantCodePerLine);
+  }
+
+  private List<String> createLineHashesMatchingDBVersion(Component component) {
+    if (!dbLineHashesVersion.hasLineHashesWithSignificantCode(component)) {
+      return createLineHashes(component, Optional.empty());
+    }
+
+    Optional<LineRange[]> significantCodePerLine = significantCodeRepository.getRangesPerLine(component);
+    return createLineHashes(component, significantCodePerLine);
+  }
+
+  private List<String> createLineHashes(Component component, Optional<LineRange[]> significantCodePerLine) {
+    LineHashesComputer processor = createLineHashesProcessor(component.getFileAttributes().getLines(), significantCodePerLine);
+    CloseableIterator<String> lines = sourceLinesRepository.readLines(component);
+
+    while (lines.hasNext()) {
+      processor.addLine(lines.next());
+    }
+
+    return processor.getResult();
+  }
+
+  public static interface LineHashesComputer {
+    void addLine(String line);
+
+    List<String> getResult();
+  }
+
+  private static LineHashesComputer createLineHashesProcessor(int numLines, Optional<LineRange[]> significantCodePerLine) {
+    if (significantCodePerLine.isPresent()) {
+      return new SignificantCodeLineHashesComputer(new SourceLineHashesComputer(numLines), significantCodePerLine.get());
+    } else {
+      return new SimpleLineHashesComputer(numLines);
+    }
+  }
+
+  static class CachedLineHashesComputer implements LineHashesComputer {
+    private final List<String> lineHashes;
+
+    public CachedLineHashesComputer(List<String> lineHashes) {
+      this.lineHashes = lineHashes;
+    }
+
+    @Override
+    public void addLine(String line) {
+      // no op
+    }
+
+    @Override
+    public List<String> getResult() {
+      return lineHashes;
+    }
+  }
+
+  static class SimpleLineHashesComputer implements LineHashesComputer {
+    private final SourceLineHashesComputer delegate;
+
+    public SimpleLineHashesComputer(int numLines) {
+      this.delegate = new SourceLineHashesComputer(numLines);
+    }
+
+    @Override
+    public void addLine(String line) {
+      delegate.addLine(line);
+    }
+
+    @Override
+    public List<String> getResult() {
+      return delegate.getLineHashes();
+    }
+  }
+
+  static class SignificantCodeLineHashesComputer implements LineHashesComputer {
+    private final SourceLineHashesComputer delegate;
+    private final LineRange[] rangesPerLine;
+
+    private int i = 0;
+
+    public SignificantCodeLineHashesComputer(SourceLineHashesComputer hashComputer, LineRange[] rangesPerLine) {
+      this.rangesPerLine = rangesPerLine;
+      this.delegate = hashComputer;
+    }
+
+    @Override
+    public void addLine(String line) {
+      LineRange range = null;
+      if (i < rangesPerLine.length) {
+        range = rangesPerLine[i];
+      }
+
+      if (range == null) {
+        delegate.addLine("");
+      } else {
+        delegate.addLine(StringUtils.substring(line, range.startOffset(), range.endOffset()));
+      }
+      i++;
+    }
+
+    @Override
+    public List<String> getResult() {
+      return delegate.getLineHashes();
+    }
+  }
+
+}
index 6526d0c4e30923c27ba3e9de2c3783df2b041c98..dacf39027633dc78ee3b446ed5ffa6dddeafe143 100644 (file)
@@ -34,7 +34,6 @@ import org.sonar.api.utils.log.Loggers;
 import org.sonar.db.protobuf.DbFileSources;
 import org.sonar.scanner.protocol.output.ScannerReport;
 import org.sonar.server.computation.task.projectanalysis.component.Component;
-import org.sonar.server.computation.task.projectanalysis.source.linereader.RangeOffsetConverter.RangeOffsetConverterException;
 
 import static java.lang.String.format;
 import static org.sonar.server.computation.task.projectanalysis.source.linereader.RangeOffsetConverter.OFFSET_SEPARATOR;
index 2a9e3de22b8e1e57870c28cf01ec99d88d3450d8..0e0427510faf951c56902634ec702d20eb3a4c37 100644 (file)
@@ -48,6 +48,8 @@ import org.sonar.server.computation.task.projectanalysis.scm.Changeset;
 import org.sonar.server.computation.task.projectanalysis.scm.ScmInfo;
 import org.sonar.server.computation.task.projectanalysis.scm.ScmInfoRepository;
 import org.sonar.server.computation.task.projectanalysis.source.ComputeFileSourceData;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepositoryImpl.LineHashesComputer;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepository;
 import org.sonar.server.computation.task.projectanalysis.source.SourceLinesRepository;
 import org.sonar.server.computation.task.projectanalysis.source.linereader.CoverageLineReader;
 import org.sonar.server.computation.task.projectanalysis.source.linereader.DuplicationLineReader;
@@ -69,9 +71,10 @@ public class PersistFileSourcesStep implements ComputationStep {
   private final SourceLinesRepository sourceLinesRepository;
   private final ScmInfoRepository scmInfoRepository;
   private final DuplicationRepository duplicationRepository;
+  private final SourceLinesHashRepository sourceLinesHash;
 
   public PersistFileSourcesStep(DbClient dbClient, System2 system2, TreeRootHolder treeRootHolder, BatchReportReader reportReader, SourceLinesRepository sourceLinesRepository,
-    ScmInfoRepository scmInfoRepository, DuplicationRepository duplicationRepository) {
+    ScmInfoRepository scmInfoRepository, DuplicationRepository duplicationRepository, SourceLinesHashRepository sourceLinesHash) {
     this.dbClient = dbClient;
     this.system2 = system2;
     this.treeRootHolder = treeRootHolder;
@@ -79,6 +82,7 @@ public class PersistFileSourcesStep implements ComputationStep {
     this.sourceLinesRepository = sourceLinesRepository;
     this.scmInfoRepository = scmInfoRepository;
     this.duplicationRepository = duplicationRepository;
+    this.sourceLinesHash = sourceLinesHash;
   }
 
   @Override
@@ -116,32 +120,35 @@ public class PersistFileSourcesStep implements ComputationStep {
     public void visitFile(Component file) {
       try (CloseableIterator<String> linesIterator = sourceLinesRepository.readLines(file);
         LineReaders lineReaders = new LineReaders(reportReader, scmInfoRepository, duplicationRepository, file)) {
-        ComputeFileSourceData computeFileSourceData = new ComputeFileSourceData(linesIterator, lineReaders.readers(), file.getFileAttributes().getLines());
+        LineHashesComputer lineHashesComputer = sourceLinesHash.getLineProcessorToPersist(file);
+        ComputeFileSourceData computeFileSourceData = new ComputeFileSourceData(linesIterator, lineReaders.readers(), lineHashesComputer);
         ComputeFileSourceData.Data fileSourceData = computeFileSourceData.compute();
-        persistSource(fileSourceData, file.getUuid(), lineReaders.getLatestChangeWithRevision());
+        persistSource(fileSourceData, file, lineReaders.getLatestChangeWithRevision());
       } catch (Exception e) {
         throw new IllegalStateException(String.format("Cannot persist sources of %s", file.getKey()), e);
       }
     }
 
-    private void persistSource(ComputeFileSourceData.Data fileSourceData, String componentUuid, @Nullable Changeset latestChangeWithRevision) {
+    private void persistSource(ComputeFileSourceData.Data fileSourceData, Component file, @Nullable Changeset latestChangeWithRevision) {
       DbFileSources.Data fileData = fileSourceData.getFileSourceData();
 
       byte[] data = FileSourceDto.encodeSourceData(fileData);
       String dataHash = DigestUtils.md5Hex(data);
       String srcHash = fileSourceData.getSrcHash();
       String lineHashes = fileSourceData.getLineHashes();
-      FileSourceDto previousDto = previousFileSourcesByUuid.get(componentUuid);
+      FileSourceDto previousDto = previousFileSourcesByUuid.get(file.getUuid());
+      Integer lineHashesVersion = sourceLinesHash.getLineHashesVersion(file);
 
       if (previousDto == null) {
         FileSourceDto dto = new FileSourceDto()
           .setProjectUuid(projectUuid)
-          .setFileUuid(componentUuid)
+          .setFileUuid(file.getUuid())
           .setDataType(Type.SOURCE)
           .setBinaryData(data)
           .setSrcHash(srcHash)
           .setDataHash(dataHash)
           .setLineHashes(lineHashes)
+          .setLineHashesVersion(lineHashesVersion)
           .setCreatedAt(system2.now())
           .setUpdatedAt(system2.now())
           .setRevision(computeRevision(latestChangeWithRevision));
@@ -153,12 +160,14 @@ public class PersistFileSourcesStep implements ComputationStep {
         boolean srcHashUpdated = !srcHash.equals(previousDto.getSrcHash());
         String revision = computeRevision(latestChangeWithRevision);
         boolean revisionUpdated = !ObjectUtils.equals(revision, previousDto.getRevision());
-        if (binaryDataUpdated || srcHashUpdated || revisionUpdated) {
+        boolean lineHashesVersionUpdated = previousDto.getLineHashesVersion() != lineHashesVersion;
+        if (binaryDataUpdated || srcHashUpdated || revisionUpdated || lineHashesVersionUpdated) {
           previousDto
             .setBinaryData(data)
             .setDataHash(dataHash)
             .setSrcHash(srcHash)
             .setLineHashes(lineHashes)
+            .setLineHashesVersion(lineHashesVersion)
             .setRevision(revision)
             .setUpdatedAt(system2.now());
           dbClient.fileSourceDao().update(session, previousDto);
index 5fc58b012cb6a7e9e55505d254a88d74a157bf9f..c428cc00a202bd8d904e1488bb66e7a387659e90 100644 (file)
@@ -34,6 +34,7 @@ import org.junit.runner.Description;
 import org.junit.runners.model.Statement;
 import org.sonar.core.util.CloseableIterator;
 import org.sonar.scanner.protocol.output.ScannerReport;
+import org.sonar.scanner.protocol.output.ScannerReport.LineSgnificantCode;
 
 public class BatchReportReaderRule implements TestRule, BatchReportReader {
   private ScannerReport.Metadata metadata;
@@ -53,6 +54,7 @@ public class BatchReportReaderRule implements TestRule, BatchReportReader {
   private Map<Integer, List<String>> fileSources = new HashMap<>();
   private Map<Integer, List<ScannerReport.Test>> tests = new HashMap<>();
   private Map<Integer, List<ScannerReport.CoverageDetail>> coverageDetails = new HashMap<>();
+  private Map<Integer, List<ScannerReport.LineSgnificantCode>> significantCode = new HashMap<>();
 
   @Override
   public Statement apply(final Statement statement, Description description) {
@@ -83,6 +85,7 @@ public class BatchReportReaderRule implements TestRule, BatchReportReader {
     this.fileSources.clear();
     this.tests.clear();
     this.coverageDetails.clear();
+    this.significantCode.clear();
   }
 
   @Override
@@ -136,11 +139,7 @@ public class BatchReportReaderRule implements TestRule, BatchReportReader {
 
   @Override
   public CloseableIterator<ScannerReport.Measure> readComponentMeasures(int componentRef) {
-    List<ScannerReport.Measure> res = this.measures.get(componentRef);
-    if (res == null) {
-      return CloseableIterator.emptyCloseableIterator();
-    }
-    return CloseableIterator.from(res.iterator());
+    return closeableIterator(this.measures.get(componentRef));
   }
 
   public BatchReportReaderRule putMeasures(int componentRef, List<ScannerReport.Measure> measures) {
@@ -223,14 +222,20 @@ public class BatchReportReaderRule implements TestRule, BatchReportReader {
     return this;
   }
 
+  public BatchReportReaderRule putSignificantCode(int fileRef, List<ScannerReport.LineSgnificantCode> significantCode) {
+    this.significantCode.put(fileRef, significantCode);
+    return this;
+  }
+
   @Override
-  public CloseableIterator<ScannerReport.SyntaxHighlightingRule> readComponentSyntaxHighlighting(int fileRef) {
-    List<ScannerReport.SyntaxHighlightingRule> res = this.syntaxHighlightings.get(fileRef);
-    if (res == null) {
-      return CloseableIterator.emptyCloseableIterator();
-    }
+  public Optional<CloseableIterator<LineSgnificantCode>> readComponentSignificantCode(int fileRef) {
+    List<LineSgnificantCode> list = significantCode.get(fileRef);
+    return list == null ? Optional.empty() : Optional.of(CloseableIterator.from(list.iterator()));
+  }
 
-    return CloseableIterator.from(res.iterator());
+  @Override
+  public CloseableIterator<ScannerReport.SyntaxHighlightingRule> readComponentSyntaxHighlighting(int fileRef) {
+    return closeableIterator(this.syntaxHighlightings.get(fileRef));
   }
 
   public BatchReportReaderRule putSyntaxHighlighting(int fileRef, List<ScannerReport.SyntaxHighlightingRule> syntaxHighlightings) {
@@ -240,12 +245,7 @@ public class BatchReportReaderRule implements TestRule, BatchReportReader {
 
   @Override
   public CloseableIterator<ScannerReport.LineCoverage> readComponentCoverage(int fileRef) {
-    List<ScannerReport.LineCoverage> res = this.coverages.get(fileRef);
-    if (res == null) {
-      return CloseableIterator.emptyCloseableIterator();
-    }
-
-    return CloseableIterator.from(res.iterator());
+    return closeableIterator(this.coverages.get(fileRef));
   }
 
   public BatchReportReaderRule putCoverage(int fileRef, List<ScannerReport.LineCoverage> coverages) {
@@ -276,12 +276,7 @@ public class BatchReportReaderRule implements TestRule, BatchReportReader {
 
   @Override
   public CloseableIterator<ScannerReport.Test> readTests(int testFileRef) {
-    List<ScannerReport.Test> res = this.tests.get(testFileRef);
-    if (res == null) {
-      return CloseableIterator.emptyCloseableIterator();
-    }
-
-    return CloseableIterator.from(res.iterator());
+    return closeableIterator(this.tests.get(testFileRef));
   }
 
   public BatchReportReaderRule putTests(int testFileRed, List<ScannerReport.Test> tests) {
@@ -291,16 +286,12 @@ public class BatchReportReaderRule implements TestRule, BatchReportReader {
 
   @Override
   public CloseableIterator<ScannerReport.CoverageDetail> readCoverageDetails(int testFileRef) {
-    List<ScannerReport.CoverageDetail> res = this.coverageDetails.get(testFileRef);
-    if (res == null) {
-      return CloseableIterator.emptyCloseableIterator();
-    }
-
-    return CloseableIterator.from(res.iterator());
+    return closeableIterator(this.coverageDetails.get(testFileRef));
   }
 
   public BatchReportReaderRule putCoverageDetails(int testFileRef, List<ScannerReport.CoverageDetail> coverageDetails) {
     this.coverageDetails.put(testFileRef, coverageDetails);
     return this;
   }
+
 }
index 60636baba3128a0f249d4a1b96a64e48ecc329aa..8d7226141a6c56864226e20cb73c1c7ecb854db2 100644 (file)
@@ -34,7 +34,7 @@ import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.mockito.ArgumentCaptor;
-import org.sonar.core.hash.SourceLinesHashesComputer;
+import org.sonar.core.hash.SourceLineHashesComputer;
 import org.sonar.db.DbClient;
 import org.sonar.db.DbSession;
 import org.sonar.db.component.ComponentDao;
@@ -47,7 +47,7 @@ import org.sonar.server.computation.task.projectanalysis.analysis.AnalysisMetada
 import org.sonar.server.computation.task.projectanalysis.component.Component;
 import org.sonar.server.computation.task.projectanalysis.component.ReportComponent;
 import org.sonar.server.computation.task.projectanalysis.component.TreeRootHolderRule;
-import org.sonar.server.computation.task.projectanalysis.source.SourceLinesRepositoryRule;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepository;
 
 import static com.google.common.base.Joiner.on;
 import static java.util.Arrays.stream;
@@ -219,19 +219,18 @@ public class FileMoveDetectionStepTest {
   @Rule
   public TreeRootHolderRule treeRootHolder = new TreeRootHolderRule();
   @Rule
-  public SourceLinesRepositoryRule sourceLinesRepository = new SourceLinesRepositoryRule();
-  @Rule
   public MutableMovedFilesRepositoryRule movedFilesRepository = new MutableMovedFilesRepositoryRule();
 
   private DbClient dbClient = mock(DbClient.class);
   private DbSession dbSession = mock(DbSession.class);
   private ComponentDao componentDao = mock(ComponentDao.class);
+  private SourceLinesHashRepository sourceLinesHash = mock(SourceLinesHashRepository.class);
   private FileSourceDao fileSourceDao = mock(FileSourceDao.class);
   private FileSimilarity fileSimilarity = new FileSimilarityImpl(new SourceSimilarityImpl());
   private long dbIdGenerator = 0;
 
   private FileMoveDetectionStep underTest = new FileMoveDetectionStep(analysisMetadataHolder, treeRootHolder, dbClient,
-    sourceLinesRepository, fileSimilarity, movedFilesRepository);
+    fileSimilarity, movedFilesRepository, sourceLinesHash);
 
   @Before
   public void setUp() throws Exception {
@@ -316,7 +315,7 @@ public class FileMoveDetectionStepTest {
     ComponentDto[] dtos = mockComponents(FILE_1.getKey());
     mockContentOfFileInDb(FILE_1.getKey(), CONTENT1);
     setFilesInReport(FILE_2);
-    setFileContentInReport(FILE_2_REF, CONTENT1);
+    setFileLineHashesInReport(FILE_2, CONTENT1);
 
     underTest.execute();
 
@@ -333,7 +332,7 @@ public class FileMoveDetectionStepTest {
     mockComponents(FILE_1.getKey());
     mockContentOfFileInDb(FILE_1.getKey(), CONTENT1);
     setFilesInReport(FILE_2);
-    setFileContentInReport(FILE_2_REF, LESS_CONTENT1);
+    setFileLineHashesInReport(FILE_2, LESS_CONTENT1);
 
     underTest.execute();
 
@@ -346,7 +345,7 @@ public class FileMoveDetectionStepTest {
     mockComponents(FILE_1.getKey());
     mockContentOfFileInDb(FILE_1.getKey(), CONTENT_EMPTY);
     setFilesInReport(FILE_2);
-    setFileContentInReport(FILE_2_REF, CONTENT1);
+    setFileLineHashesInReport(FILE_2, CONTENT1);
 
     underTest.execute();
 
@@ -360,7 +359,7 @@ public class FileMoveDetectionStepTest {
     mockComponents(key -> newComponentDto(key).setPath(null), FILE_1.getKey());
     mockContentOfFileInDb(FILE_1.getKey(), CONTENT1);
     setFilesInReport(FILE_2);
-    setFileContentInReport(FILE_2_REF, CONTENT1);
+    setFileLineHashesInReport(FILE_2, CONTENT1);
 
     underTest.execute();
 
@@ -373,7 +372,7 @@ public class FileMoveDetectionStepTest {
     mockComponents(FILE_1.getKey());
     mockContentOfFileInDb(FILE_1.getKey(), CONTENT1);
     setFilesInReport(FILE_2);
-    setFileContentInReport(FILE_2_REF, CONTENT_EMPTY);
+    setFileLineHashesInReport(FILE_2, CONTENT_EMPTY);
 
     underTest.execute();
 
@@ -386,8 +385,8 @@ public class FileMoveDetectionStepTest {
     mockComponents(FILE_1.getKey());
     mockContentOfFileInDb(FILE_1.getKey(), CONTENT1);
     setFilesInReport(FILE_2, FILE_3);
-    setFileContentInReport(FILE_2_REF, CONTENT1);
-    setFileContentInReport(FILE_3_REF, CONTENT1);
+    setFileLineHashesInReport(FILE_2, CONTENT1);
+    setFileLineHashesInReport(FILE_3, CONTENT1);
 
     underTest.execute();
 
@@ -401,7 +400,7 @@ public class FileMoveDetectionStepTest {
     mockContentOfFileInDb(FILE_1.getKey(), CONTENT1);
     mockContentOfFileInDb(FILE_2.getKey(), CONTENT1);
     setFilesInReport(FILE_3);
-    setFileContentInReport(FILE_3_REF, CONTENT1);
+    setFileLineHashesInReport(FILE_3, CONTENT1);
 
     underTest.execute();
 
@@ -437,9 +436,9 @@ public class FileMoveDetectionStepTest {
     mockContentOfFileInDb(file4.getKey(), new String[] {"e", "f", "g", "h", "i"});
     mockContentOfFileInDb(file5.getKey(), CONTENT2);
     setFilesInReport(FILE_3, file4, file6);
-    setFileContentInReport(FILE_3_REF, CONTENT1);
-    setFileContentInReport(file4.getReportAttributes().getRef(), new String[] {"a", "b"});
-    setFileContentInReport(file6.getReportAttributes().getRef(), LESS_CONTENT2);
+    setFileLineHashesInReport(FILE_3, CONTENT1);
+    setFileLineHashesInReport(file4, new String[] {"a", "b"});
+    setFileLineHashesInReport(file6, LESS_CONTENT2);
 
     underTest.execute();
 
@@ -470,11 +469,13 @@ public class FileMoveDetectionStepTest {
     Map<String, Component> comps = new HashMap<>();
     int i = 1;
     for (File f : FileUtils.listFiles(new File("src/test/resources/org/sonar/server/computation/task/projectanalysis/filemove/FileMoveDetectionStepTest/v2"), null, false)) {
-      comps.put(f.getName(), builder(Component.Type.FILE, i)
+      Component c = builder(Component.Type.FILE, i++)
         .setKey(f.getName())
         .setPath(f.getName())
-        .build());
-      setFileContentInReport(i++, readLines(f));
+        .build();
+
+      comps.put(f.getName(), c);
+      setFileLineHashesInReport(c, readLines(f));
     }
 
     setFilesInReport(comps.values().toArray(new Component[0]));
@@ -503,14 +504,18 @@ public class FileMoveDetectionStepTest {
       .toArray(new String[0]);
   }
 
-  private void setFileContentInReport(int ref, String[] content) {
-    sourceLinesRepository.addLines(ref, content);
+  private void setFileLineHashesInReport(Component file, String[] content) {
+    SourceLineHashesComputer computer = new SourceLineHashesComputer();
+    for (String line : content) {
+      computer.addLine(line);
+    }
+    when(sourceLinesHash.getMatchingDB(file)).thenReturn(computer.getLineHashes());
   }
 
   private void mockContentOfFileInDb(String key, @Nullable String[] content) {
     FileSourceDto dto = new FileSourceDto();
     if (content != null) {
-      SourceLinesHashesComputer linesHashesComputer = new SourceLinesHashesComputer();
+      SourceLineHashesComputer linesHashesComputer = new SourceLineHashesComputer();
       stream(content).forEach(linesHashesComputer::addLine);
       dto.setLineHashes(on('\n').join(linesHashesComputer.getLineHashes()));
     }
index 807cc1176b4c939d3431f2da77f0e7896021ee46..085820dae944d85246f4f4b6d8184ea850e8ca62 100644 (file)
@@ -54,6 +54,7 @@ import org.sonar.server.computation.task.projectanalysis.filemove.MovedFilesRepo
 import org.sonar.server.computation.task.projectanalysis.issue.commonrule.CommonRuleEngineImpl;
 import org.sonar.server.computation.task.projectanalysis.issue.filter.IssueFilter;
 import org.sonar.server.computation.task.projectanalysis.qualityprofile.ActiveRulesHolderRule;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepository;
 import org.sonar.server.computation.task.projectanalysis.source.SourceLinesRepositoryRule;
 
 import static com.google.common.collect.Lists.newArrayList;
@@ -113,6 +114,7 @@ public class IntegrateIssuesVisitorTest {
   private MergeBranchComponentUuids mergeBranchComponentsUuids = mock(MergeBranchComponentUuids.class);
   private ShortBranchIssueMerger issueStatusCopier = mock(ShortBranchIssueMerger.class);
   private MergeBranchComponentUuids mergeBranchComponentUuids = mock(MergeBranchComponentUuids.class);
+  private SourceLinesHashRepository sourceLinesHash = mock(SourceLinesHashRepository.class);
 
   ArgumentCaptor<DefaultIssue> defaultIssueCaptor;
 
@@ -132,8 +134,7 @@ public class IntegrateIssuesVisitorTest {
     defaultIssueCaptor = ArgumentCaptor.forClass(DefaultIssue.class);
     when(movedFilesRepository.getOriginalFile(any(Component.class))).thenReturn(Optional.absent());
 
-    TrackerRawInputFactory rawInputFactory = new TrackerRawInputFactory(treeRootHolder, reportReader, fileSourceRepository, new CommonRuleEngineImpl(),
-      issueFilter, ruleRepository);
+    TrackerRawInputFactory rawInputFactory = new TrackerRawInputFactory(treeRootHolder, reportReader, sourceLinesHash, new CommonRuleEngineImpl(), issueFilter, ruleRepository);
     TrackerBaseInputFactory baseInputFactory = new TrackerBaseInputFactory(issuesLoader, dbTester.getDbClient(), movedFilesRepository);
     TrackerMergeBranchInputFactory mergeInputFactory = new TrackerMergeBranchInputFactory(issuesLoader, mergeBranchComponentsUuids, dbTester.getDbClient());
     tracker = new TrackerExecution(baseInputFactory, rawInputFactory, new Tracker<>());
index cde7eea7a1b26a6aeea4df391d2bf72a362d8cdc..479ec0f78754c644409796362ed481d39758717d 100644 (file)
@@ -376,7 +376,7 @@ public class IssueCreationDateCalculatorTest {
     if (scmInfo == null) {
       scmInfo = mock(ScmInfo.class);
       when(scmInfoRepository.getScmInfo(component))
-        .thenReturn(java.util.Optional.empty());
+        .thenReturn(java.util.Optional.of(scmInfo));
     }
   }
 
index 8b3d48dfc5d3c83f570b90fb44e3251113f8fb36..651557e3bc96c6e15e4f0b6f1799af9caf6b1f83 100644 (file)
@@ -21,6 +21,7 @@ package org.sonar.server.computation.task.projectanalysis.issue;
 
 import com.google.common.collect.Iterators;
 import java.util.Collection;
+import java.util.Collections;
 import org.junit.Rule;
 import org.junit.Test;
 import org.sonar.api.issue.Issue;
@@ -38,7 +39,7 @@ import org.sonar.server.computation.task.projectanalysis.component.ReportCompone
 import org.sonar.server.computation.task.projectanalysis.component.TreeRootHolderRule;
 import org.sonar.server.computation.task.projectanalysis.issue.commonrule.CommonRuleEngine;
 import org.sonar.server.computation.task.projectanalysis.issue.filter.IssueFilter;
-import org.sonar.server.computation.task.projectanalysis.source.SourceLinesRepositoryRule;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepository;
 import org.sonar.server.rule.CommonRuleKeys;
 
 import static java.util.Arrays.asList;
@@ -61,24 +62,22 @@ public class TrackerRawInputFactoryTest {
   @Rule
   public BatchReportReaderRule reportReader = new BatchReportReaderRule();
 
-  @Rule
-  public SourceLinesRepositoryRule fileSourceRepository = new SourceLinesRepositoryRule();
-
   @Rule
   public RuleRepositoryRule ruleRepository = new RuleRepositoryRule();
 
+  SourceLinesHashRepository sourceLinesHash = mock(SourceLinesHashRepository.class);
   CommonRuleEngine commonRuleEngine = mock(CommonRuleEngine.class);
   IssueFilter issueFilter = mock(IssueFilter.class);
-  TrackerRawInputFactory underTest = new TrackerRawInputFactory(treeRootHolder, reportReader, fileSourceRepository, commonRuleEngine, issueFilter, ruleRepository);
+  TrackerRawInputFactory underTest = new TrackerRawInputFactory(treeRootHolder, reportReader, sourceLinesHash, commonRuleEngine, issueFilter, ruleRepository);
 
   @Test
   public void load_source_hash_sequences() {
-    fileSourceRepository.addLines(FILE_REF, "line 1;", "line 2;");
+    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
     Input<DefaultIssue> input = underTest.create(FILE);
 
     assertThat(input.getLineHashSequence()).isNotNull();
-    assertThat(input.getLineHashSequence().getHashForLine(1)).isNotEmpty();
-    assertThat(input.getLineHashSequence().getHashForLine(2)).isNotEmpty();
+    assertThat(input.getLineHashSequence().getHashForLine(1)).isEqualTo("line");
+    assertThat(input.getLineHashSequence().getHashForLine(2)).isEmpty();
     assertThat(input.getLineHashSequence().getHashForLine(3)).isEmpty();
 
     assertThat(input.getBlockHashSequence()).isNotNull();
@@ -95,7 +94,7 @@ public class TrackerRawInputFactoryTest {
   @Test
   public void load_issues_from_report() {
     when(issueFilter.accept(any(DefaultIssue.class), eq(FILE))).thenReturn(true);
-    fileSourceRepository.addLines(FILE_REF, "line 1;", "line 2;");
+    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
     ScannerReport.Issue reportIssue = ScannerReport.Issue.newBuilder()
       .setTextRange(TextRange.newBuilder().setStartLine(2).build())
       .setMsg("the message")
@@ -128,7 +127,7 @@ public class TrackerRawInputFactoryTest {
 
   @Test
   public void load_external_issues_from_report() {
-    fileSourceRepository.addLines(FILE_REF, "line 1;", "line 2;");
+    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
     ScannerReport.ExternalIssue reportIssue = ScannerReport.ExternalIssue.newBuilder()
       .setTextRange(TextRange.newBuilder().setStartLine(2).build())
       .setMsg("the message")
@@ -159,7 +158,7 @@ public class TrackerRawInputFactoryTest {
 
   @Test
   public void load_external_issues_from_report_with_default_effort() {
-    fileSourceRepository.addLines(FILE_REF, "line 1;", "line 2;");
+    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
     ScannerReport.ExternalIssue reportIssue = ScannerReport.ExternalIssue.newBuilder()
       .setTextRange(TextRange.newBuilder().setStartLine(2).build())
       .setMsg("the message")
@@ -190,7 +189,7 @@ public class TrackerRawInputFactoryTest {
   @Test
   public void ignore_issue_from_report() {
     when(issueFilter.accept(any(DefaultIssue.class), eq(FILE))).thenReturn(false);
-    fileSourceRepository.addLines(FILE_REF, "line 1;", "line 2;");
+    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
     ScannerReport.Issue reportIssue = ScannerReport.Issue.newBuilder()
       .setTextRange(TextRange.newBuilder().setStartLine(2).build())
       .setMsg("the message")
@@ -208,7 +207,7 @@ public class TrackerRawInputFactoryTest {
 
   @Test
   public void ignore_report_issues_on_common_rules() {
-    fileSourceRepository.addLines(FILE_REF, "line 1;", "line 2;");
+    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
     ScannerReport.Issue reportIssue = ScannerReport.Issue.newBuilder()
       .setMsg("the message")
       .setRuleRepository(CommonRuleKeys.commonRepositoryForLang("java"))
@@ -225,7 +224,7 @@ public class TrackerRawInputFactoryTest {
   @Test
   public void load_issues_of_compute_engine_common_rules() {
     when(issueFilter.accept(any(DefaultIssue.class), eq(FILE))).thenReturn(true);
-    fileSourceRepository.addLines(FILE_REF, "line 1;", "line 2;");
+    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
     DefaultIssue ceIssue = new DefaultIssue()
       .setRuleKey(RuleKey.of(CommonRuleKeys.commonRepositoryForLang("java"), "InsufficientCoverage"))
       .setMessage("not enough coverage")
@@ -241,7 +240,7 @@ public class TrackerRawInputFactoryTest {
   @Test
   public void ignore_issue_from_common_rule() {
     when(issueFilter.accept(any(DefaultIssue.class), eq(FILE))).thenReturn(false);
-    fileSourceRepository.addLines(FILE_REF, "line 1;", "line 2;");
+    when(sourceLinesHash.getMatchingDB(FILE)).thenReturn(Collections.singletonList("line"));
     DefaultIssue ceIssue = new DefaultIssue()
       .setRuleKey(RuleKey.of(CommonRuleKeys.commonRepositoryForLang("java"), "InsufficientCoverage"))
       .setMessage("not enough coverage")
index c6282f6dcc8e2ba53e3fd83572d5238209fdc457..b3436685c3403ef5b63ab2f82a76786a7442b272 100644 (file)
@@ -22,33 +22,47 @@ package org.sonar.server.computation.task.projectanalysis.source;
 import com.google.common.collect.Lists;
 import org.junit.Test;
 import org.sonar.db.protobuf.DbFileSources;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepositoryImpl.LineHashesComputer;
 import org.sonar.server.computation.task.projectanalysis.source.linereader.LineReader;
 
 import static com.google.common.collect.Lists.newArrayList;
 import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.when;
 
 public class ComputeFileSourceDataTest {
 
+  private LineHashesComputer lineHashesComputer = mock(LineHashesComputer.class);
+
   @Test
   public void compute_one_line() {
+    when(lineHashesComputer.getResult()).thenReturn(Lists.newArrayList("137f72c3708c6bd0de00a0e5a69c699b"));
     ComputeFileSourceData computeFileSourceData = new ComputeFileSourceData(
       newArrayList("line1").iterator(),
       Lists.newArrayList(new MockLineReader()),
-      1);
+      lineHashesComputer);
 
     ComputeFileSourceData.Data data = computeFileSourceData.compute();
     assertThat(data.getLineHashes()).isEqualTo("137f72c3708c6bd0de00a0e5a69c699b");
     assertThat(data.getSrcHash()).isEqualTo("137f72c3708c6bd0de00a0e5a69c699b");
     assertThat(data.getFileSourceData().getLinesList()).hasSize(1);
     assertThat(data.getFileSourceData().getLines(0).getHighlighting()).isEqualTo("h-1");
+
+    verify(lineHashesComputer).addLine("line1");
+    verify(lineHashesComputer).getResult();
+    verifyNoMoreInteractions(lineHashesComputer);
   }
 
   @Test
   public void compute_two_lines() {
+    when(lineHashesComputer.getResult()).thenReturn(Lists.newArrayList("137f72c3708c6bd0de00a0e5a69c699b", "e6251bcf1a7dc3ba5e7933e325bbe605"));
+
     ComputeFileSourceData computeFileSourceData = new ComputeFileSourceData(
       newArrayList("line1", "line2").iterator(),
       Lists.newArrayList(new MockLineReader()),
-      2);
+      lineHashesComputer);
 
     ComputeFileSourceData.Data data = computeFileSourceData.compute();
     assertThat(data.getLineHashes()).isEqualTo("137f72c3708c6bd0de00a0e5a69c699b\ne6251bcf1a7dc3ba5e7933e325bbe605");
@@ -56,27 +70,11 @@ public class ComputeFileSourceDataTest {
     assertThat(data.getFileSourceData().getLinesList()).hasSize(2);
     assertThat(data.getFileSourceData().getLines(0).getHighlighting()).isEqualTo("h-1");
     assertThat(data.getFileSourceData().getLines(1).getHighlighting()).isEqualTo("h-2");
-  }
 
-  @Test
-  public void remove_tabs_and_spaces_in_line_hashes() {
-    String refLineHashes = new ComputeFileSourceData(
-      newArrayList("line1").iterator(),
-      Lists.newArrayList(new MockLineReader()),
-      1).compute().getLineHashes();
-
-    assertThat(new ComputeFileSourceData(
-      newArrayList(" line\t \t 1  ").iterator(),
-      Lists.newArrayList(new MockLineReader()),
-      1).compute().getLineHashes()).isEqualTo(refLineHashes);
-  }
-
-  @Test
-  public void compute_line_hashes_of_empty_lines() {
-    assertThat(new ComputeFileSourceData(
-      newArrayList("   ").iterator(),
-      Lists.newArrayList(new MockLineReader()),
-      1).compute().getLineHashes()).isEqualTo("");
+    verify(lineHashesComputer).addLine("line1");
+    verify(lineHashesComputer).addLine("line2");
+    verify(lineHashesComputer).getResult();
+    verifyNoMoreInteractions(lineHashesComputer);
   }
 
   private static class MockLineReader implements LineReader {
diff --git a/server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/DbLineHashVersionTest.java b/server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/DbLineHashVersionTest.java
new file mode 100644 (file)
index 0000000..0d9256b
--- /dev/null
@@ -0,0 +1,72 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.source;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.sonar.db.DbTester;
+import org.sonar.db.component.ComponentDto;
+import org.sonar.db.component.ComponentTesting;
+import org.sonar.db.source.LineHashVersion;
+import org.sonar.server.computation.task.projectanalysis.component.Component;
+import org.sonar.server.computation.task.projectanalysis.component.ReportComponent;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class DbLineHashVersionTest {
+  @Rule
+  public DbTester db = DbTester.create();
+
+  private DbLineHashVersion underTest = new DbLineHashVersion(db.getDbClient());
+
+  @Test
+  public void hasLineHashWithSignificantCode_should_return_true() {
+    ComponentDto project = db.components().insertPublicProject();
+    ComponentDto file = db.components().insertComponent(ComponentTesting.newFileDto(project));
+
+    db.fileSources().insertFileSource(file, dto -> dto.setLineHashesVersion(LineHashVersion.WITH_SIGNIFICANT_CODE.getDbValue()));
+    Component component = ReportComponent.builder(Component.Type.FILE, 1).setKey("key").setUuid(file.uuid()).build();
+    assertThat(underTest.hasLineHashesWithSignificantCode(component)).isTrue();
+  }
+
+  @Test
+  public void hasLineHashWithSignificantCode_should_return_false_if_file_is_not_found() {
+    Component component = ReportComponent.builder(Component.Type.FILE, 1).setKey("key").setUuid("123").build();
+    assertThat(underTest.hasLineHashesWithSignificantCode(component)).isFalse();
+  }
+
+  @Test
+  public void should_cache_line_hash_version_from_db() {
+    ComponentDto project = db.components().insertPublicProject();
+    ComponentDto file = db.components().insertComponent(ComponentTesting.newFileDto(project));
+
+    db.fileSources().insertFileSource(file, dto -> dto.setLineHashesVersion(LineHashVersion.WITH_SIGNIFICANT_CODE.getDbValue()));
+    Component component = ReportComponent.builder(Component.Type.FILE, 1).setKey("key").setUuid(file.uuid()).build();
+    assertThat(underTest.hasLineHashesWithSignificantCode(component)).isTrue();
+
+    assertThat(db.countRowsOfTable("file_sources")).isOne();
+    db.executeUpdateSql("delete from file_sources");
+    db.commit();
+    assertThat(db.countRowsOfTable("file_sources")).isZero();
+
+    // still true because it uses cache
+    assertThat(underTest.hasLineHashesWithSignificantCode(component)).isTrue();
+  }
+}
diff --git a/server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SignificantCodeRepositoryTest.java b/server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SignificantCodeRepositoryTest.java
new file mode 100644 (file)
index 0000000..baf59f7
--- /dev/null
@@ -0,0 +1,106 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.source;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.sonar.core.hash.LineRange;
+import org.sonar.scanner.protocol.output.ScannerReport;
+import org.sonar.scanner.protocol.output.ScannerReport.LineSgnificantCode;
+import org.sonar.server.computation.task.projectanalysis.batch.BatchReportReaderRule;
+import org.sonar.server.computation.task.projectanalysis.component.Component;
+import org.sonar.server.computation.task.projectanalysis.component.FileAttributes;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.sonar.server.computation.task.projectanalysis.component.ReportComponent.builder;
+
+public class SignificantCodeRepositoryTest {
+  private static final String FILE_UUID = "FILE_UUID";
+  private static final String FILE_KEY = "FILE_KEY";
+  private static final int FILE_REF = 2;
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  @Rule
+  public BatchReportReaderRule reportReader = new BatchReportReaderRule();
+
+  private SignificantCodeRepository underTest = new SignificantCodeRepository(reportReader);
+
+  @Test
+  public void return_empty_if_information_not_available() {
+    assertThat(underTest.getRangesPerLine(createComponent(3))).isEmpty();
+  }
+
+  @Test
+  public void return_null_for_lines_without_information() {
+    Component component = createComponent(5);
+    List<ScannerReport.LineSgnificantCode> significantCode = new ArrayList<>();
+
+    // line 3 and 5 missing
+    significantCode.add(createLineSignificantCode(1, 1, 2));
+    significantCode.add(createLineSignificantCode(2, 1, 2));
+    significantCode.add(createLineSignificantCode(4, 1, 2));
+
+    reportReader.putSignificantCode(component.getReportAttributes().getRef(), significantCode);
+    assertThat(underTest.getRangesPerLine(component)).isNotEmpty();
+    LineRange[] lines = underTest.getRangesPerLine(component).get();
+    assertThat(lines).hasSize(5);
+    assertThat(lines[0]).isNotNull();
+    assertThat(lines[1]).isNotNull();
+    assertThat(lines[2]).isNull();
+    assertThat(lines[3]).isNotNull();
+    assertThat(lines[4]).isNull();
+  }
+
+  @Test
+  public void translate_offset_for_each_line() {
+    Component component = createComponent(1);
+    List<ScannerReport.LineSgnificantCode> significantCode = new ArrayList<>();
+
+    significantCode.add(createLineSignificantCode(1, 1, 2));
+
+    reportReader.putSignificantCode(component.getReportAttributes().getRef(), significantCode);
+    assertThat(underTest.getRangesPerLine(component)).isNotEmpty();
+    LineRange[] lines = underTest.getRangesPerLine(component).get();
+    assertThat(lines).hasSize(1);
+    assertThat(lines[0].startOffset()).isEqualTo(1);
+    assertThat(lines[0].endOffset()).isEqualTo(2);
+  }
+
+  private static LineSgnificantCode createLineSignificantCode(int line, int start, int end) {
+    return LineSgnificantCode.newBuilder()
+      .setLine(line)
+      .setStartOffset(start)
+      .setEndOffset(end)
+      .build();
+  }
+
+  private static Component createComponent(int lineCount) {
+    return builder(Component.Type.FILE, FILE_REF)
+      .setKey(FILE_KEY)
+      .setUuid(FILE_UUID)
+      .setFileAttributes(new FileAttributes(false, null, lineCount))
+      .build();
+  }
+}
index c692c663686282fc47d81eee2afb2bea8a7cba1f..7af8b9553a81f419c9050856f68a6c1e572045c8 100644 (file)
  */
 package org.sonar.server.computation.task.projectanalysis.source;
 
-import com.google.common.base.Splitter;
+import java.util.Arrays;
 import javax.annotation.Nullable;
 import org.junit.Before;
-import org.junit.Rule;
 import org.junit.Test;
-import org.sonar.core.hash.SourceLinesHashesComputer;
 import org.sonar.db.DbClient;
 import org.sonar.db.DbSession;
 import org.sonar.db.component.ComponentDao;
 import org.sonar.db.source.FileSourceDao;
-import org.sonar.db.source.FileSourceDto;
 import org.sonar.server.computation.task.projectanalysis.component.Component;
 
-import static com.google.common.base.Joiner.on;
 import static java.lang.String.valueOf;
-import static java.util.Arrays.stream;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -43,17 +38,13 @@ import static org.sonar.server.computation.task.projectanalysis.component.Report
 
 public class SourceLinesDiffImplTest {
 
-  @Rule
-  public SourceLinesRepositoryRule sourceLinesRepository = new SourceLinesRepositoryRule();
-
   private DbClient dbClient = mock(DbClient.class);
   private DbSession dbSession = mock(DbSession.class);
   private ComponentDao componentDao = mock(ComponentDao.class);
   private FileSourceDao fileSourceDao = mock(FileSourceDao.class);
+  private SourceLinesHashRepository sourceLinesHash = mock(SourceLinesHashRepository.class);
 
-  private static final Splitter END_OF_LINE_SPLITTER = Splitter.on('\n');
-
-  private SourceLinesDiffImpl underTest = new SourceLinesDiffImpl(dbClient, fileSourceDao, sourceLinesRepository);
+  private SourceLinesDiffImpl underTest = new SourceLinesDiffImpl(dbClient, fileSourceDao, sourceLinesHash);
 
   private static final int FILE_REF = 1;
   private static final String FILE_KEY = valueOf(FILE_REF);
@@ -77,25 +68,18 @@ public class SourceLinesDiffImplTest {
 
   @Test
   public void should_find_no_diff_when_report_and_db_content_are_identical() {
+    Component component = fileComponent(FILE_REF);
 
-    mockContentOfFileInDb("" + FILE_KEY, CONTENT);
-    setFileContentInReport(FILE_REF, CONTENT);
+    mockLineHashesInDb("" + FILE_KEY, CONTENT);
+    setLineHashesInReport(component, CONTENT);
 
-    Component component = fileComponent(FILE_REF);
     assertThat(underTest.computeMatchingLines(component)).containsExactly(1, 2, 3, 4, 5, 6, 7);
 
   }
 
-  private void mockContentOfFileInDb(String key, @Nullable String[] content) {
-    FileSourceDto dto = new FileSourceDto();
-    if (content != null) {
-      SourceLinesHashesComputer linesHashesComputer = new SourceLinesHashesComputer();
-      stream(content).forEach(linesHashesComputer::addLine);
-      dto.setLineHashes(on('\n').join(linesHashesComputer.getLineHashes()));
-    }
-
+  private void mockLineHashesInDb(String key, @Nullable String[] lineHashes) {
     when(fileSourceDao.selectLineHashes(dbSession, componentUuidOf(key)))
-      .thenReturn(END_OF_LINE_SPLITTER.splitToList(dto.getLineHashes()));
+      .thenReturn(Arrays.asList(lineHashes));
   }
 
   private static String componentUuidOf(String key) {
@@ -109,7 +93,7 @@ public class SourceLinesDiffImplTest {
       .build();
   }
 
-  private void setFileContentInReport(int ref, String[] content) {
-    sourceLinesRepository.addLines(ref, content);
+  private void setLineHashesInReport(Component component, String[] content) {
+    when(sourceLinesHash.getMatchingDB(component)).thenReturn(Arrays.asList(content));
   }
 }
diff --git a/server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashCacheTest.java b/server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashCacheTest.java
new file mode 100644 (file)
index 0000000..a738d8e
--- /dev/null
@@ -0,0 +1,95 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.source;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.function.Function;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.sonar.api.utils.internal.JUnitTempFolder;
+import org.sonar.server.computation.task.projectanalysis.component.Component;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import static org.sonar.server.computation.task.projectanalysis.component.ReportComponent.builder;
+
+public class SourceLinesHashCacheTest {
+  private static final String FILE_UUID = "FILE_UUID";
+  private static final String FILE_KEY = "FILE_KEY";
+
+  @Rule
+  public JUnitTempFolder tempFolder = new JUnitTempFolder();
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  private SourceLinesHashCache underTest;
+
+  @Before
+  public void setUp() {
+    underTest = new SourceLinesHashCache(tempFolder);
+  }
+
+  @Test
+  public void should_computeIfAbsent() {
+    Component component = createComponent(1);
+    Function<Component, List<String>> f = mock(Function.class);
+    List<String> list = Collections.singletonList("hash1");
+    when(f.apply(component)).thenReturn(list);
+
+    assertThat(underTest.contains(component)).isFalse();
+    List<String> returned = underTest.computeIfAbsent(component, f);
+    assertThat(returned).isEqualTo(list);
+    assertThat(underTest.contains(component)).isTrue();
+    returned = underTest.computeIfAbsent(component, f);
+    assertThat(returned).isEqualTo(list);
+
+    verify(f).apply(component);
+  }
+
+  @Test
+  public void get_throws_ISE_if_not_cached() {
+    Component component = createComponent(1);
+
+    exception.expect(IllegalStateException.class);
+    exception.expectMessage("Source line hashes for component ReportComponent{ref=1, key='FILE_KEY', type=FILE} not cached");
+    underTest.get(component);
+  }
+
+  @Test
+  public void get_returns_value_if_cached() {
+    List<String> list = Collections.singletonList("hash1");
+    Component component = createComponent(1);
+    underTest.computeIfAbsent(component, c -> list);
+    assertThat(underTest.get(component)).isEqualTo(list);
+  }
+
+  private static Component createComponent(int ref) {
+    return builder(Component.Type.FILE, ref)
+      .setKey(FILE_KEY)
+      .setUuid(FILE_UUID)
+      .build();
+  }
+}
diff --git a/server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashImplTest.java b/server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashImplTest.java
new file mode 100644 (file)
index 0000000..fb51eb5
--- /dev/null
@@ -0,0 +1,55 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.source;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.sonar.server.computation.task.projectanalysis.component.Component;
+
+import static org.mockito.Mockito.mock;
+import static org.sonar.server.computation.task.projectanalysis.component.ReportComponent.builder;
+
+public class SourceLinesHashImplTest {
+  private static final String FILE_UUID = "FILE_UUID";
+  private static final String FILE_KEY = "FILE_KEY";
+
+  @Rule
+  public SourceLinesRepositoryRule sourceLinesRepository = new SourceLinesRepositoryRule();
+  public SignificantCodeRepository significantCodeRepository = mock(SignificantCodeRepository.class);
+  public SourceLinesHashCache cache = mock(SourceLinesHashCache.class);
+  public DbLineHashVersion dbLineHashVersion = mock(DbLineHashVersion.class);
+
+  private SourceLinesHashRepositoryImpl underTest = new SourceLinesHashRepositoryImpl(sourceLinesRepository, significantCodeRepository, cache, dbLineHashVersion);
+
+  @Test
+  public void should_generate_correct_version_of_line_hashes() {
+    Component component = createComponent(1);
+
+    underTest.getMatchingDB(component);
+
+  }
+
+  private static Component createComponent(int ref) {
+    return builder(Component.Type.FILE, ref)
+      .setKey(FILE_KEY)
+      .setUuid(FILE_UUID)
+      .build();
+  }
+}
diff --git a/server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashRepositoryImplTest.java b/server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/SourceLinesHashRepositoryImplTest.java
new file mode 100644 (file)
index 0000000..78e5e3a
--- /dev/null
@@ -0,0 +1,240 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.computation.task.projectanalysis.source;
+
+import com.google.common.collect.Lists;
+import java.util.List;
+import java.util.Optional;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.sonar.api.utils.internal.JUnitTempFolder;
+import org.sonar.core.hash.LineRange;
+import org.sonar.core.hash.SourceLineHashesComputer;
+import org.sonar.db.source.LineHashVersion;
+import org.sonar.server.computation.task.projectanalysis.component.Component;
+import org.sonar.server.computation.task.projectanalysis.component.Component.Type;
+import org.sonar.server.computation.task.projectanalysis.component.ReportComponent;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepositoryImpl.CachedLineHashesComputer;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepositoryImpl.LineHashesComputer;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepositoryImpl.SignificantCodeLineHashesComputer;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.verifyZeroInteractions;
+import static org.mockito.Mockito.when;
+
+public class SourceLinesHashRepositoryImplTest {
+  private static final int FILE_REF = 1;
+
+  @Rule
+  public JUnitTempFolder temp = new JUnitTempFolder();
+  @Rule
+  public SourceLinesRepositoryRule sourceLinesRepository = new SourceLinesRepositoryRule();
+
+  private SourceLinesHashCache sourceLinesHashCache;
+  private SignificantCodeRepository significantCodeRepository = mock(SignificantCodeRepository.class);
+  private DbLineHashVersion dbLineHashVersion = mock(DbLineHashVersion.class);
+  private Component file = ReportComponent.builder(Type.FILE, FILE_REF).build();
+  private SourceLinesHashRepositoryImpl underTest;
+
+  @Before
+  public void setUp() {
+    sourceLinesHashCache = new SourceLinesHashCache(temp);
+    underTest = new SourceLinesHashRepositoryImpl(sourceLinesRepository, significantCodeRepository,
+      sourceLinesHashCache, dbLineHashVersion);
+    sourceLinesRepository.addLines(FILE_REF, "line1", "line2", "line3");
+  }
+
+  @Test
+  public void should_return_with_significant_code_if_report_contains_it() {
+    when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(new LineRange[0]));
+    assertThat(underTest.getLineHashesVersion(file)).isEqualTo(LineHashVersion.WITH_SIGNIFICANT_CODE.getDbValue());
+
+    verify(significantCodeRepository).getRangesPerLine(file);
+    verifyNoMoreInteractions(significantCodeRepository);
+    verifyZeroInteractions(dbLineHashVersion);
+  }
+
+  @Test
+  public void should_return_without_significant_code_if_report_does_not_contain_it() {
+    when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.empty());
+    assertThat(underTest.getLineHashesVersion(file)).isEqualTo(LineHashVersion.WITHOUT_SIGNIFICANT_CODE.getDbValue());
+
+    verify(significantCodeRepository).getRangesPerLine(file);
+    verifyNoMoreInteractions(significantCodeRepository);
+    verifyZeroInteractions(dbLineHashVersion);
+  }
+
+  @Test
+  public void should_create_hash_without_significant_code_if_db_has_no_significant_code() {
+    when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(false);
+    List<String> lineHashes = underTest.getMatchingDB(file);
+
+    assertLineHashes(lineHashes, "line1", "line2", "line3");
+    verify(dbLineHashVersion).hasLineHashesWithSignificantCode(file);
+    verifyNoMoreInteractions(dbLineHashVersion);
+    verifyZeroInteractions(significantCodeRepository);
+  }
+
+  @Test
+  public void should_create_hash_without_significant_code_if_report_has_no_significant_code() {
+    when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(true);
+    when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.empty());
+
+    List<String> lineHashes = underTest.getMatchingDB(file);
+
+    assertLineHashes(lineHashes, "line1", "line2", "line3");
+    verify(dbLineHashVersion).hasLineHashesWithSignificantCode(file);
+    verifyNoMoreInteractions(dbLineHashVersion);
+    verify(significantCodeRepository).getRangesPerLine(file);
+    verifyNoMoreInteractions(significantCodeRepository);
+  }
+
+  @Test
+  public void should_create_hash_with_significant_code() {
+    LineRange[] lineRanges = {new LineRange(0, 1), null, new LineRange(1, 5)};
+
+    when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(true);
+    when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(lineRanges));
+
+    List<String> lineHashes = underTest.getMatchingDB(file);
+
+    assertLineHashes(lineHashes, "l", "", "ine3");
+    verify(dbLineHashVersion).hasLineHashesWithSignificantCode(file);
+    verifyNoMoreInteractions(dbLineHashVersion);
+    verify(significantCodeRepository).getRangesPerLine(file);
+    verifyNoMoreInteractions(significantCodeRepository);
+  }
+
+  @Test
+  public void should_return_version_of_line_hashes_with_significant_code_in_the_report() {
+    LineRange[] lineRanges = {new LineRange(0, 1), null, new LineRange(1, 5)};
+    when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(lineRanges));
+    assertThat(underTest.getLineHashesVersion(file)).isEqualTo(LineHashVersion.WITH_SIGNIFICANT_CODE.getDbValue());
+
+    verify(significantCodeRepository).getRangesPerLine(file);
+    verifyNoMoreInteractions(significantCodeRepository);
+    verifyZeroInteractions(dbLineHashVersion);
+  }
+
+  @Test
+  public void should_return_version_of_line_hashes_without_significant_code_in_the_report() {
+    when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.empty());
+    assertThat(underTest.getLineHashesVersion(file)).isEqualTo(LineHashVersion.WITHOUT_SIGNIFICANT_CODE.getDbValue());
+
+    verify(significantCodeRepository).getRangesPerLine(file);
+    verifyNoMoreInteractions(significantCodeRepository);
+    verifyZeroInteractions(dbLineHashVersion);
+  }
+
+  @Test
+  public void should_persist_with_significant_code_from_cache_if_possible() {
+    List<String> lineHashes = Lists.newArrayList("line1", "line2", "line3");
+    LineRange[] lineRanges = {new LineRange(0, 1), null, new LineRange(1, 5)};
+    sourceLinesHashCache.computeIfAbsent(file, c -> lineHashes);
+
+    when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(true);
+    when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(lineRanges));
+
+    LineHashesComputer hashesComputer = underTest.getLineProcessorToPersist(file);
+
+    assertThat(hashesComputer).isInstanceOf(CachedLineHashesComputer.class);
+    assertThat(hashesComputer.getResult()).isEqualTo(lineHashes);
+  }
+
+  @Test
+  public void should_persist_without_significant_code_from_cache_if_possible() {
+    List<String> lineHashes = Lists.newArrayList("line1", "line2", "line3");
+    sourceLinesHashCache.computeIfAbsent(file, c -> lineHashes);
+
+    when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(false);
+    when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.empty());
+
+    LineHashesComputer hashesComputer = underTest.getLineProcessorToPersist(file);
+
+    assertThat(hashesComputer).isInstanceOf(CachedLineHashesComputer.class);
+    assertThat(hashesComputer.getResult()).isEqualTo(lineHashes);
+  }
+
+  @Test
+  public void should_generate_to_persist_if_needed() {
+    List<String> lineHashes = Lists.newArrayList("line1", "line2", "line3");
+    LineRange[] lineRanges = {new LineRange(0, 1), null, new LineRange(1, 5)};
+
+    sourceLinesHashCache.computeIfAbsent(file, c -> lineHashes);
+
+    // DB has line hashes without significant code and significant code is available in the report, so we need to generate new line hashes
+    when(dbLineHashVersion.hasLineHashesWithSignificantCode(file)).thenReturn(false);
+    when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(lineRanges));
+
+    LineHashesComputer hashesComputer = underTest.getLineProcessorToPersist(file);
+
+    assertThat(hashesComputer).isInstanceOf(SignificantCodeLineHashesComputer.class);
+  }
+
+  @Test
+  public void SignificantCodeLineHashesComputer_delegates_after_taking_ranges_into_account() {
+    LineRange[] lineRanges = {
+      new LineRange(0, 1),
+      null,
+      new LineRange(1, 5),
+      new LineRange(2, 7),
+      new LineRange(4, 5)
+    };
+
+    SourceLineHashesComputer lineHashComputer = mock(SourceLineHashesComputer.class);
+    SignificantCodeLineHashesComputer computer = new SignificantCodeLineHashesComputer(lineHashComputer, lineRanges);
+    computer.addLine("testline");
+    computer.addLine("testline");
+    computer.addLine("testline");
+    computer.addLine("testline");
+    computer.addLine("testline");
+    computer.addLine("testline");
+
+    verify(lineHashComputer).addLine("t");
+    // there is an extra line at the end which will be ignored since there is no range for it
+    verify(lineHashComputer, times(2)).addLine("");
+    verify(lineHashComputer).addLine("estl");
+    verify(lineHashComputer).addLine("stlin");
+    verify(lineHashComputer).addLine("l");
+
+    verifyNoMoreInteractions(lineHashComputer);
+  }
+
+  private void assertLineHashes(List<String> actualLines, String... lines) {
+    assertThat(actualLines).hasSize(lines.length);
+    SourceLineHashesComputer computer = new SourceLineHashesComputer();
+    for (String line : lines) {
+      computer.addLine(line);
+    }
+
+    List<String> expectedLines = computer.getLineHashes();
+
+    for (int i = 0; i < expectedLines.size(); i++) {
+      assertThat(actualLines.get(i))
+        .isEqualTo(expectedLines.get(i))
+        .withFailMessage("Line hash is different for line %d", i);
+    }
+  }
+}
index 8d5246ef961bd685ddf97f37e01a72686f0c26dc..9db2571b9c2e35c3bf6296761ce89964c51be605 100644 (file)
  */
 package org.sonar.server.computation.task.projectanalysis.step;
 
+import com.google.common.collect.Lists;
 import java.util.Arrays;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
+import org.mockito.Mockito;
 import org.sonar.api.utils.System2;
 import org.sonar.db.DbClient;
 import org.sonar.db.DbSession;
@@ -31,6 +33,7 @@ import org.sonar.db.DbTester;
 import org.sonar.db.protobuf.DbFileSources;
 import org.sonar.db.source.FileSourceDto;
 import org.sonar.db.source.FileSourceDto.Type;
+import org.sonar.db.source.LineHashVersion;
 import org.sonar.scanner.protocol.output.ScannerReport;
 import org.sonar.scanner.protocol.output.ScannerReport.Component.ComponentType;
 import org.sonar.scanner.protocol.output.ScannerReport.SyntaxHighlightingRule.HighlightingType;
@@ -45,6 +48,8 @@ import org.sonar.server.computation.task.projectanalysis.duplication.InnerDuplic
 import org.sonar.server.computation.task.projectanalysis.duplication.TextBlock;
 import org.sonar.server.computation.task.projectanalysis.scm.Changeset;
 import org.sonar.server.computation.task.projectanalysis.scm.ScmInfoRepositoryRule;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepository;
+import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepositoryImpl.LineHashesComputer;
 import org.sonar.server.computation.task.projectanalysis.source.SourceLinesRepositoryRule;
 import org.sonar.server.computation.task.step.ComputationStep;
 
@@ -56,18 +61,15 @@ import static org.mockito.Mockito.when;
 public class PersistFileSourcesStepTest extends BaseStepTest {
 
   private static final int FILE1_REF = 3;
-  private static final int FILE2_REF = 4;
   private static final String PROJECT_UUID = "PROJECT";
   private static final String PROJECT_KEY = "PROJECT_KEY";
   private static final String FILE1_UUID = "FILE1";
-  private static final String FILE2_UUID = "FILE2";
   private static final long NOW = 123456789L;
 
   private System2 system2 = mock(System2.class);
 
   @Rule
   public ExpectedException thrown = ExpectedException.none();
-
   @Rule
   public DbTester dbTester = DbTester.create(system2);
   @Rule
@@ -81,6 +83,9 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
   @Rule
   public DuplicationRepositoryRule duplicationRepository = DuplicationRepositoryRule.create(treeRootHolder);
 
+  private SourceLinesHashRepository sourceLinesHashRepository = mock(SourceLinesHashRepository.class);
+  private LineHashesComputer lineHashesComputer = mock(LineHashesComputer.class);
+
   private DbClient dbClient = dbTester.getDbClient();
   private DbSession session = dbTester.getSession();
 
@@ -89,8 +94,9 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
   @Before
   public void setup() {
     when(system2.now()).thenReturn(NOW);
+    when(sourceLinesHashRepository.getLineProcessorToPersist(Mockito.any(Component.class))).thenReturn(lineHashesComputer);
     underTest = new PersistFileSourcesStep(dbClient, system2, treeRootHolder, reportReader, fileSourceRepository, scmInfoRepository,
-      duplicationRepository);
+      duplicationRepository, sourceLinesHashRepository);
   }
 
   @Override
@@ -101,7 +107,7 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
   @Test
   public void persist_sources() {
     initBasicReport(2);
-
+    when(lineHashesComputer.getResult()).thenReturn(Lists.newArrayList("137f72c3708c6bd0de00a0e5a69c699b","e6251bcf1a7dc3ba5e7933e325bbe605"));
     underTest.execute();
 
     assertThat(dbTester.countRowsOfTable("file_sources")).isEqualTo(1);
@@ -110,6 +116,7 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
     assertThat(fileSourceDto.getFileUuid()).isEqualTo(FILE1_UUID);
     assertThat(fileSourceDto.getBinaryData()).isNotEmpty();
     assertThat(fileSourceDto.getDataHash()).isNotEmpty();
+    assertThat(fileSourceDto.getLineHashesVersion()).isEqualTo(LineHashVersion.WITHOUT_SIGNIFICANT_CODE.getDbValue());
     assertThat(fileSourceDto.getLineHashes()).isNotEmpty();
     assertThat(fileSourceDto.getCreatedAt()).isEqualTo(NOW);
     assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(NOW);
@@ -125,7 +132,7 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
   @Test
   public void persist_source_hashes() {
     initBasicReport(2);
-
+    when(lineHashesComputer.getResult()).thenReturn(Lists.newArrayList("137f72c3708c6bd0de00a0e5a69c699b","e6251bcf1a7dc3ba5e7933e325bbe605"));
     underTest.execute();
 
     assertThat(dbTester.countRowsOfTable("file_sources")).isEqualTo(1);
@@ -328,6 +335,7 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
       .setSrcHash(srcHash)
       .setLineHashes(lineHashes)
       .setDataHash(dataHash)
+      .setLineHashesVersion(LineHashVersion.WITHOUT_SIGNIFICANT_CODE.getDbValue())
       .setSourceData(DbFileSources.Data.newBuilder()
         .addLines(DbFileSources.Line.newBuilder()
           .setLine(1)
diff --git a/sonar-core/src/main/java/org/sonar/core/hash/LineRange.java b/sonar-core/src/main/java/org/sonar/core/hash/LineRange.java
new file mode 100644 (file)
index 0000000..7eb1592
--- /dev/null
@@ -0,0 +1,41 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.core.hash;
+
+import com.google.common.base.Preconditions;
+
+public class LineRange {
+  private final int startOffset;
+  private final int endOffset;
+
+  public LineRange(int startOffset, int endOffset) {
+    Preconditions.checkArgument(startOffset <= endOffset, "Line range is not valid: %s must be greater or equal than %s", endOffset, startOffset);
+    this.startOffset = startOffset;
+    this.endOffset = endOffset;
+  }
+
+  public int startOffset() {
+    return startOffset;
+  }
+
+  public int endOffset() {
+    return endOffset;
+  }
+}
diff --git a/sonar-core/src/main/java/org/sonar/core/hash/SourceLineHashesComputer.java b/sonar-core/src/main/java/org/sonar/core/hash/SourceLineHashesComputer.java
new file mode 100644 (file)
index 0000000..df4a586
--- /dev/null
@@ -0,0 +1,65 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.core.hash;
+
+import java.security.MessageDigest;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import org.apache.commons.codec.binary.Hex;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.commons.lang.StringUtils;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static java.util.Objects.requireNonNull;
+
+/**
+ * Computes the hash of each line of a given file by simply added lines of that file one by one in order with
+ * {@link #addLine(String)}.
+ */
+public class SourceLineHashesComputer {
+  private final MessageDigest md5Digest = DigestUtils.getMd5Digest();
+  private final List<String> lineHashes;
+
+  public SourceLineHashesComputer() {
+    this.lineHashes = new ArrayList<>();
+  }
+
+  public SourceLineHashesComputer(int expectedLineCount) {
+    this.lineHashes = new ArrayList<>(expectedLineCount);
+  }
+
+  public void addLine(String line) {
+    requireNonNull(line, "line can not be null");
+    lineHashes.add(computeHash(line));
+  }
+
+  public List<String> getLineHashes() {
+    return Collections.unmodifiableList(lineHashes);
+  }
+
+  private String computeHash(String line) {
+    String reducedLine = StringUtils.replaceChars(line, "\t ", "");
+    if (reducedLine.isEmpty()) {
+      return "";
+    }
+    return Hex.encodeHexString(md5Digest.digest(reducedLine.getBytes(UTF_8)));
+  }
+}
diff --git a/sonar-core/src/main/java/org/sonar/core/hash/SourceLinesHashesComputer.java b/sonar-core/src/main/java/org/sonar/core/hash/SourceLinesHashesComputer.java
deleted file mode 100644 (file)
index b64413f..0000000
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * SonarQube
- * Copyright (C) 2009-2018 SonarSource SA
- * mailto:info AT sonarsource DOT com
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
- */
-package org.sonar.core.hash;
-
-import java.security.MessageDigest;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.commons.lang.StringUtils;
-
-import static java.nio.charset.StandardCharsets.UTF_8;
-import static java.util.Objects.requireNonNull;
-
-/**
- * Computes the hash of each line of a given file by simply added lines of that file one by one in order with
- * {@link #addLine(String)}.
- */
-public class SourceLinesHashesComputer {
-  private final MessageDigest md5Digest = DigestUtils.getMd5Digest();
-  private final List<String> lineHashes;
-
-  public SourceLinesHashesComputer() {
-    this.lineHashes = new ArrayList<>();
-  }
-
-  public SourceLinesHashesComputer(int expectedLineCount) {
-    this.lineHashes = new ArrayList<>(expectedLineCount);
-  }
-
-  public void addLine(String line) {
-    requireNonNull(line, "line can not be null");
-    lineHashes.add(computeHash(line));
-  }
-
-  public List<String> getLineHashes() {
-    return Collections.unmodifiableList(lineHashes);
-  }
-
-  private String computeHash(String line) {
-    String reducedLine = StringUtils.replaceChars(line, "\t ", "");
-    if (reducedLine.isEmpty()) {
-      return "";
-    }
-    return Hex.encodeHexString(md5Digest.digest(reducedLine.getBytes(UTF_8)));
-  }
-}
index e7d9a93101967216ac1881acdb54fbd0a2e66329..94aff1000b549695b08d07605cc7eedd8c35708c 100644 (file)
@@ -26,7 +26,7 @@ import com.google.common.base.Strings;
 import java.util.List;
 import java.util.Set;
 
-import org.sonar.core.hash.SourceLinesHashesComputer;
+import org.sonar.core.hash.SourceLineHashesComputer;
 
 /**
  * Sequence of hash of lines for a given file
@@ -88,7 +88,7 @@ public class LineHashSequence {
   }
 
   public static LineHashSequence createForLines(List<String> lines) {
-    SourceLinesHashesComputer hashesComputer = new SourceLinesHashesComputer(lines.size());
+    SourceLineHashesComputer hashesComputer = new SourceLineHashesComputer(lines.size());
     for (String line : lines) {
       hashesComputer.addLine(line);
     }
diff --git a/sonar-core/src/test/java/org/sonar/core/hash/LineRangeTest.java b/sonar-core/src/test/java/org/sonar/core/hash/LineRangeTest.java
new file mode 100644 (file)
index 0000000..07cc5fe
--- /dev/null
@@ -0,0 +1,45 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.core.hash;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class LineRangeTest {
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  @Test
+  public void should_throw_ISE_if_range_is_invalid() {
+    exception.expect(IllegalArgumentException.class);
+    exception.expectMessage("Line range is not valid: 1 must be greater or equal than 2");
+    new LineRange(2, 1);
+  }
+
+  @Test
+  public void check_getters() {
+    LineRange range = new LineRange(1, 2);
+    assertThat(range.startOffset()).isEqualTo(1);
+    assertThat(range.endOffset()).isEqualTo(2);
+  }
+}
index 967e5638a9c6a9f694cffda67ec068026f367697..ee07becc43d26dbaa4de152416b12d3625110bb2 100644 (file)
@@ -37,7 +37,7 @@ public class SourceLinesHashesComputerTest {
     expectedException.expect(NullPointerException.class);
     expectedException.expectMessage("line can not be null");
 
-    new SourceLinesHashesComputer(1).addLine(null);
+    new SourceLineHashesComputer(1).addLine(null);
   }
 
   @Test
@@ -70,7 +70,7 @@ public class SourceLinesHashesComputerTest {
     String line2 = "line 1 + 1";
     String line3 = "line 10 - 7";
 
-    SourceLinesHashesComputer underTest = new SourceLinesHashesComputer();
+    SourceLineHashesComputer underTest = new SourceLineHashesComputer();
     underTest.addLine(line1);
     underTest.addLine(line2);
     underTest.addLine(line3);
@@ -80,7 +80,7 @@ public class SourceLinesHashesComputerTest {
   }
 
   private static String hashSingleLine(@Nullable String line) {
-    SourceLinesHashesComputer sourceLinesHashesComputer = new SourceLinesHashesComputer(1);
+    SourceLineHashesComputer sourceLinesHashesComputer = new SourceLineHashesComputer(1);
     sourceLinesHashesComputer.addLine(line);
     return sourceLinesHashesComputer.getLineHashes().iterator().next();
   }
index d5e66955ab309b45b80a2c2c9cfa398f80331f58..7da8a34470e96219fe744f8ea369cc078ef6ead2 100644 (file)
@@ -120,12 +120,13 @@ public class ScannerReportReader {
     return file.exists();
   }
 
+  @CheckForNull
   public CloseableIterator<ScannerReport.LineSgnificantCode> readComponentSignificantCode(int fileRef) {
     File file = fileStructure.fileFor(FileStructure.Domain.SGNIFICANT_CODE, fileRef);
     if (fileExists(file)) {
       return Protobuf.readStream(file, ScannerReport.LineSgnificantCode.parser());
     }
-    return emptyCloseableIterator();
+    return null;
   }
 
   public boolean hasSignificantCode(int fileRef) {