]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-10430 add FileSourceDto#getLineCount()
authorSébastien Lesaint <sebastien.lesaint@sonarsource.com>
Mon, 23 Apr 2018 13:05:23 +0000 (15:05 +0200)
committerSonarTech <sonartech@sonarsource.com>
Mon, 28 May 2018 18:20:44 +0000 (20:20 +0200)
and do not expose hash list as raw string anymore

19 files changed:
server/sonar-db-dao/src/main/java/org/sonar/db/source/FileSourceDto.java
server/sonar-db-dao/src/main/resources/org/sonar/db/source/FileSourceMapper.xml
server/sonar-db-dao/src/test/java/org/sonar/db/source/FileSourceDaoTest.java
server/sonar-db-dao/src/test/java/org/sonar/db/source/FileSourceDtoTest.java
server/sonar-db-dao/src/test/java/org/sonar/db/source/FileSourceTester.java
server/sonar-db-dao/src/test/resources/org/sonar/db/source/FileSourceDaoTest/insert-result.xml [deleted file]
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/filemove/FileMoveDetectionStep.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/source/ComputeFileSourceData.java
server/sonar-server/src/main/java/org/sonar/server/computation/task/projectanalysis/step/PersistFileSourcesStep.java
server/sonar-server/src/test/java/org/sonar/server/batch/ProjectDataLoaderTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/filemove/FileMoveDetectionStepTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/source/ComputeFileSourceDataTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/task/projectanalysis/step/PersistFileSourcesStepTest.java
server/sonar-server/src/test/resources/org/sonar/server/source/ws/HashActionTest/shared.xml
server/sonar-server/src/test/resources/org/sonar/server/source/ws/HashActionTest/show_hashes_on_test_file.xml
server/sonar-server/src/test/resources/org/sonar/server/test/index/TestIndexerTest/db.xml
server/sonar-server/src/test/resources/org/sonar/server/test/index/TestResultSetIteratorTest/filter_by_project.xml
server/sonar-server/src/test/resources/org/sonar/server/test/index/TestResultSetIteratorTest/filter_by_project_and_date.xml
server/sonar-server/src/test/resources/org/sonar/server/test/index/TestResultSetIteratorTest/shared.xml

index 0e30488e1d787eae0a902465edd37edd5dadf769..0d19dbff9348e38445aac93064a10885bb0603f2 100644 (file)
@@ -19,6 +19,8 @@
  */
 package org.sonar.db.source;
 
+import com.google.common.base.Joiner;
+import com.google.common.base.Splitter;
 import com.google.protobuf.CodedInputStream;
 import com.google.protobuf.InvalidProtocolBufferException;
 import java.io.ByteArrayInputStream;
@@ -26,6 +28,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 import javax.annotation.CheckForNull;
 import javax.annotation.Nullable;
@@ -34,12 +37,16 @@ import net.jpountz.lz4.LZ4BlockOutputStream;
 import org.apache.commons.io.IOUtils;
 import org.sonar.db.protobuf.DbFileSources;
 
+import static com.google.common.base.Splitter.on;
 import static java.lang.String.format;
 
 public class FileSourceDto {
 
   private static final String SIZE_LIMIT_EXCEEDED_EXCEPTION_MESSAGE = "Protocol message was too large.  May be malicious.  " +
     "Use CodedInputStream.setSizeLimit() to increase the size limit.";
+  private static final Joiner LINE_RETURN_JOINER = Joiner.on('\n');
+  public static final Splitter LINES_HASHES_SPLITTER = on('\n');
+  public static final int LINE_COUNT_NOT_POPULATED = -1;
 
   private Long id;
   private String projectUuid;
@@ -47,8 +54,20 @@ public class FileSourceDto {
   private long createdAt;
   private long updatedAt;
   private String lineHashes;
+  /**
+   * When {@code line_count} column has been added, it's been populated with value {@link #LINE_COUNT_NOT_POPULATED -1},
+   * which implies all existing files sources have this value at the time SonarQube is upgraded.
+   * <p>
+   * Column {@code line_count} is populated with the correct value from every new files and for existing files as the
+   * project they belong to is analyzed for the first time after the migration.
+   * <p>
+   * Method {@link #getLineCount()} hides this migration-only-related complexity by either returning the value
+   * of column {@code line_count} when its been populated, or computed the returned value from the value of column
+   * {@code line_hashes}.
+   */
+  private int lineCount = LINE_COUNT_NOT_POPULATED;
   private String srcHash;
-  private byte[] binaryData;
+  private byte[] binaryData = new byte[0];
   private String dataType;
   private String dataHash;
   private String revision;
@@ -241,13 +260,43 @@ public class FileSourceDto {
     return this;
   }
 
-  @CheckForNull
-  public String getLineHashes() {
+  /** Used by MyBatis */
+  public String getRawLineHashes() {
     return lineHashes;
   }
 
-  public FileSourceDto setLineHashes(@Nullable String lineHashes) {
+  public void setRawLineHashes(@Nullable String lineHashes) {
     this.lineHashes = lineHashes;
+  }
+
+  public List<String> getLineHashes() {
+    if (lineHashes == null) {
+      return Collections.emptyList();
+    }
+    return LINES_HASHES_SPLITTER.splitToList(lineHashes);
+  }
+
+  /**
+   * @return the value of column {@code line_count} if populated, otherwise the size of {@link #getLineHashes()}.
+   */
+  public int getLineCount() {
+    if (lineCount == LINE_COUNT_NOT_POPULATED) {
+      return getLineHashes().size();
+    }
+    return lineCount;
+  }
+
+  public FileSourceDto setLineHashes(@Nullable List<String> lineHashes) {
+    if (lineHashes == null) {
+      this.lineHashes = null;
+      this.lineCount = 0;
+    } else if (lineHashes.isEmpty()) {
+      this.lineHashes = null;
+      this.lineCount = 1;
+    } else {
+      this.lineHashes = LINE_RETURN_JOINER.join(lineHashes);
+      this.lineCount = lineHashes.size();
+    }
     return this;
   }
 
index 2fc4b3f6bc2ec168af04a8479817705e0cbb294d..e449b0e0803eadd5a64932b7c0eca77de8e332da 100644 (file)
@@ -5,11 +5,26 @@
 <mapper namespace="org.sonar.db.source.FileSourceMapper">
 
   <select id="select" parameterType="map" resultType="org.sonar.db.source.FileSourceDto">
-    SELECT id, project_uuid as projectUuid, file_uuid as fileUuid, created_at as createdAt, updated_at as updatedAt,
-    binary_data as binaryData, line_hashes as lineHashes, data_hash as dataHash, src_hash as srcHash, data_type as
-    dataType, revision, line_hashes_version as lineHashesVersion
-    FROM file_sources
-    WHERE file_uuid = #{fileUuid} and data_type = #{dataType}
+    select
+      id,
+      project_uuid as projectUuid,
+      file_uuid as fileUuid,
+      created_at as createdAt,
+      updated_at as updatedAt,
+      binary_data as binaryData,
+      line_hashes as rawLineHashes,
+      line_hashes_version as lineHashesVersion,
+      line_count as lineCount,
+      data_hash as dataHash,
+      src_hash as srcHash,
+      data_type as
+      dataType,
+      revision
+    from
+      file_sources
+    where
+      file_uuid = #{fileUuid}
+      and data_type = #{dataType}
   </select>
 
   <select id="selectHashesForProject" parameterType="map" resultType="org.sonar.db.source.FileSourceDto">
   </select>
 
   <select id="selectLineHashesVersion" parameterType="map" resultType="Integer">
-    SELECT line_hashes_version 
+    SELECT line_hashes_version
     FROM file_sources
     WHERE file_uuid = #{fileUuid,jdbcType=VARCHAR} and data_type=#{dataType,jdbcType=VARCHAR}
   </select>
 
   <insert id="insert" parameterType="org.sonar.db.source.FileSourceDto" useGeneratedKeys="false">
-    INSERT INTO file_sources (project_uuid, file_uuid, created_at, updated_at, binary_data, line_hashes, data_hash,
-    src_hash, data_type, revision, line_hashes_version)
-    VALUES (#{projectUuid,jdbcType=VARCHAR}, #{fileUuid,jdbcType=VARCHAR}, #{createdAt,jdbcType=BIGINT},
-    #{updatedAt,jdbcType=BIGINT}, #{binaryData,jdbcType=BLOB}, #{lineHashes,jdbcType=CLOB},
-    #{dataHash,jdbcType=VARCHAR}, #{srcHash,jdbcType=VARCHAR},#{dataType,jdbcType=VARCHAR},
-    #{revision,jdbcType=VARCHAR}, #{lineHashesVersion,jdbcType=INTEGER})
+    insert into file_sources
+    (
+      project_uuid,
+      file_uuid,
+      created_at,
+      updated_at,
+      binary_data,
+      line_hashes,
+      line_hashes_version,
+      line_count,
+      data_hash,
+      src_hash,
+      data_type,
+      revision
+    )
+    values
+    (
+      #{projectUuid,jdbcType=VARCHAR},
+      #{fileUuid,jdbcType=VARCHAR},
+      #{createdAt,jdbcType=BIGINT},
+      #{updatedAt,jdbcType=BIGINT},
+      #{binaryData,jdbcType=BLOB},
+      #{rawLineHashes,jdbcType=CLOB},
+      #{lineHashesVersion,jdbcType=INTEGER},
+      #{lineCount,jdbcType=INTEGER},
+      #{dataHash,jdbcType=VARCHAR},
+      #{srcHash,jdbcType=VARCHAR},
+      #{dataType,jdbcType=VARCHAR},
+      #{revision,jdbcType=VARCHAR}
+    )
   </insert>
 
   <update id="update" parameterType="org.sonar.db.source.FileSourceDto" useGeneratedKeys="false">
-    UPDATE file_sources SET
-    updated_at = #{updatedAt,jdbcType=BIGINT},
-    binary_data = #{binaryData,jdbcType=BLOB},
-    line_hashes = #{lineHashes,jdbcType=CLOB},
-    data_hash = #{dataHash,jdbcType=VARCHAR},
-    src_hash = #{srcHash,jdbcType=VARCHAR},
-    revision = #{revision,jdbcType=VARCHAR},
-    line_hashes_version = #{lineHashesVersion,jdbcType=INTEGER}
-    WHERE id = #{id}
+    update
+      file_sources
+    set
+      updated_at = #{updatedAt,jdbcType=BIGINT},
+      binary_data = #{binaryData,jdbcType=BLOB},
+      line_hashes = #{rawLineHashes,jdbcType=CLOB},
+      line_hashes_version = #{lineHashesVersion,jdbcType=INTEGER},
+      line_count = #{lineCount,jdbcType=INTEGER},
+      data_hash = #{dataHash,jdbcType=VARCHAR},
+      src_hash = #{srcHash,jdbcType=VARCHAR},
+      revision = #{revision,jdbcType=VARCHAR}
+    where
+      id = #{id}
   </update>
 
 </mapper>
index c8fd1b8a734ea3af8e04caf78aaa4b8a33aa62a5..efd8bfc85787ffcd7e76828272a40eefb1bb0545 100644 (file)
@@ -19,6 +19,7 @@
  */
 package org.sonar.db.source;
 
+import com.google.common.collect.ImmutableList;
 import java.io.IOException;
 import java.io.Reader;
 import java.util.function.Consumer;
@@ -31,6 +32,9 @@ import org.sonar.db.DbSession;
 import org.sonar.db.DbTester;
 import org.sonar.db.source.FileSourceDto.Type;
 
+import static com.google.common.collect.ImmutableList.of;
+import static java.util.Collections.emptyList;
+import static java.util.Collections.singletonList;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.fail;
 
@@ -93,24 +97,79 @@ public class FileSourceDaoTest {
 
   @Test
   public void insert() {
-    dbTester.prepareDbUnit(getClass(), "shared.xml");
-
-    underTest.insert(session, new FileSourceDto()
+    FileSourceDto expected = new FileSourceDto()
       .setProjectUuid("PRJ_UUID")
       .setFileUuid("FILE2_UUID")
       .setBinaryData("FILE2_BINARY_DATA".getBytes())
       .setDataHash("FILE2_DATA_HASH")
-      .setLineHashes("LINE1_HASH\\nLINE2_HASH")
+      .setLineHashes(of("LINE1_HASH", "LINE2_HASH"))
       .setSrcHash("FILE2_HASH")
       .setDataType(Type.SOURCE)
       .setCreatedAt(1500000000000L)
       .setUpdatedAt(1500000000001L)
       .setLineHashesVersion(1)
-      .setRevision("123456789"));
+      .setRevision("123456789");
+    underTest.insert(session, expected);
+    session.commit();
+
+    FileSourceDto fileSourceDto = underTest.selectSourceByFileUuid(session, expected.getFileUuid());
+
+    assertThat(fileSourceDto.getProjectUuid()).isEqualTo(expected.getProjectUuid());
+    assertThat(fileSourceDto.getFileUuid()).isEqualTo(expected.getFileUuid());
+    assertThat(fileSourceDto.getBinaryData()).isEqualTo(expected.getBinaryData());
+    assertThat(fileSourceDto.getDataHash()).isEqualTo(expected.getDataHash());
+    assertThat(fileSourceDto.getRawLineHashes()).isEqualTo(expected.getRawLineHashes());
+    assertThat(fileSourceDto.getLineHashes()).isEqualTo(expected.getLineHashes());
+    assertThat(fileSourceDto.getLineCount()).isEqualTo(expected.getLineCount());
+    assertThat(fileSourceDto.getSrcHash()).isEqualTo(expected.getSrcHash());
+    assertThat(fileSourceDto.getCreatedAt()).isEqualTo(expected.getCreatedAt());
+    assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(expected.getUpdatedAt());
+    assertThat(fileSourceDto.getRevision()).isEqualTo(expected.getRevision());
+  }
+
+  @Test
+  public void insert_does_not_fail_on_FileSourceDto_with_only_non_nullable_data() {
+    FileSourceDto fileSourceDto = new FileSourceDto()
+      .setProjectUuid("Foo")
+      .setFileUuid("Bar")
+      .setCreatedAt(1500000000000L)
+      .setUpdatedAt(1500000000001L);
+    underTest.insert(session, fileSourceDto);
+    session.commit();
+  }
+
+  @Test
+  public void selectSourceByFileUuid_reads_source_without_line_hashes() {
+    FileSourceDto fileSourceDto = new FileSourceDto()
+      .setProjectUuid("Foo")
+      .setFileUuid("Bar")
+      .setDataType(Type.SOURCE)
+      .setCreatedAt(1500000000000L)
+      .setUpdatedAt(1500000000001L);
+    underTest.insert(session, fileSourceDto);
+    session.commit();
+
+    FileSourceDto res = underTest.selectSourceByFileUuid(session, fileSourceDto.getFileUuid());
+
+    assertThat(res.getLineCount()).isEqualTo(0);
+    assertThat(res.getLineHashes()).isEmpty();
+  }
+
+  @Test
+  public void selectTest_reads_test_without_line_hashes() {
+    FileSourceDto fileSourceDto = new FileSourceDto()
+      .setProjectUuid("Foo")
+      .setFileUuid("Bar")
+      .setDataType(Type.TEST)
+      .setCreatedAt(1500000000000L)
+      .setUpdatedAt(1500000000001L);
+    underTest.insert(session, fileSourceDto);
     session.commit();
 
-    dbTester.assertDbUnitTable(getClass(), "insert-result.xml", "file_sources",
-      "project_uuid", "file_uuid", "data_hash", "line_hashes", "src_hash", "created_at", "updated_at", "data_type", "revision", "line_hashes_version");
+    FileSourceDto res = underTest.selectTestByFileUuid(session, fileSourceDto.getFileUuid());
+
+    assertThat(res.getLineCount()).isEqualTo(0);
+    assertThat(res.getLineHashes()).isEmpty();
   }
 
   @Test
@@ -139,7 +198,7 @@ public class FileSourceDaoTest {
       .setFileUuid("FILE2_UUID")
       .setBinaryData("FILE2_BINARY_DATA".getBytes())
       .setDataHash("FILE2_DATA_HASH")
-      .setLineHashes("hashes")
+      .setLineHashes(singletonList("hashes"))
       .setSrcHash("FILE2_HASH")
       .setDataType(Type.SOURCE)
       .setCreatedAt(1500000000000L)
@@ -157,7 +216,7 @@ public class FileSourceDaoTest {
       .setFileUuid("FILE2_UUID")
       .setBinaryData("FILE2_BINARY_DATA".getBytes())
       .setDataHash("FILE2_DATA_HASH")
-      .setLineHashes("hashes")
+      .setLineHashes(singletonList("hashes"))
       .setSrcHash("FILE2_HASH")
       .setDataType(Type.SOURCE)
       .setCreatedAt(1500000000000L)
@@ -207,7 +266,7 @@ public class FileSourceDaoTest {
       .setBinaryData("updated data".getBytes())
       .setDataHash("NEW_DATA_HASH")
       .setSrcHash("NEW_FILE_HASH")
-      .setLineHashes("NEW_LINE_HASHES")
+      .setLineHashes(singletonList("NEW_LINE_HASHES"))
       .setDataType(Type.SOURCE)
       .setUpdatedAt(1500000000002L)
       .setLineHashesVersion(1)
@@ -218,6 +277,33 @@ public class FileSourceDaoTest {
       "data_hash", "line_hashes", "src_hash", "created_at", "updated_at", "data_type", "revision", "line_hashes_version");
   }
 
+  @Test
+  public void update_to_no_line_hashes() {
+    ImmutableList<String> lineHashes = of("a", "b", "c");
+    FileSourceDto fileSourceDto = new FileSourceDto()
+      .setProjectUuid("Foo")
+      .setFileUuid("Bar")
+      .setDataType(Type.SOURCE)
+      .setLineHashes(lineHashes)
+      .setCreatedAt(1500000000000L)
+      .setUpdatedAt(1500000000001L);
+    underTest.insert(session, fileSourceDto);
+    session.commit();
+
+    FileSourceDto resBefore = underTest.selectSourceByFileUuid(session, fileSourceDto.getFileUuid());
+    assertThat(resBefore.getLineCount()).isEqualTo(lineHashes.size());
+    assertThat(resBefore.getLineHashes()).isEqualTo(lineHashes);
+
+    fileSourceDto.setId(resBefore.getId());
+    fileSourceDto.setLineHashes(emptyList());
+    underTest.update(session, fileSourceDto);
+    session.commit();
+
+    FileSourceDto res = underTest.selectSourceByFileUuid(session, fileSourceDto.getFileUuid());
+    assertThat(res.getLineHashes()).isEmpty();
+    assertThat(res.getLineCount()).isEqualTo(1);
+  }
+
   private static class ReaderToStringConsumer implements Consumer<Reader> {
 
     String result = null;
index 6ec45c589ddf7eec6e4ef2a0256728ef64523751..4089a700f3bb7af8780637fee662f59315c32228 100644 (file)
  */
 package org.sonar.db.source;
 
+import com.google.common.base.Joiner;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
+import java.util.Random;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -91,4 +96,44 @@ public class FileSourceDtoTest {
     }
     return dataBuilder.build();
   }
+
+  @Test
+  public void new_FileSourceDto_as_lineCount_0_and_rawLineHashes_to_null()  {
+    FileSourceDto underTest = new FileSourceDto();
+
+    assertThat(underTest.getLineCount()).isZero();
+    assertThat(underTest.getLineHashes()).isEmpty();
+    assertThat(underTest.getRawLineHashes()).isNull();
+  }
+
+  @Test
+  public void setLineHashes_null_sets_lineCount_to_0_and_rawLineHashes_to_null() {
+    FileSourceDto underTest = new FileSourceDto();
+    underTest.setLineHashes(null);
+
+    assertThat(underTest.getLineCount()).isZero();
+    assertThat(underTest.getLineHashes()).isEmpty();
+    assertThat(underTest.getRawLineHashes()).isNull();
+  }
+
+  @Test
+  public void setLineHashes_empty_sets_lineCount_to_1_and_rawLineHashes_to_null() {
+    FileSourceDto underTest = new FileSourceDto();
+    underTest.setLineHashes(Collections.emptyList());
+
+    assertThat(underTest.getLineCount()).isEqualTo(1);
+    assertThat(underTest.getLineHashes()).isEmpty();
+    assertThat(underTest.getRawLineHashes()).isNull();
+  }
+
+  @Test
+  public void setLineHashes_sets_lineCount_to_size_of_list_and_rawLineHashes_to_join_by_line_return() {
+    FileSourceDto underTest = new FileSourceDto();
+    int expected = 1 + new Random().nextInt(96);
+    List<String> lineHashes = IntStream.range(0, expected).mapToObj(String::valueOf).collect(Collectors.toList());
+    underTest.setLineHashes(lineHashes);
+
+    assertThat(underTest.getLineCount()).isEqualTo(expected);
+    assertThat(underTest.getRawLineHashes()).isEqualTo(Joiner.on('\n').join(lineHashes));
+  }
 }
index f850837c3313863a3ee5f273f6ab8fac4d19055f..2da66345b72d886280f0d20b5e5e194bd0007e12 100644 (file)
@@ -21,8 +21,11 @@ package org.sonar.db.source;
 
 import java.util.Arrays;
 import java.util.Date;
+import java.util.Random;
 import java.util.function.Consumer;
+import java.util.stream.IntStream;
 import org.apache.commons.lang.math.RandomUtils;
+import org.sonar.core.util.stream.MoreCollectors;
 import org.sonar.db.DbTester;
 import org.sonar.db.component.ComponentDto;
 import org.sonar.db.protobuf.DbFileSources;
@@ -44,7 +47,7 @@ public class FileSourceTester {
       .setFileUuid(file.uuid())
       .setSrcHash(randomAlphanumeric(50))
       .setDataHash(randomAlphanumeric(50))
-      .setLineHashes(randomAlphanumeric(50))
+      .setLineHashes(IntStream.range(0, new Random().nextInt(21)).mapToObj(String::valueOf).collect(MoreCollectors.toList()))
       .setRevision(randomAlphanumeric(100))
       .setSourceData(newRandomData(3).build())
       .setCreatedAt(new Date().getTime())
diff --git a/server/sonar-db-dao/src/test/resources/org/sonar/db/source/FileSourceDaoTest/insert-result.xml b/server/sonar-db-dao/src/test/resources/org/sonar/db/source/FileSourceDaoTest/insert-result.xml
deleted file mode 100644 (file)
index 5aec962..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-<dataset>
-
-  <file_sources id="101" project_uuid="PRJ_UUID" file_uuid="FILE1_UUID"
-                binary_data="abcde" data_hash="hash"
-                line_hashes="ABC\nDEF\nGHI"
-                src_hash="FILE_HASH" revision="123456789"
-                created_at="1500000000000" updated_at="1500000000000"  data_type="SOURCE" 
-                line_hashes_version="[null]" />
-
-
-  <file_sources id="102" project_uuid="PRJ_UUID" file_uuid="FILE2_UUID"
-                binary_data="[ignore]"
-                data_hash="FILE2_DATA_HASH"
-                line_hashes="LINE1_HASH\nLINE2_HASH"
-                src_hash="FILE2_HASH" revision="123456789"
-                created_at="1500000000000" updated_at="1500000000001"  data_type="SOURCE" 
-                line_hashes_version="1" />
-
-</dataset>
index 3a622ac76803d4843fb730667fd241eca227da34..f1a27767fb54dcfe9887aab47e540835611ddea7 100644 (file)
@@ -53,7 +53,6 @@ import org.sonar.server.computation.task.projectanalysis.filemove.FileSimilarity
 import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepository;
 import org.sonar.server.computation.task.step.ComputationStep;
 
-import static com.google.common.base.MoreObjects.firstNonNull;
 import static com.google.common.base.Splitter.on;
 import static com.google.common.collect.FluentIterable.from;
 import static java.util.Arrays.asList;
@@ -222,8 +221,7 @@ public class FileMoveDetectionStep implements ComputationStep {
     if (fileSourceDto == null) {
       return null;
     }
-    String lineHashes = firstNonNull(fileSourceDto.getLineHashes(), "");
-    return new File(dbComponent.getPath(), LINES_HASHES_SPLITTER.splitToList(lineHashes));
+    return new File(dbComponent.getPath(), fileSourceDto.getLineHashes());
   }
 
   private static void printIfDebug(ScoreMatrix scoreMatrix) {
index d15efb9b1c0c34bdc2ec1af42988aff6eb0fec40..85992b45241c2cf2648925e3741bd00ea13dfbff 100644 (file)
@@ -51,7 +51,7 @@ public class ComputeFileSourceData {
       read(fileSourceBuilder, currentLine, linesIterator.next(), linesIterator.hasNext());
     }
 
-    return new Data(fileSourceBuilder.build(), LINE_RETURN_JOINER.join(lineHashesComputer.getResult()), sourceHashComputer.getHash());
+    return new Data(fileSourceBuilder.build(), lineHashesComputer.getResult(), sourceHashComputer.getHash());
   }
 
   private void read(DbFileSources.Data.Builder fileSourceBuilder, int currentLine, String lineSource, boolean hasNextLine) {
@@ -70,10 +70,10 @@ public class ComputeFileSourceData {
 
   public static class Data {
     private final DbFileSources.Data fileSourceData;
-    private final String lineHashes;
+    private final List<String> lineHashes;
     private final String srcHash;
 
-    private Data(DbFileSources.Data fileSourceData, String lineHashes, String srcHash) {
+    private Data(DbFileSources.Data fileSourceData, List<String> lineHashes, String srcHash) {
       this.fileSourceData = fileSourceData;
       this.lineHashes = lineHashes;
       this.srcHash = srcHash;
@@ -83,7 +83,7 @@ public class ComputeFileSourceData {
       return srcHash;
     }
 
-    public String getLineHashes() {
+    public List<String> getLineHashes() {
       return lineHashes;
     }
 
index 749d48cf510c216f9352bed67e1da1a4e8b00d9f..7ed6759b6ac04c53109eef3fea676c296fe829d3 100644 (file)
@@ -135,9 +135,9 @@ public class PersistFileSourcesStep implements ComputationStep {
       byte[] data = FileSourceDto.encodeSourceData(fileData);
       String dataHash = DigestUtils.md5Hex(data);
       String srcHash = fileSourceData.getSrcHash();
-      String lineHashes = fileSourceData.getLineHashes();
+      List<String> lineHashes = fileSourceData.getLineHashes();
+      Integer lineHashesVersion = sourceLinesHash.getLineHashesVersion(file);
       FileSourceDto previousDto = previousFileSourcesByUuid.get(file.getUuid());
-      int lineHashesVersion = sourceLinesHash.getLineHashesVersion(file);
 
       if (previousDto == null) {
         FileSourceDto dto = new FileSourceDto()
index ea8f2be347ec5ea553989e73b4df4ba29b4f3423..5444d7ad49eab9f207da1b1f08977788e236eadb 100644 (file)
@@ -45,6 +45,7 @@ import org.sonar.server.exceptions.ForbiddenException;
 import org.sonar.server.exceptions.NotFoundException;
 import org.sonar.server.tester.UserSessionRule;
 
+import static com.google.common.collect.ImmutableList.of;
 import static java.lang.String.format;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.entry;
@@ -728,7 +729,7 @@ public class ProjectDataLoaderTest {
       .setFileUuid(file.uuid())
       .setProjectUuid(file.projectUuid())
       .setDataHash("0263047cd758c68c27683625f072f010")
-      .setLineHashes("8d7b3d6b83c0a517eac07e1aac94b773")
+      .setLineHashes(of("8d7b3d6b83c0a517eac07e1aac94b773"))
       .setCreatedAt(System.currentTimeMillis())
       .setUpdatedAt(System.currentTimeMillis())
       .setDataType(FileSourceDto.Type.SOURCE)
index 9de1df73da27e65ce51ef783d9cf89732bd9190c..0c844c8dbd400da0f92670158c49345851c18ff7 100644 (file)
@@ -46,7 +46,6 @@ import org.sonar.server.computation.task.projectanalysis.component.Component;
 import org.sonar.server.computation.task.projectanalysis.component.TreeRootHolderRule;
 import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashRepository;
 
-import static com.google.common.base.Joiner.on;
 import static java.util.Arrays.stream;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.mockito.Mockito.mock;
@@ -497,7 +496,7 @@ public class FileMoveDetectionStepTest {
         FileSourceDto fileSourceDto = new FileSourceDto()
           .setFileUuid(file.uuid())
           .setProjectUuid(file.projectUuid())
-          .setLineHashes(on('\n').join(linesHashesComputer.getLineHashes()))
+          .setLineHashes(linesHashesComputer.getLineHashes())
           .setDataType(FileSourceDto.Type.SOURCE);
         dbTester.getDbClient().fileSourceDao().insert(dbTester.getSession(), fileSourceDto);
         dbTester.commit();
index b3436685c3403ef5b63ab2f82a76786a7442b272..c66d1606c8901e60514f4e6d6d8dfdffc18bd295 100644 (file)
@@ -45,7 +45,7 @@ public class ComputeFileSourceDataTest {
       lineHashesComputer);
 
     ComputeFileSourceData.Data data = computeFileSourceData.compute();
-    assertThat(data.getLineHashes()).isEqualTo("137f72c3708c6bd0de00a0e5a69c699b");
+    assertThat(data.getLineHashes()).containsOnly("137f72c3708c6bd0de00a0e5a69c699b");
     assertThat(data.getSrcHash()).isEqualTo("137f72c3708c6bd0de00a0e5a69c699b");
     assertThat(data.getFileSourceData().getLinesList()).hasSize(1);
     assertThat(data.getFileSourceData().getLines(0).getHighlighting()).isEqualTo("h-1");
@@ -65,7 +65,7 @@ public class ComputeFileSourceDataTest {
       lineHashesComputer);
 
     ComputeFileSourceData.Data data = computeFileSourceData.compute();
-    assertThat(data.getLineHashes()).isEqualTo("137f72c3708c6bd0de00a0e5a69c699b\ne6251bcf1a7dc3ba5e7933e325bbe605");
+    assertThat(data.getLineHashes()).containsOnly("137f72c3708c6bd0de00a0e5a69c699b", "e6251bcf1a7dc3ba5e7933e325bbe605");
     assertThat(data.getSrcHash()).isEqualTo("ee5a58024a155466b43bc559d953e018");
     assertThat(data.getFileSourceData().getLinesList()).hasSize(2);
     assertThat(data.getFileSourceData().getLines(0).getHighlighting()).isEqualTo("h-1");
index ab910b36ac2caea1412b5133323c1807ce46db6b..84793b8a0236e3b96942e1be65c8f8587080555b 100644 (file)
@@ -21,6 +21,7 @@ package org.sonar.server.computation.task.projectanalysis.step;
 
 import com.google.common.collect.Lists;
 import java.util.Arrays;
+import java.util.List;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -53,6 +54,7 @@ import org.sonar.server.computation.task.projectanalysis.source.SourceLinesHashR
 import org.sonar.server.computation.task.projectanalysis.source.SourceLinesRepositoryRule;
 import org.sonar.server.computation.task.step.ComputationStep;
 
+import static com.google.common.collect.ImmutableList.of;
 import static com.google.common.collect.Lists.newArrayList;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.mockito.Mockito.mock;
@@ -137,7 +139,7 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
 
     assertThat(dbTester.countRowsOfTable("file_sources")).isEqualTo(1);
     FileSourceDto fileSourceDto = dbClient.fileSourceDao().selectSourceByFileUuid(session, FILE1_UUID);
-    assertThat(fileSourceDto.getLineHashes()).isEqualTo("137f72c3708c6bd0de00a0e5a69c699b\ne6251bcf1a7dc3ba5e7933e325bbe605");
+    assertThat(fileSourceDto.getLineHashes()).containsExactly("137f72c3708c6bd0de00a0e5a69c699b", "e6251bcf1a7dc3ba5e7933e325bbe605");
     assertThat(fileSourceDto.getSrcHash()).isEqualTo("ee5a58024a155466b43bc559d953e018");
   }
 
@@ -326,7 +328,7 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
     // Existing sources
     long past = 150000L;
     String srcHash = "137f72c3708c6bd0de00a0e5a69c699b";
-    String lineHashes = "137f72c3708c6bd0de00a0e5a69c699b";
+    List<String> lineHashes = of("137f72c3708c6bd0de00a0e5a69c699b");
     String dataHash = "29f25900140c94db38035128cb6de6a2";
 
     dbClient.fileSourceDao().insert(dbTester.getSession(), new FileSourceDto()
@@ -369,7 +371,7 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
       .setFileUuid(FILE1_UUID)
       .setDataType(Type.SOURCE)
       .setSrcHash("5b4bd9815cdb17b8ceae19eb1810c34c")
-      .setLineHashes("6438c669e0d0de98e6929c2cc0fac474\n")
+      .setLineHashes(of("6438c669e0d0de98e6929c2cc0fac474", ""))
       .setDataHash("6cad150e3d065976c230cddc5a09efaa")
       .setSourceData(DbFileSources.Data.newBuilder()
         .addLines(DbFileSources.Line.newBuilder()
@@ -408,7 +410,7 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
       .setFileUuid(FILE1_UUID)
       .setDataType(Type.SOURCE)
       // Source hash is missing, update will be made
-      .setLineHashes("137f72c3708c6bd0de00a0e5a69c699b")
+      .setLineHashes(of("137f72c3708c6bd0de00a0e5a69c699b"))
       .setDataHash("29f25900140c94db38035128cb6de6a2")
       .setSourceData(DbFileSources.Data.newBuilder()
         .addLines(DbFileSources.Line.newBuilder()
@@ -440,7 +442,7 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
       .setFileUuid(FILE1_UUID)
       .setDataType(Type.SOURCE)
       .setSrcHash("137f72c3708c6bd0de00a0e5a69c699b")
-      .setLineHashes("137f72c3708c6bd0de00a0e5a69c699b")
+      .setLineHashes(of("137f72c3708c6bd0de00a0e5a69c699b"))
       .setDataHash("8e84c0d961cfe364e43833c4cc4ddef5")
       // Revision is missing, update will be made
       .setSourceData(DbFileSources.Data.newBuilder()
@@ -479,7 +481,7 @@ public class PersistFileSourcesStepTest extends BaseStepTest {
       .setFileUuid(FILE1_UUID)
       .setDataType(Type.SOURCE)
       .setSrcHash("137f72c3708c6bd0de00a0e5a69c699b")
-      .setLineHashes("137f72c3708c6bd0de00a0e5a69c699b")
+      .setLineHashes(of("137f72c3708c6bd0de00a0e5a69c699b"))
       .setDataHash("8e84c0d961cfe364e43833c4cc4ddef5")
       // Revision is missing, update will be made
       .setSourceData(DbFileSources.Data.newBuilder()
index be57e6c9b98d4792dabbfc62c88b4e954bcea24b..ae19ab0df346ddaaeb536a7df89c32a49a6f1448 100644 (file)
@@ -41,6 +41,7 @@
                 file_uuid="CDEF"
                 binary_data=""
                 data_hash="hash"
+                line_count="1"
                 line_hashes="987654"
                 src_hash="12345"
                 created_at="1414597442000"
index 71ce93166c6e114b64e77046d58eeed4a9c795e7..dd0619767d581a19f05e1ff231f4d5f949b8a531 100644 (file)
@@ -41,6 +41,7 @@
                 file_uuid="CDEF"
                 binary_data=""
                 data_hash="[null]"
+                line_count="0"
                 line_hashes="[null]"
                 src_hash="[null]"
                 created_at="1414597442000"
@@ -52,6 +53,7 @@
                 file_uuid="CDEF"
                 binary_data=""
                 data_hash="hash"
+                line_count="1"
                 line_hashes="987654"
                 src_hash="12345"
                 created_at="1414597442000"
index c2326dde5dc8ade1bb70358b86a5d2c48829d130..5aedd1c167801c3fb440e0b2802ef486bf300658 100644 (file)
@@ -1,6 +1,14 @@
 <dataset>
 
-  <file_sources id="1" project_uuid="PROJECT_UUID" file_uuid="FILE_UUID" created_at="1416238020000" updated_at="1416239042000"
-                binary_data="" data_hash=""  data_type="TEST" />
+  <file_sources id="1"
+                project_uuid="PROJECT_UUID"
+                file_uuid="FILE_UUID"
+                line_count="0"
+                created_at="1416238020000"
+                updated_at="1416239042000"
+                binary_data=""
+                data_hash=""
+                data_type="TEST"
+  />
 
 </dataset>
index c629e0e50a88d3827591d8e16b704a638f9f43ef..5db8861307cf1580450ecc7fac2ad354247f53ce 100644 (file)
@@ -1,9 +1,23 @@
 <dataset>
 
-  <file_sources id="1" project_uuid="P1" file_uuid="F1" created_at="1416238020000" updated_at="1416239042000"
-                binary_data="" data_hash="" data_type="TEST"/>
+  <file_sources id="1"
+                project_uuid="P1"
+                file_uuid="F1"
+                created_at="1416238020000"
+                updated_at="1416239042000"
+                line_count="0"
+                binary_data=""
+                data_hash=""
+                data_type="TEST"/>
 
-  <file_sources id="2" project_uuid="P2" file_uuid="F2" created_at="1416238020000" updated_at="1416239042000"
-                binary_data="" data_hash="" data_type="TEST"/>
+  <file_sources id="2"
+                project_uuid="P2"
+                file_uuid="F2"
+                created_at="1416238020000"
+                updated_at="1416239042000"
+                line_count="0"
+                binary_data=""
+                data_hash=""
+                data_type="TEST"/>
 
 </dataset>
index 2edb2eafec1cc8e76891fa1efd75942e0eaa9716..b40d33fae8ba9b087d6c01b1f6be2fe774643864 100644 (file)
@@ -1,9 +1,23 @@
 <dataset>
 
-  <file_sources id="1" project_uuid="P1" file_uuid="F1" created_at="1416238020000" updated_at="1416239042000"
-                binary_data="" data_hash="" data_type="TEST"/>
+  <file_sources id="1"
+                project_uuid="P1"
+                file_uuid="F1"
+                created_at="1416238020000"
+                updated_at="1416239042000"
+                line_count="0"
+                binary_data=""
+                data_hash=""
+                data_type="TEST"/>
 
-  <file_sources id="2" project_uuid="P1" file_uuid="F2" created_at="1416238020000" updated_at="1300000000000"
-                binary_data="" data_hash="" data_type="TEST"/>
+  <file_sources id="2"
+                project_uuid="P1"
+                file_uuid="F2"
+                created_at="1416238020000"
+                updated_at="1300000000000"
+                line_count="0"
+                binary_data=""
+                data_hash=""
+                data_type="TEST"/>
 
 </dataset>
index 3942d39e9f61cf729229d54f04ff602b18572132..ca4a6e5d88a466402d5cf4471992394d5d2a9404 100644 (file)
@@ -1,6 +1,13 @@
 <dataset>
 
-  <file_sources id="1" project_uuid="P1" file_uuid="F1" created_at="1416238020000" updated_at="1416239042000"
-                binary_data="" data_hash="" data_type="TEST"/>
+  <file_sources id="1"
+                project_uuid="P1"
+                file_uuid="F1"
+                created_at="1416238020000"
+                updated_at="1416239042000"
+                line_count="0"
+                binary_data=""
+                data_hash=""
+                data_type="TEST"/>
 
 </dataset>