]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-2570 compress db table FILE_SOURCES
authorSimon Brandhof <simon.brandhof@sonarsource.com>
Tue, 3 Feb 2015 22:06:45 +0000 (23:06 +0100)
committerSimon Brandhof <simon.brandhof@sonarsource.com>
Tue, 10 Feb 2015 13:05:59 +0000 (14:05 +0100)
59 files changed:
pom.xml
server/sonar-server-benchmarks/src/test/java/org/sonar/server/benchmark/SourceDbBenchmarkTest.java
server/sonar-server/src/main/java/org/sonar/server/computation/issue/IssueComputation.java
server/sonar-server/src/main/java/org/sonar/server/computation/issue/SourceLinesCache.java
server/sonar-server/src/main/java/org/sonar/server/db/migrations/DatabaseMigrations.java
server/sonar-server/src/main/java/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryData.java [new file with mode: 0644]
server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineDoc.java
server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineIndexer.java
server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineResultSetIterator.java
server/sonar-server/src/test/java/org/sonar/server/batch/ProjectRepositoryLoaderMediumTest.java
server/sonar-server/src/test/java/org/sonar/server/computation/issue/SourceLinesCacheTest.java
server/sonar-server/src/test/java/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest.java [new file with mode: 0644]
server/sonar-server/src/test/java/org/sonar/server/source/db/FileSourceTesting.java [new file with mode: 0644]
server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineIndexerTest.java
server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineResultSetIteratorTest.java
server/sonar-server/src/test/resources/org/sonar/server/component/db/ComponentDaoTest/select_module_files_tree.xml
server/sonar-server/src/test/resources/org/sonar/server/computation/issue/SourceLinesCacheTest/load_data.xml
server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/bad_data.xml [new file with mode: 0644]
server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/data.xml [new file with mode: 0644]
server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/schema.sql [new file with mode: 0644]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/db.xml
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/empty-file.xml [deleted file]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/null-file.xml [deleted file]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/schema.sql
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/shared.xml
server/sonar-server/src/test/resources/org/sonar/server/source/ws/HashActionTest/shared.xml
server/sonar-web/src/main/webapp/WEB-INF/db/migrate/781_add_file_sources_binary_data.rb [new file with mode: 0644]
server/sonar-web/src/main/webapp/WEB-INF/db/migrate/782_feed_file_sources_binary_data.rb [new file with mode: 0644]
server/sonar-web/src/main/webapp/WEB-INF/db/migrate/783_drop_file_sources_data.rb [new file with mode: 0644]
sonar-batch-protocol/pom.xml
sonar-batch-protocol/src/main/gen-java/org/sonar/server/source/db/FileSourceDb.java [new file with mode: 0644]
sonar-batch-protocol/src/main/protobuf/file_source_db.proto [new file with mode: 0644]
sonar-batch/src/main/java/org/sonar/batch/index/SourceDataFactory.java [new file with mode: 0644]
sonar-batch/src/main/java/org/sonar/batch/index/SourcePersister.java
sonar-batch/src/main/java/org/sonar/batch/scan/ProjectScanContainer.java
sonar-batch/src/test/java/org/sonar/batch/index/SourceDataFactoryTest.java [new file with mode: 0644]
sonar-batch/src/test/java/org/sonar/batch/index/SourcePersisterTest.java [deleted file]
sonar-batch/src/test/resources/org/sonar/batch/index/MeasurePersisterTest/shouldUpdateMeasure-result.xml [deleted file]
sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources.xml [deleted file]
sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources_missing_src_hash.xml
sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/shared.xml [new file with mode: 0644]
sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistDontTouchUnchanged-result.xml
sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistEmptyFile-result.xml
sonar-core/pom.xml
sonar-core/src/main/java/org/sonar/core/persistence/DatabaseVersion.java
sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDao.java
sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDto.java
sonar-core/src/main/java/org/sonar/core/source/db/FileSourceMapper.java
sonar-core/src/main/resources/org/sonar/core/persistence/rows-h2.sql
sonar-core/src/main/resources/org/sonar/core/persistence/schema-h2.ddl
sonar-core/src/main/resources/org/sonar/core/source/db/FileSourceMapper.xml
sonar-core/src/test/java/org/sonar/core/source/db/FileSourceDaoTest.java
sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/delete_file_sources_of_disabled_resources-result.xml
sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/delete_file_sources_of_disabled_resources.xml
sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/select_purgeable_file_uuids.xml
sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/shouldDeleteProject.xml
sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/insert-result.xml
sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/shared.xml
sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/update-result.xml

diff --git a/pom.xml b/pom.xml
index 712d316e61af78af2f52459316766185157b1678..57b1e8ba2780d189df8e5786941945cf4e2586cc 100644 (file)
--- a/pom.xml
+++ b/pom.xml
         <artifactId>protobuf-java</artifactId>
         <version>2.6.1</version>
       </dependency>
+      <dependency>
+        <groupId>net.jpountz.lz4</groupId>
+        <artifactId>lz4</artifactId>
+        <version>1.3.0</version>
+      </dependency>
+
 
       <!-- tomcat -->
       <dependency>
index 843308ca2ea0dac7511ebaeccd85b994ae80d333..3d6dbf326450ef97cd4eb9cfdd99a68cb73d1729 100644 (file)
@@ -30,11 +30,13 @@ import org.sonar.core.persistence.DbTester;
 import org.sonar.core.source.db.FileSourceDao;
 import org.sonar.core.source.db.FileSourceDto;
 import org.sonar.server.db.DbClient;
+import org.sonar.server.source.db.FileSourceDb;
 import org.sonar.server.source.index.SourceLineResultSetIterator;
 
 import java.io.IOException;
 import java.sql.Connection;
 import java.sql.SQLException;
+import java.util.Arrays;
 import java.util.Timer;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -43,8 +45,9 @@ import static org.assertj.core.api.Assertions.assertThat;
 public class SourceDbBenchmarkTest {
 
   public static final Logger LOGGER = LoggerFactory.getLogger("benchmarkSourceDbScrolling");
-  // files are 3'220 lines long
+
   public static final int NUMBER_OF_FILES = 1000;
+  public static final int NUMBER_OF_LINES = 3220;
   public static final String PROJECT_UUID = Uuids.create();
 
   @Rule
@@ -52,7 +55,7 @@ public class SourceDbBenchmarkTest {
 
   @Test
   public void benchmark() throws Exception {
-    prepareFileSources();
+    prepareTable();
     scrollRows();
   }
 
@@ -70,12 +73,12 @@ public class SourceDbBenchmarkTest {
       SourceLineResultSetIterator it = SourceLineResultSetIterator.create(dbClient, connection, 0L);
       while (it.hasNext()) {
         SourceLineResultSetIterator.SourceFile row = it.next();
-        assertThat(row.getLines().size()).isEqualTo(3220);
+        assertThat(row.getLines().size()).isEqualTo(NUMBER_OF_LINES);
         assertThat(row.getFileUuid()).isNotEmpty();
         counter.incrementAndGet();
       }
       long end = System.currentTimeMillis();
-      long period = end-start;
+      long period = end - start;
       long throughputPerSecond = 1000L * counter.get() / period;
       LOGGER.info(String.format("%d FILE_SOURCES rows scrolled in %d ms (%d rows/second)", counter.get(), period, throughputPerSecond));
 
@@ -85,25 +88,50 @@ public class SourceDbBenchmarkTest {
     }
   }
 
-  private void prepareFileSources() throws IOException {
+  private void prepareTable() throws IOException {
     LOGGER.info("Populate table FILE_SOURCES");
     FileSourceDao dao = new FileSourceDao(dbTester.myBatis());
     for (int i = 0; i < NUMBER_OF_FILES; i++) {
-      dao.insert(newFileSourceDto());
+      dao.insert(generateDto());
     }
   }
 
-  private FileSourceDto newFileSourceDto() throws IOException {
+  private FileSourceDto generateDto() throws IOException {
     long now = System.currentTimeMillis();
+    byte[] data = generateData();
     FileSourceDto dto = new FileSourceDto();
     dto.setCreatedAt(now);
     dto.setUpdatedAt(now);
+    dto.setBinaryData(data);
+    dto.setDataHash("49d7230271f2bd24c759e54bcd66547d");
     dto.setProjectUuid(PROJECT_UUID);
     dto.setFileUuid(Uuids.create());
-    // this fake data is 3220 lines long
-    dto.setData(IOUtils.toString(getClass().getResourceAsStream("SourceDbBenchmarkTest/data.txt")));
-    dto.setDataHash("49d7230271f2bd24c759e54bcd66547d");
     dto.setLineHashes(IOUtils.toString(getClass().getResourceAsStream("SourceDbBenchmarkTest/line_hashes.txt")));
     return dto;
   }
+
+  private byte[] generateData() throws IOException {
+    FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+    for (int i = 1; i <= NUMBER_OF_LINES; i++) {
+      dataBuilder.addLinesBuilder()
+        .setLine(i)
+        .setScmRevision("REVISION_" + i)
+        .setScmAuthor("a_guy")
+        .setSource("this is not java code " + i)
+        .setUtLineHits(i)
+        .setUtConditions(i+1)
+        .setUtCoveredConditions(i)
+        .setItLineHits(i)
+        .setItConditions(i+1)
+        .setItCoveredConditions(i)
+        .setOverallLineHits(i)
+        .setOverallConditions(i+1)
+        .setOverallCoveredConditions(i)
+        .setScmDate(150000000L)
+        .setHighlighting("2,9,k;9,18,k")
+        .addAllDuplications(Arrays.asList(19,33,141))
+        .build();
+    }
+    return FileSourceDto.encodeData(dataBuilder.build());
+  }
 }
index c6288054115ae1e8f2ea95fb3191732056cf56d2..b503e2c74ac0305b0f01df24042f55d51f5fb64a 100644 (file)
@@ -55,7 +55,6 @@ public class IssueComputation {
         guessAuthor(issue);
         autoAssign(issue);
         copyRuleTags(issue);
-        // TODO execute extension points
       }
       diskIssuesAppender.append(issue);
     }
index 5621a7fe25fb19069fcba5c53bee22d3b812318c..25d38307c384ab0eaab2d9ad37c56f6779a324ad 100644 (file)
 package org.sonar.server.computation.issue;
 
 import com.google.common.base.Function;
-import org.apache.commons.csv.CSVFormat;
-import org.apache.commons.csv.CSVParser;
-import org.apache.commons.csv.CSVRecord;
-import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.sonar.api.utils.DateUtils;
 import org.sonar.core.source.db.FileSourceDto;
 import org.sonar.server.db.DbClient;
+import org.sonar.server.source.db.FileSourceDb;
 
 import javax.annotation.CheckForNull;
 import javax.annotation.Nullable;
 
-import java.io.Reader;
+import java.io.InputStream;
 import java.util.ArrayList;
-import java.util.Date;
 import java.util.List;
 
 /**
@@ -53,7 +48,11 @@ public class SourceLinesCache {
   private final List<String> authors = new ArrayList<>();
   private boolean loaded = false;
   private String currentFileUuid = null;
+
+  // date of the latest commit on the file
   private long lastCommitDate = 0L;
+
+  // author of the latest commit on the file
   private String lastCommitAuthor = null;
 
   public SourceLinesCache(DbClient dbClient) {
@@ -114,30 +113,25 @@ public class SourceLinesCache {
     return authors.size();
   }
 
-  class FileDataParser implements Function<Reader, Void> {
+  /**
+   * Parse lines from db and collect SCM information
+   */
+  class FileDataParser implements Function<InputStream, Void> {
     @Override
-    public Void apply(Reader input) {
-      CSVParser csvParser = null;
-      try {
-        csvParser = new CSVParser(input, CSVFormat.DEFAULT);
-        for (CSVRecord csvRecord : csvParser) {
-          Date revisionDate = DateUtils.parseDateTimeQuietly(csvRecord.get(FileSourceDto.CSV_INDEX_SCM_DATE));
-
-          // do not keep all fields in memory
-          String author = csvRecord.get(FileSourceDto.CSV_INDEX_SCM_AUTHOR);
-          authors.add(author);
-
-          if (revisionDate != null && revisionDate.getTime() > lastCommitDate) {
-            lastCommitDate = revisionDate.getTime();
-            lastCommitAuthor = author;
-          }
+    public Void apply(InputStream input) {
+      FileSourceDb.Data data = FileSourceDto.decodeData(input);
+      for (FileSourceDb.Line line : data.getLinesList()) {
+        String author = null;
+        if (line.hasScmAuthor()) {
+          author = line.getScmAuthor();
+        }
+        authors.add(author);
+        if (line.hasScmDate() && line.getScmDate() > lastCommitDate && author != null) {
+          lastCommitDate = line.getScmDate();
+          lastCommitAuthor = author;
         }
-        return null;
-      } catch (Exception e) {
-        throw new IllegalStateException("Fail to parse CSV data", e);
-      } finally {
-        IOUtils.closeQuietly(csvParser);
       }
+      return null;
     }
   }
 }
index d61f389ca5ea3b20ff750c0fcb1b54e714359655..43d9dfbdb9c016fa5d62e89d3d1c4bd099624495 100644 (file)
@@ -86,6 +86,7 @@ public interface DatabaseMigrations {
     UpdateProjectsModuleUuidPath.class,
     FeedIssueComponentUuids.class,
     FeedSnapshotsLongDates.class,
-    FeedIssuesLongDates.class
+    FeedIssuesLongDates.class,
+    FeedFileSourcesBinaryData.class
     );
 }
diff --git a/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryData.java b/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryData.java
new file mode 100644 (file)
index 0000000..b80abfd
--- /dev/null
@@ -0,0 +1,164 @@
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.db.migrations.v51;
+
+import com.google.common.base.Function;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Iterables;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVParser;
+import org.apache.commons.csv.CSVRecord;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import org.sonar.api.utils.DateUtils;
+import org.sonar.core.persistence.Database;
+import org.sonar.core.source.db.FileSourceDto;
+import org.sonar.server.db.migrations.BaseDataChange;
+import org.sonar.server.db.migrations.MassUpdate;
+import org.sonar.server.db.migrations.Select;
+import org.sonar.server.db.migrations.SqlStatement;
+import org.sonar.server.source.db.FileSourceDb;
+
+import javax.annotation.Nullable;
+
+import java.sql.SQLException;
+import java.util.Iterator;
+
+public class FeedFileSourcesBinaryData extends BaseDataChange {
+
+  public FeedFileSourcesBinaryData(Database db) {
+    super(db);
+  }
+
+  @Override
+  public void execute(Context context) throws SQLException {
+    MassUpdate update = context.prepareMassUpdate().rowPluralName("issues");
+    update.select("SELECT id,data FROM file_sources WHERE binary_data is null");
+    update.update("UPDATE file_sources SET binary_data=? WHERE id=?");
+    update.execute(new MassUpdate.Handler() {
+      @Override
+      public boolean handle(Select.Row row, SqlStatement update) throws SQLException {
+        Long fileSourceId = row.getLong(1);
+        update.setBytes(1, toBinary(fileSourceId, row.getString(2)));
+        update.setLong(2, fileSourceId);
+        return true;
+      }
+    });
+  }
+
+  private byte[] toBinary(Long fileSourceId, @Nullable String data) {
+    FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+    CSVParser parser = null;
+    try {
+      if (data != null) {
+        parser = CSVParser.parse(data, CSVFormat.DEFAULT);
+        Iterator<CSVRecord> rows = parser.iterator();
+        int line = 1;
+        while (rows.hasNext()) {
+          CSVRecord row = rows.next();
+          if (row.size() == 16) {
+
+            FileSourceDb.Line.Builder lineBuilder = dataBuilder.addLinesBuilder();
+            lineBuilder.setLine(line);
+            String s = row.get(0);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setScmRevision(s);
+            }
+            s = row.get(1);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setScmAuthor(s);
+            }
+            s = row.get(2);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setScmDate(DateUtils.parseDateTimeQuietly(s).getTime());
+            }
+            s = row.get(3);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setUtLineHits(Integer.parseInt(s));
+            }
+            s = row.get(4);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setUtConditions(Integer.parseInt(s));
+            }
+            s = row.get(5);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setUtCoveredConditions(Integer.parseInt(s));
+            }
+            s = row.get(6);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setItLineHits(Integer.parseInt(s));
+            }
+            s = row.get(7);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setItConditions(Integer.parseInt(s));
+            }
+            s = row.get(8);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setItCoveredConditions(Integer.parseInt(s));
+            }
+            s = row.get(9);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setOverallLineHits(Integer.parseInt(s));
+            }
+            s = row.get(10);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setOverallConditions(Integer.parseInt(s));
+            }
+            s = row.get(11);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setOverallCoveredConditions(Integer.parseInt(s));
+            }
+            s = row.get(12);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setHighlighting(s);
+            }
+            s = row.get(13);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.setSymbols(s);
+            }
+            s = row.get(14);
+            if (StringUtils.isNotEmpty(s)) {
+              lineBuilder.addAllDuplications(splitIntegers(s));
+            }
+            s = row.get(15);
+            if (s != null) {
+              lineBuilder.setSource(s);
+            }
+          }
+          line++;
+        }
+      }
+      return FileSourceDto.encodeData(dataBuilder.build());
+    } catch (Exception e) {
+      throw new IllegalStateException("Invalid FILE_SOURCES.DATA on row with ID " + fileSourceId + ": " + data, e);
+    } finally {
+      IOUtils.closeQuietly(parser);
+    }
+  }
+
+  private static Iterable<Integer> splitIntegers(String s) {
+    return Iterables.transform(Splitter.on(',').split(s), new Function<String, Integer>() {
+      @Override
+      public Integer apply(String input) {
+        return Integer.parseInt(input);
+      }
+    });
+  }
+}
index 822f1d3297ba7cbdfeb369468480789905bc179c..45fb14630c439386257b2b90282e45a1a5a00d81 100644 (file)
@@ -20,7 +20,6 @@
 package org.sonar.server.source.index;
 
 import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Maps;
 import org.sonar.server.search.BaseDoc;
 import org.sonar.server.search.BaseNormalizer;
 import org.sonar.server.search.IndexUtils;
@@ -30,6 +29,7 @@ import javax.annotation.Nullable;
 
 import java.util.Collection;
 import java.util.Date;
+import java.util.HashMap;
 import java.util.Map;
 
 public class SourceLineDoc extends BaseDoc {
@@ -38,9 +38,8 @@ public class SourceLineDoc extends BaseDoc {
     super(fields);
   }
 
-  // For testing purpose
   public SourceLineDoc() {
-    this(Maps.<String, Object>newHashMap());
+    this(new HashMap<String, Object>(20));
   }
 
   public String projectUuid() {
index cf00b9dd7fdced1cc1caf09d4675edd19a0e4de7..3e7ce485a159cf26cbd453613d8536364eb0a142 100644 (file)
@@ -34,6 +34,10 @@ import java.util.Iterator;
 import static org.sonar.server.source.index.SourceLineIndexDefinition.FIELD_FILE_UUID;
 import static org.sonar.server.source.index.SourceLineIndexDefinition.FIELD_PROJECT_UUID;
 
+/**
+ * Add to Elasticsearch index {@link SourceLineIndexDefinition} the rows of
+ * db table FILE_SOURCES that are not indexed yet
+ */
 public class SourceLineIndexer extends BaseIndexer {
 
   private final DbClient dbClient;
index 5f44a4013fc2f9f8fa6e410731f7db0b8ecb000b..e51d31268193f73a1ab913bff186a137db601b14 100644 (file)
 package org.sonar.server.source.index;
 
 import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import org.apache.commons.csv.CSVFormat;
-import org.apache.commons.csv.CSVParser;
-import org.apache.commons.csv.CSVRecord;
-import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.math.NumberUtils;
-import org.sonar.api.utils.DateUtils;
 import org.sonar.core.source.db.FileSourceDto;
 import org.sonar.server.db.DbClient;
 import org.sonar.server.db.ResultSetIterator;
-import org.sonar.server.db.migrations.SqlUtil;
+import org.sonar.server.source.db.FileSourceDb;
 
-import javax.annotation.CheckForNull;
-import javax.annotation.Nullable;
-
-import java.io.IOException;
-import java.io.Reader;
 import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.Date;
 import java.util.List;
-import java.util.StringTokenizer;
 
 /**
  * Scroll over table FILE_SOURCES and directly parse CSV field required to
@@ -84,7 +71,7 @@ public class SourceLineResultSetIterator extends ResultSetIterator<SourceLineRes
     "project_uuid",
     "file_uuid",
     "updated_at",
-    "data"
+    "binary_data"
   };
 
   private static final String SQL_ALL = "select " + StringUtils.join(FIELDS, ",") + " from file_sources";
@@ -113,87 +100,42 @@ public class SourceLineResultSetIterator extends ResultSetIterator<SourceLineRes
   protected SourceFile read(ResultSet rs) throws SQLException {
     String projectUuid = rs.getString(1);
     String fileUuid = rs.getString(2);
-    Long updatedAt = SqlUtil.getLong(rs, 3);
-    if (updatedAt == null) {
-      updatedAt = System.currentTimeMillis();
-    }
+    long updatedAt = rs.getLong(3);
     Date updatedDate = new Date(updatedAt);
-    SourceFile result = new SourceFile(fileUuid, updatedAt);
-
-    Reader csv = rs.getCharacterStream(4);
-    if (csv == null) {
-      return result;
-    }
 
-    int line = 1;
-    CSVParser csvParser = null;
-    try {
-      csvParser = new CSVParser(csv, CSVFormat.DEFAULT);
-
-      for (CSVRecord csvRecord : csvParser) {
-        SourceLineDoc doc = new SourceLineDoc(Maps.<String, Object>newHashMap());
-
-        doc.setProjectUuid(projectUuid);
-        doc.setFileUuid(fileUuid);
-        doc.setLine(line);
-        doc.setUpdateDate(updatedDate);
-        doc.setScmRevision(csvRecord.get(FileSourceDto.CSV_INDEX_SCM_REVISION));
-        doc.setScmAuthor(csvRecord.get(FileSourceDto.CSV_INDEX_SCM_AUTHOR));
-        doc.setScmDate(DateUtils.parseDateTimeQuietly(csvRecord.get(FileSourceDto.CSV_INDEX_SCM_DATE)));
-        // UT
-        doc.setUtLineHits(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_UT_LINE_HITS)));
-        doc.setUtConditions(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_UT_CONDITIONS)));
-        doc.setUtCoveredConditions(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_UT_COVERED_CONDITIONS)));
-        // IT
-        doc.setItLineHits(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_IT_LINE_HITS)));
-        doc.setItConditions(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_IT_CONDITIONS)));
-        doc.setItCoveredConditions(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_IT_COVERED_CONDITIONS)));
-        // OVERALL
-        doc.setOverallLineHits(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_OVERALL_LINE_HITS)));
-        doc.setOverallConditions(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_OVERALL_CONDITIONS)));
-        doc.setOverallCoveredConditions(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_OVERALL_COVERED_CONDITIONS)));
-
-        doc.setHighlighting(csvRecord.get(FileSourceDto.CSV_INDEX_HIGHLIGHTING));
-        doc.setSymbols(csvRecord.get(FileSourceDto.CSV_INDEX_SYMBOLS));
-        doc.setDuplications(parseDuplications(csvRecord.get(FileSourceDto.CSV_INDEX_DUPLICATIONS)));
-
-        // source is always the latest field. All future fields will be added between duplications (14) and source.
-        doc.setSource(csvRecord.get(csvRecord.size()-1));
-
-        result.addLine(doc);
-
-        line++;
-      }
-    } catch (IOException ioError) {
-      throw new IllegalStateException("Impossible to open stream for file_sources.data with file_uuid " + fileUuid, ioError);
-    } catch (ArrayIndexOutOfBoundsException lineError) {
-      throw new IllegalStateException(
-        String.format("Impossible to parse source line data, stuck at line %d", line), lineError);
-    } finally {
-      IOUtils.closeQuietly(csv);
-      IOUtils.closeQuietly(csvParser);
+    SourceFile result = new SourceFile(fileUuid, updatedAt);
+    FileSourceDb.Data data = FileSourceDto.decodeData(rs.getBinaryStream(4));
+    for (FileSourceDb.Line line : data.getLinesList()) {
+      SourceLineDoc doc = new SourceLineDoc();
+      doc.setProjectUuid(projectUuid);
+      doc.setFileUuid(fileUuid);
+      doc.setLine(line.getLine());
+      doc.setUpdateDate(updatedDate);
+      doc.setScmRevision(line.getScmRevision());
+      doc.setScmAuthor(line.getScmAuthor());
+      doc.setScmDate(line.hasScmDate() ? new Date(line.getScmDate()) : null);
+      // UT
+      doc.setUtLineHits(line.hasUtLineHits() ? line.getUtLineHits() : null);
+      doc.setUtConditions(line.hasUtConditions() ? line.getUtConditions() : null);
+      doc.setUtCoveredConditions(line.hasUtCoveredConditions() ? line.getUtCoveredConditions() : null);
+      // IT
+      doc.setItLineHits(line.hasItLineHits() ? line.getItLineHits() : null);
+      doc.setItConditions(line.hasItConditions() ? line.getItConditions() : null);
+      doc.setItCoveredConditions(line.hasItCoveredConditions() ? line.getItCoveredConditions() : null);
+      // OVERALL
+      doc.setOverallLineHits(line.hasOverallLineHits() ? line.getOverallLineHits() : null);
+      doc.setOverallConditions(line.hasOverallConditions() ? line.getOverallConditions() : null);
+      doc.setOverallCoveredConditions(line.hasOverallCoveredConditions() ? line.getOverallCoveredConditions() : null);
+
+      doc.setHighlighting(line.hasHighlighting() ? line.getHighlighting() : null);
+      doc.setSymbols(line.hasSymbols() ? line.getSymbols() : null);
+      doc.setDuplications(line.getDuplicationsList());
+
+      // source is always the latest field. All future fields will be added between duplications (14) and source.
+      doc.setSource(line.hasSource() ? line.getSource() : null);
+
+      result.addLine(doc);
     }
-
     return result;
   }
-
-  private List<Integer> parseDuplications(@Nullable String duplications) {
-    List<Integer> dups = Lists.newArrayList();
-    if (StringUtils.isNotEmpty(duplications)) {
-      StringTokenizer tokenizer = new StringTokenizer(duplications, ",", false);
-      while (tokenizer.hasMoreTokens()) {
-        dups.add(NumberUtils.toInt(tokenizer.nextToken(), -1));
-      }
-    }
-    return dups;
-  }
-
-  @CheckForNull
-  private Integer parseIntegerFromRecord(@Nullable String cellValue) {
-    if (cellValue == null || cellValue.isEmpty()) {
-      return null;
-    } else {
-      return Integer.parseInt(cellValue);
-    }
-  }
 }
index 6bee60a5d28397d2f97b3374b7ee83d360fa0adf..1a1f7a620039b01277b9529d6d3f88210374c547 100644 (file)
@@ -785,7 +785,7 @@ public class ProjectRepositoryLoaderMediumTest {
     return new FileSourceDto()
       .setFileUuid(file.uuid())
       .setProjectUuid(file.projectUuid())
-      .setData(",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;")
+      //.setData(",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;")
       .setDataHash("0263047cd758c68c27683625f072f010")
       .setLineHashes("8d7b3d6b83c0a517eac07e1aac94b773")
       .setCreatedAt(System.currentTimeMillis())
index b7c559e5a17a21f93046fc12aa96a0b5f2fcbccb..aed64514efbc624a0ca36de5ae72801db6e009a2 100644 (file)
@@ -25,8 +25,12 @@ import org.junit.experimental.categories.Category;
 import org.sonar.core.persistence.DbTester;
 import org.sonar.core.source.db.FileSourceDao;
 import org.sonar.server.db.DbClient;
+import org.sonar.server.source.db.FileSourceDb;
+import org.sonar.server.source.db.FileSourceTesting;
 import org.sonar.test.DbTests;
 
+import java.sql.Connection;
+
 import static org.assertj.core.api.Assertions.assertThat;
 
 @Category(DbTests.class)
@@ -38,6 +42,15 @@ public class SourceLinesCacheTest {
   @Test
   public void line_author() throws Exception {
     dbTester.prepareDbUnit(getClass(), "load_data.xml");
+    FileSourceDb.Data.Builder data = FileSourceDb.Data.newBuilder();
+    data.addLinesBuilder().setLine(1).setScmAuthor("charb").setScmDate(1_400_000_000_000L);
+    data.addLinesBuilder().setLine(2).setScmAuthor("cabu").setScmDate(1_500_000_000_000L);
+    data.addLinesBuilder().setLine(3).setScmAuthor("wolinski").setScmDate(1_300_000_000_000L);
+    data.addLinesBuilder().setLine(4);
+    try (Connection connection = dbTester.openConnection()) {
+      FileSourceTesting.updateDataColumn(connection, "FILE_A", data.build());
+    }
+
     DbClient dbClient = new DbClient(dbTester.database(), dbTester.myBatis(), new FileSourceDao(dbTester.myBatis()));
     SourceLinesCache cache = new SourceLinesCache(dbClient);
     cache.init("FILE_A");
@@ -55,12 +68,10 @@ public class SourceLinesCacheTest {
     // only 4 lines in the file -> return last committer on file
     assertThat(cache.lineAuthor(100)).isEqualTo("cabu");
 
-
     assertThat(cache.countLines()).isEqualTo(4);
 
     cache.clear();
     assertThat(cache.countLines()).isEqualTo(0);
   }
 
-
 }
diff --git a/server/sonar-server/src/test/java/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest.java b/server/sonar-server/src/test/java/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest.java
new file mode 100644 (file)
index 0000000..80fc3c0
--- /dev/null
@@ -0,0 +1,95 @@
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.db.migrations.v51;
+
+import org.apache.commons.dbutils.DbUtils;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.sonar.core.persistence.DbTester;
+import org.sonar.core.source.db.FileSourceDto;
+import org.sonar.server.db.migrations.DatabaseMigration;
+import org.sonar.server.source.db.FileSourceDb;
+
+import java.io.InputStream;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.fail;
+
+public class FeedFileSourcesBinaryDataTest {
+
+  @ClassRule
+  public static DbTester db = new DbTester().schema(FeedFileSourcesBinaryDataTest.class, "schema.sql");
+
+  @Test
+  public void convert_csv_to_protobuf() throws Exception {
+    db.prepareDbUnit(getClass(), "data.xml");
+
+    DatabaseMigration migration = new FeedFileSourcesBinaryData(db.database());
+    migration.execute();
+
+    int count = db.countSql("select count(*) from file_sources where binary_data is not null");
+    assertThat(count).isEqualTo(3);
+
+    try(Connection connection = db.openConnection()) {
+      FileSourceDb.Data data = selectData(connection, 1L);
+      assertThat(data.getLinesCount()).isEqualTo(4);
+      assertThat(data.getLines(0).getScmRevision()).isEqualTo("aef12a");
+
+      data = selectData(connection, 2L);
+      assertThat(data.getLinesCount()).isEqualTo(4);
+      assertThat(data.getLines(0).hasScmRevision()).isFalse();
+
+      data = selectData(connection, 3L);
+      assertThat(data.getLinesCount()).isEqualTo(0);
+    }
+  }
+
+  @Test
+  public void fail_to_parse_csv() throws Exception {
+    db.prepareDbUnit(getClass(), "bad_data.xml");
+
+    DatabaseMigration migration = new FeedFileSourcesBinaryData(db.database());
+    try {
+      migration.execute();
+      fail();
+    } catch (IllegalStateException e) {
+      assertThat(e).hasMessageContaining("Invalid FILE_SOURCES.DATA on row with ID 1:");
+    }
+  }
+
+  private FileSourceDb.Data selectData(Connection connection, long fileSourceId) throws SQLException {
+    PreparedStatement pstmt = connection.prepareStatement("select binary_data from file_sources where id=?");
+    ResultSet rs = null;
+    try {
+      pstmt.setLong(1, fileSourceId);
+      rs = pstmt.executeQuery();
+      rs.next();
+      InputStream data = rs.getBinaryStream(1);
+      return FileSourceDto.decodeData(data);
+    } finally {
+      DbUtils.closeQuietly(rs);
+      DbUtils.closeQuietly(pstmt);
+    }
+  }
+}
diff --git a/server/sonar-server/src/test/java/org/sonar/server/source/db/FileSourceTesting.java b/server/sonar-server/src/test/java/org/sonar/server/source/db/FileSourceTesting.java
new file mode 100644 (file)
index 0000000..10c44e5
--- /dev/null
@@ -0,0 +1,108 @@
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.source.db;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.commons.lang.math.RandomUtils;
+import org.sonar.core.source.db.FileSourceDto;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.Arrays;
+
+public class FileSourceTesting {
+
+  private FileSourceTesting() {
+    // only static stuff
+  }
+
+  public static void updateDataColumn(Connection connection, String fileUuid, FileSourceDb.Data data) throws SQLException {
+    updateDataColumn(connection, fileUuid, FileSourceDto.encodeData(data));
+  }
+
+  public static void updateDataColumn(Connection connection, String fileUuid, byte[] data) throws SQLException {
+    PreparedStatement stmt = connection.prepareStatement("UPDATE file_sources SET binary_data = ? WHERE file_uuid=?");
+    stmt.setBytes(1, data);
+    stmt.setString(2, fileUuid);
+    stmt.executeUpdate();
+    stmt.close();
+    connection.commit();
+  }
+
+  /**
+   * Generate predefined fake data. Result is mutable.
+   */
+  public static FileSourceDb.Data.Builder newFakeData(int numberOfLines) throws IOException {
+    FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+    for (int i = 1; i <= numberOfLines; i++) {
+      dataBuilder.addLinesBuilder()
+        .setLine(i)
+        .setScmRevision("REVISION_" + i)
+        .setScmAuthor("AUTHOR_" + i)
+        .setScmDate(1_500_000_000_00L + i)
+        .setSource("SOURCE_" + i)
+        .setUtLineHits(i)
+        .setUtConditions(i + 1)
+        .setUtCoveredConditions(i + 2)
+        .setItLineHits(i + 3)
+        .setItConditions(i + 4)
+        .setItCoveredConditions(i + 5)
+        .setOverallLineHits(i + 6)
+        .setOverallConditions(i + 7)
+        .setOverallCoveredConditions(i + 8)
+        .setHighlighting("HIGHLIGHTING_" + i)
+        .setSymbols("SYMBOLS_" + i)
+        .addAllDuplications(Arrays.asList(i))
+        .build();
+    }
+    return dataBuilder;
+  }
+
+  /**
+   * Generate random data. Result is mutable.
+   */
+  public static FileSourceDb.Data.Builder newRandomData(int numberOfLines) throws IOException {
+    FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+    for (int i = 1; i <= numberOfLines; i++) {
+      dataBuilder.addLinesBuilder()
+        .setLine(i)
+        .setScmRevision(RandomStringUtils.randomAlphanumeric(15))
+        .setScmAuthor(RandomStringUtils.randomAlphanumeric(10))
+        .setScmDate(RandomUtils.nextLong())
+        .setSource(RandomStringUtils.randomAlphanumeric(20))
+        .setUtLineHits(RandomUtils.nextInt(4))
+        .setUtConditions(RandomUtils.nextInt(4))
+        .setUtCoveredConditions(RandomUtils.nextInt(4))
+        .setItLineHits(RandomUtils.nextInt(4))
+        .setItConditions(RandomUtils.nextInt(4))
+        .setItCoveredConditions(RandomUtils.nextInt(4))
+        .setOverallLineHits(RandomUtils.nextInt(4))
+        .setOverallConditions(RandomUtils.nextInt(4))
+        .setOverallCoveredConditions(RandomUtils.nextInt(4))
+        .setHighlighting(RandomStringUtils.randomAlphanumeric(40))
+        .setSymbols(RandomStringUtils.randomAlphanumeric(30))
+        .addAllDuplications(Arrays.asList(RandomUtils.nextInt(200), RandomUtils.nextInt(200)))
+        .build();
+    }
+    return dataBuilder;
+  }
+}
index ed3a5dc49c2fde85f141fb6699a5ba0bbadd0959..35ae780b0ba12681185ccfc65bea1baf057f4ddd 100644 (file)
@@ -38,10 +38,12 @@ import org.sonar.core.persistence.DbTester;
 import org.sonar.server.db.DbClient;
 import org.sonar.server.es.EsTester;
 import org.sonar.server.search.BaseNormalizer;
+import org.sonar.server.source.db.FileSourceTesting;
 import org.sonar.test.DbTests;
 import org.sonar.test.TestUtils;
 
 import java.io.FileInputStream;
+import java.sql.Connection;
 import java.util.Date;
 import java.util.List;
 import java.util.Map;
@@ -69,8 +71,13 @@ public class SourceLineIndexerTest {
   @Test
   public void index_source_lines_from_db() throws Exception {
     db.prepareDbUnit(getClass(), "db.xml");
+
+    Connection connection = db.openConnection();
+    FileSourceTesting.updateDataColumn(connection, "FILE_UUID", FileSourceTesting.newRandomData(3).build());
+    connection.close();
+
     indexer.index();
-    assertThat(countDocuments()).isEqualTo(2);
+    assertThat(countDocuments()).isEqualTo(3);
   }
 
   @Test
index 5d19f58a2b124dca850fc2a4456f17f1134ada85..aaa10a7ae739c5a4336bb60298d103242f5dce99 100644 (file)
@@ -26,10 +26,10 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.sonar.core.persistence.DbTester;
 import org.sonar.server.db.DbClient;
+import org.sonar.server.source.db.FileSourceTesting;
 import org.sonar.test.DbTests;
 
 import java.sql.Connection;
-import java.sql.PreparedStatement;
 
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.junit.Assert.fail;
@@ -56,39 +56,32 @@ public class SourceLineResultSetIteratorTest {
   }
 
   @Test
-  public void should_generate_source_line_documents() throws Exception {
+  public void parse_db_and_generate_source_line_documents() throws Exception {
     db.prepareDbUnit(getClass(), "shared.xml");
-    PreparedStatement stmt = connection.prepareStatement("UPDATE file_sources SET data = ? WHERE id=1");
-    stmt.setString(1, "aef12a,alice,2014-04-25T12:34:56+0100,1,0,0,2,0,0,3,0,0,polop,palap,,class Foo {\r\n" +
-      "abe465,bob,2014-07-25T12:34:56+0100,,,,,,,,,,,,,  // Empty\r\n" +
-      "afb789,carol,2014-03-23T12:34:56+0100,,,,,,,,,,,,,}\r\n" +
-      "afb789,carol,2014-03-23T12:34:56+0100,,,,,,,,,,,,,\r\n");
-    stmt.executeUpdate();
+    FileSourceTesting.updateDataColumn(connection, "FILE_UUID", FileSourceTesting.newFakeData(3).build());
 
     SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 0L);
     assertThat(iterator.hasNext()).isTrue();
     SourceLineResultSetIterator.SourceFile file = iterator.next();
-    assertThat(file.getLines()).hasSize(4);
+    assertThat(file.getLines()).hasSize(3);
     SourceLineDoc firstLine = file.getLines().get(0);
-    assertThat(firstLine.projectUuid()).isEqualTo("uuid-MyProject");
-    assertThat(firstLine.fileUuid()).isEqualTo("uuid-MyFile.xoo");
+    assertThat(firstLine.projectUuid()).isEqualTo("PROJECT_UUID");
+    assertThat(firstLine.fileUuid()).isEqualTo("FILE_UUID");
     assertThat(firstLine.line()).isEqualTo(1);
-    assertThat(firstLine.scmRevision()).isEqualTo("aef12a");
-    assertThat(firstLine.scmAuthor()).isEqualTo("alice");
-    // TODO Sanitize usage of fscking dates
-    // assertThat(firstLine.scmDate()).isEqualTo(DateUtils.parseDateTime("2014-04-25T12:34:56+0100"));
-    assertThat(firstLine.highlighting()).isEqualTo("polop");
-    assertThat(firstLine.symbols()).isEqualTo("palap");
-    assertThat(firstLine.source()).isEqualTo("class Foo {");
+    assertThat(firstLine.scmRevision()).isEqualTo("REVISION_1");
+    assertThat(firstLine.scmAuthor()).isEqualTo("AUTHOR_1");
+    assertThat(firstLine.highlighting()).isEqualTo("HIGHLIGHTING_1");
+    assertThat(firstLine.symbols()).isEqualTo("SYMBOLS_1");
+    assertThat(firstLine.source()).isEqualTo("SOURCE_1");
     assertThat(firstLine.utLineHits()).isEqualTo(1);
-    assertThat(firstLine.utConditions()).isEqualTo(0);
-    assertThat(firstLine.utCoveredConditions()).isEqualTo(0);
-    assertThat(firstLine.itLineHits()).isEqualTo(2);
-    assertThat(firstLine.itConditions()).isEqualTo(0);
-    assertThat(firstLine.itCoveredConditions()).isEqualTo(0);
-    assertThat(firstLine.overallLineHits()).isEqualTo(3);
-    assertThat(firstLine.overallConditions()).isEqualTo(0);
-    assertThat(firstLine.overallCoveredConditions()).isEqualTo(0);
+    assertThat(firstLine.utConditions()).isEqualTo(2);
+    assertThat(firstLine.utCoveredConditions()).isEqualTo(3);
+    assertThat(firstLine.itLineHits()).isEqualTo(4);
+    assertThat(firstLine.itConditions()).isEqualTo(5);
+    assertThat(firstLine.itCoveredConditions()).isEqualTo(6);
+    assertThat(firstLine.overallLineHits()).isEqualTo(7);
+    assertThat(firstLine.overallConditions()).isEqualTo(8);
+    assertThat(firstLine.overallCoveredConditions()).isEqualTo(9);
     iterator.close();
   }
 
@@ -102,36 +95,10 @@ public class SourceLineResultSetIteratorTest {
   }
 
   @Test
-  public void parse_empty_file() throws Exception {
-    db.prepareDbUnit(getClass(), "empty-file.xml");
-
-    SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 0L);
-    assertThat(iterator.hasNext()).isTrue();
-    SourceLineResultSetIterator.SourceFile file = iterator.next();
-    assertThat(file.getFileUuid()).isEqualTo("uuid-MyFile.xoo");
-    assertThat(file.getLines()).isEmpty();
-    iterator.close();
-  }
-
-  @Test
-  public void parse_null_file() throws Exception {
-    db.prepareDbUnit(getClass(), "null-file.xml");
-
-    SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 0L);
-    assertThat(iterator.hasNext()).isTrue();
-    SourceLineResultSetIterator.SourceFile file = iterator.next();
-    assertThat(file.getFileUuid()).isEqualTo("uuid-MyFile.xoo");
-    assertThat(file.getLines()).isEmpty();
-    iterator.close();
-  }
-
-  @Test
-  public void should_fail_on_bad_csv() throws Exception {
+  public void should_fail_on_bad_data_format() throws Exception {
     db.prepareDbUnit(getClass(), "shared.xml");
-    PreparedStatement stmt = connection.prepareStatement("UPDATE file_sources SET data = ? WHERE id=1");
-    stmt.setString(1, "plouf");
-    stmt.executeUpdate();
-    stmt.close();
+
+    FileSourceTesting.updateDataColumn(connection, "FILE_UUID", "THIS_IS_NOT_PROTOBUF".getBytes());
 
     SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 0L);
     try {
index e1ea8f31504cc535e251d0116faa03599228c7a0..443b1d0a5aea42a1a6ee40921443c8d15b4ea4f3 100644 (file)
@@ -20,7 +20,7 @@
             enabled="[true]" language="java" copy_resource_id="[null]" person_id="[null]" path="src/org/struts/pom.xml" authorization_updated_at="[null]"/>
 
   <file_sources id="101" project_uuid="ABCD" file_uuid="EFGHI"
-                data=",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;"
+                binary_data=",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;"
                 line_hashes="lineEFGHI"
                 data_hash="dataEFGHI"
                 src_hash="srcEFGHI"
@@ -47,7 +47,7 @@
             enabled="[true]" language="java" copy_resource_id="[null]" person_id="[null]" path="src/org/struts/RequestContext.java" authorization_updated_at="[null]"/>
 
   <file_sources id="102" project_uuid="ABCD" file_uuid="HIJK"
-                data=",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;"
+                binary_data=",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;"
                 line_hashes="lineHIJK"
                 data_hash="dataHIJK"
                 src_hash="srcHIJK"
index 6fb2b2b541fb736ba509a283944ce06082c7e2b9..24e4d0c14d030fc488692315554e9952aa860128 100644 (file)
@@ -5,7 +5,7 @@
   - line 4 has no author
   -->
   <file_sources id="1" file_uuid="FILE_A" project_uuid="PROJECT_A"
-                data=",charb,2015-01-01T15:50:45+0100,,,,,,,,,,,,,first line&#13;&#10;,cabu,2015-01-08T15:50:45+0100,,,,,,,,,,,,,second line&#13;&#10;,wolinski,2015-01-02T12:00:00+0100,,,,,,,,,,,,,third line&#13;&#10;,,,,,,,,,,,,,,,fourth line&#13;&#10;"
+                binary_data=""
                 line_hashes="8d7b3d6b83c0a517eac07e1aac94b773&#10;9a0364b9e99bb480dd25e1f0284c8555"
                 data_hash="0263047cd758c68c27683625f072f010"
                 src_hash="123456"
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/bad_data.xml b/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/bad_data.xml
new file mode 100644 (file)
index 0000000..1c70024
--- /dev/null
@@ -0,0 +1,7 @@
+<dataset>
+  <file_sources id="1" project_uuid="PROJECT_UUID" file_uuid="FILE1_UUID" created_at="1416238020000" updated_at="1414770242000"
+                data="&quot;missing_escape_end"
+                binary_data="[null]"
+                line_hashes=""
+                data_hash="" />
+</dataset>
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/data.xml b/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/data.xml
new file mode 100644 (file)
index 0000000..fe6020a
--- /dev/null
@@ -0,0 +1,22 @@
+<dataset>
+  <!-- has data -->
+  <file_sources id="1" project_uuid="PROJECT_UUID" file_uuid="FILE1_UUID" created_at="1416238020000" updated_at="1414770242000"
+                data="aef12a,alice,2014-04-25T12:34:56+0100,1,4,2,2,5,3,3,6,4,syntax_highlighting,symbol_refs,22,class Foo {&#13;&#10;abe465,bob,2014-07-25T12:34:56+0100,,,,,,,,,,,,2,  // Empty&#13;&#10;afb789,carol,2014-03-23T12:34:56+0100,0,,,0,,,0,,,,,,}&#13;&#10;afb789,carol,2014-03-23T12:34:56+0100,,,,,,,,,,,,,&#13;&#10;"
+                binary_data="[null]"
+                line_hashes=""
+                data_hash="" />
+
+  <!-- empty fields in CSV -->
+  <file_sources id="2" project_uuid="PROJECT_UUID" file_uuid="FILE2_UUID" created_at="1416238020000" updated_at="1414770242000"
+                data=",,,,,,,,,,,,,,,&#10;,,,,,,,,,,,,,,,&#10;,,,,,,,,,,,,,,,&#10;,,,,,,,,,,,,,,,&#10;"
+                binary_data="[null]"
+                line_hashes=""
+                data_hash="" />
+
+  <!-- null CSV -->
+  <file_sources id="3" project_uuid="PROJECT_UUID" file_uuid="FILE2_UUID" created_at="1416238020000" updated_at="1414770242000"
+                data="[null]"
+                binary_data="[null]"
+                line_hashes=""
+                data_hash="" />
+</dataset>
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/schema.sql b/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/schema.sql
new file mode 100644 (file)
index 0000000..5649f79
--- /dev/null
@@ -0,0 +1,12 @@
+CREATE TABLE "FILE_SOURCES" (
+  "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1),
+  "PROJECT_UUID" VARCHAR(50) NOT NULL,
+  "FILE_UUID" VARCHAR(50) NOT NULL,
+  "LINE_HASHES" CLOB(2147483647),
+  "DATA" CLOB(2147483647),
+  "DATA_HASH" VARCHAR(50) NOT NULL,
+  "SRC_HASH" VARCHAR(50) NULL,
+  "CREATED_AT" BIGINT NOT NULL,
+  "UPDATED_AT" BIGINT NOT NULL,
+  "BINARY_DATA" BINARY(167772150),
+);
index d8d7eed7662179f080baf977c7bc4aee803f2ea8..48e58478ab6ccdf2f251783200bb7f100964c2a1 100644 (file)
@@ -1,6 +1,6 @@
 <dataset>
 
-  <file_sources id="1" project_uuid="uuid-MyProject" file_uuid="uuid-MyFile.xoo" created_at="1416238020000" updated_at="1416239042000"
-                data="aef12a,alice,2014-04-25T12:34:56+0100,,,,,,,,,,polop,palap,1,class Foo {&#10;aef12a,alice,2014-04-25T12:34:56+0100,,,,,,,,,,polop,palap,&quot;1,2&quot;,}" data_hash="THE_HASH" />
+  <file_sources id="1" project_uuid="PROJECT_UUID" file_uuid="FILE_UUID" created_at="1416238020000" updated_at="1416239042000"
+                binary_data="" data_hash="DATA_HASH" />
 
 </dataset>
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/empty-file.xml b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/empty-file.xml
deleted file mode 100644 (file)
index cf6e015..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-<dataset>
-
-  <file_sources id="1" project_uuid="uuid-MyProject" file_uuid="uuid-MyFile.xoo" created_at="1416238020000" updated_at="1416239042000"
-                data="" data_hash="" />
-
-</dataset>
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/null-file.xml b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/null-file.xml
deleted file mode 100644 (file)
index 91dafde..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-<dataset>
-
-  <file_sources id="1" project_uuid="uuid-MyProject" file_uuid="uuid-MyFile.xoo" created_at="1416238020000" updated_at="1416239042000"
-                data="[null]" data_hash="" />
-
-</dataset>
index 3032e93f25636cae6aac37abd2c7f28a072bc30b..859eefe362544adce633ef7e82cd207a7863944c 100644 (file)
@@ -3,7 +3,7 @@ CREATE TABLE "FILE_SOURCES" (
   "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1),
   "PROJECT_UUID" VARCHAR(50) NOT NULL,
   "FILE_UUID" VARCHAR(50) NOT NULL,
-  "DATA" CLOB(2147483647),
+  "BINARY_DATA" BINARY(167772150),
   "DATA_HASH" VARCHAR(50) NOT NULL,
   "CREATED_AT" BIGINT NOT NULL,
   "UPDATED_AT" BIGINT NOT NULL
index f1498ba9c5600d79eaeb558a89cb4629e6d05ca8..521e0db9ca5e5dcce88628823907d7c3a2d3d6f0 100644 (file)
@@ -1,6 +1,6 @@
 <dataset>
 
-  <file_sources id="1" project_uuid="uuid-MyProject" file_uuid="uuid-MyFile.xoo" created_at="1416238020000" updated_at="1416239042000"
-    data="" data_hash="" />
+  <file_sources id="1" project_uuid="PROJECT_UUID" file_uuid="FILE_UUID" created_at="1416238020000" updated_at="1416239042000"
+    binary_data="" data_hash="" />
 
 </dataset>
index 756dbacf84eb208a0053ed0d00d553098f06afe9..4dcc5c21ee0bf1a75248b98a11d4f9a3dc8080a7 100644 (file)
@@ -5,7 +5,7 @@
             path="src/main/java/Action.java"/>
 
   <file_sources id="101" project_uuid="ABCD" file_uuid="CDEF"
-                data="aef12a,alice,2014-04-25T12:34:56+0100,,class Foo" data_hash="hash"
+                binary_data="" data_hash="hash"
                 line_hashes="987654"
                 src_hash="12345"
                 created_at="1414597442000" updated_at="1414683842000"/>
diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/781_add_file_sources_binary_data.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/781_add_file_sources_binary_data.rb
new file mode 100644 (file)
index 0000000..3b969e3
--- /dev/null
@@ -0,0 +1,29 @@
+#
+# SonarQube, open source software quality management tool.
+# Copyright (C) 2008-2014 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# SonarQube is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# SonarQube is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+#
+
+#
+# SonarQube 5.1
+#
+class AddFileSourcesBinaryData < ActiveRecord::Migration
+
+  def self.up
+    add_column 'file_sources', :binary_data, :binary, :null => true
+  end
+end
diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/782_feed_file_sources_binary_data.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/782_feed_file_sources_binary_data.rb
new file mode 100644 (file)
index 0000000..dea83cf
--- /dev/null
@@ -0,0 +1,29 @@
+#
+# SonarQube, open source software quality management tool.
+# Copyright (C) 2008-2014 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# SonarQube is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# SonarQube is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+#
+
+#
+# SonarQube 5.1
+#
+class FeedFileSourcesBinaryData < ActiveRecord::Migration
+
+  def self.up
+    execute_java_migration('org.sonar.server.db.migrations.v51.FeedFileSourcesBinaryData')
+  end
+end
diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/783_drop_file_sources_data.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/783_drop_file_sources_data.rb
new file mode 100644 (file)
index 0000000..06d5fbb
--- /dev/null
@@ -0,0 +1,30 @@
+#
+# SonarQube, open source software quality management tool.
+# Copyright (C) 2008-2014 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# SonarQube is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# SonarQube is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+#
+
+#
+# SonarQube 5.1
+#
+class DropFileSourcesData < ActiveRecord::Migration
+
+  def self.up
+    remove_column 'file_sources', 'data'
+  end
+
+end
index 399c4a242e027fc441e2ca52cb533ddd98f2de8d..343d707f094084f3b978b35f14c74687bb0b988a 100644 (file)
   <description>Classes used for communication between batch and server</description>
 
   <dependencies>
+    <dependency>
+      <groupId>net.jpountz.lz4</groupId>
+      <artifactId>lz4</artifactId>
+    </dependency>
     <dependency>
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
diff --git a/sonar-batch-protocol/src/main/gen-java/org/sonar/server/source/db/FileSourceDb.java b/sonar-batch-protocol/src/main/gen-java/org/sonar/server/source/db/FileSourceDb.java
new file mode 100644 (file)
index 0000000..f3f9db8
--- /dev/null
@@ -0,0 +1,3008 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: file_source_db.proto
+
+package org.sonar.server.source.db;
+
+public final class FileSourceDb {
+  private FileSourceDb() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface LineOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:org.sonar.server.source.db.Line)
+      com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>optional int32 line = 1;</code>
+     */
+    boolean hasLine();
+    /**
+     * <code>optional int32 line = 1;</code>
+     */
+    int getLine();
+
+    /**
+     * <code>optional string source = 2;</code>
+     */
+    boolean hasSource();
+    /**
+     * <code>optional string source = 2;</code>
+     */
+    java.lang.String getSource();
+    /**
+     * <code>optional string source = 2;</code>
+     */
+    com.google.protobuf.ByteString
+        getSourceBytes();
+
+    /**
+     * <code>optional string scm_revision = 3;</code>
+     *
+     * <pre>
+     * SCM
+     * </pre>
+     */
+    boolean hasScmRevision();
+    /**
+     * <code>optional string scm_revision = 3;</code>
+     *
+     * <pre>
+     * SCM
+     * </pre>
+     */
+    java.lang.String getScmRevision();
+    /**
+     * <code>optional string scm_revision = 3;</code>
+     *
+     * <pre>
+     * SCM
+     * </pre>
+     */
+    com.google.protobuf.ByteString
+        getScmRevisionBytes();
+
+    /**
+     * <code>optional string scm_author = 4;</code>
+     */
+    boolean hasScmAuthor();
+    /**
+     * <code>optional string scm_author = 4;</code>
+     */
+    java.lang.String getScmAuthor();
+    /**
+     * <code>optional string scm_author = 4;</code>
+     */
+    com.google.protobuf.ByteString
+        getScmAuthorBytes();
+
+    /**
+     * <code>optional int64 scm_date = 5;</code>
+     */
+    boolean hasScmDate();
+    /**
+     * <code>optional int64 scm_date = 5;</code>
+     */
+    long getScmDate();
+
+    /**
+     * <code>optional int32 ut_line_hits = 6;</code>
+     *
+     * <pre>
+     * unit testing
+     * </pre>
+     */
+    boolean hasUtLineHits();
+    /**
+     * <code>optional int32 ut_line_hits = 6;</code>
+     *
+     * <pre>
+     * unit testing
+     * </pre>
+     */
+    int getUtLineHits();
+
+    /**
+     * <code>optional int32 ut_conditions = 7;</code>
+     */
+    boolean hasUtConditions();
+    /**
+     * <code>optional int32 ut_conditions = 7;</code>
+     */
+    int getUtConditions();
+
+    /**
+     * <code>optional int32 ut_covered_conditions = 8;</code>
+     */
+    boolean hasUtCoveredConditions();
+    /**
+     * <code>optional int32 ut_covered_conditions = 8;</code>
+     */
+    int getUtCoveredConditions();
+
+    /**
+     * <code>optional int32 it_line_hits = 9;</code>
+     *
+     * <pre>
+     * integration testing
+     * </pre>
+     */
+    boolean hasItLineHits();
+    /**
+     * <code>optional int32 it_line_hits = 9;</code>
+     *
+     * <pre>
+     * integration testing
+     * </pre>
+     */
+    int getItLineHits();
+
+    /**
+     * <code>optional int32 it_conditions = 10;</code>
+     */
+    boolean hasItConditions();
+    /**
+     * <code>optional int32 it_conditions = 10;</code>
+     */
+    int getItConditions();
+
+    /**
+     * <code>optional int32 it_covered_conditions = 11;</code>
+     */
+    boolean hasItCoveredConditions();
+    /**
+     * <code>optional int32 it_covered_conditions = 11;</code>
+     */
+    int getItCoveredConditions();
+
+    /**
+     * <code>optional int32 overall_line_hits = 12;</code>
+     *
+     * <pre>
+     * overall testing
+     * </pre>
+     */
+    boolean hasOverallLineHits();
+    /**
+     * <code>optional int32 overall_line_hits = 12;</code>
+     *
+     * <pre>
+     * overall testing
+     * </pre>
+     */
+    int getOverallLineHits();
+
+    /**
+     * <code>optional int32 overall_conditions = 13;</code>
+     */
+    boolean hasOverallConditions();
+    /**
+     * <code>optional int32 overall_conditions = 13;</code>
+     */
+    int getOverallConditions();
+
+    /**
+     * <code>optional int32 overall_covered_conditions = 14;</code>
+     */
+    boolean hasOverallCoveredConditions();
+    /**
+     * <code>optional int32 overall_covered_conditions = 14;</code>
+     */
+    int getOverallCoveredConditions();
+
+    /**
+     * <code>optional string highlighting = 15;</code>
+     */
+    boolean hasHighlighting();
+    /**
+     * <code>optional string highlighting = 15;</code>
+     */
+    java.lang.String getHighlighting();
+    /**
+     * <code>optional string highlighting = 15;</code>
+     */
+    com.google.protobuf.ByteString
+        getHighlightingBytes();
+
+    /**
+     * <code>optional string symbols = 16;</code>
+     */
+    boolean hasSymbols();
+    /**
+     * <code>optional string symbols = 16;</code>
+     */
+    java.lang.String getSymbols();
+    /**
+     * <code>optional string symbols = 16;</code>
+     */
+    com.google.protobuf.ByteString
+        getSymbolsBytes();
+
+    /**
+     * <code>repeated int32 duplications = 17;</code>
+     */
+    java.util.List<java.lang.Integer> getDuplicationsList();
+    /**
+     * <code>repeated int32 duplications = 17;</code>
+     */
+    int getDuplicationsCount();
+    /**
+     * <code>repeated int32 duplications = 17;</code>
+     */
+    int getDuplications(int index);
+  }
+  /**
+   * Protobuf type {@code org.sonar.server.source.db.Line}
+   */
+  public static final class Line extends
+      com.google.protobuf.GeneratedMessage implements
+      // @@protoc_insertion_point(message_implements:org.sonar.server.source.db.Line)
+      LineOrBuilder {
+    // Use Line.newBuilder() to construct.
+    private Line(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private Line(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final Line defaultInstance;
+    public static Line getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public Line getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private Line(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              line_ = input.readInt32();
+              break;
+            }
+            case 18: {
+              com.google.protobuf.ByteString bs = input.readBytes();
+              bitField0_ |= 0x00000002;
+              source_ = bs;
+              break;
+            }
+            case 26: {
+              com.google.protobuf.ByteString bs = input.readBytes();
+              bitField0_ |= 0x00000004;
+              scmRevision_ = bs;
+              break;
+            }
+            case 34: {
+              com.google.protobuf.ByteString bs = input.readBytes();
+              bitField0_ |= 0x00000008;
+              scmAuthor_ = bs;
+              break;
+            }
+            case 40: {
+              bitField0_ |= 0x00000010;
+              scmDate_ = input.readInt64();
+              break;
+            }
+            case 48: {
+              bitField0_ |= 0x00000020;
+              utLineHits_ = input.readInt32();
+              break;
+            }
+            case 56: {
+              bitField0_ |= 0x00000040;
+              utConditions_ = input.readInt32();
+              break;
+            }
+            case 64: {
+              bitField0_ |= 0x00000080;
+              utCoveredConditions_ = input.readInt32();
+              break;
+            }
+            case 72: {
+              bitField0_ |= 0x00000100;
+              itLineHits_ = input.readInt32();
+              break;
+            }
+            case 80: {
+              bitField0_ |= 0x00000200;
+              itConditions_ = input.readInt32();
+              break;
+            }
+            case 88: {
+              bitField0_ |= 0x00000400;
+              itCoveredConditions_ = input.readInt32();
+              break;
+            }
+            case 96: {
+              bitField0_ |= 0x00000800;
+              overallLineHits_ = input.readInt32();
+              break;
+            }
+            case 104: {
+              bitField0_ |= 0x00001000;
+              overallConditions_ = input.readInt32();
+              break;
+            }
+            case 112: {
+              bitField0_ |= 0x00002000;
+              overallCoveredConditions_ = input.readInt32();
+              break;
+            }
+            case 122: {
+              com.google.protobuf.ByteString bs = input.readBytes();
+              bitField0_ |= 0x00004000;
+              highlighting_ = bs;
+              break;
+            }
+            case 130: {
+              com.google.protobuf.ByteString bs = input.readBytes();
+              bitField0_ |= 0x00008000;
+              symbols_ = bs;
+              break;
+            }
+            case 136: {
+              if (!((mutable_bitField0_ & 0x00010000) == 0x00010000)) {
+                duplications_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00010000;
+              }
+              duplications_.add(input.readInt32());
+              break;
+            }
+            case 138: {
+              int length = input.readRawVarint32();
+              int limit = input.pushLimit(length);
+              if (!((mutable_bitField0_ & 0x00010000) == 0x00010000) && input.getBytesUntilLimit() > 0) {
+                duplications_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00010000;
+              }
+              while (input.getBytesUntilLimit() > 0) {
+                duplications_.add(input.readInt32());
+              }
+              input.popLimit(limit);
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00010000) == 0x00010000)) {
+          duplications_ = java.util.Collections.unmodifiableList(duplications_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Line_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Line_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.sonar.server.source.db.FileSourceDb.Line.class, org.sonar.server.source.db.FileSourceDb.Line.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<Line> PARSER =
+        new com.google.protobuf.AbstractParser<Line>() {
+      public Line parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new Line(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<Line> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    public static final int LINE_FIELD_NUMBER = 1;
+    private int line_;
+    /**
+     * <code>optional int32 line = 1;</code>
+     */
+    public boolean hasLine() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>optional int32 line = 1;</code>
+     */
+    public int getLine() {
+      return line_;
+    }
+
+    public static final int SOURCE_FIELD_NUMBER = 2;
+    private java.lang.Object source_;
+    /**
+     * <code>optional string source = 2;</code>
+     */
+    public boolean hasSource() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <code>optional string source = 2;</code>
+     */
+    public java.lang.String getSource() {
+      java.lang.Object ref = source_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          source_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string source = 2;</code>
+     */
+    public com.google.protobuf.ByteString
+        getSourceBytes() {
+      java.lang.Object ref = source_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        source_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int SCM_REVISION_FIELD_NUMBER = 3;
+    private java.lang.Object scmRevision_;
+    /**
+     * <code>optional string scm_revision = 3;</code>
+     *
+     * <pre>
+     * SCM
+     * </pre>
+     */
+    public boolean hasScmRevision() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    /**
+     * <code>optional string scm_revision = 3;</code>
+     *
+     * <pre>
+     * SCM
+     * </pre>
+     */
+    public java.lang.String getScmRevision() {
+      java.lang.Object ref = scmRevision_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          scmRevision_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string scm_revision = 3;</code>
+     *
+     * <pre>
+     * SCM
+     * </pre>
+     */
+    public com.google.protobuf.ByteString
+        getScmRevisionBytes() {
+      java.lang.Object ref = scmRevision_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        scmRevision_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int SCM_AUTHOR_FIELD_NUMBER = 4;
+    private java.lang.Object scmAuthor_;
+    /**
+     * <code>optional string scm_author = 4;</code>
+     */
+    public boolean hasScmAuthor() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <code>optional string scm_author = 4;</code>
+     */
+    public java.lang.String getScmAuthor() {
+      java.lang.Object ref = scmAuthor_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          scmAuthor_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string scm_author = 4;</code>
+     */
+    public com.google.protobuf.ByteString
+        getScmAuthorBytes() {
+      java.lang.Object ref = scmAuthor_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        scmAuthor_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int SCM_DATE_FIELD_NUMBER = 5;
+    private long scmDate_;
+    /**
+     * <code>optional int64 scm_date = 5;</code>
+     */
+    public boolean hasScmDate() {
+      return ((bitField0_ & 0x00000010) == 0x00000010);
+    }
+    /**
+     * <code>optional int64 scm_date = 5;</code>
+     */
+    public long getScmDate() {
+      return scmDate_;
+    }
+
+    public static final int UT_LINE_HITS_FIELD_NUMBER = 6;
+    private int utLineHits_;
+    /**
+     * <code>optional int32 ut_line_hits = 6;</code>
+     *
+     * <pre>
+     * unit testing
+     * </pre>
+     */
+    public boolean hasUtLineHits() {
+      return ((bitField0_ & 0x00000020) == 0x00000020);
+    }
+    /**
+     * <code>optional int32 ut_line_hits = 6;</code>
+     *
+     * <pre>
+     * unit testing
+     * </pre>
+     */
+    public int getUtLineHits() {
+      return utLineHits_;
+    }
+
+    public static final int UT_CONDITIONS_FIELD_NUMBER = 7;
+    private int utConditions_;
+    /**
+     * <code>optional int32 ut_conditions = 7;</code>
+     */
+    public boolean hasUtConditions() {
+      return ((bitField0_ & 0x00000040) == 0x00000040);
+    }
+    /**
+     * <code>optional int32 ut_conditions = 7;</code>
+     */
+    public int getUtConditions() {
+      return utConditions_;
+    }
+
+    public static final int UT_COVERED_CONDITIONS_FIELD_NUMBER = 8;
+    private int utCoveredConditions_;
+    /**
+     * <code>optional int32 ut_covered_conditions = 8;</code>
+     */
+    public boolean hasUtCoveredConditions() {
+      return ((bitField0_ & 0x00000080) == 0x00000080);
+    }
+    /**
+     * <code>optional int32 ut_covered_conditions = 8;</code>
+     */
+    public int getUtCoveredConditions() {
+      return utCoveredConditions_;
+    }
+
+    public static final int IT_LINE_HITS_FIELD_NUMBER = 9;
+    private int itLineHits_;
+    /**
+     * <code>optional int32 it_line_hits = 9;</code>
+     *
+     * <pre>
+     * integration testing
+     * </pre>
+     */
+    public boolean hasItLineHits() {
+      return ((bitField0_ & 0x00000100) == 0x00000100);
+    }
+    /**
+     * <code>optional int32 it_line_hits = 9;</code>
+     *
+     * <pre>
+     * integration testing
+     * </pre>
+     */
+    public int getItLineHits() {
+      return itLineHits_;
+    }
+
+    public static final int IT_CONDITIONS_FIELD_NUMBER = 10;
+    private int itConditions_;
+    /**
+     * <code>optional int32 it_conditions = 10;</code>
+     */
+    public boolean hasItConditions() {
+      return ((bitField0_ & 0x00000200) == 0x00000200);
+    }
+    /**
+     * <code>optional int32 it_conditions = 10;</code>
+     */
+    public int getItConditions() {
+      return itConditions_;
+    }
+
+    public static final int IT_COVERED_CONDITIONS_FIELD_NUMBER = 11;
+    private int itCoveredConditions_;
+    /**
+     * <code>optional int32 it_covered_conditions = 11;</code>
+     */
+    public boolean hasItCoveredConditions() {
+      return ((bitField0_ & 0x00000400) == 0x00000400);
+    }
+    /**
+     * <code>optional int32 it_covered_conditions = 11;</code>
+     */
+    public int getItCoveredConditions() {
+      return itCoveredConditions_;
+    }
+
+    public static final int OVERALL_LINE_HITS_FIELD_NUMBER = 12;
+    private int overallLineHits_;
+    /**
+     * <code>optional int32 overall_line_hits = 12;</code>
+     *
+     * <pre>
+     * overall testing
+     * </pre>
+     */
+    public boolean hasOverallLineHits() {
+      return ((bitField0_ & 0x00000800) == 0x00000800);
+    }
+    /**
+     * <code>optional int32 overall_line_hits = 12;</code>
+     *
+     * <pre>
+     * overall testing
+     * </pre>
+     */
+    public int getOverallLineHits() {
+      return overallLineHits_;
+    }
+
+    public static final int OVERALL_CONDITIONS_FIELD_NUMBER = 13;
+    private int overallConditions_;
+    /**
+     * <code>optional int32 overall_conditions = 13;</code>
+     */
+    public boolean hasOverallConditions() {
+      return ((bitField0_ & 0x00001000) == 0x00001000);
+    }
+    /**
+     * <code>optional int32 overall_conditions = 13;</code>
+     */
+    public int getOverallConditions() {
+      return overallConditions_;
+    }
+
+    public static final int OVERALL_COVERED_CONDITIONS_FIELD_NUMBER = 14;
+    private int overallCoveredConditions_;
+    /**
+     * <code>optional int32 overall_covered_conditions = 14;</code>
+     */
+    public boolean hasOverallCoveredConditions() {
+      return ((bitField0_ & 0x00002000) == 0x00002000);
+    }
+    /**
+     * <code>optional int32 overall_covered_conditions = 14;</code>
+     */
+    public int getOverallCoveredConditions() {
+      return overallCoveredConditions_;
+    }
+
+    public static final int HIGHLIGHTING_FIELD_NUMBER = 15;
+    private java.lang.Object highlighting_;
+    /**
+     * <code>optional string highlighting = 15;</code>
+     */
+    public boolean hasHighlighting() {
+      return ((bitField0_ & 0x00004000) == 0x00004000);
+    }
+    /**
+     * <code>optional string highlighting = 15;</code>
+     */
+    public java.lang.String getHighlighting() {
+      java.lang.Object ref = highlighting_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          highlighting_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string highlighting = 15;</code>
+     */
+    public com.google.protobuf.ByteString
+        getHighlightingBytes() {
+      java.lang.Object ref = highlighting_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        highlighting_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int SYMBOLS_FIELD_NUMBER = 16;
+    private java.lang.Object symbols_;
+    /**
+     * <code>optional string symbols = 16;</code>
+     */
+    public boolean hasSymbols() {
+      return ((bitField0_ & 0x00008000) == 0x00008000);
+    }
+    /**
+     * <code>optional string symbols = 16;</code>
+     */
+    public java.lang.String getSymbols() {
+      java.lang.Object ref = symbols_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          symbols_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string symbols = 16;</code>
+     */
+    public com.google.protobuf.ByteString
+        getSymbolsBytes() {
+      java.lang.Object ref = symbols_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        symbols_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int DUPLICATIONS_FIELD_NUMBER = 17;
+    private java.util.List<java.lang.Integer> duplications_;
+    /**
+     * <code>repeated int32 duplications = 17;</code>
+     */
+    public java.util.List<java.lang.Integer>
+        getDuplicationsList() {
+      return duplications_;
+    }
+    /**
+     * <code>repeated int32 duplications = 17;</code>
+     */
+    public int getDuplicationsCount() {
+      return duplications_.size();
+    }
+    /**
+     * <code>repeated int32 duplications = 17;</code>
+     */
+    public int getDuplications(int index) {
+      return duplications_.get(index);
+    }
+
+    private void initFields() {
+      line_ = 0;
+      source_ = "";
+      scmRevision_ = "";
+      scmAuthor_ = "";
+      scmDate_ = 0L;
+      utLineHits_ = 0;
+      utConditions_ = 0;
+      utCoveredConditions_ = 0;
+      itLineHits_ = 0;
+      itConditions_ = 0;
+      itCoveredConditions_ = 0;
+      overallLineHits_ = 0;
+      overallConditions_ = 0;
+      overallCoveredConditions_ = 0;
+      highlighting_ = "";
+      symbols_ = "";
+      duplications_ = java.util.Collections.emptyList();
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeInt32(1, line_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeBytes(2, getSourceBytes());
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeBytes(3, getScmRevisionBytes());
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeBytes(4, getScmAuthorBytes());
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        output.writeInt64(5, scmDate_);
+      }
+      if (((bitField0_ & 0x00000020) == 0x00000020)) {
+        output.writeInt32(6, utLineHits_);
+      }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        output.writeInt32(7, utConditions_);
+      }
+      if (((bitField0_ & 0x00000080) == 0x00000080)) {
+        output.writeInt32(8, utCoveredConditions_);
+      }
+      if (((bitField0_ & 0x00000100) == 0x00000100)) {
+        output.writeInt32(9, itLineHits_);
+      }
+      if (((bitField0_ & 0x00000200) == 0x00000200)) {
+        output.writeInt32(10, itConditions_);
+      }
+      if (((bitField0_ & 0x00000400) == 0x00000400)) {
+        output.writeInt32(11, itCoveredConditions_);
+      }
+      if (((bitField0_ & 0x00000800) == 0x00000800)) {
+        output.writeInt32(12, overallLineHits_);
+      }
+      if (((bitField0_ & 0x00001000) == 0x00001000)) {
+        output.writeInt32(13, overallConditions_);
+      }
+      if (((bitField0_ & 0x00002000) == 0x00002000)) {
+        output.writeInt32(14, overallCoveredConditions_);
+      }
+      if (((bitField0_ & 0x00004000) == 0x00004000)) {
+        output.writeBytes(15, getHighlightingBytes());
+      }
+      if (((bitField0_ & 0x00008000) == 0x00008000)) {
+        output.writeBytes(16, getSymbolsBytes());
+      }
+      for (int i = 0; i < duplications_.size(); i++) {
+        output.writeInt32(17, duplications_.get(i));
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(1, line_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(2, getSourceBytes());
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(3, getScmRevisionBytes());
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(4, getScmAuthorBytes());
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt64Size(5, scmDate_);
+      }
+      if (((bitField0_ & 0x00000020) == 0x00000020)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(6, utLineHits_);
+      }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(7, utConditions_);
+      }
+      if (((bitField0_ & 0x00000080) == 0x00000080)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(8, utCoveredConditions_);
+      }
+      if (((bitField0_ & 0x00000100) == 0x00000100)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(9, itLineHits_);
+      }
+      if (((bitField0_ & 0x00000200) == 0x00000200)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(10, itConditions_);
+      }
+      if (((bitField0_ & 0x00000400) == 0x00000400)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(11, itCoveredConditions_);
+      }
+      if (((bitField0_ & 0x00000800) == 0x00000800)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(12, overallLineHits_);
+      }
+      if (((bitField0_ & 0x00001000) == 0x00001000)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(13, overallConditions_);
+      }
+      if (((bitField0_ & 0x00002000) == 0x00002000)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(14, overallCoveredConditions_);
+      }
+      if (((bitField0_ & 0x00004000) == 0x00004000)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(15, getHighlightingBytes());
+      }
+      if (((bitField0_ & 0x00008000) == 0x00008000)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(16, getSymbolsBytes());
+      }
+      {
+        int dataSize = 0;
+        for (int i = 0; i < duplications_.size(); i++) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeInt32SizeNoTag(duplications_.get(i));
+        }
+        size += dataSize;
+        size += 2 * getDuplicationsList().size();
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    public static org.sonar.server.source.db.FileSourceDb.Line parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Line parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Line parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Line parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Line parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Line parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Line parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Line parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Line parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Line parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.sonar.server.source.db.FileSourceDb.Line prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code org.sonar.server.source.db.Line}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:org.sonar.server.source.db.Line)
+        org.sonar.server.source.db.FileSourceDb.LineOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Line_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Line_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.sonar.server.source.db.FileSourceDb.Line.class, org.sonar.server.source.db.FileSourceDb.Line.Builder.class);
+      }
+
+      // Construct using org.sonar.server.source.db.FileSourceDb.Line.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        line_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        source_ = "";
+        bitField0_ = (bitField0_ & ~0x00000002);
+        scmRevision_ = "";
+        bitField0_ = (bitField0_ & ~0x00000004);
+        scmAuthor_ = "";
+        bitField0_ = (bitField0_ & ~0x00000008);
+        scmDate_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000010);
+        utLineHits_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000020);
+        utConditions_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000040);
+        utCoveredConditions_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000080);
+        itLineHits_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000100);
+        itConditions_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000200);
+        itCoveredConditions_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000400);
+        overallLineHits_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000800);
+        overallConditions_ = 0;
+        bitField0_ = (bitField0_ & ~0x00001000);
+        overallCoveredConditions_ = 0;
+        bitField0_ = (bitField0_ & ~0x00002000);
+        highlighting_ = "";
+        bitField0_ = (bitField0_ & ~0x00004000);
+        symbols_ = "";
+        bitField0_ = (bitField0_ & ~0x00008000);
+        duplications_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00010000);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Line_descriptor;
+      }
+
+      public org.sonar.server.source.db.FileSourceDb.Line getDefaultInstanceForType() {
+        return org.sonar.server.source.db.FileSourceDb.Line.getDefaultInstance();
+      }
+
+      public org.sonar.server.source.db.FileSourceDb.Line build() {
+        org.sonar.server.source.db.FileSourceDb.Line result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.sonar.server.source.db.FileSourceDb.Line buildPartial() {
+        org.sonar.server.source.db.FileSourceDb.Line result = new org.sonar.server.source.db.FileSourceDb.Line(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.line_ = line_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.source_ = source_;
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        result.scmRevision_ = scmRevision_;
+        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+          to_bitField0_ |= 0x00000008;
+        }
+        result.scmAuthor_ = scmAuthor_;
+        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+          to_bitField0_ |= 0x00000010;
+        }
+        result.scmDate_ = scmDate_;
+        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+          to_bitField0_ |= 0x00000020;
+        }
+        result.utLineHits_ = utLineHits_;
+        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+          to_bitField0_ |= 0x00000040;
+        }
+        result.utConditions_ = utConditions_;
+        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
+          to_bitField0_ |= 0x00000080;
+        }
+        result.utCoveredConditions_ = utCoveredConditions_;
+        if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
+          to_bitField0_ |= 0x00000100;
+        }
+        result.itLineHits_ = itLineHits_;
+        if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
+          to_bitField0_ |= 0x00000200;
+        }
+        result.itConditions_ = itConditions_;
+        if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
+          to_bitField0_ |= 0x00000400;
+        }
+        result.itCoveredConditions_ = itCoveredConditions_;
+        if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
+          to_bitField0_ |= 0x00000800;
+        }
+        result.overallLineHits_ = overallLineHits_;
+        if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
+          to_bitField0_ |= 0x00001000;
+        }
+        result.overallConditions_ = overallConditions_;
+        if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
+          to_bitField0_ |= 0x00002000;
+        }
+        result.overallCoveredConditions_ = overallCoveredConditions_;
+        if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
+          to_bitField0_ |= 0x00004000;
+        }
+        result.highlighting_ = highlighting_;
+        if (((from_bitField0_ & 0x00008000) == 0x00008000)) {
+          to_bitField0_ |= 0x00008000;
+        }
+        result.symbols_ = symbols_;
+        if (((bitField0_ & 0x00010000) == 0x00010000)) {
+          duplications_ = java.util.Collections.unmodifiableList(duplications_);
+          bitField0_ = (bitField0_ & ~0x00010000);
+        }
+        result.duplications_ = duplications_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.sonar.server.source.db.FileSourceDb.Line) {
+          return mergeFrom((org.sonar.server.source.db.FileSourceDb.Line)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.sonar.server.source.db.FileSourceDb.Line other) {
+        if (other == org.sonar.server.source.db.FileSourceDb.Line.getDefaultInstance()) return this;
+        if (other.hasLine()) {
+          setLine(other.getLine());
+        }
+        if (other.hasSource()) {
+          bitField0_ |= 0x00000002;
+          source_ = other.source_;
+          onChanged();
+        }
+        if (other.hasScmRevision()) {
+          bitField0_ |= 0x00000004;
+          scmRevision_ = other.scmRevision_;
+          onChanged();
+        }
+        if (other.hasScmAuthor()) {
+          bitField0_ |= 0x00000008;
+          scmAuthor_ = other.scmAuthor_;
+          onChanged();
+        }
+        if (other.hasScmDate()) {
+          setScmDate(other.getScmDate());
+        }
+        if (other.hasUtLineHits()) {
+          setUtLineHits(other.getUtLineHits());
+        }
+        if (other.hasUtConditions()) {
+          setUtConditions(other.getUtConditions());
+        }
+        if (other.hasUtCoveredConditions()) {
+          setUtCoveredConditions(other.getUtCoveredConditions());
+        }
+        if (other.hasItLineHits()) {
+          setItLineHits(other.getItLineHits());
+        }
+        if (other.hasItConditions()) {
+          setItConditions(other.getItConditions());
+        }
+        if (other.hasItCoveredConditions()) {
+          setItCoveredConditions(other.getItCoveredConditions());
+        }
+        if (other.hasOverallLineHits()) {
+          setOverallLineHits(other.getOverallLineHits());
+        }
+        if (other.hasOverallConditions()) {
+          setOverallConditions(other.getOverallConditions());
+        }
+        if (other.hasOverallCoveredConditions()) {
+          setOverallCoveredConditions(other.getOverallCoveredConditions());
+        }
+        if (other.hasHighlighting()) {
+          bitField0_ |= 0x00004000;
+          highlighting_ = other.highlighting_;
+          onChanged();
+        }
+        if (other.hasSymbols()) {
+          bitField0_ |= 0x00008000;
+          symbols_ = other.symbols_;
+          onChanged();
+        }
+        if (!other.duplications_.isEmpty()) {
+          if (duplications_.isEmpty()) {
+            duplications_ = other.duplications_;
+            bitField0_ = (bitField0_ & ~0x00010000);
+          } else {
+            ensureDuplicationsIsMutable();
+            duplications_.addAll(other.duplications_);
+          }
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.sonar.server.source.db.FileSourceDb.Line parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.sonar.server.source.db.FileSourceDb.Line) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      private int line_ ;
+      /**
+       * <code>optional int32 line = 1;</code>
+       */
+      public boolean hasLine() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>optional int32 line = 1;</code>
+       */
+      public int getLine() {
+        return line_;
+      }
+      /**
+       * <code>optional int32 line = 1;</code>
+       */
+      public Builder setLine(int value) {
+        bitField0_ |= 0x00000001;
+        line_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int32 line = 1;</code>
+       */
+      public Builder clearLine() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        line_ = 0;
+        onChanged();
+        return this;
+      }
+
+      private java.lang.Object source_ = "";
+      /**
+       * <code>optional string source = 2;</code>
+       */
+      public boolean hasSource() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <code>optional string source = 2;</code>
+       */
+      public java.lang.String getSource() {
+        java.lang.Object ref = source_;
+        if (!(ref instanceof java.lang.String)) {
+          com.google.protobuf.ByteString bs =
+              (com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (bs.isValidUtf8()) {
+            source_ = s;
+          }
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string source = 2;</code>
+       */
+      public com.google.protobuf.ByteString
+          getSourceBytes() {
+        java.lang.Object ref = source_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          source_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string source = 2;</code>
+       */
+      public Builder setSource(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        source_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string source = 2;</code>
+       */
+      public Builder clearSource() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        source_ = getDefaultInstance().getSource();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string source = 2;</code>
+       */
+      public Builder setSourceBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        source_ = value;
+        onChanged();
+        return this;
+      }
+
+      private java.lang.Object scmRevision_ = "";
+      /**
+       * <code>optional string scm_revision = 3;</code>
+       *
+       * <pre>
+       * SCM
+       * </pre>
+       */
+      public boolean hasScmRevision() {
+        return ((bitField0_ & 0x00000004) == 0x00000004);
+      }
+      /**
+       * <code>optional string scm_revision = 3;</code>
+       *
+       * <pre>
+       * SCM
+       * </pre>
+       */
+      public java.lang.String getScmRevision() {
+        java.lang.Object ref = scmRevision_;
+        if (!(ref instanceof java.lang.String)) {
+          com.google.protobuf.ByteString bs =
+              (com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (bs.isValidUtf8()) {
+            scmRevision_ = s;
+          }
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string scm_revision = 3;</code>
+       *
+       * <pre>
+       * SCM
+       * </pre>
+       */
+      public com.google.protobuf.ByteString
+          getScmRevisionBytes() {
+        java.lang.Object ref = scmRevision_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          scmRevision_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string scm_revision = 3;</code>
+       *
+       * <pre>
+       * SCM
+       * </pre>
+       */
+      public Builder setScmRevision(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000004;
+        scmRevision_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string scm_revision = 3;</code>
+       *
+       * <pre>
+       * SCM
+       * </pre>
+       */
+      public Builder clearScmRevision() {
+        bitField0_ = (bitField0_ & ~0x00000004);
+        scmRevision_ = getDefaultInstance().getScmRevision();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string scm_revision = 3;</code>
+       *
+       * <pre>
+       * SCM
+       * </pre>
+       */
+      public Builder setScmRevisionBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000004;
+        scmRevision_ = value;
+        onChanged();
+        return this;
+      }
+
+      private java.lang.Object scmAuthor_ = "";
+      /**
+       * <code>optional string scm_author = 4;</code>
+       */
+      public boolean hasScmAuthor() {
+        return ((bitField0_ & 0x00000008) == 0x00000008);
+      }
+      /**
+       * <code>optional string scm_author = 4;</code>
+       */
+      public java.lang.String getScmAuthor() {
+        java.lang.Object ref = scmAuthor_;
+        if (!(ref instanceof java.lang.String)) {
+          com.google.protobuf.ByteString bs =
+              (com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (bs.isValidUtf8()) {
+            scmAuthor_ = s;
+          }
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string scm_author = 4;</code>
+       */
+      public com.google.protobuf.ByteString
+          getScmAuthorBytes() {
+        java.lang.Object ref = scmAuthor_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          scmAuthor_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string scm_author = 4;</code>
+       */
+      public Builder setScmAuthor(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000008;
+        scmAuthor_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string scm_author = 4;</code>
+       */
+      public Builder clearScmAuthor() {
+        bitField0_ = (bitField0_ & ~0x00000008);
+        scmAuthor_ = getDefaultInstance().getScmAuthor();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string scm_author = 4;</code>
+       */
+      public Builder setScmAuthorBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000008;
+        scmAuthor_ = value;
+        onChanged();
+        return this;
+      }
+
+      private long scmDate_ ;
+      /**
+       * <code>optional int64 scm_date = 5;</code>
+       */
+      public boolean hasScmDate() {
+        return ((bitField0_ & 0x00000010) == 0x00000010);
+      }
+      /**
+       * <code>optional int64 scm_date = 5;</code>
+       */
+      public long getScmDate() {
+        return scmDate_;
+      }
+      /**
+       * <code>optional int64 scm_date = 5;</code>
+       */
+      public Builder setScmDate(long value) {
+        bitField0_ |= 0x00000010;
+        scmDate_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int64 scm_date = 5;</code>
+       */
+      public Builder clearScmDate() {
+        bitField0_ = (bitField0_ & ~0x00000010);
+        scmDate_ = 0L;
+        onChanged();
+        return this;
+      }
+
+      private int utLineHits_ ;
+      /**
+       * <code>optional int32 ut_line_hits = 6;</code>
+       *
+       * <pre>
+       * unit testing
+       * </pre>
+       */
+      public boolean hasUtLineHits() {
+        return ((bitField0_ & 0x00000020) == 0x00000020);
+      }
+      /**
+       * <code>optional int32 ut_line_hits = 6;</code>
+       *
+       * <pre>
+       * unit testing
+       * </pre>
+       */
+      public int getUtLineHits() {
+        return utLineHits_;
+      }
+      /**
+       * <code>optional int32 ut_line_hits = 6;</code>
+       *
+       * <pre>
+       * unit testing
+       * </pre>
+       */
+      public Builder setUtLineHits(int value) {
+        bitField0_ |= 0x00000020;
+        utLineHits_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int32 ut_line_hits = 6;</code>
+       *
+       * <pre>
+       * unit testing
+       * </pre>
+       */
+      public Builder clearUtLineHits() {
+        bitField0_ = (bitField0_ & ~0x00000020);
+        utLineHits_ = 0;
+        onChanged();
+        return this;
+      }
+
+      private int utConditions_ ;
+      /**
+       * <code>optional int32 ut_conditions = 7;</code>
+       */
+      public boolean hasUtConditions() {
+        return ((bitField0_ & 0x00000040) == 0x00000040);
+      }
+      /**
+       * <code>optional int32 ut_conditions = 7;</code>
+       */
+      public int getUtConditions() {
+        return utConditions_;
+      }
+      /**
+       * <code>optional int32 ut_conditions = 7;</code>
+       */
+      public Builder setUtConditions(int value) {
+        bitField0_ |= 0x00000040;
+        utConditions_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int32 ut_conditions = 7;</code>
+       */
+      public Builder clearUtConditions() {
+        bitField0_ = (bitField0_ & ~0x00000040);
+        utConditions_ = 0;
+        onChanged();
+        return this;
+      }
+
+      private int utCoveredConditions_ ;
+      /**
+       * <code>optional int32 ut_covered_conditions = 8;</code>
+       */
+      public boolean hasUtCoveredConditions() {
+        return ((bitField0_ & 0x00000080) == 0x00000080);
+      }
+      /**
+       * <code>optional int32 ut_covered_conditions = 8;</code>
+       */
+      public int getUtCoveredConditions() {
+        return utCoveredConditions_;
+      }
+      /**
+       * <code>optional int32 ut_covered_conditions = 8;</code>
+       */
+      public Builder setUtCoveredConditions(int value) {
+        bitField0_ |= 0x00000080;
+        utCoveredConditions_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int32 ut_covered_conditions = 8;</code>
+       */
+      public Builder clearUtCoveredConditions() {
+        bitField0_ = (bitField0_ & ~0x00000080);
+        utCoveredConditions_ = 0;
+        onChanged();
+        return this;
+      }
+
+      private int itLineHits_ ;
+      /**
+       * <code>optional int32 it_line_hits = 9;</code>
+       *
+       * <pre>
+       * integration testing
+       * </pre>
+       */
+      public boolean hasItLineHits() {
+        return ((bitField0_ & 0x00000100) == 0x00000100);
+      }
+      /**
+       * <code>optional int32 it_line_hits = 9;</code>
+       *
+       * <pre>
+       * integration testing
+       * </pre>
+       */
+      public int getItLineHits() {
+        return itLineHits_;
+      }
+      /**
+       * <code>optional int32 it_line_hits = 9;</code>
+       *
+       * <pre>
+       * integration testing
+       * </pre>
+       */
+      public Builder setItLineHits(int value) {
+        bitField0_ |= 0x00000100;
+        itLineHits_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int32 it_line_hits = 9;</code>
+       *
+       * <pre>
+       * integration testing
+       * </pre>
+       */
+      public Builder clearItLineHits() {
+        bitField0_ = (bitField0_ & ~0x00000100);
+        itLineHits_ = 0;
+        onChanged();
+        return this;
+      }
+
+      private int itConditions_ ;
+      /**
+       * <code>optional int32 it_conditions = 10;</code>
+       */
+      public boolean hasItConditions() {
+        return ((bitField0_ & 0x00000200) == 0x00000200);
+      }
+      /**
+       * <code>optional int32 it_conditions = 10;</code>
+       */
+      public int getItConditions() {
+        return itConditions_;
+      }
+      /**
+       * <code>optional int32 it_conditions = 10;</code>
+       */
+      public Builder setItConditions(int value) {
+        bitField0_ |= 0x00000200;
+        itConditions_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int32 it_conditions = 10;</code>
+       */
+      public Builder clearItConditions() {
+        bitField0_ = (bitField0_ & ~0x00000200);
+        itConditions_ = 0;
+        onChanged();
+        return this;
+      }
+
+      private int itCoveredConditions_ ;
+      /**
+       * <code>optional int32 it_covered_conditions = 11;</code>
+       */
+      public boolean hasItCoveredConditions() {
+        return ((bitField0_ & 0x00000400) == 0x00000400);
+      }
+      /**
+       * <code>optional int32 it_covered_conditions = 11;</code>
+       */
+      public int getItCoveredConditions() {
+        return itCoveredConditions_;
+      }
+      /**
+       * <code>optional int32 it_covered_conditions = 11;</code>
+       */
+      public Builder setItCoveredConditions(int value) {
+        bitField0_ |= 0x00000400;
+        itCoveredConditions_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int32 it_covered_conditions = 11;</code>
+       */
+      public Builder clearItCoveredConditions() {
+        bitField0_ = (bitField0_ & ~0x00000400);
+        itCoveredConditions_ = 0;
+        onChanged();
+        return this;
+      }
+
+      private int overallLineHits_ ;
+      /**
+       * <code>optional int32 overall_line_hits = 12;</code>
+       *
+       * <pre>
+       * overall testing
+       * </pre>
+       */
+      public boolean hasOverallLineHits() {
+        return ((bitField0_ & 0x00000800) == 0x00000800);
+      }
+      /**
+       * <code>optional int32 overall_line_hits = 12;</code>
+       *
+       * <pre>
+       * overall testing
+       * </pre>
+       */
+      public int getOverallLineHits() {
+        return overallLineHits_;
+      }
+      /**
+       * <code>optional int32 overall_line_hits = 12;</code>
+       *
+       * <pre>
+       * overall testing
+       * </pre>
+       */
+      public Builder setOverallLineHits(int value) {
+        bitField0_ |= 0x00000800;
+        overallLineHits_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int32 overall_line_hits = 12;</code>
+       *
+       * <pre>
+       * overall testing
+       * </pre>
+       */
+      public Builder clearOverallLineHits() {
+        bitField0_ = (bitField0_ & ~0x00000800);
+        overallLineHits_ = 0;
+        onChanged();
+        return this;
+      }
+
+      private int overallConditions_ ;
+      /**
+       * <code>optional int32 overall_conditions = 13;</code>
+       */
+      public boolean hasOverallConditions() {
+        return ((bitField0_ & 0x00001000) == 0x00001000);
+      }
+      /**
+       * <code>optional int32 overall_conditions = 13;</code>
+       */
+      public int getOverallConditions() {
+        return overallConditions_;
+      }
+      /**
+       * <code>optional int32 overall_conditions = 13;</code>
+       */
+      public Builder setOverallConditions(int value) {
+        bitField0_ |= 0x00001000;
+        overallConditions_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int32 overall_conditions = 13;</code>
+       */
+      public Builder clearOverallConditions() {
+        bitField0_ = (bitField0_ & ~0x00001000);
+        overallConditions_ = 0;
+        onChanged();
+        return this;
+      }
+
+      private int overallCoveredConditions_ ;
+      /**
+       * <code>optional int32 overall_covered_conditions = 14;</code>
+       */
+      public boolean hasOverallCoveredConditions() {
+        return ((bitField0_ & 0x00002000) == 0x00002000);
+      }
+      /**
+       * <code>optional int32 overall_covered_conditions = 14;</code>
+       */
+      public int getOverallCoveredConditions() {
+        return overallCoveredConditions_;
+      }
+      /**
+       * <code>optional int32 overall_covered_conditions = 14;</code>
+       */
+      public Builder setOverallCoveredConditions(int value) {
+        bitField0_ |= 0x00002000;
+        overallCoveredConditions_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int32 overall_covered_conditions = 14;</code>
+       */
+      public Builder clearOverallCoveredConditions() {
+        bitField0_ = (bitField0_ & ~0x00002000);
+        overallCoveredConditions_ = 0;
+        onChanged();
+        return this;
+      }
+
+      private java.lang.Object highlighting_ = "";
+      /**
+       * <code>optional string highlighting = 15;</code>
+       */
+      public boolean hasHighlighting() {
+        return ((bitField0_ & 0x00004000) == 0x00004000);
+      }
+      /**
+       * <code>optional string highlighting = 15;</code>
+       */
+      public java.lang.String getHighlighting() {
+        java.lang.Object ref = highlighting_;
+        if (!(ref instanceof java.lang.String)) {
+          com.google.protobuf.ByteString bs =
+              (com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (bs.isValidUtf8()) {
+            highlighting_ = s;
+          }
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string highlighting = 15;</code>
+       */
+      public com.google.protobuf.ByteString
+          getHighlightingBytes() {
+        java.lang.Object ref = highlighting_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          highlighting_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string highlighting = 15;</code>
+       */
+      public Builder setHighlighting(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00004000;
+        highlighting_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string highlighting = 15;</code>
+       */
+      public Builder clearHighlighting() {
+        bitField0_ = (bitField0_ & ~0x00004000);
+        highlighting_ = getDefaultInstance().getHighlighting();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string highlighting = 15;</code>
+       */
+      public Builder setHighlightingBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00004000;
+        highlighting_ = value;
+        onChanged();
+        return this;
+      }
+
+      private java.lang.Object symbols_ = "";
+      /**
+       * <code>optional string symbols = 16;</code>
+       */
+      public boolean hasSymbols() {
+        return ((bitField0_ & 0x00008000) == 0x00008000);
+      }
+      /**
+       * <code>optional string symbols = 16;</code>
+       */
+      public java.lang.String getSymbols() {
+        java.lang.Object ref = symbols_;
+        if (!(ref instanceof java.lang.String)) {
+          com.google.protobuf.ByteString bs =
+              (com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (bs.isValidUtf8()) {
+            symbols_ = s;
+          }
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string symbols = 16;</code>
+       */
+      public com.google.protobuf.ByteString
+          getSymbolsBytes() {
+        java.lang.Object ref = symbols_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          symbols_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string symbols = 16;</code>
+       */
+      public Builder setSymbols(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00008000;
+        symbols_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string symbols = 16;</code>
+       */
+      public Builder clearSymbols() {
+        bitField0_ = (bitField0_ & ~0x00008000);
+        symbols_ = getDefaultInstance().getSymbols();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string symbols = 16;</code>
+       */
+      public Builder setSymbolsBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00008000;
+        symbols_ = value;
+        onChanged();
+        return this;
+      }
+
+      private java.util.List<java.lang.Integer> duplications_ = java.util.Collections.emptyList();
+      private void ensureDuplicationsIsMutable() {
+        if (!((bitField0_ & 0x00010000) == 0x00010000)) {
+          duplications_ = new java.util.ArrayList<java.lang.Integer>(duplications_);
+          bitField0_ |= 0x00010000;
+         }
+      }
+      /**
+       * <code>repeated int32 duplications = 17;</code>
+       */
+      public java.util.List<java.lang.Integer>
+          getDuplicationsList() {
+        return java.util.Collections.unmodifiableList(duplications_);
+      }
+      /**
+       * <code>repeated int32 duplications = 17;</code>
+       */
+      public int getDuplicationsCount() {
+        return duplications_.size();
+      }
+      /**
+       * <code>repeated int32 duplications = 17;</code>
+       */
+      public int getDuplications(int index) {
+        return duplications_.get(index);
+      }
+      /**
+       * <code>repeated int32 duplications = 17;</code>
+       */
+      public Builder setDuplications(
+          int index, int value) {
+        ensureDuplicationsIsMutable();
+        duplications_.set(index, value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 duplications = 17;</code>
+       */
+      public Builder addDuplications(int value) {
+        ensureDuplicationsIsMutable();
+        duplications_.add(value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 duplications = 17;</code>
+       */
+      public Builder addAllDuplications(
+          java.lang.Iterable<? extends java.lang.Integer> values) {
+        ensureDuplicationsIsMutable();
+        com.google.protobuf.AbstractMessageLite.Builder.addAll(
+            values, duplications_);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 duplications = 17;</code>
+       */
+      public Builder clearDuplications() {
+        duplications_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00010000);
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:org.sonar.server.source.db.Line)
+    }
+
+    static {
+      defaultInstance = new Line(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:org.sonar.server.source.db.Line)
+  }
+
+  public interface DataOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:org.sonar.server.source.db.Data)
+      com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+     */
+    java.util.List<org.sonar.server.source.db.FileSourceDb.Line> 
+        getLinesList();
+    /**
+     * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+     */
+    org.sonar.server.source.db.FileSourceDb.Line getLines(int index);
+    /**
+     * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+     */
+    int getLinesCount();
+    /**
+     * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+     */
+    java.util.List<? extends org.sonar.server.source.db.FileSourceDb.LineOrBuilder> 
+        getLinesOrBuilderList();
+    /**
+     * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+     */
+    org.sonar.server.source.db.FileSourceDb.LineOrBuilder getLinesOrBuilder(
+        int index);
+  }
+  /**
+   * Protobuf type {@code org.sonar.server.source.db.Data}
+   */
+  public static final class Data extends
+      com.google.protobuf.GeneratedMessage implements
+      // @@protoc_insertion_point(message_implements:org.sonar.server.source.db.Data)
+      DataOrBuilder {
+    // Use Data.newBuilder() to construct.
+    private Data(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private Data(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final Data defaultInstance;
+    public static Data getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public Data getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private Data(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+                lines_ = new java.util.ArrayList<org.sonar.server.source.db.FileSourceDb.Line>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              lines_.add(input.readMessage(org.sonar.server.source.db.FileSourceDb.Line.PARSER, extensionRegistry));
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+          lines_ = java.util.Collections.unmodifiableList(lines_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Data_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Data_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.sonar.server.source.db.FileSourceDb.Data.class, org.sonar.server.source.db.FileSourceDb.Data.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<Data> PARSER =
+        new com.google.protobuf.AbstractParser<Data>() {
+      public Data parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new Data(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<Data> getParserForType() {
+      return PARSER;
+    }
+
+    public static final int LINES_FIELD_NUMBER = 1;
+    private java.util.List<org.sonar.server.source.db.FileSourceDb.Line> lines_;
+    /**
+     * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+     */
+    public java.util.List<org.sonar.server.source.db.FileSourceDb.Line> getLinesList() {
+      return lines_;
+    }
+    /**
+     * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+     */
+    public java.util.List<? extends org.sonar.server.source.db.FileSourceDb.LineOrBuilder> 
+        getLinesOrBuilderList() {
+      return lines_;
+    }
+    /**
+     * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+     */
+    public int getLinesCount() {
+      return lines_.size();
+    }
+    /**
+     * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+     */
+    public org.sonar.server.source.db.FileSourceDb.Line getLines(int index) {
+      return lines_.get(index);
+    }
+    /**
+     * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+     */
+    public org.sonar.server.source.db.FileSourceDb.LineOrBuilder getLinesOrBuilder(
+        int index) {
+      return lines_.get(index);
+    }
+
+    private void initFields() {
+      lines_ = java.util.Collections.emptyList();
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      for (int i = 0; i < lines_.size(); i++) {
+        output.writeMessage(1, lines_.get(i));
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      for (int i = 0; i < lines_.size(); i++) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, lines_.get(i));
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    public static org.sonar.server.source.db.FileSourceDb.Data parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Data parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Data parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Data parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Data parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Data parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Data parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Data parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Data parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.sonar.server.source.db.FileSourceDb.Data parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.sonar.server.source.db.FileSourceDb.Data prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code org.sonar.server.source.db.Data}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:org.sonar.server.source.db.Data)
+        org.sonar.server.source.db.FileSourceDb.DataOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Data_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Data_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.sonar.server.source.db.FileSourceDb.Data.class, org.sonar.server.source.db.FileSourceDb.Data.Builder.class);
+      }
+
+      // Construct using org.sonar.server.source.db.FileSourceDb.Data.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+          getLinesFieldBuilder();
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        if (linesBuilder_ == null) {
+          lines_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+        } else {
+          linesBuilder_.clear();
+        }
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Data_descriptor;
+      }
+
+      public org.sonar.server.source.db.FileSourceDb.Data getDefaultInstanceForType() {
+        return org.sonar.server.source.db.FileSourceDb.Data.getDefaultInstance();
+      }
+
+      public org.sonar.server.source.db.FileSourceDb.Data build() {
+        org.sonar.server.source.db.FileSourceDb.Data result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.sonar.server.source.db.FileSourceDb.Data buildPartial() {
+        org.sonar.server.source.db.FileSourceDb.Data result = new org.sonar.server.source.db.FileSourceDb.Data(this);
+        int from_bitField0_ = bitField0_;
+        if (linesBuilder_ == null) {
+          if (((bitField0_ & 0x00000001) == 0x00000001)) {
+            lines_ = java.util.Collections.unmodifiableList(lines_);
+            bitField0_ = (bitField0_ & ~0x00000001);
+          }
+          result.lines_ = lines_;
+        } else {
+          result.lines_ = linesBuilder_.build();
+        }
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.sonar.server.source.db.FileSourceDb.Data) {
+          return mergeFrom((org.sonar.server.source.db.FileSourceDb.Data)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.sonar.server.source.db.FileSourceDb.Data other) {
+        if (other == org.sonar.server.source.db.FileSourceDb.Data.getDefaultInstance()) return this;
+        if (linesBuilder_ == null) {
+          if (!other.lines_.isEmpty()) {
+            if (lines_.isEmpty()) {
+              lines_ = other.lines_;
+              bitField0_ = (bitField0_ & ~0x00000001);
+            } else {
+              ensureLinesIsMutable();
+              lines_.addAll(other.lines_);
+            }
+            onChanged();
+          }
+        } else {
+          if (!other.lines_.isEmpty()) {
+            if (linesBuilder_.isEmpty()) {
+              linesBuilder_.dispose();
+              linesBuilder_ = null;
+              lines_ = other.lines_;
+              bitField0_ = (bitField0_ & ~0x00000001);
+              linesBuilder_ = 
+                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                   getLinesFieldBuilder() : null;
+            } else {
+              linesBuilder_.addAllMessages(other.lines_);
+            }
+          }
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.sonar.server.source.db.FileSourceDb.Data parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.sonar.server.source.db.FileSourceDb.Data) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      private java.util.List<org.sonar.server.source.db.FileSourceDb.Line> lines_ =
+        java.util.Collections.emptyList();
+      private void ensureLinesIsMutable() {
+        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+          lines_ = new java.util.ArrayList<org.sonar.server.source.db.FileSourceDb.Line>(lines_);
+          bitField0_ |= 0x00000001;
+         }
+      }
+
+      private com.google.protobuf.RepeatedFieldBuilder<
+          org.sonar.server.source.db.FileSourceDb.Line, org.sonar.server.source.db.FileSourceDb.Line.Builder, org.sonar.server.source.db.FileSourceDb.LineOrBuilder> linesBuilder_;
+
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public java.util.List<org.sonar.server.source.db.FileSourceDb.Line> getLinesList() {
+        if (linesBuilder_ == null) {
+          return java.util.Collections.unmodifiableList(lines_);
+        } else {
+          return linesBuilder_.getMessageList();
+        }
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public int getLinesCount() {
+        if (linesBuilder_ == null) {
+          return lines_.size();
+        } else {
+          return linesBuilder_.getCount();
+        }
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public org.sonar.server.source.db.FileSourceDb.Line getLines(int index) {
+        if (linesBuilder_ == null) {
+          return lines_.get(index);
+        } else {
+          return linesBuilder_.getMessage(index);
+        }
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public Builder setLines(
+          int index, org.sonar.server.source.db.FileSourceDb.Line value) {
+        if (linesBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureLinesIsMutable();
+          lines_.set(index, value);
+          onChanged();
+        } else {
+          linesBuilder_.setMessage(index, value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public Builder setLines(
+          int index, org.sonar.server.source.db.FileSourceDb.Line.Builder builderForValue) {
+        if (linesBuilder_ == null) {
+          ensureLinesIsMutable();
+          lines_.set(index, builderForValue.build());
+          onChanged();
+        } else {
+          linesBuilder_.setMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public Builder addLines(org.sonar.server.source.db.FileSourceDb.Line value) {
+        if (linesBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureLinesIsMutable();
+          lines_.add(value);
+          onChanged();
+        } else {
+          linesBuilder_.addMessage(value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public Builder addLines(
+          int index, org.sonar.server.source.db.FileSourceDb.Line value) {
+        if (linesBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureLinesIsMutable();
+          lines_.add(index, value);
+          onChanged();
+        } else {
+          linesBuilder_.addMessage(index, value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public Builder addLines(
+          org.sonar.server.source.db.FileSourceDb.Line.Builder builderForValue) {
+        if (linesBuilder_ == null) {
+          ensureLinesIsMutable();
+          lines_.add(builderForValue.build());
+          onChanged();
+        } else {
+          linesBuilder_.addMessage(builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public Builder addLines(
+          int index, org.sonar.server.source.db.FileSourceDb.Line.Builder builderForValue) {
+        if (linesBuilder_ == null) {
+          ensureLinesIsMutable();
+          lines_.add(index, builderForValue.build());
+          onChanged();
+        } else {
+          linesBuilder_.addMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public Builder addAllLines(
+          java.lang.Iterable<? extends org.sonar.server.source.db.FileSourceDb.Line> values) {
+        if (linesBuilder_ == null) {
+          ensureLinesIsMutable();
+          com.google.protobuf.AbstractMessageLite.Builder.addAll(
+              values, lines_);
+          onChanged();
+        } else {
+          linesBuilder_.addAllMessages(values);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public Builder clearLines() {
+        if (linesBuilder_ == null) {
+          lines_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+          onChanged();
+        } else {
+          linesBuilder_.clear();
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public Builder removeLines(int index) {
+        if (linesBuilder_ == null) {
+          ensureLinesIsMutable();
+          lines_.remove(index);
+          onChanged();
+        } else {
+          linesBuilder_.remove(index);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public org.sonar.server.source.db.FileSourceDb.Line.Builder getLinesBuilder(
+          int index) {
+        return getLinesFieldBuilder().getBuilder(index);
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public org.sonar.server.source.db.FileSourceDb.LineOrBuilder getLinesOrBuilder(
+          int index) {
+        if (linesBuilder_ == null) {
+          return lines_.get(index);  } else {
+          return linesBuilder_.getMessageOrBuilder(index);
+        }
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public java.util.List<? extends org.sonar.server.source.db.FileSourceDb.LineOrBuilder> 
+           getLinesOrBuilderList() {
+        if (linesBuilder_ != null) {
+          return linesBuilder_.getMessageOrBuilderList();
+        } else {
+          return java.util.Collections.unmodifiableList(lines_);
+        }
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public org.sonar.server.source.db.FileSourceDb.Line.Builder addLinesBuilder() {
+        return getLinesFieldBuilder().addBuilder(
+            org.sonar.server.source.db.FileSourceDb.Line.getDefaultInstance());
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public org.sonar.server.source.db.FileSourceDb.Line.Builder addLinesBuilder(
+          int index) {
+        return getLinesFieldBuilder().addBuilder(
+            index, org.sonar.server.source.db.FileSourceDb.Line.getDefaultInstance());
+      }
+      /**
+       * <code>repeated .org.sonar.server.source.db.Line lines = 1;</code>
+       */
+      public java.util.List<org.sonar.server.source.db.FileSourceDb.Line.Builder> 
+           getLinesBuilderList() {
+        return getLinesFieldBuilder().getBuilderList();
+      }
+      private com.google.protobuf.RepeatedFieldBuilder<
+          org.sonar.server.source.db.FileSourceDb.Line, org.sonar.server.source.db.FileSourceDb.Line.Builder, org.sonar.server.source.db.FileSourceDb.LineOrBuilder> 
+          getLinesFieldBuilder() {
+        if (linesBuilder_ == null) {
+          linesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+              org.sonar.server.source.db.FileSourceDb.Line, org.sonar.server.source.db.FileSourceDb.Line.Builder, org.sonar.server.source.db.FileSourceDb.LineOrBuilder>(
+                  lines_,
+                  ((bitField0_ & 0x00000001) == 0x00000001),
+                  getParentForChildren(),
+                  isClean());
+          lines_ = null;
+        }
+        return linesBuilder_;
+      }
+
+      // @@protoc_insertion_point(builder_scope:org.sonar.server.source.db.Data)
+    }
+
+    static {
+      defaultInstance = new Data(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:org.sonar.server.source.db.Data)
+  }
+
+  private static final com.google.protobuf.Descriptors.Descriptor
+    internal_static_org_sonar_server_source_db_Line_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_org_sonar_server_source_db_Line_fieldAccessorTable;
+  private static final com.google.protobuf.Descriptors.Descriptor
+    internal_static_org_sonar_server_source_db_Data_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_org_sonar_server_source_db_Data_fieldAccessorTable;
+
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\024file_source_db.proto\022\032org.sonar.server" +
+      ".source.db\"\220\003\n\004Line\022\014\n\004line\030\001 \001(\005\022\016\n\006sou" +
+      "rce\030\002 \001(\t\022\024\n\014scm_revision\030\003 \001(\t\022\022\n\nscm_a" +
+      "uthor\030\004 \001(\t\022\020\n\010scm_date\030\005 \001(\003\022\024\n\014ut_line" +
+      "_hits\030\006 \001(\005\022\025\n\rut_conditions\030\007 \001(\005\022\035\n\025ut" +
+      "_covered_conditions\030\010 \001(\005\022\024\n\014it_line_hit" +
+      "s\030\t \001(\005\022\025\n\rit_conditions\030\n \001(\005\022\035\n\025it_cov" +
+      "ered_conditions\030\013 \001(\005\022\031\n\021overall_line_hi" +
+      "ts\030\014 \001(\005\022\032\n\022overall_conditions\030\r \001(\005\022\"\n\032" +
+      "overall_covered_conditions\030\016 \001(\005\022\024\n\014high",
+      "lighting\030\017 \001(\t\022\017\n\007symbols\030\020 \001(\t\022\024\n\014dupli" +
+      "cations\030\021 \003(\005\"7\n\004Data\022/\n\005lines\030\001 \003(\0132 .o" +
+      "rg.sonar.server.source.db.LineB\002H\001"
+    };
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+        new com.google.protobuf.Descriptors.FileDescriptor.    InternalDescriptorAssigner() {
+          public com.google.protobuf.ExtensionRegistry assignDescriptors(
+              com.google.protobuf.Descriptors.FileDescriptor root) {
+            descriptor = root;
+            return null;
+          }
+        };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+        }, assigner);
+    internal_static_org_sonar_server_source_db_Line_descriptor =
+      getDescriptor().getMessageTypes().get(0);
+    internal_static_org_sonar_server_source_db_Line_fieldAccessorTable = new
+      com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+        internal_static_org_sonar_server_source_db_Line_descriptor,
+        new java.lang.String[] { "Line", "Source", "ScmRevision", "ScmAuthor", "ScmDate", "UtLineHits", "UtConditions", "UtCoveredConditions", "ItLineHits", "ItConditions", "ItCoveredConditions", "OverallLineHits", "OverallConditions", "OverallCoveredConditions", "Highlighting", "Symbols", "Duplications", });
+    internal_static_org_sonar_server_source_db_Data_descriptor =
+      getDescriptor().getMessageTypes().get(1);
+    internal_static_org_sonar_server_source_db_Data_fieldAccessorTable = new
+      com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+        internal_static_org_sonar_server_source_db_Data_descriptor,
+        new java.lang.String[] { "Lines", });
+  }
+
+  // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/sonar-batch-protocol/src/main/protobuf/file_source_db.proto b/sonar-batch-protocol/src/main/protobuf/file_source_db.proto
new file mode 100644 (file)
index 0000000..bdb0b87
--- /dev/null
@@ -0,0 +1,72 @@
+/*
+    SonarQube, open source software quality management tool.
+    Copyright (C) 2008-2015 SonarSource
+    mailto:contact AT sonarsource DOT com
+
+    SonarQube is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 3 of the License, or (at your option) any later version.
+
+    SonarQube is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public License
+    along with this program; if not, write to the Free Software Foundation,
+    Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+*/
+
+/*
+Notes
+
+  - "required" fields are not used as recommended by Google to keep forward-compatibility:
+    https://developers.google.com/protocol-buffers/docs/proto#simple
+
+  - the related Java files are not generated during build. Indeed the existing protoc maven
+    plugins require protobuf to be installed on boxes. That means that generated Java files
+    are updated and committed for each change (see src/main/gen-java).
+*/
+
+// structure of db column FILE_SOURCES.BINARY_DATA
+
+// Temporarily in sonar-batch-protocol as this db table
+// is still fed on batch-side. However generated sources
+// are already in correct package
+
+package org.sonar.server.source.db;
+option optimize_for = SPEED;
+
+message Line {
+  optional int32 line = 1;
+  optional string source = 2;
+
+  // SCM
+  optional string scm_revision = 3;
+  optional string scm_author = 4;
+  optional int64 scm_date = 5;
+
+  // unit tests
+  optional int32 ut_line_hits = 6;
+  optional int32 ut_conditions = 7;
+  optional int32 ut_covered_conditions = 8;
+
+  // integration tests
+  optional int32 it_line_hits = 9;
+  optional int32 it_conditions = 10;
+  optional int32 it_covered_conditions = 11;
+
+  // overall tests
+  optional int32 overall_line_hits = 12;
+  optional int32 overall_conditions = 13;
+  optional int32 overall_covered_conditions = 14;
+
+  optional string highlighting = 15;
+  optional string symbols = 16;
+  repeated int32 duplications = 17;
+}
+
+message Data {
+  repeated Line lines = 1;
+}
diff --git a/sonar-batch/src/main/java/org/sonar/batch/index/SourceDataFactory.java b/sonar-batch/src/main/java/org/sonar/batch/index/SourceDataFactory.java
new file mode 100644 (file)
index 0000000..e3e9f33
--- /dev/null
@@ -0,0 +1,376 @@
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.batch.index;
+
+import com.google.common.base.CharMatcher;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.Multimap;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
+import org.sonar.api.BatchComponent;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.sensor.duplication.DuplicationGroup;
+import org.sonar.api.batch.sensor.symbol.Symbol;
+import org.sonar.api.measures.CoreMetrics;
+import org.sonar.api.measures.Measure;
+import org.sonar.api.utils.DateUtils;
+import org.sonar.api.utils.KeyValueFormat;
+import org.sonar.batch.duplication.DuplicationCache;
+import org.sonar.batch.highlighting.SyntaxHighlightingData;
+import org.sonar.batch.highlighting.SyntaxHighlightingRule;
+import org.sonar.batch.scan.measure.MeasureCache;
+import org.sonar.batch.source.CodeColorizers;
+import org.sonar.batch.symbol.SymbolData;
+import org.sonar.core.source.SnapshotDataTypes;
+import org.sonar.core.source.db.FileSourceDto;
+import org.sonar.server.source.db.FileSourceDb;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Consolidate different caches for the export of report to server.
+ * @see org.sonar.server.source.db.FileSourceDb
+ */
+public class SourceDataFactory implements BatchComponent {
+
+  private static final String BOM = "\uFEFF";
+
+  private final MeasureCache measureCache;
+  private final ComponentDataCache componentDataCache;
+  private final DuplicationCache duplicationCache;
+  private final CodeColorizers codeColorizers;
+
+  public SourceDataFactory(MeasureCache measureCache, ComponentDataCache componentDataCache,
+    DuplicationCache duplicationCache, CodeColorizers codeColorizers) {
+    this.measureCache = measureCache;
+    this.componentDataCache = componentDataCache;
+    this.duplicationCache = duplicationCache;
+    this.codeColorizers = codeColorizers;
+  }
+
+  public byte[] consolidateData(DefaultInputFile inputFile) throws IOException {
+    FileSourceDb.Data.Builder dataBuilder = createForSource(inputFile);
+    applyLineMeasures(inputFile, dataBuilder);
+    applyDuplications(inputFile.key(), dataBuilder);
+    applyHighlighting(inputFile, dataBuilder);
+    applySymbolReferences(inputFile, dataBuilder);
+    return FileSourceDto.encodeData(dataBuilder.build());
+  }
+
+  FileSourceDb.Data.Builder createForSource(DefaultInputFile inputFile) throws IOException {
+    FileSourceDb.Data.Builder result = FileSourceDb.Data.newBuilder();
+    List<String> lines = FileUtils.readLines(inputFile.file(), inputFile.encoding());
+    // Missing empty last line
+    if (lines.size() == inputFile.lines() - 1) {
+      lines.add("");
+    }
+    for (int lineIdx = 1; lineIdx <= lines.size(); lineIdx++) {
+      String s = CharMatcher.anyOf(BOM).removeFrom(lines.get(lineIdx - 1));
+      FileSourceDb.Line.Builder linesBuilder = result.addLinesBuilder();
+      linesBuilder.setLine(lineIdx).setSource(s);
+    }
+    return result;
+  }
+
+  void applyLineMeasures(DefaultInputFile file, FileSourceDb.Data.Builder dataBuilder) {
+    applyLineMeasure(file.key(), CoreMetrics.SCM_AUTHORS_BY_LINE_KEY, dataBuilder, new MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setScmAuthor(value);
+      }
+    });
+    applyLineMeasure(file.key(), CoreMetrics.SCM_REVISIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setScmRevision(value);
+      }
+    });
+    applyLineMeasure(file.key(), CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE_KEY, dataBuilder, new MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setScmDate(DateUtils.parseDateTimeQuietly(value).getTime());
+      }
+    });
+    applyLineMeasure(file.key(), CoreMetrics.COVERAGE_LINE_HITS_DATA_KEY, dataBuilder, new MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setUtLineHits(Integer.parseInt(value));
+      }
+    });
+    applyLineMeasure(file.key(), CoreMetrics.CONDITIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setUtConditions(Integer.parseInt(value));
+      }
+    });
+    applyLineMeasure(file.key(), CoreMetrics.COVERED_CONDITIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setUtCoveredConditions(Integer.parseInt(value));
+      }
+    });
+    applyLineMeasure(file.key(), CoreMetrics.IT_COVERAGE_LINE_HITS_DATA_KEY, dataBuilder, new MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setItLineHits(Integer.parseInt(value));
+      }
+    });
+    applyLineMeasure(file.key(), CoreMetrics.IT_CONDITIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setItConditions(Integer.parseInt(value));
+      }
+    });
+    applyLineMeasure(file.key(), CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setItCoveredConditions(Integer.parseInt(value));
+      }
+    });
+    applyLineMeasure(file.key(), CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA_KEY, dataBuilder, new MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setOverallLineHits(Integer.parseInt(value));
+      }
+    });
+    applyLineMeasure(file.key(), CoreMetrics.OVERALL_CONDITIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setOverallConditions(Integer.parseInt(value));
+      }
+    });
+    applyLineMeasure(file.key(), CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setOverallCoveredConditions(Integer.parseInt(value));
+      }
+    });
+  }
+
+  void applyLineMeasure(String inputFileKey, String metricKey, FileSourceDb.Data.Builder to, MeasureOperation op) {
+    Iterable<Measure> measures = measureCache.byMetric(inputFileKey, metricKey);
+    if (measures != null) {
+      for (Measure measure : measures) {
+        Map<Integer, String> lineMeasures = KeyValueFormat.parseIntString((String) measure.value());
+        for (Map.Entry<Integer, String> lineMeasure : lineMeasures.entrySet()) {
+          String value = lineMeasure.getValue();
+          if (StringUtils.isNotEmpty(value)) {
+            FileSourceDb.Line.Builder lineBuilder = to.getLinesBuilder(lineMeasure.getKey() - 1);
+            op.apply(value, lineBuilder);
+          }
+        }
+      }
+    }
+  }
+
+  static interface MeasureOperation {
+    void apply(String value, FileSourceDb.Line.Builder lineBuilder);
+  }
+
+  void applyHighlighting(DefaultInputFile inputFile, FileSourceDb.Data.Builder to) {
+    SyntaxHighlightingData highlighting = componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING);
+    String language = inputFile.language();
+    if (highlighting == null && language != null) {
+      highlighting = codeColorizers.toSyntaxHighlighting(inputFile.file(), inputFile.encoding(), language);
+    }
+    if (highlighting == null) {
+      return;
+    }
+    StringBuilder[] highlightingPerLine = new StringBuilder[inputFile.lines()];
+    RuleItemWriter ruleItemWriter = new RuleItemWriter();
+    int currentLineIdx = 1;
+    for (SyntaxHighlightingRule rule : highlighting.syntaxHighlightingRuleSet()) {
+      while (currentLineIdx < inputFile.lines() && rule.getStartPosition() >= inputFile.originalLineOffsets()[currentLineIdx]) {
+        // This rule starts on another line so advance
+        currentLineIdx++;
+      }
+      // Now we know current rule starts on current line
+      writeDataPerLine(inputFile.originalLineOffsets(), rule, rule.getStartPosition(), rule.getEndPosition(), highlightingPerLine, currentLineIdx, ruleItemWriter);
+    }
+    for (int i = 0; i < highlightingPerLine.length; i++) {
+      StringBuilder sb = highlightingPerLine[i];
+      if (sb != null) {
+        to.getLinesBuilder(i).setHighlighting(sb.toString());
+      }
+    }
+  }
+
+  void applySymbolReferences(DefaultInputFile file, FileSourceDb.Data.Builder to) {
+    SymbolData symbolRefs = componentDataCache.getData(file.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING);
+    if (symbolRefs != null) {
+      StringBuilder[] refsPerLine = new StringBuilder[file.lines()];
+      int symbolId = 1;
+      List<Symbol> symbols = new ArrayList<Symbol>(symbolRefs.referencesBySymbol().keySet());
+      // Sort symbols to avoid false variation that would lead to an unnecessary update
+      Collections.sort(symbols, new Comparator<Symbol>() {
+        @Override
+        public int compare(Symbol o1, Symbol o2) {
+          return o1.getDeclarationStartOffset() - o2.getDeclarationStartOffset();
+        }
+      });
+      for (Symbol symbol : symbols) {
+        int declarationStartOffset = symbol.getDeclarationStartOffset();
+        int declarationEndOffset = symbol.getDeclarationEndOffset();
+        int length = declarationEndOffset - declarationStartOffset;
+        addSymbol(symbolId, declarationStartOffset, declarationEndOffset, file.originalLineOffsets(), refsPerLine);
+        for (Integer referenceStartOffset : symbolRefs.referencesBySymbol().get(symbol)) {
+          if (referenceStartOffset == declarationStartOffset) {
+            // Ignore old API that used to store reference as first declaration
+            continue;
+          }
+          addSymbol(symbolId, referenceStartOffset, referenceStartOffset + length, file.originalLineOffsets(), refsPerLine);
+        }
+        symbolId++;
+      }
+      for (int i = 0; i < refsPerLine.length; i++) {
+        StringBuilder sb = refsPerLine[i];
+        if (sb != null) {
+          to.getLinesBuilder(i).setSymbols(sb.toString());
+        }
+      }
+    }
+  }
+
+  private void addSymbol(int symbolId, int startOffset, int endOffset, long[] originalLineOffsets, StringBuilder[] result) {
+    int startLine = binarySearchLine(startOffset, originalLineOffsets);
+    writeDataPerLine(originalLineOffsets, symbolId, startOffset, endOffset, result, startLine, new SymbolItemWriter());
+  }
+
+  private int binarySearchLine(int declarationStartOffset, long[] originalLineOffsets) {
+    int begin = 0;
+    int end = originalLineOffsets.length - 1;
+    while (begin < end) {
+      int mid = (int) Math.round((begin + end) / 2D);
+      if (declarationStartOffset < originalLineOffsets[mid]) {
+        end = mid - 1;
+      } else {
+        begin = mid;
+      }
+    }
+    return begin + 1;
+  }
+
+  private <G> void writeDataPerLine(long[] originalLineOffsets, G item, int globalStartOffset, int globalEndOffset, StringBuilder[] dataPerLine, int startLine,
+    RangeItemWriter<G> writer) {
+    int currentLineIdx = startLine;
+    // We know current item starts on current line
+    long ruleStartOffsetCurrentLine = globalStartOffset;
+    while (currentLineIdx < originalLineOffsets.length && globalEndOffset >= originalLineOffsets[currentLineIdx]) {
+      // item continue on next line so write current line and continue on next line with same item
+      writeItem(item, dataPerLine, currentLineIdx, ruleStartOffsetCurrentLine - originalLineOffsets[currentLineIdx - 1], originalLineOffsets[currentLineIdx]
+        - originalLineOffsets[currentLineIdx - 1], writer);
+      currentLineIdx++;
+      ruleStartOffsetCurrentLine = originalLineOffsets[currentLineIdx - 1];
+    }
+    // item ends on current line
+    writeItem(item, dataPerLine, currentLineIdx, ruleStartOffsetCurrentLine - originalLineOffsets[currentLineIdx - 1], globalEndOffset
+      - originalLineOffsets[currentLineIdx - 1], writer);
+  }
+
+  private <G> void writeItem(G item, StringBuilder[] dataPerLine, int currentLineIdx, long startLineOffset, long endLineOffset, RangeItemWriter<G> writer) {
+    if (startLineOffset == endLineOffset) {
+      // Do not store empty items
+      return;
+    }
+    if (dataPerLine[currentLineIdx - 1] == null) {
+      dataPerLine[currentLineIdx - 1] = new StringBuilder();
+    }
+    StringBuilder currentLineSb = dataPerLine[currentLineIdx - 1];
+    writer.writeItem(currentLineSb, startLineOffset, endLineOffset, item);
+  }
+
+  private static interface RangeItemWriter<G> {
+    /**
+     * Write item on a single line
+     */
+    void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, G item);
+  }
+
+  private static class RuleItemWriter implements RangeItemWriter<SyntaxHighlightingRule> {
+    @Override
+    public void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, SyntaxHighlightingRule item) {
+      if (currentLineSb.length() > 0) {
+        currentLineSb.append(SyntaxHighlightingData.RULE_SEPARATOR);
+      }
+      currentLineSb.append(startLineOffset)
+        .append(SyntaxHighlightingData.FIELD_SEPARATOR)
+        .append(endLineOffset)
+        .append(SyntaxHighlightingData.FIELD_SEPARATOR)
+        .append(item.getTextType().cssClass());
+    }
+
+  }
+
+  private static class SymbolItemWriter implements RangeItemWriter<Integer> {
+    @Override
+    public void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, Integer symbolId) {
+      if (currentLineSb.length() > 0) {
+        currentLineSb.append(SymbolData.SYMBOL_SEPARATOR);
+      }
+      currentLineSb.append(startLineOffset)
+        .append(SymbolData.FIELD_SEPARATOR)
+        .append(endLineOffset)
+        .append(SymbolData.FIELD_SEPARATOR)
+        .append(symbolId);
+    }
+  }
+
+  void applyDuplications(String inputFileKey, FileSourceDb.Data.Builder to) {
+    List<DuplicationGroup> groups = duplicationCache.byComponent(inputFileKey);
+    if (groups != null) {
+      Multimap<Integer, Integer> duplicationsPerLine = ArrayListMultimap.create();
+      int blockId = 1;
+      for (Iterator<DuplicationGroup> it = groups.iterator(); it.hasNext();) {
+        DuplicationGroup group = it.next();
+        addBlock(blockId, group.originBlock(), duplicationsPerLine);
+        blockId++;
+        for (Iterator<DuplicationGroup.Block> dupsIt = group.duplicates().iterator(); dupsIt.hasNext();) {
+          DuplicationGroup.Block dups = dupsIt.next();
+          if (inputFileKey.equals(dups.resourceKey())) {
+            addBlock(blockId, dups, duplicationsPerLine);
+            blockId++;
+          }
+          // Save memory
+          dupsIt.remove();
+        }
+        // Save memory
+        it.remove();
+      }
+      for (Map.Entry<Integer, Collection<Integer>> entry : duplicationsPerLine.asMap().entrySet()) {
+        to.getLinesBuilder(entry.getKey() - 1).addAllDuplications(entry.getValue());
+      }
+    }
+  }
+
+  private void addBlock(int blockId, DuplicationGroup.Block block, Multimap<Integer, Integer> dupPerLine) {
+    int currentLine = block.startLine();
+    for (int i = 0; i < block.length(); i++) {
+      dupPerLine.put(currentLine, blockId);
+      currentLine++;
+    }
+  }
+}
index 76fdd9015f5c8c403e578bacfff3db2a13406cb6..a7a18548418a5cf34eec187313806813a6ab0677 100644 (file)
  */
 package org.sonar.batch.index;
 
-import com.google.common.base.CharMatcher;
 import org.apache.commons.codec.binary.Hex;
 import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang.StringUtils;
 import org.apache.ibatis.session.ResultContext;
 import org.apache.ibatis.session.ResultHandler;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.sonar.api.batch.fs.InputFile;
 import org.sonar.api.batch.fs.InputPath;
 import org.sonar.api.batch.fs.internal.DefaultInputFile;
-import org.sonar.api.batch.sensor.duplication.DuplicationGroup;
-import org.sonar.api.batch.sensor.duplication.DuplicationGroup.Block;
-import org.sonar.api.batch.sensor.symbol.Symbol;
-import org.sonar.api.measures.CoreMetrics;
-import org.sonar.api.measures.Measure;
-import org.sonar.api.utils.KeyValueFormat;
 import org.sonar.api.utils.System2;
-import org.sonar.api.utils.text.CsvWriter;
 import org.sonar.batch.ProjectTree;
-import org.sonar.batch.duplication.DuplicationCache;
-import org.sonar.batch.highlighting.SyntaxHighlightingData;
-import org.sonar.batch.highlighting.SyntaxHighlightingRule;
 import org.sonar.batch.scan.filesystem.InputPathCache;
-import org.sonar.batch.scan.measure.MeasureCache;
-import org.sonar.batch.source.CodeColorizers;
-import org.sonar.batch.symbol.SymbolData;
 import org.sonar.core.persistence.DbSession;
 import org.sonar.core.persistence.MyBatis;
-import org.sonar.core.source.SnapshotDataTypes;
 import org.sonar.core.source.db.FileSourceDto;
 import org.sonar.core.source.db.FileSourceMapper;
 
 import javax.annotation.CheckForNull;
-import javax.annotation.Nullable;
 
 import java.io.IOException;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Date;
 import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
 import java.util.Map;
 
 public class SourcePersister implements ScanPersister {
 
-  private static final Logger LOG = LoggerFactory.getLogger(SourcePersister.class);
-
-  private static final String BOM = "\uFEFF";
-  private final InputPathCache inputPathCache;
   private final MyBatis mybatis;
-  private final MeasureCache measureCache;
-  private final ComponentDataCache componentDataCache;
   private final System2 system2;
   private final ProjectTree projectTree;
   private final ResourceCache resourceCache;
-  private CodeColorizers codeColorizers;
-  private DuplicationCache duplicationCache;
+  private final InputPathCache inputPathCache;
+  private final SourceDataFactory dataFactory;
 
-  public SourcePersister(InputPathCache inputPathCache,
-    MyBatis mybatis, MeasureCache measureCache, ComponentDataCache componentDataCache, ProjectTree projectTree, System2 system2,
-    ResourceCache resourceCache, CodeColorizers codeColorizers, DuplicationCache duplicationCache) {
+  public SourcePersister(InputPathCache inputPathCache, MyBatis mybatis, System2 system2,
+    ProjectTree projectTree, ResourceCache resourceCache, SourceDataFactory dataFactory) {
     this.inputPathCache = inputPathCache;
     this.mybatis = mybatis;
-    this.measureCache = measureCache;
-    this.componentDataCache = componentDataCache;
-    this.projectTree = projectTree;
     this.system2 = system2;
+    this.projectTree = projectTree;
     this.resourceCache = resourceCache;
-    this.codeColorizers = codeColorizers;
-    this.duplicationCache = duplicationCache;
+    this.dataFactory = dataFactory;
   }
 
   @Override
@@ -102,22 +63,19 @@ public class SourcePersister implements ScanPersister {
     // Don't use batch insert for file_sources since keeping all data in memory can produce OOM for big files
     try (DbSession session = mybatis.openSession(false)) {
 
-      final Map<String, FileSourceDto> fileSourceDtoByFileUuid = new HashMap<String, FileSourceDto>();
-
-      session.select("org.sonar.core.source.db.FileSourceMapper.selectAllFileDataHashByProject", projectTree.getRootProject().getUuid(), new ResultHandler() {
-
+      final Map<String, FileSourceDto> previousDtosByUuid = new HashMap<>();
+      session.select("org.sonar.core.source.db.FileSourceMapper.selectHashesForProject", projectTree.getRootProject().getUuid(), new ResultHandler() {
         @Override
         public void handleResult(ResultContext context) {
           FileSourceDto dto = (FileSourceDto) context.getResultObject();
-          fileSourceDtoByFileUuid.put(dto.getFileUuid(), dto);
+          previousDtosByUuid.put(dto.getFileUuid(), dto);
         }
       });
 
       FileSourceMapper mapper = session.getMapper(FileSourceMapper.class);
-
       for (InputPath inputPath : inputPathCache.all()) {
-        if (inputPath instanceof InputFile) {
-          persist(session, mapper, inputPath, fileSourceDtoByFileUuid);
+        if (inputPath instanceof DefaultInputFile) {
+          persist(session, mapper, (DefaultInputFile) inputPath, previousDtosByUuid);
         }
       }
     } catch (Exception e) {
@@ -126,43 +84,36 @@ public class SourcePersister implements ScanPersister {
 
   }
 
-  private void persist(DbSession session, FileSourceMapper mapper, InputPath inputPath, Map<String, FileSourceDto> fileSourceDtoByFileUuid) {
-    DefaultInputFile inputFile = (DefaultInputFile) inputPath;
-    LOG.debug("Processing {}", inputFile.absolutePath());
-    org.sonar.api.resources.File file = (org.sonar.api.resources.File) resourceCache.get(inputFile.key()).resource();
-    String fileUuid = file.getUuid();
-    FileSourceDto previous = fileSourceDtoByFileUuid.get(fileUuid);
-    String newData = getSourceData(inputFile);
-    String newDataHash = newData != null ? DigestUtils.md5Hex(newData) : "0";
-    Date now = system2.newDate();
-    try {
-      if (previous == null) {
-        FileSourceDto newFileSource = new FileSourceDto()
-          .setProjectUuid(projectTree.getRootProject().getUuid())
-          .setFileUuid(fileUuid)
-          .setData(newData)
-          .setDataHash(newDataHash)
+  private void persist(DbSession session, FileSourceMapper mapper, DefaultInputFile inputFile, Map<String, FileSourceDto> previousDtosByUuid) {
+    String fileUuid = resourceCache.get(inputFile.key()).resource().getUuid();
+
+    byte[] data = computeData(inputFile);
+    String dataHash = DigestUtils.md5Hex(data);
+    FileSourceDto previousDto = previousDtosByUuid.get(fileUuid);
+    if (previousDto == null) {
+      FileSourceDto dto = new FileSourceDto()
+        .setProjectUuid(projectTree.getRootProject().getUuid())
+        .setFileUuid(fileUuid)
+        .setBinaryData(data)
+        .setDataHash(dataHash)
+        .setSrcHash(inputFile.hash())
+        .setLineHashes(lineHashesAsMd5Hex(inputFile))
+        .setCreatedAt(system2.now())
+        .setUpdatedAt(system2.now());
+      mapper.insert(dto);
+      session.commit();
+    } else {
+      // Update only if data_hash has changed or if src_hash is missing (progressive migration)
+      if (!dataHash.equals(previousDto.getDataHash()) || !inputFile.hash().equals(previousDto.getSrcHash())) {
+        previousDto
+          .setBinaryData(data)
+          .setDataHash(dataHash)
           .setSrcHash(inputFile.hash())
           .setLineHashes(lineHashesAsMd5Hex(inputFile))
-          .setCreatedAt(now.getTime())
-          .setUpdatedAt(now.getTime());
-        mapper.insert(newFileSource);
+          .setUpdatedAt(system2.now());
+        mapper.update(previousDto);
         session.commit();
-      } else {
-        // Update only if data_hash has changed or if src_hash is missing (progressive migration)
-        if (!newDataHash.equals(previous.getDataHash()) || !inputFile.hash().equals(previous.getSrcHash())) {
-          previous
-            .setData(newData)
-            .setLineHashes(lineHashesAsMd5Hex(inputFile))
-            .setDataHash(newDataHash)
-            .setSrcHash(inputFile.hash())
-            .setUpdatedAt(now.getTime());
-          mapper.update(previous);
-          session.commit();
-        }
       }
-    } catch (Exception e) {
-      throw new IllegalStateException("Unable to save file sources for " + inputPath.absolutePath(), e);
     }
   }
 
@@ -182,284 +133,11 @@ public class SourcePersister implements ScanPersister {
     return result.toString();
   }
 
-  @CheckForNull
-  String getSourceData(DefaultInputFile file) {
-    if (file.lines() == 0) {
-      return null;
-    }
-    List<String> lines;
+  private byte[] computeData(DefaultInputFile inputFile) {
     try {
-      lines = FileUtils.readLines(file.file(), file.encoding());
+      return dataFactory.consolidateData(inputFile);
     } catch (IOException e) {
-      throw new IllegalStateException("Unable to read file", e);
-    }
-    // Missing empty last line
-    if (lines.size() == file.lines() - 1) {
-      lines.add("");
-    }
-    Map<Integer, String> authorsByLine = getLineMetric(file, CoreMetrics.SCM_AUTHORS_BY_LINE_KEY);
-    Map<Integer, String> revisionsByLine = getLineMetric(file, CoreMetrics.SCM_REVISIONS_BY_LINE_KEY);
-    Map<Integer, String> datesByLine = getLineMetric(file, CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE_KEY);
-    Map<Integer, String> utHitsByLine = getLineMetric(file, CoreMetrics.COVERAGE_LINE_HITS_DATA_KEY);
-    Map<Integer, String> utCondByLine = getLineMetric(file, CoreMetrics.CONDITIONS_BY_LINE_KEY);
-    Map<Integer, String> utCoveredCondByLine = getLineMetric(file, CoreMetrics.COVERED_CONDITIONS_BY_LINE_KEY);
-    Map<Integer, String> itHitsByLine = getLineMetric(file, CoreMetrics.IT_COVERAGE_LINE_HITS_DATA_KEY);
-    Map<Integer, String> itCondByLine = getLineMetric(file, CoreMetrics.IT_CONDITIONS_BY_LINE_KEY);
-    Map<Integer, String> itCoveredCondByLine = getLineMetric(file, CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE_KEY);
-    Map<Integer, String> overallHitsByLine = getLineMetric(file, CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA_KEY);
-    Map<Integer, String> overallCondByLine = getLineMetric(file, CoreMetrics.OVERALL_CONDITIONS_BY_LINE_KEY);
-    Map<Integer, String> overallCoveredCondByLine = getLineMetric(file, CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE_KEY);
-    SyntaxHighlightingData highlighting = loadHighlighting(file);
-    String[] highlightingPerLine = computeHighlightingPerLine(file, highlighting);
-    String[] symbolReferencesPerLine = computeSymbolReferencesPerLine(file, loadSymbolReferences(file));
-    String[] duplicationsPerLine = computeDuplicationsPerLine(file, duplicationCache.byComponent(file.key()));
-
-    StringWriter writer = new StringWriter(file.lines() * 16);
-    CsvWriter csv = CsvWriter.of(writer);
-    for (int lineIdx = 1; lineIdx <= file.lines(); lineIdx++) {
-      csv.values(revisionsByLine.get(lineIdx), authorsByLine.get(lineIdx), datesByLine.get(lineIdx),
-        utHitsByLine.get(lineIdx), utCondByLine.get(lineIdx), utCoveredCondByLine.get(lineIdx),
-        itHitsByLine.get(lineIdx), itCondByLine.get(lineIdx), itCoveredCondByLine.get(lineIdx),
-        overallHitsByLine.get(lineIdx), overallCondByLine.get(lineIdx), overallCoveredCondByLine.get(lineIdx),
-        highlightingPerLine[lineIdx - 1], symbolReferencesPerLine[lineIdx - 1], duplicationsPerLine[lineIdx - 1],
-        CharMatcher.anyOf(BOM).removeFrom(lines.get(lineIdx - 1)));
-      // Free memory
-      revisionsByLine.remove(lineIdx);
-      authorsByLine.remove(lineIdx);
-      datesByLine.remove(lineIdx);
-      utHitsByLine.remove(lineIdx);
-      utCondByLine.remove(lineIdx);
-      utCoveredCondByLine.remove(lineIdx);
-      itHitsByLine.remove(lineIdx);
-      itCondByLine.remove(lineIdx);
-      itCoveredCondByLine.remove(lineIdx);
-      overallHitsByLine.remove(lineIdx);
-      overallCondByLine.remove(lineIdx);
-      overallCoveredCondByLine.remove(lineIdx);
-      highlightingPerLine[lineIdx - 1] = null;
-      symbolReferencesPerLine[lineIdx - 1] = null;
-      duplicationsPerLine[lineIdx - 1] = null;
-      lines.set(lineIdx - 1, null);
-    }
-    csv.close();
-    return StringUtils.defaultIfEmpty(writer.toString(), null);
-  }
-
-  private String[] computeDuplicationsPerLine(DefaultInputFile file, List<DuplicationGroup> duplicationGroups) {
-    String[] result = new String[file.lines()];
-    if (duplicationGroups == null) {
-      return result;
-    }
-    List<DuplicationGroup> groups = new LinkedList<DuplicationGroup>(duplicationGroups);
-    StringBuilder[] dupPerLine = new StringBuilder[file.lines()];
-    int blockId = 1;
-    for (Iterator<DuplicationGroup> it = groups.iterator(); it.hasNext();) {
-      DuplicationGroup group = it.next();
-      addBlock(blockId, group.originBlock(), dupPerLine);
-      blockId++;
-      for (Iterator<Block> dupsIt = group.duplicates().iterator(); dupsIt.hasNext();) {
-        Block dups = dupsIt.next();
-        if (dups.resourceKey().equals(file.key())) {
-          addBlock(blockId, dups, dupPerLine);
-          blockId++;
-        }
-        // Save memory
-        dupsIt.remove();
-      }
-      // Save memory
-      it.remove();
-    }
-    for (int i = 0; i < file.lines(); i++) {
-      result[i] = dupPerLine[i] != null ? dupPerLine[i].toString() : null;
-      // Save memory
-      dupPerLine[i] = null;
-    }
-    return result;
-  }
-
-  private void addBlock(int blockId, Block block, StringBuilder[] dupPerLine) {
-    int currentLine = block.startLine();
-    for (int i = 0; i < block.length(); i++) {
-      if (dupPerLine[currentLine - 1] == null) {
-        dupPerLine[currentLine - 1] = new StringBuilder();
-      }
-      if (dupPerLine[currentLine - 1].length() > 0) {
-        dupPerLine[currentLine - 1].append(',');
-      }
-      dupPerLine[currentLine - 1].append(blockId);
-      currentLine++;
-    }
-
-  }
-
-  @CheckForNull
-  private SyntaxHighlightingData loadHighlighting(DefaultInputFile file) {
-    SyntaxHighlightingData highlighting = componentDataCache.getData(file.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING);
-    String language = file.language();
-    if (highlighting == null && language != null) {
-      highlighting = codeColorizers.toSyntaxHighlighting(file.file(), file.encoding(), language);
-    }
-    return highlighting;
-  }
-
-  @CheckForNull
-  private SymbolData loadSymbolReferences(DefaultInputFile file) {
-    return componentDataCache.getData(file.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING);
-  }
-
-  String[] computeHighlightingPerLine(DefaultInputFile file, @Nullable SyntaxHighlightingData highlighting) {
-    String[] result = new String[file.lines()];
-    if (highlighting == null) {
-      return result;
-    }
-    Iterable<SyntaxHighlightingRule> rules = highlighting.syntaxHighlightingRuleSet();
-    int currentLineIdx = 1;
-    StringBuilder[] highlightingPerLine = new StringBuilder[file.lines()];
-    for (SyntaxHighlightingRule rule : rules) {
-      while (currentLineIdx < file.lines() && rule.getStartPosition() >= file.originalLineOffsets()[currentLineIdx]) {
-        // This rule starts on another line so advance
-        currentLineIdx++;
-      }
-      // Now we know current rule starts on current line
-      writeDataPerLine(file.originalLineOffsets(), rule, rule.getStartPosition(), rule.getEndPosition(), highlightingPerLine, currentLineIdx, new RuleItemWriter());
-    }
-    for (int i = 0; i < file.lines(); i++) {
-      result[i] = highlightingPerLine[i] != null ? highlightingPerLine[i].toString() : null;
-    }
-    return result;
-  }
-
-  String[] computeSymbolReferencesPerLine(DefaultInputFile file, @Nullable SymbolData symbolRefs) {
-    String[] result = new String[file.lines()];
-    if (symbolRefs == null) {
-      return result;
-    }
-    StringBuilder[] symbolRefsPerLine = new StringBuilder[file.lines()];
-    long[] originalLineOffsets = file.originalLineOffsets();
-    int symbolId = 1;
-    List<Symbol> symbols = new ArrayList<Symbol>(symbolRefs.referencesBySymbol().keySet());
-    // Sort symbols to avoid false variation that would lead to an unnecessary update
-    Collections.sort(symbols, new Comparator<Symbol>() {
-      @Override
-      public int compare(Symbol o1, Symbol o2) {
-        return o1.getDeclarationStartOffset() - o2.getDeclarationStartOffset();
-      }
-    });
-    for (Symbol symbol : symbols) {
-      int declarationStartOffset = symbol.getDeclarationStartOffset();
-      int declarationEndOffset = symbol.getDeclarationEndOffset();
-      int length = declarationEndOffset - declarationStartOffset;
-      addSymbol(symbolId, declarationStartOffset, declarationEndOffset, originalLineOffsets, symbolRefsPerLine);
-      for (Integer referenceStartOffset : symbolRefs.referencesBySymbol().get(symbol)) {
-        if (referenceStartOffset == declarationStartOffset) {
-          // Ignore old API that used to store reference as first declaration
-          continue;
-        }
-        addSymbol(symbolId, referenceStartOffset, referenceStartOffset + length, originalLineOffsets, symbolRefsPerLine);
-      }
-      symbolId++;
-    }
-    for (int i = 0; i < file.lines(); i++) {
-      result[i] = symbolRefsPerLine[i] != null ? symbolRefsPerLine[i].toString() : null;
-    }
-    return result;
-  }
-
-  private void addSymbol(int symbolId, int startOffset, int endOffset, long[] originalLineOffsets, StringBuilder[] result) {
-    int startLine = binarySearchLine(startOffset, originalLineOffsets);
-    writeDataPerLine(originalLineOffsets, symbolId, startOffset, endOffset, result, startLine, new SymbolItemWriter());
-  }
-
-  private int binarySearchLine(int declarationStartOffset, long[] originalLineOffsets) {
-    int begin = 0;
-    int end = originalLineOffsets.length - 1;
-    while (begin < end) {
-      int mid = (int) Math.round((begin + end) / 2D);
-      if (declarationStartOffset < originalLineOffsets[mid]) {
-        end = mid - 1;
-      } else {
-        begin = mid;
-      }
-    }
-    return begin + 1;
-  }
-
-  private <G> void writeDataPerLine(long[] originalLineOffsets, G item, int globalStartOffset, int globalEndOffset, StringBuilder[] dataPerLine, int startLine,
-    RangeItemWriter<G> writer) {
-    int currentLineIdx = startLine;
-    // We know current item starts on current line
-    long ruleStartOffsetCurrentLine = globalStartOffset;
-    while (currentLineIdx < originalLineOffsets.length && globalEndOffset >= originalLineOffsets[currentLineIdx]) {
-      // item continue on next line so write current line and continue on next line with same item
-      writeItem(item, dataPerLine, currentLineIdx, ruleStartOffsetCurrentLine - originalLineOffsets[currentLineIdx - 1], originalLineOffsets[currentLineIdx]
-        - originalLineOffsets[currentLineIdx - 1], writer);
-      currentLineIdx++;
-      ruleStartOffsetCurrentLine = originalLineOffsets[currentLineIdx - 1];
-    }
-    // item ends on current line
-    writeItem(item, dataPerLine, currentLineIdx, ruleStartOffsetCurrentLine - originalLineOffsets[currentLineIdx - 1], globalEndOffset
-      - originalLineOffsets[currentLineIdx - 1], writer);
-  }
-
-  private <G> void writeItem(G item, StringBuilder[] dataPerLine, int currentLineIdx, long startLineOffset, long endLineOffset, RangeItemWriter<G> writer) {
-    if (startLineOffset == endLineOffset) {
-      // Do not store empty items
-      return;
-    }
-    if (dataPerLine[currentLineIdx - 1] == null) {
-      dataPerLine[currentLineIdx - 1] = new StringBuilder();
-    }
-    StringBuilder currentLineSb = dataPerLine[currentLineIdx - 1];
-    writer.writeItem(currentLineSb, startLineOffset, endLineOffset, item);
-  }
-
-  private static interface RangeItemWriter<G> {
-    /**
-     * Write item on a single line
-     */
-    void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, G item);
-  }
-
-  private static class RuleItemWriter implements RangeItemWriter<SyntaxHighlightingRule> {
-
-    @Override
-    public void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, SyntaxHighlightingRule item) {
-      if (currentLineSb.length() > 0) {
-        currentLineSb.append(SyntaxHighlightingData.RULE_SEPARATOR);
-      }
-      currentLineSb.append(startLineOffset)
-        .append(SyntaxHighlightingData.FIELD_SEPARATOR)
-        .append(endLineOffset)
-        .append(SyntaxHighlightingData.FIELD_SEPARATOR)
-        .append(item.getTextType().cssClass());
-    }
-
-  }
-
-  private static class SymbolItemWriter implements RangeItemWriter<Integer> {
-
-    @Override
-    public void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, Integer symbolId) {
-      if (currentLineSb.length() > 0) {
-        currentLineSb.append(SymbolData.SYMBOL_SEPARATOR);
-      }
-      currentLineSb.append(startLineOffset)
-        .append(SymbolData.FIELD_SEPARATOR)
-        .append(endLineOffset)
-        .append(SymbolData.FIELD_SEPARATOR)
-        .append(symbolId);
-    }
-
-  }
-
-  private Map<Integer, String> getLineMetric(DefaultInputFile file, String metricKey) {
-    Map<Integer, String> authorsByLine;
-    Iterator<Measure> authorsIt = measureCache.byMetric(file.key(), metricKey).iterator();
-    if (authorsIt.hasNext()) {
-      authorsByLine = KeyValueFormat.parseIntString((String) authorsIt.next().value());
-    } else {
-      authorsByLine = Collections.emptyMap();
+      throw new IllegalStateException("Fail to read file " + inputFile, e);
     }
-    return authorsByLine;
   }
 }
index 8edc824b1fdce2255d56e25035b822fe901f6232..30ef5615488ddb98b77fa5e885d9936356f6bcf5 100644 (file)
@@ -56,6 +56,7 @@ import org.sonar.batch.index.MeasurePersister;
 import org.sonar.batch.index.ResourceCache;
 import org.sonar.batch.index.ResourceKeyMigration;
 import org.sonar.batch.index.ResourcePersister;
+import org.sonar.batch.index.SourceDataFactory;
 import org.sonar.batch.index.SourcePersister;
 import org.sonar.batch.issue.DefaultProjectIssues;
 import org.sonar.batch.issue.IssueCache;
@@ -147,6 +148,7 @@ public class ProjectScanContainer extends ComponentContainer {
       Caches.class,
       ResourceCache.class,
       ComponentDataCache.class,
+      SourceDataFactory.class,
 
       // file system
       InputPathCache.class,
diff --git a/sonar-batch/src/test/java/org/sonar/batch/index/SourceDataFactoryTest.java b/sonar-batch/src/test/java/org/sonar/batch/index/SourceDataFactoryTest.java
new file mode 100644 (file)
index 0000000..821cb73
--- /dev/null
@@ -0,0 +1,305 @@
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.batch.index;
+
+import com.google.common.base.Charsets;
+import com.google.common.collect.Lists;
+import org.apache.commons.io.FileUtils;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.sensor.duplication.DuplicationGroup;
+import org.sonar.api.batch.sensor.highlighting.TypeOfText;
+import org.sonar.api.measures.CoreMetrics;
+import org.sonar.api.measures.Measure;
+import org.sonar.api.measures.Metric;
+import org.sonar.batch.duplication.DuplicationCache;
+import org.sonar.batch.highlighting.SyntaxHighlightingData;
+import org.sonar.batch.highlighting.SyntaxHighlightingDataBuilder;
+import org.sonar.batch.scan.measure.MeasureCache;
+import org.sonar.batch.source.CodeColorizers;
+import org.sonar.batch.symbol.DefaultSymbolTableBuilder;
+import org.sonar.core.source.SnapshotDataTypes;
+import org.sonar.server.source.db.FileSourceDb;
+
+import java.io.File;
+import java.util.Arrays;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class SourceDataFactoryTest {
+
+  @Rule
+  public TemporaryFolder temp = new TemporaryFolder();
+
+  MeasureCache measureCache = mock(MeasureCache.class);
+  ComponentDataCache componentDataCache = mock(ComponentDataCache.class);
+  DuplicationCache duplicationCache = mock(DuplicationCache.class);
+  CodeColorizers colorizers = mock(CodeColorizers.class);
+  DefaultInputFile inputFile;
+  SourceDataFactory sut = new SourceDataFactory(measureCache, componentDataCache, duplicationCache, colorizers);
+  FileSourceDb.Data.Builder output;
+
+  @Before
+  public void setUp() throws Exception {
+    // generate a file with 3 lines
+    File file = temp.newFile();
+    inputFile = new DefaultInputFile("module_key", "src/Foo.java")
+      .setLines(3)
+      .setEncoding(Charsets.UTF_8.name())
+      .setFile(file);
+    FileUtils.write(file, "one\ntwo\nthree\n");
+    output = sut.createForSource(inputFile);
+  }
+
+  @Test
+  public void createForSource() throws Exception {
+    FileSourceDb.Data data = output.build();
+    assertThat(data.getLinesCount()).isEqualTo(3);
+    for (int index = 1; index <= 3; index++) {
+      assertThat(data.getLines(index - 1).getLine()).isEqualTo(index);
+    }
+  }
+
+  @Test
+  public void consolidateData() throws Exception {
+    byte[] bytes = sut.consolidateData(inputFile);
+    assertThat(bytes).isNotEmpty();
+  }
+
+  @Test
+  public void applyLineMeasure() throws Exception {
+    Metric metric = CoreMetrics.COVERAGE_LINE_HITS_DATA;
+    when(measureCache.byMetric("component_key", metric.key())).thenReturn(
+      // line 1 has 10 hits, ...
+      Arrays.asList(new Measure().setData("1=10;3=4").setMetric(metric)));
+
+    sut.applyLineMeasure("component_key", metric.key(), output, new SourceDataFactory.MeasureOperation() {
+      @Override
+      public void apply(String value, FileSourceDb.Line.Builder lineBuilder) {
+        lineBuilder.setUtLineHits(Integer.parseInt(value));
+      }
+    });
+
+    FileSourceDb.Data data = output.build();
+    assertThat(data.getLines(0).getUtLineHits()).isEqualTo(10);
+    assertThat(data.getLines(1).hasUtLineHits()).isFalse();
+    assertThat(data.getLines(2).getUtLineHits()).isEqualTo(4);
+  }
+
+  @Test
+  public void applyLineMeasures() throws Exception {
+    setupLineMeasure(CoreMetrics.SCM_AUTHORS_BY_LINE, "1=him;2=her");
+    setupLineMeasure(CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE, "1=2014-10-11T16:44:02+0100;2=2014-10-12T16:44:02+0100;3=2014-10-13T16:44:02+0100");
+    setupLineMeasure(CoreMetrics.SCM_REVISIONS_BY_LINE, "1=ABC;2=234;3=345");
+    setupLineMeasure(CoreMetrics.COVERAGE_LINE_HITS_DATA, "1=10;3=4");
+    setupLineMeasure(CoreMetrics.CONDITIONS_BY_LINE, "1=10;3=4");
+    setupLineMeasure(CoreMetrics.CONDITIONS_BY_LINE, "1=10;3=4");
+    setupLineMeasure(CoreMetrics.COVERED_CONDITIONS_BY_LINE, "1=10;3=4");
+    setupLineMeasure(CoreMetrics.IT_COVERAGE_LINE_HITS_DATA, "1=11;2=4");
+    setupLineMeasure(CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE, "1=10;3=4");
+    setupLineMeasure(CoreMetrics.IT_CONDITIONS_BY_LINE, "1=10;3=4");
+    setupLineMeasure(CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA, "1=10;3=4");
+    setupLineMeasure(CoreMetrics.OVERALL_CONDITIONS_BY_LINE, "1=10;3=4");
+    setupLineMeasure(CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE, "1=10;3=4");
+
+    sut.applyLineMeasures(inputFile, output);
+
+    FileSourceDb.Data data = output.build();
+    assertThat(data.getLines(0).getUtLineHits()).isEqualTo(10);
+    assertThat(data.getLines(0).getItLineHits()).isEqualTo(11);
+    assertThat(data.getLines(0).getScmRevision()).isEqualTo("ABC");
+    assertThat(data.getLines(0).getScmAuthor()).isEqualTo("him");
+
+    assertThat(data.getLines(1).hasUtLineHits()).isFalse();
+    assertThat(data.getLines(1).getItLineHits()).isEqualTo(4);
+    assertThat(data.getLines(1).getScmAuthor()).isEqualTo("her");
+
+    assertThat(data.getLines(2).getUtLineHits()).isEqualTo(4);
+    assertThat(data.getLines(2).hasScmAuthor()).isFalse();
+  }
+
+  private void setupLineMeasure(Metric metric, String dataPerLine) {
+    when(measureCache.byMetric(inputFile.key(), metric.key())).thenReturn(
+      Arrays.asList(new Measure().setData(dataPerLine).setMetric(metric)));
+  }
+
+
+  @Test
+  public void applyDuplications() throws Exception {
+    DuplicationGroup group1 = new DuplicationGroup(new DuplicationGroup.Block(inputFile.key(), 1, 1))
+      .addDuplicate(new DuplicationGroup.Block(inputFile.key(), 3, 1))
+      .addDuplicate(new DuplicationGroup.Block("anotherFile1", 12, 1))
+      .addDuplicate(new DuplicationGroup.Block("anotherFile2", 13, 1));
+    DuplicationGroup group2 = new DuplicationGroup(new DuplicationGroup.Block(inputFile.key(), 1, 2))
+      .addDuplicate(new DuplicationGroup.Block("anotherFile1", 12, 2))
+      .addDuplicate(new DuplicationGroup.Block("anotherFile2", 13, 2));
+    when(duplicationCache.byComponent(inputFile.key())).thenReturn(Lists.newArrayList(group1, group2));
+
+    sut.applyDuplications(inputFile.key(), output);
+
+    FileSourceDb.Data data = output.build();
+    assertThat(data.getLines(0).getDuplicationsList()).containsExactly(1, 3);
+    assertThat(data.getLines(1).getDuplicationsList()).containsExactly(3);
+    assertThat(data.getLines(2).getDuplicationsList()).containsExactly(2);
+  }
+
+  @Test
+  public void applyHighlighting_missing() throws Exception {
+    when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(null);
+
+    sut.applyHighlighting(inputFile, output);
+
+    FileSourceDb.Data data = output.build();
+    assertThat(data.getLines(0).hasHighlighting()).isFalse();
+    assertThat(data.getLines(1).hasHighlighting()).isFalse();
+    assertThat(data.getLines(2).hasHighlighting()).isFalse();
+  }
+
+  @Test
+  public void applyHighlighting() throws Exception {
+    SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
+      .registerHighlightingRule(0, 4, TypeOfText.ANNOTATION)
+      .registerHighlightingRule(4, 5, TypeOfText.COMMENT)
+      .registerHighlightingRule(7, 16, TypeOfText.CONSTANT)
+      .build();
+    when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting);
+    inputFile.setOriginalLineOffsets(new long[] {0, 4, 7});
+
+    sut.applyHighlighting(inputFile, output);
+
+    FileSourceDb.Data data = output.build();
+    assertThat(data.getLines(0).getHighlighting()).isEqualTo("0,4,a");
+    assertThat(data.getLines(1).getHighlighting()).isEqualTo("0,1,cd");
+    assertThat(data.getLines(2).getHighlighting()).isEqualTo("0,9,c");
+  }
+
+  @Test
+  public void applyHighlighting_multiple_lines() throws Exception {
+    SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
+      .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION)
+      .registerHighlightingRule(4, 9, TypeOfText.COMMENT)
+      .registerHighlightingRule(10, 16, TypeOfText.CONSTANT)
+      .build();
+    when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting);
+    inputFile.setOriginalLineOffsets(new long[] {0, 4, 7});
+
+    sut.applyHighlighting(inputFile, output);
+
+    FileSourceDb.Data data = output.build();
+    assertThat(data.getLines(0).getHighlighting()).isEqualTo("0,3,a");
+    assertThat(data.getLines(1).getHighlighting()).isEqualTo("0,3,cd");
+    assertThat(data.getLines(2).getHighlighting()).isEqualTo("0,2,cd;3,9,c");
+  }
+
+  @Test
+  public void applyHighlighting_nested_rules() throws Exception {
+    SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
+      .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION)
+      .registerHighlightingRule(4, 6, TypeOfText.COMMENT)
+      .registerHighlightingRule(7, 16, TypeOfText.CONSTANT)
+      .registerHighlightingRule(8, 15, TypeOfText.KEYWORD)
+      .build();
+    when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting);
+    inputFile.setOriginalLineOffsets(new long[] {0, 4, 7});
+
+    sut.applyHighlighting(inputFile, output);
+
+    FileSourceDb.Data data = output.build();
+    assertThat(data.getLines(0).getHighlighting()).isEqualTo("0,3,a");
+    assertThat(data.getLines(1).getHighlighting()).isEqualTo("0,2,cd");
+    assertThat(data.getLines(2).getHighlighting()).isEqualTo("0,9,c;1,8,k");
+  }
+
+  @Test
+  public void applyHighlighting_nested_rules_and_multiple_lines() throws Exception {
+    SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
+      .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION)
+      .registerHighlightingRule(4, 6, TypeOfText.COMMENT)
+      .registerHighlightingRule(4, 16, TypeOfText.CONSTANT)
+      .registerHighlightingRule(8, 15, TypeOfText.KEYWORD)
+      .build();
+    when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting);
+    inputFile.setOriginalLineOffsets(new long[] {0, 4, 7});
+
+    sut.applyHighlighting(inputFile, output);
+
+    FileSourceDb.Data data = output.build();
+    assertThat(data.getLines(0).getHighlighting()).isEqualTo("0,3,a");
+    assertThat(data.getLines(1).getHighlighting()).isEqualTo("0,3,c;0,2,cd");
+    assertThat(data.getLines(2).getHighlighting()).isEqualTo("0,9,c;1,8,k");
+  }
+
+  @Test
+  public void applySymbolReferences_missing() throws Exception {
+    when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING)).thenReturn(null);
+
+    sut.applySymbolReferences(inputFile, output);
+
+    FileSourceDb.Data data = output.build();
+    assertThat(data.getLines(0).hasSymbols()).isFalse();
+    assertThat(data.getLines(1).hasSymbols()).isFalse();
+    assertThat(data.getLines(2).hasSymbols()).isFalse();
+  }
+
+  @Test
+  public void applySymbolReferences() throws Exception {
+    DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(inputFile.key(), null);
+    org.sonar.api.batch.sensor.symbol.Symbol s1 = symbolBuilder.newSymbol(1, 2);
+    symbolBuilder.newReference(s1, 4);
+    symbolBuilder.newReference(s1, 11);
+    org.sonar.api.batch.sensor.symbol.Symbol s2 = symbolBuilder.newSymbol(4, 6);
+    symbolBuilder.newReference(s2, 0);
+    symbolBuilder.newReference(s2, 7);
+    when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING)).thenReturn(symbolBuilder.build());
+    inputFile.setOriginalLineOffsets(new long[] {0, 4, 7});
+
+    sut.applySymbolReferences(inputFile, output);
+
+    FileSourceDb.Data data = output.build();
+    assertThat(data.getLines(0).getSymbols()).isEqualTo("1,2,1;0,2,2");
+    assertThat(data.getLines(1).getSymbols()).isEqualTo("0,1,1;0,2,2");
+    assertThat(data.getLines(2).getSymbols()).isEqualTo("4,5,1;0,2,2");
+  }
+
+  @Test
+  public void applySymbolReferences_declaration_order_is_not_important() throws Exception {
+    DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(inputFile.key(), null);
+    org.sonar.api.batch.sensor.symbol.Symbol s2 = symbolBuilder.newSymbol(4, 6);
+    symbolBuilder.newReference(s2, 7);
+    symbolBuilder.newReference(s2, 0);
+    org.sonar.api.batch.sensor.symbol.Symbol s1 = symbolBuilder.newSymbol(1, 2);
+    symbolBuilder.newReference(s1, 11);
+    symbolBuilder.newReference(s1, 4);
+    when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING)).thenReturn(symbolBuilder.build());
+    inputFile.setOriginalLineOffsets(new long[] {0, 4, 7});
+
+    sut.applySymbolReferences(inputFile, output);
+
+    FileSourceDb.Data data = output.build();
+    assertThat(data.getLines(0).getSymbols()).isEqualTo("1,2,1;0,2,2");
+    assertThat(data.getLines(1).getSymbols()).isEqualTo("0,1,1;0,2,2");
+    assertThat(data.getLines(2).getSymbols()).isEqualTo("4,5,1;0,2,2");
+  }
+}
diff --git a/sonar-batch/src/test/java/org/sonar/batch/index/SourcePersisterTest.java b/sonar-batch/src/test/java/org/sonar/batch/index/SourcePersisterTest.java
deleted file mode 100644 (file)
index b23b40a..0000000
+++ /dev/null
@@ -1,428 +0,0 @@
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
- */
-package org.sonar.batch.index;
-
-import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.commons.io.FileUtils;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-import org.sonar.api.batch.fs.InputPath;
-import org.sonar.api.batch.fs.internal.DefaultInputFile;
-import org.sonar.api.batch.sensor.duplication.DuplicationGroup;
-import org.sonar.api.batch.sensor.highlighting.TypeOfText;
-import org.sonar.api.database.model.Snapshot;
-import org.sonar.api.measures.CoreMetrics;
-import org.sonar.api.measures.Measure;
-import org.sonar.api.resources.File;
-import org.sonar.api.resources.Project;
-import org.sonar.api.utils.DateUtils;
-import org.sonar.api.utils.System2;
-import org.sonar.batch.ProjectTree;
-import org.sonar.batch.duplication.DuplicationCache;
-import org.sonar.batch.highlighting.SyntaxHighlightingData;
-import org.sonar.batch.highlighting.SyntaxHighlightingDataBuilder;
-import org.sonar.batch.scan.filesystem.InputPathCache;
-import org.sonar.batch.scan.measure.MeasureCache;
-import org.sonar.batch.source.CodeColorizers;
-import org.sonar.batch.symbol.DefaultSymbolTableBuilder;
-import org.sonar.core.persistence.AbstractDaoTestCase;
-import org.sonar.core.source.SnapshotDataTypes;
-import org.sonar.core.source.db.FileSourceDao;
-import org.sonar.core.source.db.FileSourceDto;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Date;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-public class SourcePersisterTest extends AbstractDaoTestCase {
-
-  @Rule
-  public TemporaryFolder temp = new TemporaryFolder();
-
-  private SourcePersister sourcePersister;
-  private InputPathCache inputPathCache;
-  private ResourceCache resourceCache;
-  private ProjectTree projectTree;
-  private System2 system2;
-  private MeasureCache measureCache;
-  private ComponentDataCache componentDataCache;
-  private DuplicationCache duplicationCache;
-
-  private static final String PROJECT_KEY = "foo";
-
-  private java.io.File basedir;
-
-  @Before
-  public void before() throws IOException {
-    Snapshot snapshot = new Snapshot();
-    snapshot.setId(1000);
-    inputPathCache = mock(InputPathCache.class);
-    resourceCache = mock(ResourceCache.class);
-    projectTree = mock(ProjectTree.class);
-    system2 = mock(System2.class);
-    measureCache = mock(MeasureCache.class);
-    when(measureCache.byMetric(anyString(), anyString())).thenReturn(Collections.<org.sonar.api.measures.Measure>emptyList());
-    componentDataCache = mock(ComponentDataCache.class);
-    duplicationCache = mock(DuplicationCache.class);
-    sourcePersister = new SourcePersister(inputPathCache,
-      getMyBatis(), measureCache, componentDataCache, projectTree, system2,
-      resourceCache, mock(CodeColorizers.class), duplicationCache);
-    Project project = new Project(PROJECT_KEY);
-    project.setUuid("projectUuid");
-    when(projectTree.getRootProject()).thenReturn(project);
-    basedir = temp.newFolder();
-  }
-
-  @Test
-  public void testPersistUpdateWhenSrcHashIsMissing() throws Exception {
-    setupData("file_sources_missing_src_hash");
-    Date now = DateUtils.parseDateTime("2014-10-29T16:44:02+0100");
-    when(system2.newDate()).thenReturn(now);
-
-    String relativePathSame = "src/same.java";
-    java.io.File sameFile = new java.io.File(basedir, relativePathSame);
-    FileUtils.write(sameFile, "unchanged\ncontent");
-    DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame)
-      .setLines(2)
-      .setAbsolutePath(sameFile.getAbsolutePath())
-      .setHash("123456")
-      .setLineHashes(new byte[][] {md5("unchanged"), md5("content")});
-    when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew));
-
-    mockResourceCache(relativePathSame, PROJECT_KEY, "uuidsame");
-
-    sourcePersister.persist();
-    FileSourceDto fileSourceDto = new FileSourceDao(getMyBatis()).select("uuidsame");
-    assertThat(fileSourceDto.getCreatedAt()).isEqualTo(DateUtils.parseDateTime("2014-10-10T16:44:02+0200").getTime());
-    assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
-    assertThat(fileSourceDto.getSrcHash()).isEqualTo("123456");
-  }
-
-  @Test
-  public void testPersistDontTouchUnchanged() throws Exception {
-    setupData("file_sources");
-    when(system2.newDate()).thenReturn(DateUtils.parseDateTime("2014-10-29T16:44:02+0100"));
-
-    String relativePathSame = "src/same.java";
-    java.io.File sameFile = new java.io.File(basedir, relativePathSame);
-    FileUtils.write(sameFile, "unchanged\ncontent");
-    DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame).setLines(2)
-      .setAbsolutePath(sameFile.getAbsolutePath())
-      .setHash("123456")
-      .setLineHashes(new byte[][] {md5("unchanged"), md5("ncontent")});
-    when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew));
-
-    mockResourceCache(relativePathSame, PROJECT_KEY, "uuidsame");
-
-    sourcePersister.persist();
-    checkTables("testPersistDontTouchUnchanged", "file_sources");
-  }
-
-  @Test
-  public void testPersistUpdateChanged() throws Exception {
-    setupData("file_sources");
-    Date now = DateUtils.parseDateTime("2014-10-29T16:44:02+0100");
-    when(system2.newDate()).thenReturn(now);
-
-    String relativePathSame = "src/changed.java";
-    java.io.File sameFile = new java.io.File(basedir, relativePathSame);
-    FileUtils.write(sameFile, "changed\ncontent");
-    DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame).setLines(2)
-      .setAbsolutePath(sameFile.getAbsolutePath())
-      .setHash("123456")
-      .setLineHashes(new byte[][] {md5("changed"), md5("content")});
-    when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew));
-
-    mockResourceCache(relativePathSame, PROJECT_KEY, "uuidsame");
-
-    sourcePersister.persist();
-
-    FileSourceDto fileSourceDto = new FileSourceDao(getMyBatis()).select("uuidsame");
-    assertThat(fileSourceDto.getCreatedAt()).isEqualTo(DateUtils.parseDateTime("2014-10-10T16:44:02+0200").getTime());
-    assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
-    assertThat(fileSourceDto.getData()).isEqualTo(
-      ",,,,,,,,,,,,,,,changed\r\n,,,,,,,,,,,,,,,content\r\n");
-    assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("changed") + "\n" + md5Hex("content"));
-    assertThat(fileSourceDto.getDataHash()).isEqualTo("d1a4dd62422639f665a8d80b37c59f8d");
-    assertThat(fileSourceDto.getSrcHash()).isEqualTo("123456");
-  }
-
-  @Test
-  public void testPersistEmptyFile() throws Exception {
-    setupData("file_sources");
-    when(system2.newDate()).thenReturn(DateUtils.parseDateTime("2014-10-29T16:44:02+0100"));
-
-    String relativePathEmpty = "src/empty.java";
-    DefaultInputFile inputFileEmpty = new DefaultInputFile(PROJECT_KEY, relativePathEmpty)
-      .setLines(0)
-      .setHash("abcd")
-      .setLineHashes(new byte[][] {});
-    when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileEmpty));
-
-    mockResourceCache(relativePathEmpty, PROJECT_KEY, "uuidempty");
-
-    sourcePersister.persist();
-    checkTables("testPersistEmptyFile", "file_sources");
-  }
-
-  @Test
-  public void testPersistNewFileNoScmNoHighlighting() throws Exception {
-    setupData("file_sources");
-    Date now = DateUtils.parseDateTime("2014-10-29T16:44:02+0100");
-    when(system2.newDate()).thenReturn(now);
-
-    String relativePathNew = "src/new.java";
-    java.io.File newFile = new java.io.File(basedir, relativePathNew);
-    FileUtils.write(newFile, "foo\nbar\nbiz");
-    DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathNew)
-      .setLines(3)
-      .setAbsolutePath(newFile.getAbsolutePath())
-      .setLineHashes(new byte[][] {md5("foo"), md5("bar"), md5("biz")});
-    when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew));
-
-    mockResourceCache(relativePathNew, PROJECT_KEY, "uuidnew");
-
-    sourcePersister.persist();
-    FileSourceDto fileSourceDto = new FileSourceDao(getMyBatis()).select("uuidnew");
-    assertThat(fileSourceDto.getCreatedAt()).isEqualTo(now.getTime());
-    assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
-    assertThat(fileSourceDto.getData()).isEqualTo(
-      ",,,,,,,,,,,,,,,foo\r\n,,,,,,,,,,,,,,,bar\r\n,,,,,,,,,,,,,,,biz\r\n");
-    assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("foo") + "\n" + md5Hex("bar") + "\n" + md5Hex("biz"));
-    assertThat(fileSourceDto.getDataHash()).isEqualTo("a34ed99cc7d27150c82f5cba2b22b665");
-
-  }
-
-  @Test
-  public void testPersistNewFileWithScmAndCoverageAndHighlighting() throws Exception {
-    setupData("file_sources");
-    Date now = DateUtils.parseDateTime("2014-10-29T16:44:02+0100");
-    when(system2.newDate()).thenReturn(now);
-
-    String relativePathNew = "src/new.java";
-    java.io.File newFile = new java.io.File(basedir, relativePathNew);
-    FileUtils.write(newFile, "foo\nbar\nbiz");
-    DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathNew)
-      .setLines(3)
-      .setAbsolutePath(newFile.getAbsolutePath())
-      .setOriginalLineOffsets(new long[] {0, 4, 7})
-      .setLineHashes(new byte[][] {md5("foo"), md5("bar"), md5("biz")});
-    when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew));
-
-    mockResourceCache(relativePathNew, PROJECT_KEY, "uuidnew");
-
-    String fileKey = PROJECT_KEY + ":" + relativePathNew;
-    when(measureCache.byMetric(fileKey, CoreMetrics.SCM_AUTHORS_BY_LINE_KEY))
-      .thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_AUTHORS_BY_LINE, "1=julien;2=simon;3=julien")));
-    when(measureCache.byMetric(fileKey, CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE_KEY))
-      .thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE, "1=2014-10-11T16:44:02+0100;2=2014-10-12T16:44:02+0100;3=2014-10-13T16:44:02+0100")));
-    when(measureCache.byMetric(fileKey, CoreMetrics.SCM_REVISIONS_BY_LINE_KEY))
-      .thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_REVISIONS_BY_LINE, "1=123;2=234;3=345")));
-    when(measureCache.byMetric(fileKey, CoreMetrics.COVERAGE_LINE_HITS_DATA_KEY))
-      .thenReturn(Arrays.asList(new Measure(CoreMetrics.COVERAGE_LINE_HITS_DATA, "1=1;3=0")));
-    when(measureCache.byMetric(fileKey, CoreMetrics.CONDITIONS_BY_LINE_KEY))
-      .thenReturn(Arrays.asList(new Measure(CoreMetrics.CONDITIONS_BY_LINE, "1=4")));
-    when(measureCache.byMetric(fileKey, CoreMetrics.COVERED_CONDITIONS_BY_LINE_KEY))
-      .thenReturn(Arrays.asList(new Measure(CoreMetrics.COVERED_CONDITIONS_BY_LINE, "1=2")));
-    when(measureCache.byMetric(fileKey, CoreMetrics.IT_COVERAGE_LINE_HITS_DATA_KEY))
-      .thenReturn(Arrays.asList(new Measure(CoreMetrics.IT_COVERAGE_LINE_HITS_DATA, "1=2;3=0")));
-    when(measureCache.byMetric(fileKey, CoreMetrics.IT_CONDITIONS_BY_LINE_KEY))
-      .thenReturn(Arrays.asList(new Measure(CoreMetrics.IT_CONDITIONS_BY_LINE, "1=5")));
-    when(measureCache.byMetric(fileKey, CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE_KEY))
-      .thenReturn(Arrays.asList(new Measure(CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE, "1=3")));
-    when(measureCache.byMetric(fileKey, CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA_KEY))
-      .thenReturn(Arrays.asList(new Measure(CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA, "1=3;3=0")));
-    when(measureCache.byMetric(fileKey, CoreMetrics.OVERALL_CONDITIONS_BY_LINE_KEY))
-      .thenReturn(Arrays.asList(new Measure(CoreMetrics.OVERALL_CONDITIONS_BY_LINE, "1=6")));
-    when(measureCache.byMetric(fileKey, CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE_KEY))
-      .thenReturn(Arrays.asList(new Measure(CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE, "1=4")));
-
-    SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
-      .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION)
-      .registerHighlightingRule(4, 5, TypeOfText.COMMENT)
-      .registerHighlightingRule(7, 16, TypeOfText.CONSTANT)
-      .build();
-    when(componentDataCache.getData(fileKey, SnapshotDataTypes.SYNTAX_HIGHLIGHTING))
-      .thenReturn(highlighting);
-
-    DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(fileKey, null);
-    org.sonar.api.batch.sensor.symbol.Symbol s1 = symbolBuilder.newSymbol(1, 2);
-    symbolBuilder.newReference(s1, 4);
-    symbolBuilder.newReference(s1, 11);
-    org.sonar.api.batch.sensor.symbol.Symbol s2 = symbolBuilder.newSymbol(4, 6);
-    symbolBuilder.newReference(s2, 0);
-    symbolBuilder.newReference(s2, 7);
-    when(componentDataCache.getData(fileKey, SnapshotDataTypes.SYMBOL_HIGHLIGHTING))
-      .thenReturn(symbolBuilder.build());
-
-    DuplicationGroup group1 = new DuplicationGroup(new DuplicationGroup.Block(fileKey, 1, 1))
-      .addDuplicate(new DuplicationGroup.Block(fileKey, 3, 1))
-      .addDuplicate(new DuplicationGroup.Block("anotherFile1", 12, 1))
-      .addDuplicate(new DuplicationGroup.Block("anotherFile2", 13, 1));
-
-    DuplicationGroup group2 = new DuplicationGroup(new DuplicationGroup.Block(fileKey, 1, 2))
-      .addDuplicate(new DuplicationGroup.Block("anotherFile1", 12, 2))
-      .addDuplicate(new DuplicationGroup.Block("anotherFile2", 13, 2));
-    when(duplicationCache.byComponent(fileKey)).thenReturn(Arrays.asList(group1, group2));
-
-    sourcePersister.persist();
-
-    FileSourceDto fileSourceDto = new FileSourceDao(getMyBatis()).select("uuidnew");
-    assertThat(fileSourceDto.getCreatedAt()).isEqualTo(now.getTime());
-    assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
-    assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("foo") + "\n" + md5Hex("bar") + "\n" + md5Hex("biz"));
-    assertThat(fileSourceDto.getData()).isEqualTo(
-      "123,julien,2014-10-11T16:44:02+0100,1,4,2,2,5,3,3,6,4,\"0,3,a\",\"1,2,1;0,2,2\",\"1,3\",foo\r\n"
-        + "234,simon,2014-10-12T16:44:02+0100,,,,,,,,,,\"0,1,cd\",\"0,1,1;0,2,2\",3,bar\r\n"
-        + "345,julien,2014-10-13T16:44:02+0100,0,,,0,,,0,,,\"0,9,c\",\"4,5,1;0,2,2\",2,biz\r\n");
-    assertThat(fileSourceDto.getDataHash()).isEqualTo("26930cf0250d525b04083185ff24a046");
-  }
-
-  @Test
-  public void testSimpleConversionOfHighlightingOffset() {
-    DefaultInputFile file = new DefaultInputFile(PROJECT_KEY, "src/foo.java")
-      .setLines(3)
-      .setOriginalLineOffsets(new long[] {0, 4, 7});
-
-    SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
-      .registerHighlightingRule(0, 4, TypeOfText.ANNOTATION)
-      .registerHighlightingRule(4, 5, TypeOfText.COMMENT)
-      .registerHighlightingRule(7, 16, TypeOfText.CONSTANT)
-      .build();
-
-    String[] highlightingPerLine = sourcePersister.computeHighlightingPerLine(file, highlighting);
-
-    assertThat(highlightingPerLine).containsOnly("0,4,a", "0,1,cd", "0,9,c");
-  }
-
-  @Test
-  public void testConversionOfHighlightingOffsetMultiLine() {
-    DefaultInputFile file = new DefaultInputFile(PROJECT_KEY, "src/foo.java")
-      .setLines(3)
-      .setOriginalLineOffsets(new long[] {0, 4, 7});
-
-    SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
-      .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION)
-      .registerHighlightingRule(4, 9, TypeOfText.COMMENT)
-      .registerHighlightingRule(10, 16, TypeOfText.CONSTANT)
-      .build();
-
-    String[] highlightingPerLine = sourcePersister.computeHighlightingPerLine(file, highlighting);
-
-    assertThat(highlightingPerLine).containsOnly("0,3,a", "0,3,cd", "0,2,cd;3,9,c");
-  }
-
-  @Test
-  public void testConversionOfHighlightingNestedRules() {
-    DefaultInputFile file = new DefaultInputFile(PROJECT_KEY, "src/foo.java")
-      .setLines(3)
-      .setOriginalLineOffsets(new long[] {0, 4, 7});
-
-    SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
-      .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION)
-      .registerHighlightingRule(4, 6, TypeOfText.COMMENT)
-      .registerHighlightingRule(7, 16, TypeOfText.CONSTANT)
-      .registerHighlightingRule(8, 15, TypeOfText.KEYWORD)
-      .build();
-
-    String[] highlightingPerLine = sourcePersister.computeHighlightingPerLine(file, highlighting);
-
-    assertThat(highlightingPerLine).containsOnly("0,3,a", "0,2,cd", "0,9,c;1,8,k");
-  }
-
-  @Test
-  public void testConversionOfHighlightingNestedRulesMultiLine() {
-    DefaultInputFile file = new DefaultInputFile(PROJECT_KEY, "src/foo.java")
-      .setLines(3)
-      .setOriginalLineOffsets(new long[] {0, 4, 7});
-
-    SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
-      .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION)
-      .registerHighlightingRule(4, 6, TypeOfText.COMMENT)
-      .registerHighlightingRule(4, 16, TypeOfText.CONSTANT)
-      .registerHighlightingRule(8, 15, TypeOfText.KEYWORD)
-      .build();
-
-    String[] highlightingPerLine = sourcePersister.computeHighlightingPerLine(file, highlighting);
-
-    assertThat(highlightingPerLine).containsOnly("0,3,a", "0,3,c;0,2,cd", "0,9,c;1,8,k");
-  }
-
-  @Test
-  public void testSimpleConversionOfSymbolOffset() {
-    DefaultInputFile file = new DefaultInputFile(PROJECT_KEY, "src/foo.java")
-      .setLines(3)
-      .setOriginalLineOffsets(new long[] {0, 4, 7});
-
-    DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(PROJECT_KEY + ":" + "src/foo.java", null);
-    org.sonar.api.batch.sensor.symbol.Symbol s1 = symbolBuilder.newSymbol(1, 2);
-    symbolBuilder.newReference(s1, 4);
-    symbolBuilder.newReference(s1, 11);
-    org.sonar.api.batch.sensor.symbol.Symbol s2 = symbolBuilder.newSymbol(4, 6);
-    symbolBuilder.newReference(s2, 0);
-    symbolBuilder.newReference(s2, 7);
-
-    String[] symbolsPerLine = sourcePersister.computeSymbolReferencesPerLine(file, symbolBuilder.build());
-
-    assertThat(symbolsPerLine).containsOnly("1,2,1;0,2,2", "0,1,1;0,2,2", "4,5,1;0,2,2");
-  }
-
-  @Test
-  public void verifyDeclarationOrderOfSymbolHasNoImpact() {
-    DefaultInputFile file = new DefaultInputFile(PROJECT_KEY, "src/foo.java")
-      .setLines(3)
-      .setOriginalLineOffsets(new long[] {0, 4, 7});
-
-    DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(PROJECT_KEY + ":" + "src/foo.java", null);
-    org.sonar.api.batch.sensor.symbol.Symbol s2 = symbolBuilder.newSymbol(4, 6);
-    symbolBuilder.newReference(s2, 7);
-    symbolBuilder.newReference(s2, 0);
-    org.sonar.api.batch.sensor.symbol.Symbol s1 = symbolBuilder.newSymbol(1, 2);
-    symbolBuilder.newReference(s1, 11);
-    symbolBuilder.newReference(s1, 4);
-
-    String[] symbolsPerLine = sourcePersister.computeSymbolReferencesPerLine(file, symbolBuilder.build());
-
-    assertThat(symbolsPerLine).containsOnly("1,2,1;0,2,2", "0,1,1;0,2,2", "4,5,1;0,2,2");
-  }
-
-  private void mockResourceCache(String relativePathEmpty, String projectKey, String uuid) {
-    File sonarFile = File.create(relativePathEmpty);
-    sonarFile.setUuid(uuid);
-    when(resourceCache.get(projectKey + ":" + relativePathEmpty)).thenReturn(new BatchResource(1, sonarFile, null));
-  }
-
-  private byte[] md5(String string) {
-    return DigestUtils.md5(string);
-  }
-
-  private String md5Hex(String string) {
-    return DigestUtils.md5Hex(string);
-  }
-
-}
diff --git a/sonar-batch/src/test/resources/org/sonar/batch/index/MeasurePersisterTest/shouldUpdateMeasure-result.xml b/sonar-batch/src/test/resources/org/sonar/batch/index/MeasurePersisterTest/shouldUpdateMeasure-result.xml
deleted file mode 100644 (file)
index 07f08fb..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-<dataset>
-
-  <project_measures id="1" VALUE="12.5" METRIC_ID="2" SNAPSHOT_ID="3001" alert_text="[null]" RULES_CATEGORY_ID="[null]"
-                    RULE_ID="[null]" text_value="[null]" tendency="[null]" measure_date="[null]" project_id="[null]"
-                    alert_status="[null]" description="[null]" rule_priority="[null]" characteristic_id="[null]" url="[null]"
-                    person_id="[null]"
-                    variation_value_1="[null]" variation_value_2="[null]" variation_value_3="[null]" variation_value_4="[null]" variation_value_5="[null]"
-                    measure_data="[null]"/>
-
-  <project_measures id="2" VALUE="[null]" METRIC_ID="2" SNAPSHOT_ID="3001" alert_text="[null]" RULES_CATEGORY_ID="[null]"
-                    RULE_ID="[null]" text_value="SHORT" tendency="[null]" measure_date="[null]" project_id="[null]"
-                    alert_status="[null]" description="[null]" rule_priority="[null]" characteristic_id="[null]" url="[null]"
-                    person_id="[null]"
-                    variation_value_1="[null]" variation_value_2="[null]" variation_value_3="[null]" variation_value_4="[null]" variation_value_5="[null]"
-                    measure_data="[null]"/>
-
-  <project_measures id="3" VALUE="[null]" METRIC_ID="2" SNAPSHOT_ID="3002" alert_text="[null]" RULES_CATEGORY_ID="[null]"
-                    RULE_ID="[null]" text_value="[null]" tendency="[null]" measure_date="[null]" project_id="[null]"
-                    alert_status="[null]" description="[null]" rule_priority="[null]" characteristic_id="[null]" url="[null]"
-                    person_id="[null]"
-                    variation_value_1="[null]" variation_value_2="[null]" variation_value_3="[null]" variation_value_4="[null]" variation_value_5="[null]"
-                    measure_data="MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTIzNDU2Nzg5MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTIzNDU2Nzg5MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTIzNDU2Nzg5MDEyMzQ1Njc4OQ=="/>
-
-</dataset>
diff --git a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources.xml b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources.xml
deleted file mode 100644 (file)
index 61fa8fc..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-<dataset>
-  <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" 
-      data=",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;" 
-      line_hashes="8d7b3d6b83c0a517eac07e1aac94b773&#10;9a0364b9e99bb480dd25e1f0284c8555" 
-      data_hash="0263047cd758c68c27683625f072f010" 
-      src_hash="123456"
-      created_at="1412952242000" updated_at="1412952242000" />
-      
-</dataset>
index 4a26ff98e6a5e36e8633d54b6338a12f1d6fc3e3..2fc32f5845b0e1c25100b30ca2f4e99478d1f5d5 100644 (file)
@@ -1,6 +1,6 @@
 <dataset>
   <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" 
-      data=",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;" 
+      binary_data="[null]"
       line_hashes="8d7b3d6b83c0a517eac07e1aac94b773&#10;9a0364b9e99bb480dd25e1f0284c8555" 
       data_hash="0263047cd758c68c27683625f072f010" 
       src_hash="[null]"
diff --git a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/shared.xml b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/shared.xml
new file mode 100644 (file)
index 0000000..14fded3
--- /dev/null
@@ -0,0 +1,9 @@
+<dataset>
+  <file_sources id="1" project_uuid="PROJECT_UUID" file_uuid="FILE_UUID"
+      binary_data="[null]"
+      line_hashes="8d7b3d6b83c0a517eac07e1aac94b773&#10;9a0364b9e99bb480dd25e1f0284c8555" 
+      data_hash="0263047cd758c68c27683625f072f010" 
+      src_hash="123456"
+      created_at="1412952242000" updated_at="1412952242000" />
+      
+</dataset>
index 940080dc041f5771258bd997a0e2782053ec80ba..0942d062ede600f67e51455b938c58a36ae8632b 100644 (file)
@@ -1,7 +1,7 @@
 <dataset>
 
-  <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" 
-      data=",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;" 
+  <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame"
+                binary_data="[null]"
       line_hashes="8d7b3d6b83c0a517eac07e1aac94b773&#10;9a0364b9e99bb480dd25e1f0284c8555" 
       data_hash="0263047cd758c68c27683625f072f010" 
       src_hash="123456"
index 184e155cb024f350c1e39543b38d5a8dfa584d4a..0bdadc4baccb95e4317a49077134114b6c216738 100644 (file)
@@ -1,12 +1,12 @@
 <dataset>
-    <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" 
-      data=",,,,,,,,,,,,,,,unchanged&#13;&#10;,,,,,,,,,,,,,,,content&#13;&#10;" 
+    <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame"
+                  binary_data="[null]"
       line_hashes="8d7b3d6b83c0a517eac07e1aac94b773&#10;9a0364b9e99bb480dd25e1f0284c8555" 
       data_hash="0263047cd758c68c27683625f072f010" 
       src_hash="123456"
       created_at="1412952242000" updated_at="1412952242000" />
       
-    <file_sources id="102" project_uuid="projectUuid" file_uuid="uuidempty" data="[null]"
+    <file_sources id="102" project_uuid="projectUuid" file_uuid="uuidempty" binary_data="[null]"
        line_hashes="[null]"
        src_hash="abcd"
        data_hash="0" created_at="1414597442000" updated_at="1414597442000" />
index b1d6b8696e9b02343b2f0b9ca0a16f1aeb6acd89..3682bc1fb644a3f98b159efe3ecf8838bdc5d78d 100644 (file)
       <groupId>org.codehaus.sonar</groupId>
       <artifactId>sonar-update-center-common</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.codehaus.sonar</groupId>
+      <artifactId>sonar-batch-protocol</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
     <dependency>
       <groupId>org.codehaus.sonar</groupId>
       <artifactId>sonar-home</artifactId>
 
   <build>
     <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>src/main/gen-java</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-jar-plugin</artifactId>
index b2d44f173e422d5ba0dffaaec889dc358ee2d088..80de635be56a5bcfb30ade25fa0ae2766d2893b9 100644 (file)
@@ -33,7 +33,7 @@ import java.util.List;
  */
 public class DatabaseVersion implements BatchComponent, ServerComponent {
 
-  public static final int LAST_VERSION = 780;
+  public static final int LAST_VERSION = 783;
 
   /**
    * List of all the tables.n
index c0a75f7a4122fa7cffc1054ee58c8bb79de7ef22..eced8499a4d4ec414d1dc9629499050eb9a64880 100644 (file)
@@ -31,6 +31,7 @@ import org.sonar.core.persistence.MyBatis;
 
 import javax.annotation.CheckForNull;
 
+import java.io.InputStream;
 import java.io.Reader;
 import java.sql.Connection;
 import java.sql.PreparedStatement;
@@ -56,17 +57,48 @@ public class FileSourceDao implements BatchComponent, ServerComponent, DaoCompon
     }
   }
 
-  public <T> void readDataStream(String fileUuid, Function<Reader, T> function) {
+  public <T> void readDataStream(String fileUuid, Function<InputStream, T> function) {
     DbSession dbSession = mybatis.openSession(false);
+    Connection connection = dbSession.getConnection();
+    PreparedStatement pstmt = null;
+    ResultSet rs = null;
+    InputStream input = null;
     try {
-      readColumnStream(dbSession, fileUuid, function, "data");
+      pstmt = connection.prepareStatement("SELECT binary_data FROM file_sources WHERE file_uuid=?");
+      pstmt.setString(1, fileUuid);
+      rs = pstmt.executeQuery();
+      if (rs.next()) {
+        input = rs.getBinaryStream(1);
+        function.apply(input);
+      }
+    } catch (SQLException e) {
+      throw new IllegalStateException("Fail to read FILE_SOURCES.BINARY_DATA of file " + fileUuid, e);
     } finally {
+      IOUtils.closeQuietly(input);
+      DbUtils.closeQuietly(connection, pstmt, rs);
       MyBatis.closeQuietly(dbSession);
     }
   }
 
   public <T> void readLineHashesStream(DbSession dbSession, String fileUuid, Function<Reader, T> function) {
-    readColumnStream(dbSession, fileUuid, function, "line_hashes");
+    Connection connection = dbSession.getConnection();
+    PreparedStatement pstmt = null;
+    ResultSet rs = null;
+    Reader reader = null;
+    try {
+      pstmt = connection.prepareStatement("SELECT line_hashes FROM file_sources WHERE file_uuid=?");
+      pstmt.setString(1, fileUuid);
+      rs = pstmt.executeQuery();
+      if (rs.next()) {
+        reader = rs.getCharacterStream(1);
+        function.apply(reader);
+      }
+    } catch (SQLException e) {
+      throw new IllegalStateException("Fail to read FILE_SOURCES.LINE_HASHES of file " + fileUuid, e);
+    } finally {
+      IOUtils.closeQuietly(reader);
+      DbUtils.closeQuietly(connection, pstmt, rs);
+    }
   }
 
   public void insert(FileSourceDto dto) {
@@ -89,24 +121,4 @@ public class FileSourceDao implements BatchComponent, ServerComponent, DaoCompon
     }
   }
 
-  private <T> void readColumnStream(DbSession dbSession, String fileUuid, Function<Reader, T> function, String column) {
-    Connection connection = dbSession.getConnection();
-    PreparedStatement pstmt = null;
-    ResultSet rs = null;
-    Reader reader = null;
-    try {
-      pstmt = connection.prepareStatement("SELECT " + column + " FROM file_sources WHERE file_uuid = ?");
-      pstmt.setString(1, fileUuid);
-      rs = pstmt.executeQuery();
-      if (rs.next()) {
-        reader = rs.getCharacterStream(1);
-        function.apply(reader);
-      }
-    } catch (SQLException e) {
-      throw new IllegalStateException("Fail to read FILE_SOURCES." + column.toUpperCase() + " of file " + fileUuid, e);
-    } finally {
-      IOUtils.closeQuietly(reader);
-      DbUtils.closeQuietly(connection, pstmt, rs);
-    }
-  }
 }
index 999ee5f2dd53fe039e602e601016f21daee7e0d5..7840d213df61757d61a4723766bb511b0bd1be79 100644 (file)
  */
 package org.sonar.core.source.db;
 
+import net.jpountz.lz4.LZ4BlockInputStream;
+import net.jpountz.lz4.LZ4BlockOutputStream;
+import org.apache.commons.io.IOUtils;
+import org.sonar.server.source.db.FileSourceDb;
+
 import javax.annotation.CheckForNull;
 import javax.annotation.Nullable;
 
-public class FileSourceDto {
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
 
-  public static final int CSV_INDEX_SCM_REVISION = 0;
-  public static final int CSV_INDEX_SCM_AUTHOR = 1;
-  public static final int CSV_INDEX_SCM_DATE = 2;
-  public static final int CSV_INDEX_UT_LINE_HITS = 3;
-  public static final int CSV_INDEX_UT_CONDITIONS = 4;
-  public static final int CSV_INDEX_UT_COVERED_CONDITIONS = 5;
-  public static final int CSV_INDEX_IT_LINE_HITS = 6;
-  public static final int CSV_INDEX_IT_CONDITIONS = 7;
-  public static final int CSV_INDEX_IT_COVERED_CONDITIONS = 8;
-  public static final int CSV_INDEX_OVERALL_LINE_HITS = 9;
-  public static final int CSV_INDEX_OVERALL_CONDITIONS = 10;
-  public static final int CSV_INDEX_OVERALL_COVERED_CONDITIONS = 11;
-  public static final int CSV_INDEX_HIGHLIGHTING = 12;
-  public static final int CSV_INDEX_SYMBOLS = 13;
-  public static final int CSV_INDEX_DUPLICATIONS = 14;
+public class FileSourceDto {
 
   private Long id;
   private String projectUuid;
   private String fileUuid;
   private long createdAt;
   private long updatedAt;
-  private String data;
   private String lineHashes;
-  private String dataHash;
   private String srcHash;
+  private byte[] binaryData;
+  private String dataHash;
 
   public Long getId() {
     return id;
@@ -77,32 +71,91 @@ public class FileSourceDto {
     return this;
   }
 
-  @CheckForNull
-  public String getData() {
-    return data;
+  public String getDataHash() {
+    return dataHash;
   }
 
-  public FileSourceDto setData(@Nullable String data) {
-    this.data = data;
+  /**
+   * MD5 of column BINARY_DATA. Used to know to detect data changes and need for update.
+   */
+  public FileSourceDto setDataHash(String s) {
+    this.dataHash = s;
     return this;
   }
 
-  @CheckForNull
-  public String getLineHashes() {
-    return lineHashes;
+  /**
+   * Compressed value of serialized protobuf message {@link org.sonar.server.source.db.FileSourceDb.Data}
+   */
+  public byte[] getBinaryData() {
+    return binaryData;
   }
 
-  public FileSourceDto setLineHashes(@Nullable String lineHashes) {
-    this.lineHashes = lineHashes;
+  /**
+   * Compressed value of serialized protobuf message {@link org.sonar.server.source.db.FileSourceDb.Data}
+   */
+  public FileSourceDb.Data getData() {
+    return decodeData(binaryData);
+  }
+
+  public static FileSourceDb.Data decodeData(byte[] binaryData) {
+    // stream is always closed
+    return decodeData(new ByteArrayInputStream(binaryData));
+  }
+
+  /**
+   * Decompress and deserialize content of column FILE_SOURCES.BINARY_DATA.
+   * The parameter "input" is always closed by this method.
+   */
+  public static FileSourceDb.Data decodeData(InputStream binaryInput) {
+    LZ4BlockInputStream lz4Input = null;
+    try {
+      lz4Input = new LZ4BlockInputStream(binaryInput);
+      return FileSourceDb.Data.parseFrom(lz4Input);
+    } catch (IOException e) {
+      throw new IllegalStateException("Fail to decompress and deserialize source data", e);
+    } finally {
+      IOUtils.closeQuietly(lz4Input);
+    }
+  }
+
+  /**
+   * Set compressed value of the protobuf message {@link org.sonar.server.source.db.FileSourceDb.Data}
+   */
+  public FileSourceDto setBinaryData(byte[] data) {
+    this.binaryData = data;
     return this;
   }
 
-  public String getDataHash() {
-    return dataHash;
+  public FileSourceDto setData(FileSourceDb.Data data) {
+    this.binaryData = encodeData(data);
+    return this;
+  }
+
+  /**
+   * Serialize and compress protobuf message {@link org.sonar.server.source.db.FileSourceDb.Data}
+   * in the column BINARY_DATA.
+   */
+  public static byte[] encodeData(FileSourceDb.Data data) {
+    ByteArrayOutputStream byteOutput = new ByteArrayOutputStream();
+    LZ4BlockOutputStream compressedOutput = new LZ4BlockOutputStream(byteOutput);
+    try {
+      data.writeTo(compressedOutput);
+      compressedOutput.close();
+      return byteOutput.toByteArray();
+    } catch (IOException e) {
+      throw new IllegalStateException("Fail to serialize and compress source data", e);
+    } finally {
+      IOUtils.closeQuietly(compressedOutput);
+    }
   }
 
-  public FileSourceDto setDataHash(String dataHash) {
-    this.dataHash = dataHash;
+  @CheckForNull
+  public String getLineHashes() {
+    return lineHashes;
+  }
+
+  public FileSourceDto setLineHashes(@Nullable String lineHashes) {
+    this.lineHashes = lineHashes;
     return this;
   }
 
@@ -110,6 +163,9 @@ public class FileSourceDto {
     return srcHash;
   }
 
+  /**
+   * Hash of file content. Value is computed by batch.
+   */
   public FileSourceDto setSrcHash(String srcHash) {
     this.srcHash = srcHash;
     return this;
index 2fd054897ebe1f25f6700ec6f02ebf70b0dde6fc..e887689235796d948e705c192fb40e3b01ee6d94 100644 (file)
@@ -26,7 +26,7 @@ import java.util.List;
 
 public interface FileSourceMapper {
 
-  List<FileSourceDto> selectAllFileDataHashByProject(String projectUuid);
+  List<FileSourceDto> selectHashesForProject(String projectUuid);
 
   @CheckForNull
   FileSourceDto select(String fileUuid);
index 92e9f0aff0cc7c20f5e96ab0c48ee3f63f38be89..fd2663d75e5969be160d3b7a3e2dd6e52f2ad6d5 100644 (file)
@@ -308,6 +308,9 @@ INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('777');
 INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('778');
 INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('779');
 INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('780');
+INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('781');
+INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('782');
+INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('783');
 
 INSERT INTO USERS(ID, LOGIN, NAME, EMAIL, CRYPTED_PASSWORD, SALT, CREATED_AT, UPDATED_AT, REMEMBER_TOKEN, REMEMBER_TOKEN_EXPIRES_AT) VALUES (1, 'admin', 'Administrator', '', 'a373a0e667abb2604c1fd571eb4ad47fe8cc0878', '48bc4b0d93179b5103fd3885ea9119498e9d161b', '1418215735482', '1418215735482', null, null);
 ALTER TABLE USERS ALTER COLUMN ID RESTART WITH 2;
index 202f896611e3bb26856bb97215097118b8ab847c..fadf4581aa951368070d2524bc1e7be8a5351e38 100644 (file)
@@ -554,8 +554,8 @@ CREATE TABLE "FILE_SOURCES" (
   "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1),
   "PROJECT_UUID" VARCHAR(50) NOT NULL,
   "FILE_UUID" VARCHAR(50) NOT NULL,
-  "DATA" CLOB(2147483647),
   "LINE_HASHES" CLOB(2147483647),
+  "BINARY_DATA" BINARY(167772150),
   "DATA_HASH" VARCHAR(50) NOT NULL,
   "SRC_HASH" VARCHAR(50) NULL,
   "CREATED_AT" BIGINT NOT NULL,
index 009f36d1451f52a566797960403a2da762c9e12e..2edae7f055c1ca3eef7c541b28f47b054152c2d3 100644 (file)
@@ -5,29 +5,32 @@
 <mapper namespace="org.sonar.core.source.db.FileSourceMapper">
 
   <select id="select" parameterType="string" resultType="org.sonar.core.source.db.FileSourceDto">
-    SELECT id, project_uuid as projectUuid, file_uuid as fileUuid, created_at as createdAt, updated_at as updatedAt, data, line_hashes as lineHashes, data_hash as dataHash, src_hash as srcHash
+    SELECT id, project_uuid as projectUuid, file_uuid as fileUuid, created_at as createdAt, updated_at as updatedAt,
+    binary_data as binaryData, line_hashes as lineHashes, data_hash as dataHash, src_hash as srcHash
     FROM file_sources
     WHERE file_uuid = #{fileUuid}
   </select>
   
-  <select id="selectAllFileDataHashByProject" parameterType="string" resultType="org.sonar.core.source.db.FileSourceDto">
+  <select id="selectHashesForProject" parameterType="string" resultType="org.sonar.core.source.db.FileSourceDto">
     SELECT id, file_uuid as fileUuid, data_hash as dataHash, src_hash as srcHash
     FROM file_sources
     WHERE project_uuid = #{projectUuid}
   </select>
   
   <insert id="insert" parameterType="org.sonar.core.source.db.FileSourceDto" useGeneratedKeys="false">
-    insert into file_sources (project_uuid, file_uuid, created_at, updated_at, data, line_hashes, data_hash, src_hash) 
-    values (#{projectUuid,jdbcType=VARCHAR}, #{fileUuid,jdbcType=VARCHAR}, #{createdAt,jdbcType=BIGINT}, #{updatedAt,jdbcType=BIGINT}, #{data,jdbcType=CLOB}, #{lineHashes,jdbcType=CLOB}, #{dataHash,jdbcType=VARCHAR}, #{srcHash,jdbcType=VARCHAR})
+    insert into file_sources (project_uuid, file_uuid, created_at, updated_at, binary_data, line_hashes, data_hash, src_hash)
+    values (#{projectUuid,jdbcType=VARCHAR}, #{fileUuid,jdbcType=VARCHAR}, #{createdAt,jdbcType=BIGINT},
+    #{updatedAt,jdbcType=BIGINT}, #{binaryData,jdbcType=BLOB}, #{lineHashes,jdbcType=CLOB},
+    #{dataHash,jdbcType=VARCHAR}, #{srcHash,jdbcType=VARCHAR})
   </insert>
   
   <update id="update" parameterType="org.sonar.core.source.db.FileSourceDto" useGeneratedKeys="false">
     update file_sources set
-      updated_at = #{updatedAt},
-      data = #{data},
-      line_hashes = #{lineHashes},
-      data_hash = #{dataHash},
-      src_hash = #{srcHash}
+      updated_at = #{updatedAt,jdbcType=BIGINT},
+      binary_data = #{binaryData,jdbcType=BLOB},
+      line_hashes = #{lineHashes,jdbcType=CLOB},
+      data_hash = #{dataHash,jdbcType=VARCHAR},
+      src_hash = #{srcHash,jdbcType=VARCHAR}
     where id = #{id}
   </update>
 
index dc8dc262db2c2dfaedb2333b8a2a9c409c8a7639..aed0f04fd71549511876b6050970dcc9f07a4050 100644 (file)
@@ -25,13 +25,12 @@ import org.apache.commons.io.IOUtils;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
-import org.sonar.api.utils.DateUtils;
 import org.sonar.core.persistence.AbstractDaoTestCase;
 import org.sonar.core.persistence.DbSession;
 
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.Reader;
-import java.util.Date;
 
 import static org.assertj.core.api.Assertions.assertThat;
 
@@ -56,77 +55,82 @@ public class FileSourceDaoTest extends AbstractDaoTestCase {
   public void select() throws Exception {
     setupData("shared");
 
-    FileSourceDto fileSourceDto = dao.select("ab12");
+    FileSourceDto fileSourceDto = dao.select("FILE1_UUID");
 
-    assertThat(fileSourceDto.getData()).isEqualTo("aef12a,alice,2014-04-25T12:34:56+0100,,class Foo");
+    assertThat(fileSourceDto.getBinaryData()).isNotEmpty();
     assertThat(fileSourceDto.getDataHash()).isEqualTo("hash");
-    assertThat(fileSourceDto.getProjectUuid()).isEqualTo("abcd");
-    assertThat(fileSourceDto.getFileUuid()).isEqualTo("ab12");
-    assertThat(new Date(fileSourceDto.getCreatedAt())).isEqualTo(DateUtils.parseDateTime("2014-10-29T16:44:02+0100"));
-    assertThat(new Date(fileSourceDto.getUpdatedAt())).isEqualTo(DateUtils.parseDateTime("2014-10-30T16:44:02+0100"));
+    assertThat(fileSourceDto.getProjectUuid()).isEqualTo("PRJ_UUID");
+    assertThat(fileSourceDto.getFileUuid()).isEqualTo("FILE1_UUID");
+    assertThat(fileSourceDto.getCreatedAt()).isEqualTo(1500000000000L);
+    assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(1500000000000L);
   }
 
   @Test
   public void select_data() throws Exception {
     setupData("shared");
 
-    StringParser stringParser = new StringParser();
-    dao.readDataStream("ab12", stringParser);
+    InputStreamToStringFunction fn = new InputStreamToStringFunction();
+    dao.readDataStream("FILE1_UUID", fn);
 
-    assertThat(stringParser.getResult()).isEqualTo("aef12a,alice,2014-04-25T12:34:56+0100,,class Foo");
+    assertThat(fn.result).isNotEmpty();
   }
 
   @Test
   public void select_line_hashes() throws Exception {
     setupData("shared");
 
-    StringParser stringParser = new StringParser();
-    dao.readLineHashesStream(session, "ab12", stringParser);
+    ReaderToStringFunction fn = new ReaderToStringFunction();
+    dao.readLineHashesStream(session, "FILE1_UUID", fn);
 
-    assertThat(stringParser.getResult()).isEqualTo("truc");
+    assertThat(fn.result).isEqualTo("ABC\\nDEF\\nGHI");
   }
 
   @Test
   public void no_line_hashes_on_unknown_file() throws Exception {
     setupData("shared");
 
-    StringParser stringParser = new StringParser();
-    dao.readLineHashesStream(session, "unknown", stringParser);
+    ReaderToStringFunction fn = new ReaderToStringFunction();
+    dao.readLineHashesStream(session, "unknown", fn);
 
-    assertThat(stringParser.getResult()).isEmpty();
+    assertThat(fn.result).isNull();
   }
 
   @Test
   public void insert() throws Exception {
     setupData("shared");
 
-    dao.insert(new FileSourceDto().setProjectUuid("prj").setFileUuid("file").setData("bla bla")
-      .setDataHash("hash2")
-      .setLineHashes("foo\nbar")
-      .setSrcHash("hache")
-      .setCreatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime())
-      .setUpdatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime()));
-
-    checkTable("insert", "file_sources");
+    dao.insert(new FileSourceDto()
+      .setProjectUuid("PRJ_UUID")
+      .setFileUuid("FILE2_UUID")
+      .setBinaryData("FILE2_BINARY_DATA".getBytes())
+      .setDataHash("FILE2_DATA_HASH")
+      .setLineHashes("LINE1_HASH\\nLINE2_HASH")
+      .setSrcHash("FILE2_HASH")
+      .setCreatedAt(1500000000000L)
+      .setUpdatedAt(1500000000001L));
+
+    checkTable("insert", "file_sources", "project_uuid", "file_uuid", "data_hash", "line_hashes", "src_hash", "created_at", "updated_at");
   }
 
   @Test
   public void update() throws Exception {
     setupData("shared");
 
-    dao.update(new FileSourceDto().setId(101L).setProjectUuid("prj").setFileUuid("file")
-      .setData("updated data")
-      .setDataHash("hash2")
-      .setSrcHash("123456")
-      .setLineHashes("foo2\nbar2")
-      .setUpdatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime()));
+    dao.update(new FileSourceDto().setId(101L)
+      .setProjectUuid("PRJ_UUID")
+      .setFileUuid("FILE1_UUID")
+      .setBinaryData("updated data".getBytes())
+      .setDataHash("NEW_DATA_HASH")
+      .setSrcHash("NEW_FILE_HASH")
+      .setLineHashes("NEW_LINE_HASHES")
+      .setUpdatedAt(1500000000002L));
 
-    checkTable("update", "file_sources");
+    checkTable("update", "file_sources", "project_uuid", "file_uuid", "data_hash", "line_hashes", "src_hash", "created_at", "updated_at");
   }
 
-  class StringParser implements Function<Reader, String> {
+  private static class ReaderToStringFunction implements Function<Reader, String> {
 
-    String result = "";
+    String result = null;
 
     @Override
     public String apply(Reader input) {
@@ -137,9 +141,20 @@ public class FileSourceDaoTest extends AbstractDaoTestCase {
         throw new RuntimeException(e);
       }
     }
+  }
+
+  private static class InputStreamToStringFunction implements Function<InputStream, String> {
 
-    public String getResult() {
-      return result;
+    String result = null;
+
+    @Override
+    public String apply(InputStream input) {
+      try {
+        result = IOUtils.toString(input);
+        return IOUtils.toString(input);
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
     }
   }
 }
index 13b1c270dc4d4ec9882f712415df56c9febdd4a8..7caaa52651310aee0fe28d2ef680d24ce1d3b0b8 100644 (file)
@@ -1,5 +1,5 @@
 <dataset>
 
-  <file_sources id="2" project_uuid="ABCD" file_uuid="KLMN" data="[null]" line_hashes="[null]" data_hash="321654988"
+  <file_sources id="2" project_uuid="ABCD" file_uuid="KLMN" binary_data="[null]" line_hashes="[null]" data_hash="321654988"
                 created_at="123456789" updated_at="123456789" src_hash="123456"/>
 </dataset>
index fa5e4a7828d33b20245bb472614ac50ac9b33299..aaae9915d98ca140221fbcb3450c2268a910c5ed 100644 (file)
@@ -72,8 +72,8 @@
              depth="[null]" scope="PRJ" qualifier="TRK" created_at="1228222680000"
              build_date="1228222680000" version="[null]" path="[null]"/>
 
-  <file_sources id="1" project_uuid="ABCD" file_uuid="GHIJ" data="[null]" line_hashes="[null]" data_hash="321654987"
+  <file_sources id="1" project_uuid="ABCD" file_uuid="GHIJ" binary_data="[null]" line_hashes="[null]" data_hash="321654987"
                 created_at="123456789" updated_at="123456789" src_hash="12345"/>
-  <file_sources id="2" project_uuid="ABCD" file_uuid="KLMN" data="[null]" line_hashes="[null]" data_hash="321654988"
+  <file_sources id="2" project_uuid="ABCD" file_uuid="KLMN" binary_data="[null]" line_hashes="[null]" data_hash="321654988"
                 created_at="123456789" updated_at="123456789" src_hash="123456"/>
 </dataset>
index cfc39014c9ddf05d1cf7ee3fee986dc195b39ade..5bbb5aa58913b23ab27a03f09f5c9a324baa185f 100644 (file)
@@ -79,8 +79,8 @@
              depth="[null]" scope="PRJ" qualifier="TRK" created_at="1228222680000"
              build_date="1228222680000" version="[null]" path="[null]"/>
 
-  <file_sources id="1" project_uuid="ABCD" file_uuid="GHIJ" data="[null]" line_hashes="[null]" data_hash="321654987"
+  <file_sources id="1" project_uuid="ABCD" file_uuid="GHIJ" binary_data="[null]" line_hashes="[null]" data_hash="321654987"
                 created_at="123456789" updated_at="123456789"/>
-  <file_sources id="2" project_uuid="ABCD" file_uuid="KLMN" data="[null]" line_hashes="[null]" data_hash="321654988"
+  <file_sources id="2" project_uuid="ABCD" file_uuid="KLMN" binary_data="[null]" line_hashes="[null]" data_hash="321654988"
                 created_at="123456789" updated_at="123456789"/>
 </dataset>
index 1f164a2692e5973a77326f21311b70a93c16d6e9..d50cc3926f5b062ec00f0accf80c5fbdf7b36a21 100644 (file)
              depth="[null]" scope="FIL" qualifier="FIL" created_at="1228222680000"
              build_date="1228222680000"
              version="[null]" path="[null]"/>
-  <file_sources id="1" project_uuid="A" file_uuid="D" data="[null]" line_hashes="[null]" data_hash="321654987"
+  <file_sources id="1" project_uuid="A" file_uuid="D" binary_data="[null]" line_hashes="[null]" data_hash="321654987"
                 created_at="123456789" updated_at="123456789"/>
 </dataset>
index f43f9f06564dbcf711ac08c832f65b895d951f3c..74bca5ec788c7024e24fefbb0636bb1039243c78 100644 (file)
@@ -1,16 +1,17 @@
 <dataset>
 
-    <file_sources id="101" project_uuid="abcd" file_uuid="ab12"
-                  data="aef12a,alice,2014-04-25T12:34:56+0100,,class Foo" data_hash="hash"
-                  line_hashes="truc"
-                  src_hash="12345"
-                  created_at="1414597442000" updated_at="1414683842000" />
+  <file_sources id="101" project_uuid="PRJ_UUID" file_uuid="FILE1_UUID"
+                binary_data="abcde" data_hash="hash"
+                line_hashes="ABC\nDEF\nGHI"
+                src_hash="FILE_HASH"
+                created_at="1500000000000" updated_at="1500000000000" />
 
 
-    <file_sources id="102" project_uuid="prj" file_uuid="file"
-                  data="bla bla" data_hash="hash2"
-                  line_hashes="foo&#10;bar"
-                  src_hash="hache"
-                  created_at="1414770242000" updated_at="1414770242000" />
+  <file_sources id="102" project_uuid="PRJ_UUID" file_uuid="FILE2_UUID"
+                binary_data="[ignore]"
+                data_hash="FILE2_DATA_HASH"
+                line_hashes="LINE1_HASH\nLINE2_HASH"
+                src_hash="FILE2_HASH"
+                created_at="1500000000000" updated_at="1500000000001" />
 
 </dataset>
index fbfa94a6ddd7006143aa5c38c4f9468725831557..79a340f841d4f0034a8fdceb1a3834c925930cf8 100644 (file)
@@ -1,9 +1,9 @@
 <dataset>
 
-    <file_sources id="101" project_uuid="abcd" file_uuid="ab12"
-                  data="aef12a,alice,2014-04-25T12:34:56+0100,,class Foo" data_hash="hash"
-                  line_hashes="truc"
-                  src_hash="12345"
-                  created_at="1414597442000" updated_at="1414683842000" />
+    <file_sources id="101" project_uuid="PRJ_UUID" file_uuid="FILE1_UUID"
+                  binary_data="abcde" data_hash="hash"
+                  line_hashes="ABC\nDEF\nGHI"
+                  src_hash="FILE_HASH"
+                  created_at="1500000000000" updated_at="1500000000000" />
 
 </dataset>
index 64ff997152f2ab7a6ca2bcdc403cc9e042396143..40cbfa91a43b6f6db4fd62e1932f6e7360af152d 100644 (file)
@@ -1,10 +1,11 @@
 <dataset>
 
-    <file_sources id="101" project_uuid="abcd" file_uuid="ab12"
-                  data="updated data" data_hash="hash2"
-                  line_hashes="foo2&#10;bar2"
-                  src_hash="123456"
-                  created_at="1414597442000" updated_at="1414770242000" />
+  <file_sources id="101" project_uuid="PRJ_UUID" file_uuid="FILE1_UUID"
+                binary_data="[ignore]"
+                data_hash="NEW_DATA_HASH"
+                line_hashes="NEW_LINE_HASHES"
+                src_hash="NEW_FILE_HASH"
+                created_at="1500000000000" updated_at="1500000000002" />
 
 
 </dataset>