From: Simon Brandhof Date: Tue, 3 Feb 2015 22:06:45 +0000 (+0100) Subject: SONAR-2570 compress db table FILE_SOURCES X-Git-Tag: 5.1-RC1~348 X-Git-Url: https://source.dussan.org/?a=commitdiff_plain;h=605a62f9e0342e63b55b50e76db3260dee1ef748;p=sonarqube.git SONAR-2570 compress db table FILE_SOURCES --- diff --git a/pom.xml b/pom.xml index 712d316e61a..57b1e8ba278 100644 --- a/pom.xml +++ b/pom.xml @@ -1088,6 +1088,12 @@ protobuf-java 2.6.1 + + net.jpountz.lz4 + lz4 + 1.3.0 + + diff --git a/server/sonar-server-benchmarks/src/test/java/org/sonar/server/benchmark/SourceDbBenchmarkTest.java b/server/sonar-server-benchmarks/src/test/java/org/sonar/server/benchmark/SourceDbBenchmarkTest.java index 843308ca2ea..3d6dbf32645 100644 --- a/server/sonar-server-benchmarks/src/test/java/org/sonar/server/benchmark/SourceDbBenchmarkTest.java +++ b/server/sonar-server-benchmarks/src/test/java/org/sonar/server/benchmark/SourceDbBenchmarkTest.java @@ -30,11 +30,13 @@ import org.sonar.core.persistence.DbTester; import org.sonar.core.source.db.FileSourceDao; import org.sonar.core.source.db.FileSourceDto; import org.sonar.server.db.DbClient; +import org.sonar.server.source.db.FileSourceDb; import org.sonar.server.source.index.SourceLineResultSetIterator; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; +import java.util.Arrays; import java.util.Timer; import java.util.concurrent.atomic.AtomicLong; @@ -43,8 +45,9 @@ import static org.assertj.core.api.Assertions.assertThat; public class SourceDbBenchmarkTest { public static final Logger LOGGER = LoggerFactory.getLogger("benchmarkSourceDbScrolling"); - // files are 3'220 lines long + public static final int NUMBER_OF_FILES = 1000; + public static final int NUMBER_OF_LINES = 3220; public static final String PROJECT_UUID = Uuids.create(); @Rule @@ -52,7 +55,7 @@ public class SourceDbBenchmarkTest { @Test public void benchmark() throws Exception { - prepareFileSources(); + prepareTable(); scrollRows(); } @@ -70,12 +73,12 @@ public class SourceDbBenchmarkTest { SourceLineResultSetIterator it = SourceLineResultSetIterator.create(dbClient, connection, 0L); while (it.hasNext()) { SourceLineResultSetIterator.SourceFile row = it.next(); - assertThat(row.getLines().size()).isEqualTo(3220); + assertThat(row.getLines().size()).isEqualTo(NUMBER_OF_LINES); assertThat(row.getFileUuid()).isNotEmpty(); counter.incrementAndGet(); } long end = System.currentTimeMillis(); - long period = end-start; + long period = end - start; long throughputPerSecond = 1000L * counter.get() / period; LOGGER.info(String.format("%d FILE_SOURCES rows scrolled in %d ms (%d rows/second)", counter.get(), period, throughputPerSecond)); @@ -85,25 +88,50 @@ public class SourceDbBenchmarkTest { } } - private void prepareFileSources() throws IOException { + private void prepareTable() throws IOException { LOGGER.info("Populate table FILE_SOURCES"); FileSourceDao dao = new FileSourceDao(dbTester.myBatis()); for (int i = 0; i < NUMBER_OF_FILES; i++) { - dao.insert(newFileSourceDto()); + dao.insert(generateDto()); } } - private FileSourceDto newFileSourceDto() throws IOException { + private FileSourceDto generateDto() throws IOException { long now = System.currentTimeMillis(); + byte[] data = generateData(); FileSourceDto dto = new FileSourceDto(); dto.setCreatedAt(now); dto.setUpdatedAt(now); + dto.setBinaryData(data); + dto.setDataHash("49d7230271f2bd24c759e54bcd66547d"); dto.setProjectUuid(PROJECT_UUID); dto.setFileUuid(Uuids.create()); - // this fake data is 3220 lines long - dto.setData(IOUtils.toString(getClass().getResourceAsStream("SourceDbBenchmarkTest/data.txt"))); - dto.setDataHash("49d7230271f2bd24c759e54bcd66547d"); dto.setLineHashes(IOUtils.toString(getClass().getResourceAsStream("SourceDbBenchmarkTest/line_hashes.txt"))); return dto; } + + private byte[] generateData() throws IOException { + FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder(); + for (int i = 1; i <= NUMBER_OF_LINES; i++) { + dataBuilder.addLinesBuilder() + .setLine(i) + .setScmRevision("REVISION_" + i) + .setScmAuthor("a_guy") + .setSource("this is not java code " + i) + .setUtLineHits(i) + .setUtConditions(i+1) + .setUtCoveredConditions(i) + .setItLineHits(i) + .setItConditions(i+1) + .setItCoveredConditions(i) + .setOverallLineHits(i) + .setOverallConditions(i+1) + .setOverallCoveredConditions(i) + .setScmDate(150000000L) + .setHighlighting("2,9,k;9,18,k") + .addAllDuplications(Arrays.asList(19,33,141)) + .build(); + } + return FileSourceDto.encodeData(dataBuilder.build()); + } } diff --git a/server/sonar-server/src/main/java/org/sonar/server/computation/issue/IssueComputation.java b/server/sonar-server/src/main/java/org/sonar/server/computation/issue/IssueComputation.java index c6288054115..b503e2c74ac 100644 --- a/server/sonar-server/src/main/java/org/sonar/server/computation/issue/IssueComputation.java +++ b/server/sonar-server/src/main/java/org/sonar/server/computation/issue/IssueComputation.java @@ -55,7 +55,6 @@ public class IssueComputation { guessAuthor(issue); autoAssign(issue); copyRuleTags(issue); - // TODO execute extension points } diskIssuesAppender.append(issue); } diff --git a/server/sonar-server/src/main/java/org/sonar/server/computation/issue/SourceLinesCache.java b/server/sonar-server/src/main/java/org/sonar/server/computation/issue/SourceLinesCache.java index 5621a7fe25f..25d38307c38 100644 --- a/server/sonar-server/src/main/java/org/sonar/server/computation/issue/SourceLinesCache.java +++ b/server/sonar-server/src/main/java/org/sonar/server/computation/issue/SourceLinesCache.java @@ -20,21 +20,16 @@ package org.sonar.server.computation.issue; import com.google.common.base.Function; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVParser; -import org.apache.commons.csv.CSVRecord; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; -import org.sonar.api.utils.DateUtils; import org.sonar.core.source.db.FileSourceDto; import org.sonar.server.db.DbClient; +import org.sonar.server.source.db.FileSourceDb; import javax.annotation.CheckForNull; import javax.annotation.Nullable; -import java.io.Reader; +import java.io.InputStream; import java.util.ArrayList; -import java.util.Date; import java.util.List; /** @@ -53,7 +48,11 @@ public class SourceLinesCache { private final List authors = new ArrayList<>(); private boolean loaded = false; private String currentFileUuid = null; + + // date of the latest commit on the file private long lastCommitDate = 0L; + + // author of the latest commit on the file private String lastCommitAuthor = null; public SourceLinesCache(DbClient dbClient) { @@ -114,30 +113,25 @@ public class SourceLinesCache { return authors.size(); } - class FileDataParser implements Function { + /** + * Parse lines from db and collect SCM information + */ + class FileDataParser implements Function { @Override - public Void apply(Reader input) { - CSVParser csvParser = null; - try { - csvParser = new CSVParser(input, CSVFormat.DEFAULT); - for (CSVRecord csvRecord : csvParser) { - Date revisionDate = DateUtils.parseDateTimeQuietly(csvRecord.get(FileSourceDto.CSV_INDEX_SCM_DATE)); - - // do not keep all fields in memory - String author = csvRecord.get(FileSourceDto.CSV_INDEX_SCM_AUTHOR); - authors.add(author); - - if (revisionDate != null && revisionDate.getTime() > lastCommitDate) { - lastCommitDate = revisionDate.getTime(); - lastCommitAuthor = author; - } + public Void apply(InputStream input) { + FileSourceDb.Data data = FileSourceDto.decodeData(input); + for (FileSourceDb.Line line : data.getLinesList()) { + String author = null; + if (line.hasScmAuthor()) { + author = line.getScmAuthor(); + } + authors.add(author); + if (line.hasScmDate() && line.getScmDate() > lastCommitDate && author != null) { + lastCommitDate = line.getScmDate(); + lastCommitAuthor = author; } - return null; - } catch (Exception e) { - throw new IllegalStateException("Fail to parse CSV data", e); - } finally { - IOUtils.closeQuietly(csvParser); } + return null; } } } diff --git a/server/sonar-server/src/main/java/org/sonar/server/db/migrations/DatabaseMigrations.java b/server/sonar-server/src/main/java/org/sonar/server/db/migrations/DatabaseMigrations.java index d61f389ca5e..43d9dfbdb9c 100644 --- a/server/sonar-server/src/main/java/org/sonar/server/db/migrations/DatabaseMigrations.java +++ b/server/sonar-server/src/main/java/org/sonar/server/db/migrations/DatabaseMigrations.java @@ -86,6 +86,7 @@ public interface DatabaseMigrations { UpdateProjectsModuleUuidPath.class, FeedIssueComponentUuids.class, FeedSnapshotsLongDates.class, - FeedIssuesLongDates.class + FeedIssuesLongDates.class, + FeedFileSourcesBinaryData.class ); } diff --git a/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryData.java b/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryData.java new file mode 100644 index 00000000000..b80abfd3789 --- /dev/null +++ b/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryData.java @@ -0,0 +1,164 @@ +/* + * SonarQube, open source software quality management tool. + * Copyright (C) 2008-2014 SonarSource + * mailto:contact AT sonarsource DOT com + * + * SonarQube is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 3 of the License, or (at your option) any later version. + * + * SonarQube is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.sonar.server.db.migrations.v51; + +import com.google.common.base.Function; +import com.google.common.base.Splitter; +import com.google.common.collect.Iterables; +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVParser; +import org.apache.commons.csv.CSVRecord; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.sonar.api.utils.DateUtils; +import org.sonar.core.persistence.Database; +import org.sonar.core.source.db.FileSourceDto; +import org.sonar.server.db.migrations.BaseDataChange; +import org.sonar.server.db.migrations.MassUpdate; +import org.sonar.server.db.migrations.Select; +import org.sonar.server.db.migrations.SqlStatement; +import org.sonar.server.source.db.FileSourceDb; + +import javax.annotation.Nullable; + +import java.sql.SQLException; +import java.util.Iterator; + +public class FeedFileSourcesBinaryData extends BaseDataChange { + + public FeedFileSourcesBinaryData(Database db) { + super(db); + } + + @Override + public void execute(Context context) throws SQLException { + MassUpdate update = context.prepareMassUpdate().rowPluralName("issues"); + update.select("SELECT id,data FROM file_sources WHERE binary_data is null"); + update.update("UPDATE file_sources SET binary_data=? WHERE id=?"); + update.execute(new MassUpdate.Handler() { + @Override + public boolean handle(Select.Row row, SqlStatement update) throws SQLException { + Long fileSourceId = row.getLong(1); + update.setBytes(1, toBinary(fileSourceId, row.getString(2))); + update.setLong(2, fileSourceId); + return true; + } + }); + } + + private byte[] toBinary(Long fileSourceId, @Nullable String data) { + FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder(); + CSVParser parser = null; + try { + if (data != null) { + parser = CSVParser.parse(data, CSVFormat.DEFAULT); + Iterator rows = parser.iterator(); + int line = 1; + while (rows.hasNext()) { + CSVRecord row = rows.next(); + if (row.size() == 16) { + + FileSourceDb.Line.Builder lineBuilder = dataBuilder.addLinesBuilder(); + lineBuilder.setLine(line); + String s = row.get(0); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setScmRevision(s); + } + s = row.get(1); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setScmAuthor(s); + } + s = row.get(2); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setScmDate(DateUtils.parseDateTimeQuietly(s).getTime()); + } + s = row.get(3); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setUtLineHits(Integer.parseInt(s)); + } + s = row.get(4); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setUtConditions(Integer.parseInt(s)); + } + s = row.get(5); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setUtCoveredConditions(Integer.parseInt(s)); + } + s = row.get(6); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setItLineHits(Integer.parseInt(s)); + } + s = row.get(7); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setItConditions(Integer.parseInt(s)); + } + s = row.get(8); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setItCoveredConditions(Integer.parseInt(s)); + } + s = row.get(9); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setOverallLineHits(Integer.parseInt(s)); + } + s = row.get(10); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setOverallConditions(Integer.parseInt(s)); + } + s = row.get(11); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setOverallCoveredConditions(Integer.parseInt(s)); + } + s = row.get(12); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setHighlighting(s); + } + s = row.get(13); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.setSymbols(s); + } + s = row.get(14); + if (StringUtils.isNotEmpty(s)) { + lineBuilder.addAllDuplications(splitIntegers(s)); + } + s = row.get(15); + if (s != null) { + lineBuilder.setSource(s); + } + } + line++; + } + } + return FileSourceDto.encodeData(dataBuilder.build()); + } catch (Exception e) { + throw new IllegalStateException("Invalid FILE_SOURCES.DATA on row with ID " + fileSourceId + ": " + data, e); + } finally { + IOUtils.closeQuietly(parser); + } + } + + private static Iterable splitIntegers(String s) { + return Iterables.transform(Splitter.on(',').split(s), new Function() { + @Override + public Integer apply(String input) { + return Integer.parseInt(input); + } + }); + } +} diff --git a/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineDoc.java b/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineDoc.java index 822f1d3297b..45fb14630c4 100644 --- a/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineDoc.java +++ b/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineDoc.java @@ -20,7 +20,6 @@ package org.sonar.server.source.index; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Maps; import org.sonar.server.search.BaseDoc; import org.sonar.server.search.BaseNormalizer; import org.sonar.server.search.IndexUtils; @@ -30,6 +29,7 @@ import javax.annotation.Nullable; import java.util.Collection; import java.util.Date; +import java.util.HashMap; import java.util.Map; public class SourceLineDoc extends BaseDoc { @@ -38,9 +38,8 @@ public class SourceLineDoc extends BaseDoc { super(fields); } - // For testing purpose public SourceLineDoc() { - this(Maps.newHashMap()); + this(new HashMap(20)); } public String projectUuid() { diff --git a/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineIndexer.java b/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineIndexer.java index cf00b9dd7fd..3e7ce485a15 100644 --- a/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineIndexer.java +++ b/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineIndexer.java @@ -34,6 +34,10 @@ import java.util.Iterator; import static org.sonar.server.source.index.SourceLineIndexDefinition.FIELD_FILE_UUID; import static org.sonar.server.source.index.SourceLineIndexDefinition.FIELD_PROJECT_UUID; +/** + * Add to Elasticsearch index {@link SourceLineIndexDefinition} the rows of + * db table FILE_SOURCES that are not indexed yet + */ public class SourceLineIndexer extends BaseIndexer { private final DbClient dbClient; diff --git a/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineResultSetIterator.java b/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineResultSetIterator.java index 5f44a4013fc..e51d3126819 100644 --- a/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineResultSetIterator.java +++ b/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineResultSetIterator.java @@ -20,31 +20,18 @@ package org.sonar.server.source.index; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVParser; -import org.apache.commons.csv.CSVRecord; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.math.NumberUtils; -import org.sonar.api.utils.DateUtils; import org.sonar.core.source.db.FileSourceDto; import org.sonar.server.db.DbClient; import org.sonar.server.db.ResultSetIterator; -import org.sonar.server.db.migrations.SqlUtil; +import org.sonar.server.source.db.FileSourceDb; -import javax.annotation.CheckForNull; -import javax.annotation.Nullable; - -import java.io.IOException; -import java.io.Reader; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Date; import java.util.List; -import java.util.StringTokenizer; /** * Scroll over table FILE_SOURCES and directly parse CSV field required to @@ -84,7 +71,7 @@ public class SourceLineResultSetIterator extends ResultSetIteratornewHashMap()); - - doc.setProjectUuid(projectUuid); - doc.setFileUuid(fileUuid); - doc.setLine(line); - doc.setUpdateDate(updatedDate); - doc.setScmRevision(csvRecord.get(FileSourceDto.CSV_INDEX_SCM_REVISION)); - doc.setScmAuthor(csvRecord.get(FileSourceDto.CSV_INDEX_SCM_AUTHOR)); - doc.setScmDate(DateUtils.parseDateTimeQuietly(csvRecord.get(FileSourceDto.CSV_INDEX_SCM_DATE))); - // UT - doc.setUtLineHits(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_UT_LINE_HITS))); - doc.setUtConditions(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_UT_CONDITIONS))); - doc.setUtCoveredConditions(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_UT_COVERED_CONDITIONS))); - // IT - doc.setItLineHits(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_IT_LINE_HITS))); - doc.setItConditions(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_IT_CONDITIONS))); - doc.setItCoveredConditions(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_IT_COVERED_CONDITIONS))); - // OVERALL - doc.setOverallLineHits(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_OVERALL_LINE_HITS))); - doc.setOverallConditions(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_OVERALL_CONDITIONS))); - doc.setOverallCoveredConditions(parseIntegerFromRecord(csvRecord.get(FileSourceDto.CSV_INDEX_OVERALL_COVERED_CONDITIONS))); - - doc.setHighlighting(csvRecord.get(FileSourceDto.CSV_INDEX_HIGHLIGHTING)); - doc.setSymbols(csvRecord.get(FileSourceDto.CSV_INDEX_SYMBOLS)); - doc.setDuplications(parseDuplications(csvRecord.get(FileSourceDto.CSV_INDEX_DUPLICATIONS))); - - // source is always the latest field. All future fields will be added between duplications (14) and source. - doc.setSource(csvRecord.get(csvRecord.size()-1)); - - result.addLine(doc); - - line++; - } - } catch (IOException ioError) { - throw new IllegalStateException("Impossible to open stream for file_sources.data with file_uuid " + fileUuid, ioError); - } catch (ArrayIndexOutOfBoundsException lineError) { - throw new IllegalStateException( - String.format("Impossible to parse source line data, stuck at line %d", line), lineError); - } finally { - IOUtils.closeQuietly(csv); - IOUtils.closeQuietly(csvParser); + SourceFile result = new SourceFile(fileUuid, updatedAt); + FileSourceDb.Data data = FileSourceDto.decodeData(rs.getBinaryStream(4)); + for (FileSourceDb.Line line : data.getLinesList()) { + SourceLineDoc doc = new SourceLineDoc(); + doc.setProjectUuid(projectUuid); + doc.setFileUuid(fileUuid); + doc.setLine(line.getLine()); + doc.setUpdateDate(updatedDate); + doc.setScmRevision(line.getScmRevision()); + doc.setScmAuthor(line.getScmAuthor()); + doc.setScmDate(line.hasScmDate() ? new Date(line.getScmDate()) : null); + // UT + doc.setUtLineHits(line.hasUtLineHits() ? line.getUtLineHits() : null); + doc.setUtConditions(line.hasUtConditions() ? line.getUtConditions() : null); + doc.setUtCoveredConditions(line.hasUtCoveredConditions() ? line.getUtCoveredConditions() : null); + // IT + doc.setItLineHits(line.hasItLineHits() ? line.getItLineHits() : null); + doc.setItConditions(line.hasItConditions() ? line.getItConditions() : null); + doc.setItCoveredConditions(line.hasItCoveredConditions() ? line.getItCoveredConditions() : null); + // OVERALL + doc.setOverallLineHits(line.hasOverallLineHits() ? line.getOverallLineHits() : null); + doc.setOverallConditions(line.hasOverallConditions() ? line.getOverallConditions() : null); + doc.setOverallCoveredConditions(line.hasOverallCoveredConditions() ? line.getOverallCoveredConditions() : null); + + doc.setHighlighting(line.hasHighlighting() ? line.getHighlighting() : null); + doc.setSymbols(line.hasSymbols() ? line.getSymbols() : null); + doc.setDuplications(line.getDuplicationsList()); + + // source is always the latest field. All future fields will be added between duplications (14) and source. + doc.setSource(line.hasSource() ? line.getSource() : null); + + result.addLine(doc); } - return result; } - - private List parseDuplications(@Nullable String duplications) { - List dups = Lists.newArrayList(); - if (StringUtils.isNotEmpty(duplications)) { - StringTokenizer tokenizer = new StringTokenizer(duplications, ",", false); - while (tokenizer.hasMoreTokens()) { - dups.add(NumberUtils.toInt(tokenizer.nextToken(), -1)); - } - } - return dups; - } - - @CheckForNull - private Integer parseIntegerFromRecord(@Nullable String cellValue) { - if (cellValue == null || cellValue.isEmpty()) { - return null; - } else { - return Integer.parseInt(cellValue); - } - } } diff --git a/server/sonar-server/src/test/java/org/sonar/server/batch/ProjectRepositoryLoaderMediumTest.java b/server/sonar-server/src/test/java/org/sonar/server/batch/ProjectRepositoryLoaderMediumTest.java index 6bee60a5d28..1a1f7a62003 100644 --- a/server/sonar-server/src/test/java/org/sonar/server/batch/ProjectRepositoryLoaderMediumTest.java +++ b/server/sonar-server/src/test/java/org/sonar/server/batch/ProjectRepositoryLoaderMediumTest.java @@ -785,7 +785,7 @@ public class ProjectRepositoryLoaderMediumTest { return new FileSourceDto() .setFileUuid(file.uuid()) .setProjectUuid(file.projectUuid()) - .setData(",,,,,,,,,,,,,,,unchanged ,,,,,,,,,,,,,,,content ") + //.setData(",,,,,,,,,,,,,,,unchanged ,,,,,,,,,,,,,,,content ") .setDataHash("0263047cd758c68c27683625f072f010") .setLineHashes("8d7b3d6b83c0a517eac07e1aac94b773") .setCreatedAt(System.currentTimeMillis()) diff --git a/server/sonar-server/src/test/java/org/sonar/server/computation/issue/SourceLinesCacheTest.java b/server/sonar-server/src/test/java/org/sonar/server/computation/issue/SourceLinesCacheTest.java index b7c559e5a17..aed64514efb 100644 --- a/server/sonar-server/src/test/java/org/sonar/server/computation/issue/SourceLinesCacheTest.java +++ b/server/sonar-server/src/test/java/org/sonar/server/computation/issue/SourceLinesCacheTest.java @@ -25,8 +25,12 @@ import org.junit.experimental.categories.Category; import org.sonar.core.persistence.DbTester; import org.sonar.core.source.db.FileSourceDao; import org.sonar.server.db.DbClient; +import org.sonar.server.source.db.FileSourceDb; +import org.sonar.server.source.db.FileSourceTesting; import org.sonar.test.DbTests; +import java.sql.Connection; + import static org.assertj.core.api.Assertions.assertThat; @Category(DbTests.class) @@ -38,6 +42,15 @@ public class SourceLinesCacheTest { @Test public void line_author() throws Exception { dbTester.prepareDbUnit(getClass(), "load_data.xml"); + FileSourceDb.Data.Builder data = FileSourceDb.Data.newBuilder(); + data.addLinesBuilder().setLine(1).setScmAuthor("charb").setScmDate(1_400_000_000_000L); + data.addLinesBuilder().setLine(2).setScmAuthor("cabu").setScmDate(1_500_000_000_000L); + data.addLinesBuilder().setLine(3).setScmAuthor("wolinski").setScmDate(1_300_000_000_000L); + data.addLinesBuilder().setLine(4); + try (Connection connection = dbTester.openConnection()) { + FileSourceTesting.updateDataColumn(connection, "FILE_A", data.build()); + } + DbClient dbClient = new DbClient(dbTester.database(), dbTester.myBatis(), new FileSourceDao(dbTester.myBatis())); SourceLinesCache cache = new SourceLinesCache(dbClient); cache.init("FILE_A"); @@ -55,12 +68,10 @@ public class SourceLinesCacheTest { // only 4 lines in the file -> return last committer on file assertThat(cache.lineAuthor(100)).isEqualTo("cabu"); - assertThat(cache.countLines()).isEqualTo(4); cache.clear(); assertThat(cache.countLines()).isEqualTo(0); } - } diff --git a/server/sonar-server/src/test/java/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest.java b/server/sonar-server/src/test/java/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest.java new file mode 100644 index 00000000000..80fc3c07db9 --- /dev/null +++ b/server/sonar-server/src/test/java/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest.java @@ -0,0 +1,95 @@ +/* + * SonarQube, open source software quality management tool. + * Copyright (C) 2008-2014 SonarSource + * mailto:contact AT sonarsource DOT com + * + * SonarQube is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 3 of the License, or (at your option) any later version. + * + * SonarQube is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.sonar.server.db.migrations.v51; + +import org.apache.commons.dbutils.DbUtils; +import org.junit.ClassRule; +import org.junit.Test; +import org.sonar.core.persistence.DbTester; +import org.sonar.core.source.db.FileSourceDto; +import org.sonar.server.db.migrations.DatabaseMigration; +import org.sonar.server.source.db.FileSourceDb; + +import java.io.InputStream; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.Assert.fail; + +public class FeedFileSourcesBinaryDataTest { + + @ClassRule + public static DbTester db = new DbTester().schema(FeedFileSourcesBinaryDataTest.class, "schema.sql"); + + @Test + public void convert_csv_to_protobuf() throws Exception { + db.prepareDbUnit(getClass(), "data.xml"); + + DatabaseMigration migration = new FeedFileSourcesBinaryData(db.database()); + migration.execute(); + + int count = db.countSql("select count(*) from file_sources where binary_data is not null"); + assertThat(count).isEqualTo(3); + + try(Connection connection = db.openConnection()) { + FileSourceDb.Data data = selectData(connection, 1L); + assertThat(data.getLinesCount()).isEqualTo(4); + assertThat(data.getLines(0).getScmRevision()).isEqualTo("aef12a"); + + data = selectData(connection, 2L); + assertThat(data.getLinesCount()).isEqualTo(4); + assertThat(data.getLines(0).hasScmRevision()).isFalse(); + + data = selectData(connection, 3L); + assertThat(data.getLinesCount()).isEqualTo(0); + } + } + + @Test + public void fail_to_parse_csv() throws Exception { + db.prepareDbUnit(getClass(), "bad_data.xml"); + + DatabaseMigration migration = new FeedFileSourcesBinaryData(db.database()); + try { + migration.execute(); + fail(); + } catch (IllegalStateException e) { + assertThat(e).hasMessageContaining("Invalid FILE_SOURCES.DATA on row with ID 1:"); + } + } + + private FileSourceDb.Data selectData(Connection connection, long fileSourceId) throws SQLException { + PreparedStatement pstmt = connection.prepareStatement("select binary_data from file_sources where id=?"); + ResultSet rs = null; + try { + pstmt.setLong(1, fileSourceId); + rs = pstmt.executeQuery(); + rs.next(); + InputStream data = rs.getBinaryStream(1); + return FileSourceDto.decodeData(data); + } finally { + DbUtils.closeQuietly(rs); + DbUtils.closeQuietly(pstmt); + } + } +} diff --git a/server/sonar-server/src/test/java/org/sonar/server/source/db/FileSourceTesting.java b/server/sonar-server/src/test/java/org/sonar/server/source/db/FileSourceTesting.java new file mode 100644 index 00000000000..10c44e5d259 --- /dev/null +++ b/server/sonar-server/src/test/java/org/sonar/server/source/db/FileSourceTesting.java @@ -0,0 +1,108 @@ +/* + * SonarQube, open source software quality management tool. + * Copyright (C) 2008-2014 SonarSource + * mailto:contact AT sonarsource DOT com + * + * SonarQube is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 3 of the License, or (at your option) any later version. + * + * SonarQube is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.sonar.server.source.db; + +import org.apache.commons.lang.RandomStringUtils; +import org.apache.commons.lang.math.RandomUtils; +import org.sonar.core.source.db.FileSourceDto; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.Arrays; + +public class FileSourceTesting { + + private FileSourceTesting() { + // only static stuff + } + + public static void updateDataColumn(Connection connection, String fileUuid, FileSourceDb.Data data) throws SQLException { + updateDataColumn(connection, fileUuid, FileSourceDto.encodeData(data)); + } + + public static void updateDataColumn(Connection connection, String fileUuid, byte[] data) throws SQLException { + PreparedStatement stmt = connection.prepareStatement("UPDATE file_sources SET binary_data = ? WHERE file_uuid=?"); + stmt.setBytes(1, data); + stmt.setString(2, fileUuid); + stmt.executeUpdate(); + stmt.close(); + connection.commit(); + } + + /** + * Generate predefined fake data. Result is mutable. + */ + public static FileSourceDb.Data.Builder newFakeData(int numberOfLines) throws IOException { + FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder(); + for (int i = 1; i <= numberOfLines; i++) { + dataBuilder.addLinesBuilder() + .setLine(i) + .setScmRevision("REVISION_" + i) + .setScmAuthor("AUTHOR_" + i) + .setScmDate(1_500_000_000_00L + i) + .setSource("SOURCE_" + i) + .setUtLineHits(i) + .setUtConditions(i + 1) + .setUtCoveredConditions(i + 2) + .setItLineHits(i + 3) + .setItConditions(i + 4) + .setItCoveredConditions(i + 5) + .setOverallLineHits(i + 6) + .setOverallConditions(i + 7) + .setOverallCoveredConditions(i + 8) + .setHighlighting("HIGHLIGHTING_" + i) + .setSymbols("SYMBOLS_" + i) + .addAllDuplications(Arrays.asList(i)) + .build(); + } + return dataBuilder; + } + + /** + * Generate random data. Result is mutable. + */ + public static FileSourceDb.Data.Builder newRandomData(int numberOfLines) throws IOException { + FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder(); + for (int i = 1; i <= numberOfLines; i++) { + dataBuilder.addLinesBuilder() + .setLine(i) + .setScmRevision(RandomStringUtils.randomAlphanumeric(15)) + .setScmAuthor(RandomStringUtils.randomAlphanumeric(10)) + .setScmDate(RandomUtils.nextLong()) + .setSource(RandomStringUtils.randomAlphanumeric(20)) + .setUtLineHits(RandomUtils.nextInt(4)) + .setUtConditions(RandomUtils.nextInt(4)) + .setUtCoveredConditions(RandomUtils.nextInt(4)) + .setItLineHits(RandomUtils.nextInt(4)) + .setItConditions(RandomUtils.nextInt(4)) + .setItCoveredConditions(RandomUtils.nextInt(4)) + .setOverallLineHits(RandomUtils.nextInt(4)) + .setOverallConditions(RandomUtils.nextInt(4)) + .setOverallCoveredConditions(RandomUtils.nextInt(4)) + .setHighlighting(RandomStringUtils.randomAlphanumeric(40)) + .setSymbols(RandomStringUtils.randomAlphanumeric(30)) + .addAllDuplications(Arrays.asList(RandomUtils.nextInt(200), RandomUtils.nextInt(200))) + .build(); + } + return dataBuilder; + } +} diff --git a/server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineIndexerTest.java b/server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineIndexerTest.java index ed3a5dc49c2..35ae780b0ba 100644 --- a/server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineIndexerTest.java +++ b/server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineIndexerTest.java @@ -38,10 +38,12 @@ import org.sonar.core.persistence.DbTester; import org.sonar.server.db.DbClient; import org.sonar.server.es.EsTester; import org.sonar.server.search.BaseNormalizer; +import org.sonar.server.source.db.FileSourceTesting; import org.sonar.test.DbTests; import org.sonar.test.TestUtils; import java.io.FileInputStream; +import java.sql.Connection; import java.util.Date; import java.util.List; import java.util.Map; @@ -69,8 +71,13 @@ public class SourceLineIndexerTest { @Test public void index_source_lines_from_db() throws Exception { db.prepareDbUnit(getClass(), "db.xml"); + + Connection connection = db.openConnection(); + FileSourceTesting.updateDataColumn(connection, "FILE_UUID", FileSourceTesting.newRandomData(3).build()); + connection.close(); + indexer.index(); - assertThat(countDocuments()).isEqualTo(2); + assertThat(countDocuments()).isEqualTo(3); } @Test diff --git a/server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineResultSetIteratorTest.java b/server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineResultSetIteratorTest.java index 5d19f58a2b1..aaa10a7ae73 100644 --- a/server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineResultSetIteratorTest.java +++ b/server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineResultSetIteratorTest.java @@ -26,10 +26,10 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.sonar.core.persistence.DbTester; import org.sonar.server.db.DbClient; +import org.sonar.server.source.db.FileSourceTesting; import org.sonar.test.DbTests; import java.sql.Connection; -import java.sql.PreparedStatement; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.fail; @@ -56,39 +56,32 @@ public class SourceLineResultSetIteratorTest { } @Test - public void should_generate_source_line_documents() throws Exception { + public void parse_db_and_generate_source_line_documents() throws Exception { db.prepareDbUnit(getClass(), "shared.xml"); - PreparedStatement stmt = connection.prepareStatement("UPDATE file_sources SET data = ? WHERE id=1"); - stmt.setString(1, "aef12a,alice,2014-04-25T12:34:56+0100,1,0,0,2,0,0,3,0,0,polop,palap,,class Foo {\r\n" + - "abe465,bob,2014-07-25T12:34:56+0100,,,,,,,,,,,,, // Empty\r\n" + - "afb789,carol,2014-03-23T12:34:56+0100,,,,,,,,,,,,,}\r\n" + - "afb789,carol,2014-03-23T12:34:56+0100,,,,,,,,,,,,,\r\n"); - stmt.executeUpdate(); + FileSourceTesting.updateDataColumn(connection, "FILE_UUID", FileSourceTesting.newFakeData(3).build()); SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 0L); assertThat(iterator.hasNext()).isTrue(); SourceLineResultSetIterator.SourceFile file = iterator.next(); - assertThat(file.getLines()).hasSize(4); + assertThat(file.getLines()).hasSize(3); SourceLineDoc firstLine = file.getLines().get(0); - assertThat(firstLine.projectUuid()).isEqualTo("uuid-MyProject"); - assertThat(firstLine.fileUuid()).isEqualTo("uuid-MyFile.xoo"); + assertThat(firstLine.projectUuid()).isEqualTo("PROJECT_UUID"); + assertThat(firstLine.fileUuid()).isEqualTo("FILE_UUID"); assertThat(firstLine.line()).isEqualTo(1); - assertThat(firstLine.scmRevision()).isEqualTo("aef12a"); - assertThat(firstLine.scmAuthor()).isEqualTo("alice"); - // TODO Sanitize usage of fscking dates - // assertThat(firstLine.scmDate()).isEqualTo(DateUtils.parseDateTime("2014-04-25T12:34:56+0100")); - assertThat(firstLine.highlighting()).isEqualTo("polop"); - assertThat(firstLine.symbols()).isEqualTo("palap"); - assertThat(firstLine.source()).isEqualTo("class Foo {"); + assertThat(firstLine.scmRevision()).isEqualTo("REVISION_1"); + assertThat(firstLine.scmAuthor()).isEqualTo("AUTHOR_1"); + assertThat(firstLine.highlighting()).isEqualTo("HIGHLIGHTING_1"); + assertThat(firstLine.symbols()).isEqualTo("SYMBOLS_1"); + assertThat(firstLine.source()).isEqualTo("SOURCE_1"); assertThat(firstLine.utLineHits()).isEqualTo(1); - assertThat(firstLine.utConditions()).isEqualTo(0); - assertThat(firstLine.utCoveredConditions()).isEqualTo(0); - assertThat(firstLine.itLineHits()).isEqualTo(2); - assertThat(firstLine.itConditions()).isEqualTo(0); - assertThat(firstLine.itCoveredConditions()).isEqualTo(0); - assertThat(firstLine.overallLineHits()).isEqualTo(3); - assertThat(firstLine.overallConditions()).isEqualTo(0); - assertThat(firstLine.overallCoveredConditions()).isEqualTo(0); + assertThat(firstLine.utConditions()).isEqualTo(2); + assertThat(firstLine.utCoveredConditions()).isEqualTo(3); + assertThat(firstLine.itLineHits()).isEqualTo(4); + assertThat(firstLine.itConditions()).isEqualTo(5); + assertThat(firstLine.itCoveredConditions()).isEqualTo(6); + assertThat(firstLine.overallLineHits()).isEqualTo(7); + assertThat(firstLine.overallConditions()).isEqualTo(8); + assertThat(firstLine.overallCoveredConditions()).isEqualTo(9); iterator.close(); } @@ -102,36 +95,10 @@ public class SourceLineResultSetIteratorTest { } @Test - public void parse_empty_file() throws Exception { - db.prepareDbUnit(getClass(), "empty-file.xml"); - - SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 0L); - assertThat(iterator.hasNext()).isTrue(); - SourceLineResultSetIterator.SourceFile file = iterator.next(); - assertThat(file.getFileUuid()).isEqualTo("uuid-MyFile.xoo"); - assertThat(file.getLines()).isEmpty(); - iterator.close(); - } - - @Test - public void parse_null_file() throws Exception { - db.prepareDbUnit(getClass(), "null-file.xml"); - - SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 0L); - assertThat(iterator.hasNext()).isTrue(); - SourceLineResultSetIterator.SourceFile file = iterator.next(); - assertThat(file.getFileUuid()).isEqualTo("uuid-MyFile.xoo"); - assertThat(file.getLines()).isEmpty(); - iterator.close(); - } - - @Test - public void should_fail_on_bad_csv() throws Exception { + public void should_fail_on_bad_data_format() throws Exception { db.prepareDbUnit(getClass(), "shared.xml"); - PreparedStatement stmt = connection.prepareStatement("UPDATE file_sources SET data = ? WHERE id=1"); - stmt.setString(1, "plouf"); - stmt.executeUpdate(); - stmt.close(); + + FileSourceTesting.updateDataColumn(connection, "FILE_UUID", "THIS_IS_NOT_PROTOBUF".getBytes()); SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 0L); try { diff --git a/server/sonar-server/src/test/resources/org/sonar/server/component/db/ComponentDaoTest/select_module_files_tree.xml b/server/sonar-server/src/test/resources/org/sonar/server/component/db/ComponentDaoTest/select_module_files_tree.xml index e1ea8f31504..443b1d0a5ae 100644 --- a/server/sonar-server/src/test/resources/org/sonar/server/component/db/ComponentDaoTest/select_module_files_tree.xml +++ b/server/sonar-server/src/test/resources/org/sonar/server/component/db/ComponentDaoTest/select_module_files_tree.xml @@ -20,7 +20,7 @@ enabled="[true]" language="java" copy_resource_id="[null]" person_id="[null]" path="src/org/struts/pom.xml" authorization_updated_at="[null]"/> + + diff --git a/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/data.xml b/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/data.xml new file mode 100644 index 00000000000..fe6020ae9bb --- /dev/null +++ b/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/data.xml @@ -0,0 +1,22 @@ + + + + + + + + + + diff --git a/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/schema.sql b/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/schema.sql new file mode 100644 index 00000000000..5649f795864 --- /dev/null +++ b/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedFileSourcesBinaryDataTest/schema.sql @@ -0,0 +1,12 @@ +CREATE TABLE "FILE_SOURCES" ( + "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1), + "PROJECT_UUID" VARCHAR(50) NOT NULL, + "FILE_UUID" VARCHAR(50) NOT NULL, + "LINE_HASHES" CLOB(2147483647), + "DATA" CLOB(2147483647), + "DATA_HASH" VARCHAR(50) NOT NULL, + "SRC_HASH" VARCHAR(50) NULL, + "CREATED_AT" BIGINT NOT NULL, + "UPDATED_AT" BIGINT NOT NULL, + "BINARY_DATA" BINARY(167772150), +); diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/db.xml b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/db.xml index d8d7eed7662..48e58478ab6 100644 --- a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/db.xml +++ b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/db.xml @@ -1,6 +1,6 @@ - + diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/empty-file.xml b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/empty-file.xml deleted file mode 100644 index cf6e01583dd..00000000000 --- a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/empty-file.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/null-file.xml b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/null-file.xml deleted file mode 100644 index 91dafded3dc..00000000000 --- a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/null-file.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/schema.sql b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/schema.sql index 3032e93f256..859eefe3625 100644 --- a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/schema.sql +++ b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/schema.sql @@ -3,7 +3,7 @@ CREATE TABLE "FILE_SOURCES" ( "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1), "PROJECT_UUID" VARCHAR(50) NOT NULL, "FILE_UUID" VARCHAR(50) NOT NULL, - "DATA" CLOB(2147483647), + "BINARY_DATA" BINARY(167772150), "DATA_HASH" VARCHAR(50) NOT NULL, "CREATED_AT" BIGINT NOT NULL, "UPDATED_AT" BIGINT NOT NULL diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/shared.xml b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/shared.xml index f1498ba9c56..521e0db9ca5 100644 --- a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/shared.xml +++ b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/shared.xml @@ -1,6 +1,6 @@ - + diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/ws/HashActionTest/shared.xml b/server/sonar-server/src/test/resources/org/sonar/server/source/ws/HashActionTest/shared.xml index 756dbacf84e..4dcc5c21ee0 100644 --- a/server/sonar-server/src/test/resources/org/sonar/server/source/ws/HashActionTest/shared.xml +++ b/server/sonar-server/src/test/resources/org/sonar/server/source/ws/HashActionTest/shared.xml @@ -5,7 +5,7 @@ path="src/main/java/Action.java"/> diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/781_add_file_sources_binary_data.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/781_add_file_sources_binary_data.rb new file mode 100644 index 00000000000..3b969e3967e --- /dev/null +++ b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/781_add_file_sources_binary_data.rb @@ -0,0 +1,29 @@ +# +# SonarQube, open source software quality management tool. +# Copyright (C) 2008-2014 SonarSource +# mailto:contact AT sonarsource DOT com +# +# SonarQube is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 3 of the License, or (at your option) any later version. +# +# SonarQube is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# + +# +# SonarQube 5.1 +# +class AddFileSourcesBinaryData < ActiveRecord::Migration + + def self.up + add_column 'file_sources', :binary_data, :binary, :null => true + end +end diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/782_feed_file_sources_binary_data.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/782_feed_file_sources_binary_data.rb new file mode 100644 index 00000000000..dea83cfc92b --- /dev/null +++ b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/782_feed_file_sources_binary_data.rb @@ -0,0 +1,29 @@ +# +# SonarQube, open source software quality management tool. +# Copyright (C) 2008-2014 SonarSource +# mailto:contact AT sonarsource DOT com +# +# SonarQube is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 3 of the License, or (at your option) any later version. +# +# SonarQube is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# + +# +# SonarQube 5.1 +# +class FeedFileSourcesBinaryData < ActiveRecord::Migration + + def self.up + execute_java_migration('org.sonar.server.db.migrations.v51.FeedFileSourcesBinaryData') + end +end diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/783_drop_file_sources_data.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/783_drop_file_sources_data.rb new file mode 100644 index 00000000000..06d5fbb598e --- /dev/null +++ b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/783_drop_file_sources_data.rb @@ -0,0 +1,30 @@ +# +# SonarQube, open source software quality management tool. +# Copyright (C) 2008-2014 SonarSource +# mailto:contact AT sonarsource DOT com +# +# SonarQube is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 3 of the License, or (at your option) any later version. +# +# SonarQube is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# + +# +# SonarQube 5.1 +# +class DropFileSourcesData < ActiveRecord::Migration + + def self.up + remove_column 'file_sources', 'data' + end + +end diff --git a/sonar-batch-protocol/pom.xml b/sonar-batch-protocol/pom.xml index 399c4a242e0..343d707f094 100644 --- a/sonar-batch-protocol/pom.xml +++ b/sonar-batch-protocol/pom.xml @@ -13,6 +13,10 @@ Classes used for communication between batch and server + + net.jpountz.lz4 + lz4 + com.google.protobuf protobuf-java diff --git a/sonar-batch-protocol/src/main/gen-java/org/sonar/server/source/db/FileSourceDb.java b/sonar-batch-protocol/src/main/gen-java/org/sonar/server/source/db/FileSourceDb.java new file mode 100644 index 00000000000..f3f9db893b2 --- /dev/null +++ b/sonar-batch-protocol/src/main/gen-java/org/sonar/server/source/db/FileSourceDb.java @@ -0,0 +1,3008 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: file_source_db.proto + +package org.sonar.server.source.db; + +public final class FileSourceDb { + private FileSourceDb() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface LineOrBuilder extends + // @@protoc_insertion_point(interface_extends:org.sonar.server.source.db.Line) + com.google.protobuf.MessageOrBuilder { + + /** + * optional int32 line = 1; + */ + boolean hasLine(); + /** + * optional int32 line = 1; + */ + int getLine(); + + /** + * optional string source = 2; + */ + boolean hasSource(); + /** + * optional string source = 2; + */ + java.lang.String getSource(); + /** + * optional string source = 2; + */ + com.google.protobuf.ByteString + getSourceBytes(); + + /** + * optional string scm_revision = 3; + * + *
+     * SCM
+     * 
+ */ + boolean hasScmRevision(); + /** + * optional string scm_revision = 3; + * + *
+     * SCM
+     * 
+ */ + java.lang.String getScmRevision(); + /** + * optional string scm_revision = 3; + * + *
+     * SCM
+     * 
+ */ + com.google.protobuf.ByteString + getScmRevisionBytes(); + + /** + * optional string scm_author = 4; + */ + boolean hasScmAuthor(); + /** + * optional string scm_author = 4; + */ + java.lang.String getScmAuthor(); + /** + * optional string scm_author = 4; + */ + com.google.protobuf.ByteString + getScmAuthorBytes(); + + /** + * optional int64 scm_date = 5; + */ + boolean hasScmDate(); + /** + * optional int64 scm_date = 5; + */ + long getScmDate(); + + /** + * optional int32 ut_line_hits = 6; + * + *
+     * unit testing
+     * 
+ */ + boolean hasUtLineHits(); + /** + * optional int32 ut_line_hits = 6; + * + *
+     * unit testing
+     * 
+ */ + int getUtLineHits(); + + /** + * optional int32 ut_conditions = 7; + */ + boolean hasUtConditions(); + /** + * optional int32 ut_conditions = 7; + */ + int getUtConditions(); + + /** + * optional int32 ut_covered_conditions = 8; + */ + boolean hasUtCoveredConditions(); + /** + * optional int32 ut_covered_conditions = 8; + */ + int getUtCoveredConditions(); + + /** + * optional int32 it_line_hits = 9; + * + *
+     * integration testing
+     * 
+ */ + boolean hasItLineHits(); + /** + * optional int32 it_line_hits = 9; + * + *
+     * integration testing
+     * 
+ */ + int getItLineHits(); + + /** + * optional int32 it_conditions = 10; + */ + boolean hasItConditions(); + /** + * optional int32 it_conditions = 10; + */ + int getItConditions(); + + /** + * optional int32 it_covered_conditions = 11; + */ + boolean hasItCoveredConditions(); + /** + * optional int32 it_covered_conditions = 11; + */ + int getItCoveredConditions(); + + /** + * optional int32 overall_line_hits = 12; + * + *
+     * overall testing
+     * 
+ */ + boolean hasOverallLineHits(); + /** + * optional int32 overall_line_hits = 12; + * + *
+     * overall testing
+     * 
+ */ + int getOverallLineHits(); + + /** + * optional int32 overall_conditions = 13; + */ + boolean hasOverallConditions(); + /** + * optional int32 overall_conditions = 13; + */ + int getOverallConditions(); + + /** + * optional int32 overall_covered_conditions = 14; + */ + boolean hasOverallCoveredConditions(); + /** + * optional int32 overall_covered_conditions = 14; + */ + int getOverallCoveredConditions(); + + /** + * optional string highlighting = 15; + */ + boolean hasHighlighting(); + /** + * optional string highlighting = 15; + */ + java.lang.String getHighlighting(); + /** + * optional string highlighting = 15; + */ + com.google.protobuf.ByteString + getHighlightingBytes(); + + /** + * optional string symbols = 16; + */ + boolean hasSymbols(); + /** + * optional string symbols = 16; + */ + java.lang.String getSymbols(); + /** + * optional string symbols = 16; + */ + com.google.protobuf.ByteString + getSymbolsBytes(); + + /** + * repeated int32 duplications = 17; + */ + java.util.List getDuplicationsList(); + /** + * repeated int32 duplications = 17; + */ + int getDuplicationsCount(); + /** + * repeated int32 duplications = 17; + */ + int getDuplications(int index); + } + /** + * Protobuf type {@code org.sonar.server.source.db.Line} + */ + public static final class Line extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:org.sonar.server.source.db.Line) + LineOrBuilder { + // Use Line.newBuilder() to construct. + private Line(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private Line(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final Line defaultInstance; + public static Line getDefaultInstance() { + return defaultInstance; + } + + public Line getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Line( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + line_ = input.readInt32(); + break; + } + case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000002; + source_ = bs; + break; + } + case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000004; + scmRevision_ = bs; + break; + } + case 34: { + com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000008; + scmAuthor_ = bs; + break; + } + case 40: { + bitField0_ |= 0x00000010; + scmDate_ = input.readInt64(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + utLineHits_ = input.readInt32(); + break; + } + case 56: { + bitField0_ |= 0x00000040; + utConditions_ = input.readInt32(); + break; + } + case 64: { + bitField0_ |= 0x00000080; + utCoveredConditions_ = input.readInt32(); + break; + } + case 72: { + bitField0_ |= 0x00000100; + itLineHits_ = input.readInt32(); + break; + } + case 80: { + bitField0_ |= 0x00000200; + itConditions_ = input.readInt32(); + break; + } + case 88: { + bitField0_ |= 0x00000400; + itCoveredConditions_ = input.readInt32(); + break; + } + case 96: { + bitField0_ |= 0x00000800; + overallLineHits_ = input.readInt32(); + break; + } + case 104: { + bitField0_ |= 0x00001000; + overallConditions_ = input.readInt32(); + break; + } + case 112: { + bitField0_ |= 0x00002000; + overallCoveredConditions_ = input.readInt32(); + break; + } + case 122: { + com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00004000; + highlighting_ = bs; + break; + } + case 130: { + com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00008000; + symbols_ = bs; + break; + } + case 136: { + if (!((mutable_bitField0_ & 0x00010000) == 0x00010000)) { + duplications_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00010000; + } + duplications_.add(input.readInt32()); + break; + } + case 138: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00010000) == 0x00010000) && input.getBytesUntilLimit() > 0) { + duplications_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00010000; + } + while (input.getBytesUntilLimit() > 0) { + duplications_.add(input.readInt32()); + } + input.popLimit(limit); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00010000) == 0x00010000)) { + duplications_ = java.util.Collections.unmodifiableList(duplications_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Line_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Line_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.sonar.server.source.db.FileSourceDb.Line.class, org.sonar.server.source.db.FileSourceDb.Line.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Line parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Line(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + public static final int LINE_FIELD_NUMBER = 1; + private int line_; + /** + * optional int32 line = 1; + */ + public boolean hasLine() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional int32 line = 1; + */ + public int getLine() { + return line_; + } + + public static final int SOURCE_FIELD_NUMBER = 2; + private java.lang.Object source_; + /** + * optional string source = 2; + */ + public boolean hasSource() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string source = 2; + */ + public java.lang.String getSource() { + java.lang.Object ref = source_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + source_ = s; + } + return s; + } + } + /** + * optional string source = 2; + */ + public com.google.protobuf.ByteString + getSourceBytes() { + java.lang.Object ref = source_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + source_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int SCM_REVISION_FIELD_NUMBER = 3; + private java.lang.Object scmRevision_; + /** + * optional string scm_revision = 3; + * + *
+     * SCM
+     * 
+ */ + public boolean hasScmRevision() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string scm_revision = 3; + * + *
+     * SCM
+     * 
+ */ + public java.lang.String getScmRevision() { + java.lang.Object ref = scmRevision_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + scmRevision_ = s; + } + return s; + } + } + /** + * optional string scm_revision = 3; + * + *
+     * SCM
+     * 
+ */ + public com.google.protobuf.ByteString + getScmRevisionBytes() { + java.lang.Object ref = scmRevision_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + scmRevision_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int SCM_AUTHOR_FIELD_NUMBER = 4; + private java.lang.Object scmAuthor_; + /** + * optional string scm_author = 4; + */ + public boolean hasScmAuthor() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string scm_author = 4; + */ + public java.lang.String getScmAuthor() { + java.lang.Object ref = scmAuthor_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + scmAuthor_ = s; + } + return s; + } + } + /** + * optional string scm_author = 4; + */ + public com.google.protobuf.ByteString + getScmAuthorBytes() { + java.lang.Object ref = scmAuthor_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + scmAuthor_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int SCM_DATE_FIELD_NUMBER = 5; + private long scmDate_; + /** + * optional int64 scm_date = 5; + */ + public boolean hasScmDate() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional int64 scm_date = 5; + */ + public long getScmDate() { + return scmDate_; + } + + public static final int UT_LINE_HITS_FIELD_NUMBER = 6; + private int utLineHits_; + /** + * optional int32 ut_line_hits = 6; + * + *
+     * unit testing
+     * 
+ */ + public boolean hasUtLineHits() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional int32 ut_line_hits = 6; + * + *
+     * unit testing
+     * 
+ */ + public int getUtLineHits() { + return utLineHits_; + } + + public static final int UT_CONDITIONS_FIELD_NUMBER = 7; + private int utConditions_; + /** + * optional int32 ut_conditions = 7; + */ + public boolean hasUtConditions() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + /** + * optional int32 ut_conditions = 7; + */ + public int getUtConditions() { + return utConditions_; + } + + public static final int UT_COVERED_CONDITIONS_FIELD_NUMBER = 8; + private int utCoveredConditions_; + /** + * optional int32 ut_covered_conditions = 8; + */ + public boolean hasUtCoveredConditions() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + /** + * optional int32 ut_covered_conditions = 8; + */ + public int getUtCoveredConditions() { + return utCoveredConditions_; + } + + public static final int IT_LINE_HITS_FIELD_NUMBER = 9; + private int itLineHits_; + /** + * optional int32 it_line_hits = 9; + * + *
+     * integration testing
+     * 
+ */ + public boolean hasItLineHits() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + /** + * optional int32 it_line_hits = 9; + * + *
+     * integration testing
+     * 
+ */ + public int getItLineHits() { + return itLineHits_; + } + + public static final int IT_CONDITIONS_FIELD_NUMBER = 10; + private int itConditions_; + /** + * optional int32 it_conditions = 10; + */ + public boolean hasItConditions() { + return ((bitField0_ & 0x00000200) == 0x00000200); + } + /** + * optional int32 it_conditions = 10; + */ + public int getItConditions() { + return itConditions_; + } + + public static final int IT_COVERED_CONDITIONS_FIELD_NUMBER = 11; + private int itCoveredConditions_; + /** + * optional int32 it_covered_conditions = 11; + */ + public boolean hasItCoveredConditions() { + return ((bitField0_ & 0x00000400) == 0x00000400); + } + /** + * optional int32 it_covered_conditions = 11; + */ + public int getItCoveredConditions() { + return itCoveredConditions_; + } + + public static final int OVERALL_LINE_HITS_FIELD_NUMBER = 12; + private int overallLineHits_; + /** + * optional int32 overall_line_hits = 12; + * + *
+     * overall testing
+     * 
+ */ + public boolean hasOverallLineHits() { + return ((bitField0_ & 0x00000800) == 0x00000800); + } + /** + * optional int32 overall_line_hits = 12; + * + *
+     * overall testing
+     * 
+ */ + public int getOverallLineHits() { + return overallLineHits_; + } + + public static final int OVERALL_CONDITIONS_FIELD_NUMBER = 13; + private int overallConditions_; + /** + * optional int32 overall_conditions = 13; + */ + public boolean hasOverallConditions() { + return ((bitField0_ & 0x00001000) == 0x00001000); + } + /** + * optional int32 overall_conditions = 13; + */ + public int getOverallConditions() { + return overallConditions_; + } + + public static final int OVERALL_COVERED_CONDITIONS_FIELD_NUMBER = 14; + private int overallCoveredConditions_; + /** + * optional int32 overall_covered_conditions = 14; + */ + public boolean hasOverallCoveredConditions() { + return ((bitField0_ & 0x00002000) == 0x00002000); + } + /** + * optional int32 overall_covered_conditions = 14; + */ + public int getOverallCoveredConditions() { + return overallCoveredConditions_; + } + + public static final int HIGHLIGHTING_FIELD_NUMBER = 15; + private java.lang.Object highlighting_; + /** + * optional string highlighting = 15; + */ + public boolean hasHighlighting() { + return ((bitField0_ & 0x00004000) == 0x00004000); + } + /** + * optional string highlighting = 15; + */ + public java.lang.String getHighlighting() { + java.lang.Object ref = highlighting_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + highlighting_ = s; + } + return s; + } + } + /** + * optional string highlighting = 15; + */ + public com.google.protobuf.ByteString + getHighlightingBytes() { + java.lang.Object ref = highlighting_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + highlighting_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int SYMBOLS_FIELD_NUMBER = 16; + private java.lang.Object symbols_; + /** + * optional string symbols = 16; + */ + public boolean hasSymbols() { + return ((bitField0_ & 0x00008000) == 0x00008000); + } + /** + * optional string symbols = 16; + */ + public java.lang.String getSymbols() { + java.lang.Object ref = symbols_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + symbols_ = s; + } + return s; + } + } + /** + * optional string symbols = 16; + */ + public com.google.protobuf.ByteString + getSymbolsBytes() { + java.lang.Object ref = symbols_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + symbols_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DUPLICATIONS_FIELD_NUMBER = 17; + private java.util.List duplications_; + /** + * repeated int32 duplications = 17; + */ + public java.util.List + getDuplicationsList() { + return duplications_; + } + /** + * repeated int32 duplications = 17; + */ + public int getDuplicationsCount() { + return duplications_.size(); + } + /** + * repeated int32 duplications = 17; + */ + public int getDuplications(int index) { + return duplications_.get(index); + } + + private void initFields() { + line_ = 0; + source_ = ""; + scmRevision_ = ""; + scmAuthor_ = ""; + scmDate_ = 0L; + utLineHits_ = 0; + utConditions_ = 0; + utCoveredConditions_ = 0; + itLineHits_ = 0; + itConditions_ = 0; + itCoveredConditions_ = 0; + overallLineHits_ = 0; + overallConditions_ = 0; + overallCoveredConditions_ = 0; + highlighting_ = ""; + symbols_ = ""; + duplications_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt32(1, line_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getSourceBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getScmRevisionBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, getScmAuthorBytes()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeInt64(5, scmDate_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeInt32(6, utLineHits_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output.writeInt32(7, utConditions_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + output.writeInt32(8, utCoveredConditions_); + } + if (((bitField0_ & 0x00000100) == 0x00000100)) { + output.writeInt32(9, itLineHits_); + } + if (((bitField0_ & 0x00000200) == 0x00000200)) { + output.writeInt32(10, itConditions_); + } + if (((bitField0_ & 0x00000400) == 0x00000400)) { + output.writeInt32(11, itCoveredConditions_); + } + if (((bitField0_ & 0x00000800) == 0x00000800)) { + output.writeInt32(12, overallLineHits_); + } + if (((bitField0_ & 0x00001000) == 0x00001000)) { + output.writeInt32(13, overallConditions_); + } + if (((bitField0_ & 0x00002000) == 0x00002000)) { + output.writeInt32(14, overallCoveredConditions_); + } + if (((bitField0_ & 0x00004000) == 0x00004000)) { + output.writeBytes(15, getHighlightingBytes()); + } + if (((bitField0_ & 0x00008000) == 0x00008000)) { + output.writeBytes(16, getSymbolsBytes()); + } + for (int i = 0; i < duplications_.size(); i++) { + output.writeInt32(17, duplications_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, line_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getSourceBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getScmRevisionBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, getScmAuthorBytes()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(5, scmDate_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(6, utLineHits_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(7, utConditions_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(8, utCoveredConditions_); + } + if (((bitField0_ & 0x00000100) == 0x00000100)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(9, itLineHits_); + } + if (((bitField0_ & 0x00000200) == 0x00000200)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(10, itConditions_); + } + if (((bitField0_ & 0x00000400) == 0x00000400)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(11, itCoveredConditions_); + } + if (((bitField0_ & 0x00000800) == 0x00000800)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(12, overallLineHits_); + } + if (((bitField0_ & 0x00001000) == 0x00001000)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(13, overallConditions_); + } + if (((bitField0_ & 0x00002000) == 0x00002000)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(14, overallCoveredConditions_); + } + if (((bitField0_ & 0x00004000) == 0x00004000)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(15, getHighlightingBytes()); + } + if (((bitField0_ & 0x00008000) == 0x00008000)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(16, getSymbolsBytes()); + } + { + int dataSize = 0; + for (int i = 0; i < duplications_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeInt32SizeNoTag(duplications_.get(i)); + } + size += dataSize; + size += 2 * getDuplicationsList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.sonar.server.source.db.FileSourceDb.Line parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.sonar.server.source.db.FileSourceDb.Line parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.sonar.server.source.db.FileSourceDb.Line parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.sonar.server.source.db.FileSourceDb.Line parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.sonar.server.source.db.FileSourceDb.Line parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.sonar.server.source.db.FileSourceDb.Line parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.sonar.server.source.db.FileSourceDb.Line parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.sonar.server.source.db.FileSourceDb.Line parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.sonar.server.source.db.FileSourceDb.Line parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.sonar.server.source.db.FileSourceDb.Line parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.sonar.server.source.db.FileSourceDb.Line prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.sonar.server.source.db.Line} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:org.sonar.server.source.db.Line) + org.sonar.server.source.db.FileSourceDb.LineOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Line_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Line_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.sonar.server.source.db.FileSourceDb.Line.class, org.sonar.server.source.db.FileSourceDb.Line.Builder.class); + } + + // Construct using org.sonar.server.source.db.FileSourceDb.Line.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + line_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + source_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + scmRevision_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + scmAuthor_ = ""; + bitField0_ = (bitField0_ & ~0x00000008); + scmDate_ = 0L; + bitField0_ = (bitField0_ & ~0x00000010); + utLineHits_ = 0; + bitField0_ = (bitField0_ & ~0x00000020); + utConditions_ = 0; + bitField0_ = (bitField0_ & ~0x00000040); + utCoveredConditions_ = 0; + bitField0_ = (bitField0_ & ~0x00000080); + itLineHits_ = 0; + bitField0_ = (bitField0_ & ~0x00000100); + itConditions_ = 0; + bitField0_ = (bitField0_ & ~0x00000200); + itCoveredConditions_ = 0; + bitField0_ = (bitField0_ & ~0x00000400); + overallLineHits_ = 0; + bitField0_ = (bitField0_ & ~0x00000800); + overallConditions_ = 0; + bitField0_ = (bitField0_ & ~0x00001000); + overallCoveredConditions_ = 0; + bitField0_ = (bitField0_ & ~0x00002000); + highlighting_ = ""; + bitField0_ = (bitField0_ & ~0x00004000); + symbols_ = ""; + bitField0_ = (bitField0_ & ~0x00008000); + duplications_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00010000); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Line_descriptor; + } + + public org.sonar.server.source.db.FileSourceDb.Line getDefaultInstanceForType() { + return org.sonar.server.source.db.FileSourceDb.Line.getDefaultInstance(); + } + + public org.sonar.server.source.db.FileSourceDb.Line build() { + org.sonar.server.source.db.FileSourceDb.Line result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.sonar.server.source.db.FileSourceDb.Line buildPartial() { + org.sonar.server.source.db.FileSourceDb.Line result = new org.sonar.server.source.db.FileSourceDb.Line(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.line_ = line_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.source_ = source_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.scmRevision_ = scmRevision_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.scmAuthor_ = scmAuthor_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.scmDate_ = scmDate_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.utLineHits_ = utLineHits_; + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000040; + } + result.utConditions_ = utConditions_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000080; + } + result.utCoveredConditions_ = utCoveredConditions_; + if (((from_bitField0_ & 0x00000100) == 0x00000100)) { + to_bitField0_ |= 0x00000100; + } + result.itLineHits_ = itLineHits_; + if (((from_bitField0_ & 0x00000200) == 0x00000200)) { + to_bitField0_ |= 0x00000200; + } + result.itConditions_ = itConditions_; + if (((from_bitField0_ & 0x00000400) == 0x00000400)) { + to_bitField0_ |= 0x00000400; + } + result.itCoveredConditions_ = itCoveredConditions_; + if (((from_bitField0_ & 0x00000800) == 0x00000800)) { + to_bitField0_ |= 0x00000800; + } + result.overallLineHits_ = overallLineHits_; + if (((from_bitField0_ & 0x00001000) == 0x00001000)) { + to_bitField0_ |= 0x00001000; + } + result.overallConditions_ = overallConditions_; + if (((from_bitField0_ & 0x00002000) == 0x00002000)) { + to_bitField0_ |= 0x00002000; + } + result.overallCoveredConditions_ = overallCoveredConditions_; + if (((from_bitField0_ & 0x00004000) == 0x00004000)) { + to_bitField0_ |= 0x00004000; + } + result.highlighting_ = highlighting_; + if (((from_bitField0_ & 0x00008000) == 0x00008000)) { + to_bitField0_ |= 0x00008000; + } + result.symbols_ = symbols_; + if (((bitField0_ & 0x00010000) == 0x00010000)) { + duplications_ = java.util.Collections.unmodifiableList(duplications_); + bitField0_ = (bitField0_ & ~0x00010000); + } + result.duplications_ = duplications_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.sonar.server.source.db.FileSourceDb.Line) { + return mergeFrom((org.sonar.server.source.db.FileSourceDb.Line)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.sonar.server.source.db.FileSourceDb.Line other) { + if (other == org.sonar.server.source.db.FileSourceDb.Line.getDefaultInstance()) return this; + if (other.hasLine()) { + setLine(other.getLine()); + } + if (other.hasSource()) { + bitField0_ |= 0x00000002; + source_ = other.source_; + onChanged(); + } + if (other.hasScmRevision()) { + bitField0_ |= 0x00000004; + scmRevision_ = other.scmRevision_; + onChanged(); + } + if (other.hasScmAuthor()) { + bitField0_ |= 0x00000008; + scmAuthor_ = other.scmAuthor_; + onChanged(); + } + if (other.hasScmDate()) { + setScmDate(other.getScmDate()); + } + if (other.hasUtLineHits()) { + setUtLineHits(other.getUtLineHits()); + } + if (other.hasUtConditions()) { + setUtConditions(other.getUtConditions()); + } + if (other.hasUtCoveredConditions()) { + setUtCoveredConditions(other.getUtCoveredConditions()); + } + if (other.hasItLineHits()) { + setItLineHits(other.getItLineHits()); + } + if (other.hasItConditions()) { + setItConditions(other.getItConditions()); + } + if (other.hasItCoveredConditions()) { + setItCoveredConditions(other.getItCoveredConditions()); + } + if (other.hasOverallLineHits()) { + setOverallLineHits(other.getOverallLineHits()); + } + if (other.hasOverallConditions()) { + setOverallConditions(other.getOverallConditions()); + } + if (other.hasOverallCoveredConditions()) { + setOverallCoveredConditions(other.getOverallCoveredConditions()); + } + if (other.hasHighlighting()) { + bitField0_ |= 0x00004000; + highlighting_ = other.highlighting_; + onChanged(); + } + if (other.hasSymbols()) { + bitField0_ |= 0x00008000; + symbols_ = other.symbols_; + onChanged(); + } + if (!other.duplications_.isEmpty()) { + if (duplications_.isEmpty()) { + duplications_ = other.duplications_; + bitField0_ = (bitField0_ & ~0x00010000); + } else { + ensureDuplicationsIsMutable(); + duplications_.addAll(other.duplications_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.sonar.server.source.db.FileSourceDb.Line parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.sonar.server.source.db.FileSourceDb.Line) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private int line_ ; + /** + * optional int32 line = 1; + */ + public boolean hasLine() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional int32 line = 1; + */ + public int getLine() { + return line_; + } + /** + * optional int32 line = 1; + */ + public Builder setLine(int value) { + bitField0_ |= 0x00000001; + line_ = value; + onChanged(); + return this; + } + /** + * optional int32 line = 1; + */ + public Builder clearLine() { + bitField0_ = (bitField0_ & ~0x00000001); + line_ = 0; + onChanged(); + return this; + } + + private java.lang.Object source_ = ""; + /** + * optional string source = 2; + */ + public boolean hasSource() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string source = 2; + */ + public java.lang.String getSource() { + java.lang.Object ref = source_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + source_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string source = 2; + */ + public com.google.protobuf.ByteString + getSourceBytes() { + java.lang.Object ref = source_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + source_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string source = 2; + */ + public Builder setSource( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + source_ = value; + onChanged(); + return this; + } + /** + * optional string source = 2; + */ + public Builder clearSource() { + bitField0_ = (bitField0_ & ~0x00000002); + source_ = getDefaultInstance().getSource(); + onChanged(); + return this; + } + /** + * optional string source = 2; + */ + public Builder setSourceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + source_ = value; + onChanged(); + return this; + } + + private java.lang.Object scmRevision_ = ""; + /** + * optional string scm_revision = 3; + * + *
+       * SCM
+       * 
+ */ + public boolean hasScmRevision() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string scm_revision = 3; + * + *
+       * SCM
+       * 
+ */ + public java.lang.String getScmRevision() { + java.lang.Object ref = scmRevision_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + scmRevision_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string scm_revision = 3; + * + *
+       * SCM
+       * 
+ */ + public com.google.protobuf.ByteString + getScmRevisionBytes() { + java.lang.Object ref = scmRevision_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + scmRevision_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string scm_revision = 3; + * + *
+       * SCM
+       * 
+ */ + public Builder setScmRevision( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + scmRevision_ = value; + onChanged(); + return this; + } + /** + * optional string scm_revision = 3; + * + *
+       * SCM
+       * 
+ */ + public Builder clearScmRevision() { + bitField0_ = (bitField0_ & ~0x00000004); + scmRevision_ = getDefaultInstance().getScmRevision(); + onChanged(); + return this; + } + /** + * optional string scm_revision = 3; + * + *
+       * SCM
+       * 
+ */ + public Builder setScmRevisionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + scmRevision_ = value; + onChanged(); + return this; + } + + private java.lang.Object scmAuthor_ = ""; + /** + * optional string scm_author = 4; + */ + public boolean hasScmAuthor() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string scm_author = 4; + */ + public java.lang.String getScmAuthor() { + java.lang.Object ref = scmAuthor_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + scmAuthor_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string scm_author = 4; + */ + public com.google.protobuf.ByteString + getScmAuthorBytes() { + java.lang.Object ref = scmAuthor_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + scmAuthor_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string scm_author = 4; + */ + public Builder setScmAuthor( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + scmAuthor_ = value; + onChanged(); + return this; + } + /** + * optional string scm_author = 4; + */ + public Builder clearScmAuthor() { + bitField0_ = (bitField0_ & ~0x00000008); + scmAuthor_ = getDefaultInstance().getScmAuthor(); + onChanged(); + return this; + } + /** + * optional string scm_author = 4; + */ + public Builder setScmAuthorBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + scmAuthor_ = value; + onChanged(); + return this; + } + + private long scmDate_ ; + /** + * optional int64 scm_date = 5; + */ + public boolean hasScmDate() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional int64 scm_date = 5; + */ + public long getScmDate() { + return scmDate_; + } + /** + * optional int64 scm_date = 5; + */ + public Builder setScmDate(long value) { + bitField0_ |= 0x00000010; + scmDate_ = value; + onChanged(); + return this; + } + /** + * optional int64 scm_date = 5; + */ + public Builder clearScmDate() { + bitField0_ = (bitField0_ & ~0x00000010); + scmDate_ = 0L; + onChanged(); + return this; + } + + private int utLineHits_ ; + /** + * optional int32 ut_line_hits = 6; + * + *
+       * unit testing
+       * 
+ */ + public boolean hasUtLineHits() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional int32 ut_line_hits = 6; + * + *
+       * unit testing
+       * 
+ */ + public int getUtLineHits() { + return utLineHits_; + } + /** + * optional int32 ut_line_hits = 6; + * + *
+       * unit testing
+       * 
+ */ + public Builder setUtLineHits(int value) { + bitField0_ |= 0x00000020; + utLineHits_ = value; + onChanged(); + return this; + } + /** + * optional int32 ut_line_hits = 6; + * + *
+       * unit testing
+       * 
+ */ + public Builder clearUtLineHits() { + bitField0_ = (bitField0_ & ~0x00000020); + utLineHits_ = 0; + onChanged(); + return this; + } + + private int utConditions_ ; + /** + * optional int32 ut_conditions = 7; + */ + public boolean hasUtConditions() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + /** + * optional int32 ut_conditions = 7; + */ + public int getUtConditions() { + return utConditions_; + } + /** + * optional int32 ut_conditions = 7; + */ + public Builder setUtConditions(int value) { + bitField0_ |= 0x00000040; + utConditions_ = value; + onChanged(); + return this; + } + /** + * optional int32 ut_conditions = 7; + */ + public Builder clearUtConditions() { + bitField0_ = (bitField0_ & ~0x00000040); + utConditions_ = 0; + onChanged(); + return this; + } + + private int utCoveredConditions_ ; + /** + * optional int32 ut_covered_conditions = 8; + */ + public boolean hasUtCoveredConditions() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + /** + * optional int32 ut_covered_conditions = 8; + */ + public int getUtCoveredConditions() { + return utCoveredConditions_; + } + /** + * optional int32 ut_covered_conditions = 8; + */ + public Builder setUtCoveredConditions(int value) { + bitField0_ |= 0x00000080; + utCoveredConditions_ = value; + onChanged(); + return this; + } + /** + * optional int32 ut_covered_conditions = 8; + */ + public Builder clearUtCoveredConditions() { + bitField0_ = (bitField0_ & ~0x00000080); + utCoveredConditions_ = 0; + onChanged(); + return this; + } + + private int itLineHits_ ; + /** + * optional int32 it_line_hits = 9; + * + *
+       * integration testing
+       * 
+ */ + public boolean hasItLineHits() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + /** + * optional int32 it_line_hits = 9; + * + *
+       * integration testing
+       * 
+ */ + public int getItLineHits() { + return itLineHits_; + } + /** + * optional int32 it_line_hits = 9; + * + *
+       * integration testing
+       * 
+ */ + public Builder setItLineHits(int value) { + bitField0_ |= 0x00000100; + itLineHits_ = value; + onChanged(); + return this; + } + /** + * optional int32 it_line_hits = 9; + * + *
+       * integration testing
+       * 
+ */ + public Builder clearItLineHits() { + bitField0_ = (bitField0_ & ~0x00000100); + itLineHits_ = 0; + onChanged(); + return this; + } + + private int itConditions_ ; + /** + * optional int32 it_conditions = 10; + */ + public boolean hasItConditions() { + return ((bitField0_ & 0x00000200) == 0x00000200); + } + /** + * optional int32 it_conditions = 10; + */ + public int getItConditions() { + return itConditions_; + } + /** + * optional int32 it_conditions = 10; + */ + public Builder setItConditions(int value) { + bitField0_ |= 0x00000200; + itConditions_ = value; + onChanged(); + return this; + } + /** + * optional int32 it_conditions = 10; + */ + public Builder clearItConditions() { + bitField0_ = (bitField0_ & ~0x00000200); + itConditions_ = 0; + onChanged(); + return this; + } + + private int itCoveredConditions_ ; + /** + * optional int32 it_covered_conditions = 11; + */ + public boolean hasItCoveredConditions() { + return ((bitField0_ & 0x00000400) == 0x00000400); + } + /** + * optional int32 it_covered_conditions = 11; + */ + public int getItCoveredConditions() { + return itCoveredConditions_; + } + /** + * optional int32 it_covered_conditions = 11; + */ + public Builder setItCoveredConditions(int value) { + bitField0_ |= 0x00000400; + itCoveredConditions_ = value; + onChanged(); + return this; + } + /** + * optional int32 it_covered_conditions = 11; + */ + public Builder clearItCoveredConditions() { + bitField0_ = (bitField0_ & ~0x00000400); + itCoveredConditions_ = 0; + onChanged(); + return this; + } + + private int overallLineHits_ ; + /** + * optional int32 overall_line_hits = 12; + * + *
+       * overall testing
+       * 
+ */ + public boolean hasOverallLineHits() { + return ((bitField0_ & 0x00000800) == 0x00000800); + } + /** + * optional int32 overall_line_hits = 12; + * + *
+       * overall testing
+       * 
+ */ + public int getOverallLineHits() { + return overallLineHits_; + } + /** + * optional int32 overall_line_hits = 12; + * + *
+       * overall testing
+       * 
+ */ + public Builder setOverallLineHits(int value) { + bitField0_ |= 0x00000800; + overallLineHits_ = value; + onChanged(); + return this; + } + /** + * optional int32 overall_line_hits = 12; + * + *
+       * overall testing
+       * 
+ */ + public Builder clearOverallLineHits() { + bitField0_ = (bitField0_ & ~0x00000800); + overallLineHits_ = 0; + onChanged(); + return this; + } + + private int overallConditions_ ; + /** + * optional int32 overall_conditions = 13; + */ + public boolean hasOverallConditions() { + return ((bitField0_ & 0x00001000) == 0x00001000); + } + /** + * optional int32 overall_conditions = 13; + */ + public int getOverallConditions() { + return overallConditions_; + } + /** + * optional int32 overall_conditions = 13; + */ + public Builder setOverallConditions(int value) { + bitField0_ |= 0x00001000; + overallConditions_ = value; + onChanged(); + return this; + } + /** + * optional int32 overall_conditions = 13; + */ + public Builder clearOverallConditions() { + bitField0_ = (bitField0_ & ~0x00001000); + overallConditions_ = 0; + onChanged(); + return this; + } + + private int overallCoveredConditions_ ; + /** + * optional int32 overall_covered_conditions = 14; + */ + public boolean hasOverallCoveredConditions() { + return ((bitField0_ & 0x00002000) == 0x00002000); + } + /** + * optional int32 overall_covered_conditions = 14; + */ + public int getOverallCoveredConditions() { + return overallCoveredConditions_; + } + /** + * optional int32 overall_covered_conditions = 14; + */ + public Builder setOverallCoveredConditions(int value) { + bitField0_ |= 0x00002000; + overallCoveredConditions_ = value; + onChanged(); + return this; + } + /** + * optional int32 overall_covered_conditions = 14; + */ + public Builder clearOverallCoveredConditions() { + bitField0_ = (bitField0_ & ~0x00002000); + overallCoveredConditions_ = 0; + onChanged(); + return this; + } + + private java.lang.Object highlighting_ = ""; + /** + * optional string highlighting = 15; + */ + public boolean hasHighlighting() { + return ((bitField0_ & 0x00004000) == 0x00004000); + } + /** + * optional string highlighting = 15; + */ + public java.lang.String getHighlighting() { + java.lang.Object ref = highlighting_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + highlighting_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string highlighting = 15; + */ + public com.google.protobuf.ByteString + getHighlightingBytes() { + java.lang.Object ref = highlighting_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + highlighting_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string highlighting = 15; + */ + public Builder setHighlighting( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00004000; + highlighting_ = value; + onChanged(); + return this; + } + /** + * optional string highlighting = 15; + */ + public Builder clearHighlighting() { + bitField0_ = (bitField0_ & ~0x00004000); + highlighting_ = getDefaultInstance().getHighlighting(); + onChanged(); + return this; + } + /** + * optional string highlighting = 15; + */ + public Builder setHighlightingBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00004000; + highlighting_ = value; + onChanged(); + return this; + } + + private java.lang.Object symbols_ = ""; + /** + * optional string symbols = 16; + */ + public boolean hasSymbols() { + return ((bitField0_ & 0x00008000) == 0x00008000); + } + /** + * optional string symbols = 16; + */ + public java.lang.String getSymbols() { + java.lang.Object ref = symbols_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + symbols_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string symbols = 16; + */ + public com.google.protobuf.ByteString + getSymbolsBytes() { + java.lang.Object ref = symbols_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + symbols_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string symbols = 16; + */ + public Builder setSymbols( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00008000; + symbols_ = value; + onChanged(); + return this; + } + /** + * optional string symbols = 16; + */ + public Builder clearSymbols() { + bitField0_ = (bitField0_ & ~0x00008000); + symbols_ = getDefaultInstance().getSymbols(); + onChanged(); + return this; + } + /** + * optional string symbols = 16; + */ + public Builder setSymbolsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00008000; + symbols_ = value; + onChanged(); + return this; + } + + private java.util.List duplications_ = java.util.Collections.emptyList(); + private void ensureDuplicationsIsMutable() { + if (!((bitField0_ & 0x00010000) == 0x00010000)) { + duplications_ = new java.util.ArrayList(duplications_); + bitField0_ |= 0x00010000; + } + } + /** + * repeated int32 duplications = 17; + */ + public java.util.List + getDuplicationsList() { + return java.util.Collections.unmodifiableList(duplications_); + } + /** + * repeated int32 duplications = 17; + */ + public int getDuplicationsCount() { + return duplications_.size(); + } + /** + * repeated int32 duplications = 17; + */ + public int getDuplications(int index) { + return duplications_.get(index); + } + /** + * repeated int32 duplications = 17; + */ + public Builder setDuplications( + int index, int value) { + ensureDuplicationsIsMutable(); + duplications_.set(index, value); + onChanged(); + return this; + } + /** + * repeated int32 duplications = 17; + */ + public Builder addDuplications(int value) { + ensureDuplicationsIsMutable(); + duplications_.add(value); + onChanged(); + return this; + } + /** + * repeated int32 duplications = 17; + */ + public Builder addAllDuplications( + java.lang.Iterable values) { + ensureDuplicationsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, duplications_); + onChanged(); + return this; + } + /** + * repeated int32 duplications = 17; + */ + public Builder clearDuplications() { + duplications_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00010000); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:org.sonar.server.source.db.Line) + } + + static { + defaultInstance = new Line(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.sonar.server.source.db.Line) + } + + public interface DataOrBuilder extends + // @@protoc_insertion_point(interface_extends:org.sonar.server.source.db.Data) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + java.util.List + getLinesList(); + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + org.sonar.server.source.db.FileSourceDb.Line getLines(int index); + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + int getLinesCount(); + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + java.util.List + getLinesOrBuilderList(); + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + org.sonar.server.source.db.FileSourceDb.LineOrBuilder getLinesOrBuilder( + int index); + } + /** + * Protobuf type {@code org.sonar.server.source.db.Data} + */ + public static final class Data extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:org.sonar.server.source.db.Data) + DataOrBuilder { + // Use Data.newBuilder() to construct. + private Data(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private Data(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final Data defaultInstance; + public static Data getDefaultInstance() { + return defaultInstance; + } + + public Data getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Data( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + lines_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + lines_.add(input.readMessage(org.sonar.server.source.db.FileSourceDb.Line.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + lines_ = java.util.Collections.unmodifiableList(lines_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Data_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Data_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.sonar.server.source.db.FileSourceDb.Data.class, org.sonar.server.source.db.FileSourceDb.Data.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Data parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Data(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public static final int LINES_FIELD_NUMBER = 1; + private java.util.List lines_; + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public java.util.List getLinesList() { + return lines_; + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public java.util.List + getLinesOrBuilderList() { + return lines_; + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public int getLinesCount() { + return lines_.size(); + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public org.sonar.server.source.db.FileSourceDb.Line getLines(int index) { + return lines_.get(index); + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public org.sonar.server.source.db.FileSourceDb.LineOrBuilder getLinesOrBuilder( + int index) { + return lines_.get(index); + } + + private void initFields() { + lines_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < lines_.size(); i++) { + output.writeMessage(1, lines_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < lines_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, lines_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.sonar.server.source.db.FileSourceDb.Data parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.sonar.server.source.db.FileSourceDb.Data parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.sonar.server.source.db.FileSourceDb.Data parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.sonar.server.source.db.FileSourceDb.Data parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.sonar.server.source.db.FileSourceDb.Data parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.sonar.server.source.db.FileSourceDb.Data parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.sonar.server.source.db.FileSourceDb.Data parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.sonar.server.source.db.FileSourceDb.Data parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.sonar.server.source.db.FileSourceDb.Data parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.sonar.server.source.db.FileSourceDb.Data parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.sonar.server.source.db.FileSourceDb.Data prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.sonar.server.source.db.Data} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:org.sonar.server.source.db.Data) + org.sonar.server.source.db.FileSourceDb.DataOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Data_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Data_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.sonar.server.source.db.FileSourceDb.Data.class, org.sonar.server.source.db.FileSourceDb.Data.Builder.class); + } + + // Construct using org.sonar.server.source.db.FileSourceDb.Data.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getLinesFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (linesBuilder_ == null) { + lines_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + linesBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.sonar.server.source.db.FileSourceDb.internal_static_org_sonar_server_source_db_Data_descriptor; + } + + public org.sonar.server.source.db.FileSourceDb.Data getDefaultInstanceForType() { + return org.sonar.server.source.db.FileSourceDb.Data.getDefaultInstance(); + } + + public org.sonar.server.source.db.FileSourceDb.Data build() { + org.sonar.server.source.db.FileSourceDb.Data result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.sonar.server.source.db.FileSourceDb.Data buildPartial() { + org.sonar.server.source.db.FileSourceDb.Data result = new org.sonar.server.source.db.FileSourceDb.Data(this); + int from_bitField0_ = bitField0_; + if (linesBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + lines_ = java.util.Collections.unmodifiableList(lines_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.lines_ = lines_; + } else { + result.lines_ = linesBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.sonar.server.source.db.FileSourceDb.Data) { + return mergeFrom((org.sonar.server.source.db.FileSourceDb.Data)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.sonar.server.source.db.FileSourceDb.Data other) { + if (other == org.sonar.server.source.db.FileSourceDb.Data.getDefaultInstance()) return this; + if (linesBuilder_ == null) { + if (!other.lines_.isEmpty()) { + if (lines_.isEmpty()) { + lines_ = other.lines_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureLinesIsMutable(); + lines_.addAll(other.lines_); + } + onChanged(); + } + } else { + if (!other.lines_.isEmpty()) { + if (linesBuilder_.isEmpty()) { + linesBuilder_.dispose(); + linesBuilder_ = null; + lines_ = other.lines_; + bitField0_ = (bitField0_ & ~0x00000001); + linesBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getLinesFieldBuilder() : null; + } else { + linesBuilder_.addAllMessages(other.lines_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.sonar.server.source.db.FileSourceDb.Data parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.sonar.server.source.db.FileSourceDb.Data) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List lines_ = + java.util.Collections.emptyList(); + private void ensureLinesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + lines_ = new java.util.ArrayList(lines_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.sonar.server.source.db.FileSourceDb.Line, org.sonar.server.source.db.FileSourceDb.Line.Builder, org.sonar.server.source.db.FileSourceDb.LineOrBuilder> linesBuilder_; + + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public java.util.List getLinesList() { + if (linesBuilder_ == null) { + return java.util.Collections.unmodifiableList(lines_); + } else { + return linesBuilder_.getMessageList(); + } + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public int getLinesCount() { + if (linesBuilder_ == null) { + return lines_.size(); + } else { + return linesBuilder_.getCount(); + } + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public org.sonar.server.source.db.FileSourceDb.Line getLines(int index) { + if (linesBuilder_ == null) { + return lines_.get(index); + } else { + return linesBuilder_.getMessage(index); + } + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public Builder setLines( + int index, org.sonar.server.source.db.FileSourceDb.Line value) { + if (linesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLinesIsMutable(); + lines_.set(index, value); + onChanged(); + } else { + linesBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public Builder setLines( + int index, org.sonar.server.source.db.FileSourceDb.Line.Builder builderForValue) { + if (linesBuilder_ == null) { + ensureLinesIsMutable(); + lines_.set(index, builderForValue.build()); + onChanged(); + } else { + linesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public Builder addLines(org.sonar.server.source.db.FileSourceDb.Line value) { + if (linesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLinesIsMutable(); + lines_.add(value); + onChanged(); + } else { + linesBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public Builder addLines( + int index, org.sonar.server.source.db.FileSourceDb.Line value) { + if (linesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLinesIsMutable(); + lines_.add(index, value); + onChanged(); + } else { + linesBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public Builder addLines( + org.sonar.server.source.db.FileSourceDb.Line.Builder builderForValue) { + if (linesBuilder_ == null) { + ensureLinesIsMutable(); + lines_.add(builderForValue.build()); + onChanged(); + } else { + linesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public Builder addLines( + int index, org.sonar.server.source.db.FileSourceDb.Line.Builder builderForValue) { + if (linesBuilder_ == null) { + ensureLinesIsMutable(); + lines_.add(index, builderForValue.build()); + onChanged(); + } else { + linesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public Builder addAllLines( + java.lang.Iterable values) { + if (linesBuilder_ == null) { + ensureLinesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, lines_); + onChanged(); + } else { + linesBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public Builder clearLines() { + if (linesBuilder_ == null) { + lines_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + linesBuilder_.clear(); + } + return this; + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public Builder removeLines(int index) { + if (linesBuilder_ == null) { + ensureLinesIsMutable(); + lines_.remove(index); + onChanged(); + } else { + linesBuilder_.remove(index); + } + return this; + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public org.sonar.server.source.db.FileSourceDb.Line.Builder getLinesBuilder( + int index) { + return getLinesFieldBuilder().getBuilder(index); + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public org.sonar.server.source.db.FileSourceDb.LineOrBuilder getLinesOrBuilder( + int index) { + if (linesBuilder_ == null) { + return lines_.get(index); } else { + return linesBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public java.util.List + getLinesOrBuilderList() { + if (linesBuilder_ != null) { + return linesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(lines_); + } + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public org.sonar.server.source.db.FileSourceDb.Line.Builder addLinesBuilder() { + return getLinesFieldBuilder().addBuilder( + org.sonar.server.source.db.FileSourceDb.Line.getDefaultInstance()); + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public org.sonar.server.source.db.FileSourceDb.Line.Builder addLinesBuilder( + int index) { + return getLinesFieldBuilder().addBuilder( + index, org.sonar.server.source.db.FileSourceDb.Line.getDefaultInstance()); + } + /** + * repeated .org.sonar.server.source.db.Line lines = 1; + */ + public java.util.List + getLinesBuilderList() { + return getLinesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.sonar.server.source.db.FileSourceDb.Line, org.sonar.server.source.db.FileSourceDb.Line.Builder, org.sonar.server.source.db.FileSourceDb.LineOrBuilder> + getLinesFieldBuilder() { + if (linesBuilder_ == null) { + linesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.sonar.server.source.db.FileSourceDb.Line, org.sonar.server.source.db.FileSourceDb.Line.Builder, org.sonar.server.source.db.FileSourceDb.LineOrBuilder>( + lines_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + lines_ = null; + } + return linesBuilder_; + } + + // @@protoc_insertion_point(builder_scope:org.sonar.server.source.db.Data) + } + + static { + defaultInstance = new Data(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.sonar.server.source.db.Data) + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_org_sonar_server_source_db_Line_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_sonar_server_source_db_Line_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_org_sonar_server_source_db_Data_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_sonar_server_source_db_Data_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\024file_source_db.proto\022\032org.sonar.server" + + ".source.db\"\220\003\n\004Line\022\014\n\004line\030\001 \001(\005\022\016\n\006sou" + + "rce\030\002 \001(\t\022\024\n\014scm_revision\030\003 \001(\t\022\022\n\nscm_a" + + "uthor\030\004 \001(\t\022\020\n\010scm_date\030\005 \001(\003\022\024\n\014ut_line" + + "_hits\030\006 \001(\005\022\025\n\rut_conditions\030\007 \001(\005\022\035\n\025ut" + + "_covered_conditions\030\010 \001(\005\022\024\n\014it_line_hit" + + "s\030\t \001(\005\022\025\n\rit_conditions\030\n \001(\005\022\035\n\025it_cov" + + "ered_conditions\030\013 \001(\005\022\031\n\021overall_line_hi" + + "ts\030\014 \001(\005\022\032\n\022overall_conditions\030\r \001(\005\022\"\n\032" + + "overall_covered_conditions\030\016 \001(\005\022\024\n\014high", + "lighting\030\017 \001(\t\022\017\n\007symbols\030\020 \001(\t\022\024\n\014dupli" + + "cations\030\021 \003(\005\"7\n\004Data\022/\n\005lines\030\001 \003(\0132 .o" + + "rg.sonar.server.source.db.LineB\002H\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + internal_static_org_sonar_server_source_db_Line_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_org_sonar_server_source_db_Line_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_sonar_server_source_db_Line_descriptor, + new java.lang.String[] { "Line", "Source", "ScmRevision", "ScmAuthor", "ScmDate", "UtLineHits", "UtConditions", "UtCoveredConditions", "ItLineHits", "ItConditions", "ItCoveredConditions", "OverallLineHits", "OverallConditions", "OverallCoveredConditions", "Highlighting", "Symbols", "Duplications", }); + internal_static_org_sonar_server_source_db_Data_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_org_sonar_server_source_db_Data_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_sonar_server_source_db_Data_descriptor, + new java.lang.String[] { "Lines", }); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/sonar-batch-protocol/src/main/protobuf/file_source_db.proto b/sonar-batch-protocol/src/main/protobuf/file_source_db.proto new file mode 100644 index 00000000000..bdb0b877949 --- /dev/null +++ b/sonar-batch-protocol/src/main/protobuf/file_source_db.proto @@ -0,0 +1,72 @@ +/* + SonarQube, open source software quality management tool. + Copyright (C) 2008-2015 SonarSource + mailto:contact AT sonarsource DOT com + + SonarQube is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 3 of the License, or (at your option) any later version. + + SonarQube is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public License + along with this program; if not, write to the Free Software Foundation, + Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +*/ + +/* +Notes + + - "required" fields are not used as recommended by Google to keep forward-compatibility: + https://developers.google.com/protocol-buffers/docs/proto#simple + + - the related Java files are not generated during build. Indeed the existing protoc maven + plugins require protobuf to be installed on boxes. That means that generated Java files + are updated and committed for each change (see src/main/gen-java). +*/ + +// structure of db column FILE_SOURCES.BINARY_DATA + +// Temporarily in sonar-batch-protocol as this db table +// is still fed on batch-side. However generated sources +// are already in correct package + +package org.sonar.server.source.db; +option optimize_for = SPEED; + +message Line { + optional int32 line = 1; + optional string source = 2; + + // SCM + optional string scm_revision = 3; + optional string scm_author = 4; + optional int64 scm_date = 5; + + // unit tests + optional int32 ut_line_hits = 6; + optional int32 ut_conditions = 7; + optional int32 ut_covered_conditions = 8; + + // integration tests + optional int32 it_line_hits = 9; + optional int32 it_conditions = 10; + optional int32 it_covered_conditions = 11; + + // overall tests + optional int32 overall_line_hits = 12; + optional int32 overall_conditions = 13; + optional int32 overall_covered_conditions = 14; + + optional string highlighting = 15; + optional string symbols = 16; + repeated int32 duplications = 17; +} + +message Data { + repeated Line lines = 1; +} diff --git a/sonar-batch/src/main/java/org/sonar/batch/index/SourceDataFactory.java b/sonar-batch/src/main/java/org/sonar/batch/index/SourceDataFactory.java new file mode 100644 index 00000000000..e3e9f33bc79 --- /dev/null +++ b/sonar-batch/src/main/java/org/sonar/batch/index/SourceDataFactory.java @@ -0,0 +1,376 @@ +/* + * SonarQube, open source software quality management tool. + * Copyright (C) 2008-2014 SonarSource + * mailto:contact AT sonarsource DOT com + * + * SonarQube is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 3 of the License, or (at your option) any later version. + * + * SonarQube is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.sonar.batch.index; + +import com.google.common.base.CharMatcher; +import com.google.common.collect.ArrayListMultimap; +import com.google.common.collect.Multimap; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang.StringUtils; +import org.sonar.api.BatchComponent; +import org.sonar.api.batch.fs.internal.DefaultInputFile; +import org.sonar.api.batch.sensor.duplication.DuplicationGroup; +import org.sonar.api.batch.sensor.symbol.Symbol; +import org.sonar.api.measures.CoreMetrics; +import org.sonar.api.measures.Measure; +import org.sonar.api.utils.DateUtils; +import org.sonar.api.utils.KeyValueFormat; +import org.sonar.batch.duplication.DuplicationCache; +import org.sonar.batch.highlighting.SyntaxHighlightingData; +import org.sonar.batch.highlighting.SyntaxHighlightingRule; +import org.sonar.batch.scan.measure.MeasureCache; +import org.sonar.batch.source.CodeColorizers; +import org.sonar.batch.symbol.SymbolData; +import org.sonar.core.source.SnapshotDataTypes; +import org.sonar.core.source.db.FileSourceDto; +import org.sonar.server.source.db.FileSourceDb; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +/** + * Consolidate different caches for the export of report to server. + * @see org.sonar.server.source.db.FileSourceDb + */ +public class SourceDataFactory implements BatchComponent { + + private static final String BOM = "\uFEFF"; + + private final MeasureCache measureCache; + private final ComponentDataCache componentDataCache; + private final DuplicationCache duplicationCache; + private final CodeColorizers codeColorizers; + + public SourceDataFactory(MeasureCache measureCache, ComponentDataCache componentDataCache, + DuplicationCache duplicationCache, CodeColorizers codeColorizers) { + this.measureCache = measureCache; + this.componentDataCache = componentDataCache; + this.duplicationCache = duplicationCache; + this.codeColorizers = codeColorizers; + } + + public byte[] consolidateData(DefaultInputFile inputFile) throws IOException { + FileSourceDb.Data.Builder dataBuilder = createForSource(inputFile); + applyLineMeasures(inputFile, dataBuilder); + applyDuplications(inputFile.key(), dataBuilder); + applyHighlighting(inputFile, dataBuilder); + applySymbolReferences(inputFile, dataBuilder); + return FileSourceDto.encodeData(dataBuilder.build()); + } + + FileSourceDb.Data.Builder createForSource(DefaultInputFile inputFile) throws IOException { + FileSourceDb.Data.Builder result = FileSourceDb.Data.newBuilder(); + List lines = FileUtils.readLines(inputFile.file(), inputFile.encoding()); + // Missing empty last line + if (lines.size() == inputFile.lines() - 1) { + lines.add(""); + } + for (int lineIdx = 1; lineIdx <= lines.size(); lineIdx++) { + String s = CharMatcher.anyOf(BOM).removeFrom(lines.get(lineIdx - 1)); + FileSourceDb.Line.Builder linesBuilder = result.addLinesBuilder(); + linesBuilder.setLine(lineIdx).setSource(s); + } + return result; + } + + void applyLineMeasures(DefaultInputFile file, FileSourceDb.Data.Builder dataBuilder) { + applyLineMeasure(file.key(), CoreMetrics.SCM_AUTHORS_BY_LINE_KEY, dataBuilder, new MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setScmAuthor(value); + } + }); + applyLineMeasure(file.key(), CoreMetrics.SCM_REVISIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setScmRevision(value); + } + }); + applyLineMeasure(file.key(), CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE_KEY, dataBuilder, new MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setScmDate(DateUtils.parseDateTimeQuietly(value).getTime()); + } + }); + applyLineMeasure(file.key(), CoreMetrics.COVERAGE_LINE_HITS_DATA_KEY, dataBuilder, new MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setUtLineHits(Integer.parseInt(value)); + } + }); + applyLineMeasure(file.key(), CoreMetrics.CONDITIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setUtConditions(Integer.parseInt(value)); + } + }); + applyLineMeasure(file.key(), CoreMetrics.COVERED_CONDITIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setUtCoveredConditions(Integer.parseInt(value)); + } + }); + applyLineMeasure(file.key(), CoreMetrics.IT_COVERAGE_LINE_HITS_DATA_KEY, dataBuilder, new MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setItLineHits(Integer.parseInt(value)); + } + }); + applyLineMeasure(file.key(), CoreMetrics.IT_CONDITIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setItConditions(Integer.parseInt(value)); + } + }); + applyLineMeasure(file.key(), CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setItCoveredConditions(Integer.parseInt(value)); + } + }); + applyLineMeasure(file.key(), CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA_KEY, dataBuilder, new MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setOverallLineHits(Integer.parseInt(value)); + } + }); + applyLineMeasure(file.key(), CoreMetrics.OVERALL_CONDITIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setOverallConditions(Integer.parseInt(value)); + } + }); + applyLineMeasure(file.key(), CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE_KEY, dataBuilder, new MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setOverallCoveredConditions(Integer.parseInt(value)); + } + }); + } + + void applyLineMeasure(String inputFileKey, String metricKey, FileSourceDb.Data.Builder to, MeasureOperation op) { + Iterable measures = measureCache.byMetric(inputFileKey, metricKey); + if (measures != null) { + for (Measure measure : measures) { + Map lineMeasures = KeyValueFormat.parseIntString((String) measure.value()); + for (Map.Entry lineMeasure : lineMeasures.entrySet()) { + String value = lineMeasure.getValue(); + if (StringUtils.isNotEmpty(value)) { + FileSourceDb.Line.Builder lineBuilder = to.getLinesBuilder(lineMeasure.getKey() - 1); + op.apply(value, lineBuilder); + } + } + } + } + } + + static interface MeasureOperation { + void apply(String value, FileSourceDb.Line.Builder lineBuilder); + } + + void applyHighlighting(DefaultInputFile inputFile, FileSourceDb.Data.Builder to) { + SyntaxHighlightingData highlighting = componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING); + String language = inputFile.language(); + if (highlighting == null && language != null) { + highlighting = codeColorizers.toSyntaxHighlighting(inputFile.file(), inputFile.encoding(), language); + } + if (highlighting == null) { + return; + } + StringBuilder[] highlightingPerLine = new StringBuilder[inputFile.lines()]; + RuleItemWriter ruleItemWriter = new RuleItemWriter(); + int currentLineIdx = 1; + for (SyntaxHighlightingRule rule : highlighting.syntaxHighlightingRuleSet()) { + while (currentLineIdx < inputFile.lines() && rule.getStartPosition() >= inputFile.originalLineOffsets()[currentLineIdx]) { + // This rule starts on another line so advance + currentLineIdx++; + } + // Now we know current rule starts on current line + writeDataPerLine(inputFile.originalLineOffsets(), rule, rule.getStartPosition(), rule.getEndPosition(), highlightingPerLine, currentLineIdx, ruleItemWriter); + } + for (int i = 0; i < highlightingPerLine.length; i++) { + StringBuilder sb = highlightingPerLine[i]; + if (sb != null) { + to.getLinesBuilder(i).setHighlighting(sb.toString()); + } + } + } + + void applySymbolReferences(DefaultInputFile file, FileSourceDb.Data.Builder to) { + SymbolData symbolRefs = componentDataCache.getData(file.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING); + if (symbolRefs != null) { + StringBuilder[] refsPerLine = new StringBuilder[file.lines()]; + int symbolId = 1; + List symbols = new ArrayList(symbolRefs.referencesBySymbol().keySet()); + // Sort symbols to avoid false variation that would lead to an unnecessary update + Collections.sort(symbols, new Comparator() { + @Override + public int compare(Symbol o1, Symbol o2) { + return o1.getDeclarationStartOffset() - o2.getDeclarationStartOffset(); + } + }); + for (Symbol symbol : symbols) { + int declarationStartOffset = symbol.getDeclarationStartOffset(); + int declarationEndOffset = symbol.getDeclarationEndOffset(); + int length = declarationEndOffset - declarationStartOffset; + addSymbol(symbolId, declarationStartOffset, declarationEndOffset, file.originalLineOffsets(), refsPerLine); + for (Integer referenceStartOffset : symbolRefs.referencesBySymbol().get(symbol)) { + if (referenceStartOffset == declarationStartOffset) { + // Ignore old API that used to store reference as first declaration + continue; + } + addSymbol(symbolId, referenceStartOffset, referenceStartOffset + length, file.originalLineOffsets(), refsPerLine); + } + symbolId++; + } + for (int i = 0; i < refsPerLine.length; i++) { + StringBuilder sb = refsPerLine[i]; + if (sb != null) { + to.getLinesBuilder(i).setSymbols(sb.toString()); + } + } + } + } + + private void addSymbol(int symbolId, int startOffset, int endOffset, long[] originalLineOffsets, StringBuilder[] result) { + int startLine = binarySearchLine(startOffset, originalLineOffsets); + writeDataPerLine(originalLineOffsets, symbolId, startOffset, endOffset, result, startLine, new SymbolItemWriter()); + } + + private int binarySearchLine(int declarationStartOffset, long[] originalLineOffsets) { + int begin = 0; + int end = originalLineOffsets.length - 1; + while (begin < end) { + int mid = (int) Math.round((begin + end) / 2D); + if (declarationStartOffset < originalLineOffsets[mid]) { + end = mid - 1; + } else { + begin = mid; + } + } + return begin + 1; + } + + private void writeDataPerLine(long[] originalLineOffsets, G item, int globalStartOffset, int globalEndOffset, StringBuilder[] dataPerLine, int startLine, + RangeItemWriter writer) { + int currentLineIdx = startLine; + // We know current item starts on current line + long ruleStartOffsetCurrentLine = globalStartOffset; + while (currentLineIdx < originalLineOffsets.length && globalEndOffset >= originalLineOffsets[currentLineIdx]) { + // item continue on next line so write current line and continue on next line with same item + writeItem(item, dataPerLine, currentLineIdx, ruleStartOffsetCurrentLine - originalLineOffsets[currentLineIdx - 1], originalLineOffsets[currentLineIdx] + - originalLineOffsets[currentLineIdx - 1], writer); + currentLineIdx++; + ruleStartOffsetCurrentLine = originalLineOffsets[currentLineIdx - 1]; + } + // item ends on current line + writeItem(item, dataPerLine, currentLineIdx, ruleStartOffsetCurrentLine - originalLineOffsets[currentLineIdx - 1], globalEndOffset + - originalLineOffsets[currentLineIdx - 1], writer); + } + + private void writeItem(G item, StringBuilder[] dataPerLine, int currentLineIdx, long startLineOffset, long endLineOffset, RangeItemWriter writer) { + if (startLineOffset == endLineOffset) { + // Do not store empty items + return; + } + if (dataPerLine[currentLineIdx - 1] == null) { + dataPerLine[currentLineIdx - 1] = new StringBuilder(); + } + StringBuilder currentLineSb = dataPerLine[currentLineIdx - 1]; + writer.writeItem(currentLineSb, startLineOffset, endLineOffset, item); + } + + private static interface RangeItemWriter { + /** + * Write item on a single line + */ + void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, G item); + } + + private static class RuleItemWriter implements RangeItemWriter { + @Override + public void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, SyntaxHighlightingRule item) { + if (currentLineSb.length() > 0) { + currentLineSb.append(SyntaxHighlightingData.RULE_SEPARATOR); + } + currentLineSb.append(startLineOffset) + .append(SyntaxHighlightingData.FIELD_SEPARATOR) + .append(endLineOffset) + .append(SyntaxHighlightingData.FIELD_SEPARATOR) + .append(item.getTextType().cssClass()); + } + + } + + private static class SymbolItemWriter implements RangeItemWriter { + @Override + public void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, Integer symbolId) { + if (currentLineSb.length() > 0) { + currentLineSb.append(SymbolData.SYMBOL_SEPARATOR); + } + currentLineSb.append(startLineOffset) + .append(SymbolData.FIELD_SEPARATOR) + .append(endLineOffset) + .append(SymbolData.FIELD_SEPARATOR) + .append(symbolId); + } + } + + void applyDuplications(String inputFileKey, FileSourceDb.Data.Builder to) { + List groups = duplicationCache.byComponent(inputFileKey); + if (groups != null) { + Multimap duplicationsPerLine = ArrayListMultimap.create(); + int blockId = 1; + for (Iterator it = groups.iterator(); it.hasNext();) { + DuplicationGroup group = it.next(); + addBlock(blockId, group.originBlock(), duplicationsPerLine); + blockId++; + for (Iterator dupsIt = group.duplicates().iterator(); dupsIt.hasNext();) { + DuplicationGroup.Block dups = dupsIt.next(); + if (inputFileKey.equals(dups.resourceKey())) { + addBlock(blockId, dups, duplicationsPerLine); + blockId++; + } + // Save memory + dupsIt.remove(); + } + // Save memory + it.remove(); + } + for (Map.Entry> entry : duplicationsPerLine.asMap().entrySet()) { + to.getLinesBuilder(entry.getKey() - 1).addAllDuplications(entry.getValue()); + } + } + } + + private void addBlock(int blockId, DuplicationGroup.Block block, Multimap dupPerLine) { + int currentLine = block.startLine(); + for (int i = 0; i < block.length(); i++) { + dupPerLine.put(currentLine, blockId); + currentLine++; + } + } +} diff --git a/sonar-batch/src/main/java/org/sonar/batch/index/SourcePersister.java b/sonar-batch/src/main/java/org/sonar/batch/index/SourcePersister.java index 76fdd9015f5..a7a18548418 100644 --- a/sonar-batch/src/main/java/org/sonar/batch/index/SourcePersister.java +++ b/sonar-batch/src/main/java/org/sonar/batch/index/SourcePersister.java @@ -19,82 +19,43 @@ */ package org.sonar.batch.index; -import com.google.common.base.CharMatcher; import org.apache.commons.codec.binary.Hex; import org.apache.commons.codec.digest.DigestUtils; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.StringUtils; import org.apache.ibatis.session.ResultContext; import org.apache.ibatis.session.ResultHandler; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.sonar.api.batch.fs.InputFile; import org.sonar.api.batch.fs.InputPath; import org.sonar.api.batch.fs.internal.DefaultInputFile; -import org.sonar.api.batch.sensor.duplication.DuplicationGroup; -import org.sonar.api.batch.sensor.duplication.DuplicationGroup.Block; -import org.sonar.api.batch.sensor.symbol.Symbol; -import org.sonar.api.measures.CoreMetrics; -import org.sonar.api.measures.Measure; -import org.sonar.api.utils.KeyValueFormat; import org.sonar.api.utils.System2; -import org.sonar.api.utils.text.CsvWriter; import org.sonar.batch.ProjectTree; -import org.sonar.batch.duplication.DuplicationCache; -import org.sonar.batch.highlighting.SyntaxHighlightingData; -import org.sonar.batch.highlighting.SyntaxHighlightingRule; import org.sonar.batch.scan.filesystem.InputPathCache; -import org.sonar.batch.scan.measure.MeasureCache; -import org.sonar.batch.source.CodeColorizers; -import org.sonar.batch.symbol.SymbolData; import org.sonar.core.persistence.DbSession; import org.sonar.core.persistence.MyBatis; -import org.sonar.core.source.SnapshotDataTypes; import org.sonar.core.source.db.FileSourceDto; import org.sonar.core.source.db.FileSourceMapper; import javax.annotation.CheckForNull; -import javax.annotation.Nullable; import java.io.IOException; -import java.io.StringWriter; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.Date; import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; import java.util.Map; public class SourcePersister implements ScanPersister { - private static final Logger LOG = LoggerFactory.getLogger(SourcePersister.class); - - private static final String BOM = "\uFEFF"; - private final InputPathCache inputPathCache; private final MyBatis mybatis; - private final MeasureCache measureCache; - private final ComponentDataCache componentDataCache; private final System2 system2; private final ProjectTree projectTree; private final ResourceCache resourceCache; - private CodeColorizers codeColorizers; - private DuplicationCache duplicationCache; + private final InputPathCache inputPathCache; + private final SourceDataFactory dataFactory; - public SourcePersister(InputPathCache inputPathCache, - MyBatis mybatis, MeasureCache measureCache, ComponentDataCache componentDataCache, ProjectTree projectTree, System2 system2, - ResourceCache resourceCache, CodeColorizers codeColorizers, DuplicationCache duplicationCache) { + public SourcePersister(InputPathCache inputPathCache, MyBatis mybatis, System2 system2, + ProjectTree projectTree, ResourceCache resourceCache, SourceDataFactory dataFactory) { this.inputPathCache = inputPathCache; this.mybatis = mybatis; - this.measureCache = measureCache; - this.componentDataCache = componentDataCache; - this.projectTree = projectTree; this.system2 = system2; + this.projectTree = projectTree; this.resourceCache = resourceCache; - this.codeColorizers = codeColorizers; - this.duplicationCache = duplicationCache; + this.dataFactory = dataFactory; } @Override @@ -102,22 +63,19 @@ public class SourcePersister implements ScanPersister { // Don't use batch insert for file_sources since keeping all data in memory can produce OOM for big files try (DbSession session = mybatis.openSession(false)) { - final Map fileSourceDtoByFileUuid = new HashMap(); - - session.select("org.sonar.core.source.db.FileSourceMapper.selectAllFileDataHashByProject", projectTree.getRootProject().getUuid(), new ResultHandler() { - + final Map previousDtosByUuid = new HashMap<>(); + session.select("org.sonar.core.source.db.FileSourceMapper.selectHashesForProject", projectTree.getRootProject().getUuid(), new ResultHandler() { @Override public void handleResult(ResultContext context) { FileSourceDto dto = (FileSourceDto) context.getResultObject(); - fileSourceDtoByFileUuid.put(dto.getFileUuid(), dto); + previousDtosByUuid.put(dto.getFileUuid(), dto); } }); FileSourceMapper mapper = session.getMapper(FileSourceMapper.class); - for (InputPath inputPath : inputPathCache.all()) { - if (inputPath instanceof InputFile) { - persist(session, mapper, inputPath, fileSourceDtoByFileUuid); + if (inputPath instanceof DefaultInputFile) { + persist(session, mapper, (DefaultInputFile) inputPath, previousDtosByUuid); } } } catch (Exception e) { @@ -126,43 +84,36 @@ public class SourcePersister implements ScanPersister { } - private void persist(DbSession session, FileSourceMapper mapper, InputPath inputPath, Map fileSourceDtoByFileUuid) { - DefaultInputFile inputFile = (DefaultInputFile) inputPath; - LOG.debug("Processing {}", inputFile.absolutePath()); - org.sonar.api.resources.File file = (org.sonar.api.resources.File) resourceCache.get(inputFile.key()).resource(); - String fileUuid = file.getUuid(); - FileSourceDto previous = fileSourceDtoByFileUuid.get(fileUuid); - String newData = getSourceData(inputFile); - String newDataHash = newData != null ? DigestUtils.md5Hex(newData) : "0"; - Date now = system2.newDate(); - try { - if (previous == null) { - FileSourceDto newFileSource = new FileSourceDto() - .setProjectUuid(projectTree.getRootProject().getUuid()) - .setFileUuid(fileUuid) - .setData(newData) - .setDataHash(newDataHash) + private void persist(DbSession session, FileSourceMapper mapper, DefaultInputFile inputFile, Map previousDtosByUuid) { + String fileUuid = resourceCache.get(inputFile.key()).resource().getUuid(); + + byte[] data = computeData(inputFile); + String dataHash = DigestUtils.md5Hex(data); + FileSourceDto previousDto = previousDtosByUuid.get(fileUuid); + if (previousDto == null) { + FileSourceDto dto = new FileSourceDto() + .setProjectUuid(projectTree.getRootProject().getUuid()) + .setFileUuid(fileUuid) + .setBinaryData(data) + .setDataHash(dataHash) + .setSrcHash(inputFile.hash()) + .setLineHashes(lineHashesAsMd5Hex(inputFile)) + .setCreatedAt(system2.now()) + .setUpdatedAt(system2.now()); + mapper.insert(dto); + session.commit(); + } else { + // Update only if data_hash has changed or if src_hash is missing (progressive migration) + if (!dataHash.equals(previousDto.getDataHash()) || !inputFile.hash().equals(previousDto.getSrcHash())) { + previousDto + .setBinaryData(data) + .setDataHash(dataHash) .setSrcHash(inputFile.hash()) .setLineHashes(lineHashesAsMd5Hex(inputFile)) - .setCreatedAt(now.getTime()) - .setUpdatedAt(now.getTime()); - mapper.insert(newFileSource); + .setUpdatedAt(system2.now()); + mapper.update(previousDto); session.commit(); - } else { - // Update only if data_hash has changed or if src_hash is missing (progressive migration) - if (!newDataHash.equals(previous.getDataHash()) || !inputFile.hash().equals(previous.getSrcHash())) { - previous - .setData(newData) - .setLineHashes(lineHashesAsMd5Hex(inputFile)) - .setDataHash(newDataHash) - .setSrcHash(inputFile.hash()) - .setUpdatedAt(now.getTime()); - mapper.update(previous); - session.commit(); - } } - } catch (Exception e) { - throw new IllegalStateException("Unable to save file sources for " + inputPath.absolutePath(), e); } } @@ -182,284 +133,11 @@ public class SourcePersister implements ScanPersister { return result.toString(); } - @CheckForNull - String getSourceData(DefaultInputFile file) { - if (file.lines() == 0) { - return null; - } - List lines; + private byte[] computeData(DefaultInputFile inputFile) { try { - lines = FileUtils.readLines(file.file(), file.encoding()); + return dataFactory.consolidateData(inputFile); } catch (IOException e) { - throw new IllegalStateException("Unable to read file", e); - } - // Missing empty last line - if (lines.size() == file.lines() - 1) { - lines.add(""); - } - Map authorsByLine = getLineMetric(file, CoreMetrics.SCM_AUTHORS_BY_LINE_KEY); - Map revisionsByLine = getLineMetric(file, CoreMetrics.SCM_REVISIONS_BY_LINE_KEY); - Map datesByLine = getLineMetric(file, CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE_KEY); - Map utHitsByLine = getLineMetric(file, CoreMetrics.COVERAGE_LINE_HITS_DATA_KEY); - Map utCondByLine = getLineMetric(file, CoreMetrics.CONDITIONS_BY_LINE_KEY); - Map utCoveredCondByLine = getLineMetric(file, CoreMetrics.COVERED_CONDITIONS_BY_LINE_KEY); - Map itHitsByLine = getLineMetric(file, CoreMetrics.IT_COVERAGE_LINE_HITS_DATA_KEY); - Map itCondByLine = getLineMetric(file, CoreMetrics.IT_CONDITIONS_BY_LINE_KEY); - Map itCoveredCondByLine = getLineMetric(file, CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE_KEY); - Map overallHitsByLine = getLineMetric(file, CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA_KEY); - Map overallCondByLine = getLineMetric(file, CoreMetrics.OVERALL_CONDITIONS_BY_LINE_KEY); - Map overallCoveredCondByLine = getLineMetric(file, CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE_KEY); - SyntaxHighlightingData highlighting = loadHighlighting(file); - String[] highlightingPerLine = computeHighlightingPerLine(file, highlighting); - String[] symbolReferencesPerLine = computeSymbolReferencesPerLine(file, loadSymbolReferences(file)); - String[] duplicationsPerLine = computeDuplicationsPerLine(file, duplicationCache.byComponent(file.key())); - - StringWriter writer = new StringWriter(file.lines() * 16); - CsvWriter csv = CsvWriter.of(writer); - for (int lineIdx = 1; lineIdx <= file.lines(); lineIdx++) { - csv.values(revisionsByLine.get(lineIdx), authorsByLine.get(lineIdx), datesByLine.get(lineIdx), - utHitsByLine.get(lineIdx), utCondByLine.get(lineIdx), utCoveredCondByLine.get(lineIdx), - itHitsByLine.get(lineIdx), itCondByLine.get(lineIdx), itCoveredCondByLine.get(lineIdx), - overallHitsByLine.get(lineIdx), overallCondByLine.get(lineIdx), overallCoveredCondByLine.get(lineIdx), - highlightingPerLine[lineIdx - 1], symbolReferencesPerLine[lineIdx - 1], duplicationsPerLine[lineIdx - 1], - CharMatcher.anyOf(BOM).removeFrom(lines.get(lineIdx - 1))); - // Free memory - revisionsByLine.remove(lineIdx); - authorsByLine.remove(lineIdx); - datesByLine.remove(lineIdx); - utHitsByLine.remove(lineIdx); - utCondByLine.remove(lineIdx); - utCoveredCondByLine.remove(lineIdx); - itHitsByLine.remove(lineIdx); - itCondByLine.remove(lineIdx); - itCoveredCondByLine.remove(lineIdx); - overallHitsByLine.remove(lineIdx); - overallCondByLine.remove(lineIdx); - overallCoveredCondByLine.remove(lineIdx); - highlightingPerLine[lineIdx - 1] = null; - symbolReferencesPerLine[lineIdx - 1] = null; - duplicationsPerLine[lineIdx - 1] = null; - lines.set(lineIdx - 1, null); - } - csv.close(); - return StringUtils.defaultIfEmpty(writer.toString(), null); - } - - private String[] computeDuplicationsPerLine(DefaultInputFile file, List duplicationGroups) { - String[] result = new String[file.lines()]; - if (duplicationGroups == null) { - return result; - } - List groups = new LinkedList(duplicationGroups); - StringBuilder[] dupPerLine = new StringBuilder[file.lines()]; - int blockId = 1; - for (Iterator it = groups.iterator(); it.hasNext();) { - DuplicationGroup group = it.next(); - addBlock(blockId, group.originBlock(), dupPerLine); - blockId++; - for (Iterator dupsIt = group.duplicates().iterator(); dupsIt.hasNext();) { - Block dups = dupsIt.next(); - if (dups.resourceKey().equals(file.key())) { - addBlock(blockId, dups, dupPerLine); - blockId++; - } - // Save memory - dupsIt.remove(); - } - // Save memory - it.remove(); - } - for (int i = 0; i < file.lines(); i++) { - result[i] = dupPerLine[i] != null ? dupPerLine[i].toString() : null; - // Save memory - dupPerLine[i] = null; - } - return result; - } - - private void addBlock(int blockId, Block block, StringBuilder[] dupPerLine) { - int currentLine = block.startLine(); - for (int i = 0; i < block.length(); i++) { - if (dupPerLine[currentLine - 1] == null) { - dupPerLine[currentLine - 1] = new StringBuilder(); - } - if (dupPerLine[currentLine - 1].length() > 0) { - dupPerLine[currentLine - 1].append(','); - } - dupPerLine[currentLine - 1].append(blockId); - currentLine++; - } - - } - - @CheckForNull - private SyntaxHighlightingData loadHighlighting(DefaultInputFile file) { - SyntaxHighlightingData highlighting = componentDataCache.getData(file.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING); - String language = file.language(); - if (highlighting == null && language != null) { - highlighting = codeColorizers.toSyntaxHighlighting(file.file(), file.encoding(), language); - } - return highlighting; - } - - @CheckForNull - private SymbolData loadSymbolReferences(DefaultInputFile file) { - return componentDataCache.getData(file.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING); - } - - String[] computeHighlightingPerLine(DefaultInputFile file, @Nullable SyntaxHighlightingData highlighting) { - String[] result = new String[file.lines()]; - if (highlighting == null) { - return result; - } - Iterable rules = highlighting.syntaxHighlightingRuleSet(); - int currentLineIdx = 1; - StringBuilder[] highlightingPerLine = new StringBuilder[file.lines()]; - for (SyntaxHighlightingRule rule : rules) { - while (currentLineIdx < file.lines() && rule.getStartPosition() >= file.originalLineOffsets()[currentLineIdx]) { - // This rule starts on another line so advance - currentLineIdx++; - } - // Now we know current rule starts on current line - writeDataPerLine(file.originalLineOffsets(), rule, rule.getStartPosition(), rule.getEndPosition(), highlightingPerLine, currentLineIdx, new RuleItemWriter()); - } - for (int i = 0; i < file.lines(); i++) { - result[i] = highlightingPerLine[i] != null ? highlightingPerLine[i].toString() : null; - } - return result; - } - - String[] computeSymbolReferencesPerLine(DefaultInputFile file, @Nullable SymbolData symbolRefs) { - String[] result = new String[file.lines()]; - if (symbolRefs == null) { - return result; - } - StringBuilder[] symbolRefsPerLine = new StringBuilder[file.lines()]; - long[] originalLineOffsets = file.originalLineOffsets(); - int symbolId = 1; - List symbols = new ArrayList(symbolRefs.referencesBySymbol().keySet()); - // Sort symbols to avoid false variation that would lead to an unnecessary update - Collections.sort(symbols, new Comparator() { - @Override - public int compare(Symbol o1, Symbol o2) { - return o1.getDeclarationStartOffset() - o2.getDeclarationStartOffset(); - } - }); - for (Symbol symbol : symbols) { - int declarationStartOffset = symbol.getDeclarationStartOffset(); - int declarationEndOffset = symbol.getDeclarationEndOffset(); - int length = declarationEndOffset - declarationStartOffset; - addSymbol(symbolId, declarationStartOffset, declarationEndOffset, originalLineOffsets, symbolRefsPerLine); - for (Integer referenceStartOffset : symbolRefs.referencesBySymbol().get(symbol)) { - if (referenceStartOffset == declarationStartOffset) { - // Ignore old API that used to store reference as first declaration - continue; - } - addSymbol(symbolId, referenceStartOffset, referenceStartOffset + length, originalLineOffsets, symbolRefsPerLine); - } - symbolId++; - } - for (int i = 0; i < file.lines(); i++) { - result[i] = symbolRefsPerLine[i] != null ? symbolRefsPerLine[i].toString() : null; - } - return result; - } - - private void addSymbol(int symbolId, int startOffset, int endOffset, long[] originalLineOffsets, StringBuilder[] result) { - int startLine = binarySearchLine(startOffset, originalLineOffsets); - writeDataPerLine(originalLineOffsets, symbolId, startOffset, endOffset, result, startLine, new SymbolItemWriter()); - } - - private int binarySearchLine(int declarationStartOffset, long[] originalLineOffsets) { - int begin = 0; - int end = originalLineOffsets.length - 1; - while (begin < end) { - int mid = (int) Math.round((begin + end) / 2D); - if (declarationStartOffset < originalLineOffsets[mid]) { - end = mid - 1; - } else { - begin = mid; - } - } - return begin + 1; - } - - private void writeDataPerLine(long[] originalLineOffsets, G item, int globalStartOffset, int globalEndOffset, StringBuilder[] dataPerLine, int startLine, - RangeItemWriter writer) { - int currentLineIdx = startLine; - // We know current item starts on current line - long ruleStartOffsetCurrentLine = globalStartOffset; - while (currentLineIdx < originalLineOffsets.length && globalEndOffset >= originalLineOffsets[currentLineIdx]) { - // item continue on next line so write current line and continue on next line with same item - writeItem(item, dataPerLine, currentLineIdx, ruleStartOffsetCurrentLine - originalLineOffsets[currentLineIdx - 1], originalLineOffsets[currentLineIdx] - - originalLineOffsets[currentLineIdx - 1], writer); - currentLineIdx++; - ruleStartOffsetCurrentLine = originalLineOffsets[currentLineIdx - 1]; - } - // item ends on current line - writeItem(item, dataPerLine, currentLineIdx, ruleStartOffsetCurrentLine - originalLineOffsets[currentLineIdx - 1], globalEndOffset - - originalLineOffsets[currentLineIdx - 1], writer); - } - - private void writeItem(G item, StringBuilder[] dataPerLine, int currentLineIdx, long startLineOffset, long endLineOffset, RangeItemWriter writer) { - if (startLineOffset == endLineOffset) { - // Do not store empty items - return; - } - if (dataPerLine[currentLineIdx - 1] == null) { - dataPerLine[currentLineIdx - 1] = new StringBuilder(); - } - StringBuilder currentLineSb = dataPerLine[currentLineIdx - 1]; - writer.writeItem(currentLineSb, startLineOffset, endLineOffset, item); - } - - private static interface RangeItemWriter { - /** - * Write item on a single line - */ - void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, G item); - } - - private static class RuleItemWriter implements RangeItemWriter { - - @Override - public void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, SyntaxHighlightingRule item) { - if (currentLineSb.length() > 0) { - currentLineSb.append(SyntaxHighlightingData.RULE_SEPARATOR); - } - currentLineSb.append(startLineOffset) - .append(SyntaxHighlightingData.FIELD_SEPARATOR) - .append(endLineOffset) - .append(SyntaxHighlightingData.FIELD_SEPARATOR) - .append(item.getTextType().cssClass()); - } - - } - - private static class SymbolItemWriter implements RangeItemWriter { - - @Override - public void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, Integer symbolId) { - if (currentLineSb.length() > 0) { - currentLineSb.append(SymbolData.SYMBOL_SEPARATOR); - } - currentLineSb.append(startLineOffset) - .append(SymbolData.FIELD_SEPARATOR) - .append(endLineOffset) - .append(SymbolData.FIELD_SEPARATOR) - .append(symbolId); - } - - } - - private Map getLineMetric(DefaultInputFile file, String metricKey) { - Map authorsByLine; - Iterator authorsIt = measureCache.byMetric(file.key(), metricKey).iterator(); - if (authorsIt.hasNext()) { - authorsByLine = KeyValueFormat.parseIntString((String) authorsIt.next().value()); - } else { - authorsByLine = Collections.emptyMap(); + throw new IllegalStateException("Fail to read file " + inputFile, e); } - return authorsByLine; } } diff --git a/sonar-batch/src/main/java/org/sonar/batch/scan/ProjectScanContainer.java b/sonar-batch/src/main/java/org/sonar/batch/scan/ProjectScanContainer.java index 8edc824b1fd..30ef5615488 100644 --- a/sonar-batch/src/main/java/org/sonar/batch/scan/ProjectScanContainer.java +++ b/sonar-batch/src/main/java/org/sonar/batch/scan/ProjectScanContainer.java @@ -56,6 +56,7 @@ import org.sonar.batch.index.MeasurePersister; import org.sonar.batch.index.ResourceCache; import org.sonar.batch.index.ResourceKeyMigration; import org.sonar.batch.index.ResourcePersister; +import org.sonar.batch.index.SourceDataFactory; import org.sonar.batch.index.SourcePersister; import org.sonar.batch.issue.DefaultProjectIssues; import org.sonar.batch.issue.IssueCache; @@ -147,6 +148,7 @@ public class ProjectScanContainer extends ComponentContainer { Caches.class, ResourceCache.class, ComponentDataCache.class, + SourceDataFactory.class, // file system InputPathCache.class, diff --git a/sonar-batch/src/test/java/org/sonar/batch/index/SourceDataFactoryTest.java b/sonar-batch/src/test/java/org/sonar/batch/index/SourceDataFactoryTest.java new file mode 100644 index 00000000000..821cb735756 --- /dev/null +++ b/sonar-batch/src/test/java/org/sonar/batch/index/SourceDataFactoryTest.java @@ -0,0 +1,305 @@ +/* + * SonarQube, open source software quality management tool. + * Copyright (C) 2008-2014 SonarSource + * mailto:contact AT sonarsource DOT com + * + * SonarQube is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 3 of the License, or (at your option) any later version. + * + * SonarQube is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.sonar.batch.index; + +import com.google.common.base.Charsets; +import com.google.common.collect.Lists; +import org.apache.commons.io.FileUtils; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.sonar.api.batch.fs.internal.DefaultInputFile; +import org.sonar.api.batch.sensor.duplication.DuplicationGroup; +import org.sonar.api.batch.sensor.highlighting.TypeOfText; +import org.sonar.api.measures.CoreMetrics; +import org.sonar.api.measures.Measure; +import org.sonar.api.measures.Metric; +import org.sonar.batch.duplication.DuplicationCache; +import org.sonar.batch.highlighting.SyntaxHighlightingData; +import org.sonar.batch.highlighting.SyntaxHighlightingDataBuilder; +import org.sonar.batch.scan.measure.MeasureCache; +import org.sonar.batch.source.CodeColorizers; +import org.sonar.batch.symbol.DefaultSymbolTableBuilder; +import org.sonar.core.source.SnapshotDataTypes; +import org.sonar.server.source.db.FileSourceDb; + +import java.io.File; +import java.util.Arrays; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class SourceDataFactoryTest { + + @Rule + public TemporaryFolder temp = new TemporaryFolder(); + + MeasureCache measureCache = mock(MeasureCache.class); + ComponentDataCache componentDataCache = mock(ComponentDataCache.class); + DuplicationCache duplicationCache = mock(DuplicationCache.class); + CodeColorizers colorizers = mock(CodeColorizers.class); + DefaultInputFile inputFile; + SourceDataFactory sut = new SourceDataFactory(measureCache, componentDataCache, duplicationCache, colorizers); + FileSourceDb.Data.Builder output; + + @Before + public void setUp() throws Exception { + // generate a file with 3 lines + File file = temp.newFile(); + inputFile = new DefaultInputFile("module_key", "src/Foo.java") + .setLines(3) + .setEncoding(Charsets.UTF_8.name()) + .setFile(file); + FileUtils.write(file, "one\ntwo\nthree\n"); + output = sut.createForSource(inputFile); + } + + @Test + public void createForSource() throws Exception { + FileSourceDb.Data data = output.build(); + assertThat(data.getLinesCount()).isEqualTo(3); + for (int index = 1; index <= 3; index++) { + assertThat(data.getLines(index - 1).getLine()).isEqualTo(index); + } + } + + @Test + public void consolidateData() throws Exception { + byte[] bytes = sut.consolidateData(inputFile); + assertThat(bytes).isNotEmpty(); + } + + @Test + public void applyLineMeasure() throws Exception { + Metric metric = CoreMetrics.COVERAGE_LINE_HITS_DATA; + when(measureCache.byMetric("component_key", metric.key())).thenReturn( + // line 1 has 10 hits, ... + Arrays.asList(new Measure().setData("1=10;3=4").setMetric(metric))); + + sut.applyLineMeasure("component_key", metric.key(), output, new SourceDataFactory.MeasureOperation() { + @Override + public void apply(String value, FileSourceDb.Line.Builder lineBuilder) { + lineBuilder.setUtLineHits(Integer.parseInt(value)); + } + }); + + FileSourceDb.Data data = output.build(); + assertThat(data.getLines(0).getUtLineHits()).isEqualTo(10); + assertThat(data.getLines(1).hasUtLineHits()).isFalse(); + assertThat(data.getLines(2).getUtLineHits()).isEqualTo(4); + } + + @Test + public void applyLineMeasures() throws Exception { + setupLineMeasure(CoreMetrics.SCM_AUTHORS_BY_LINE, "1=him;2=her"); + setupLineMeasure(CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE, "1=2014-10-11T16:44:02+0100;2=2014-10-12T16:44:02+0100;3=2014-10-13T16:44:02+0100"); + setupLineMeasure(CoreMetrics.SCM_REVISIONS_BY_LINE, "1=ABC;2=234;3=345"); + setupLineMeasure(CoreMetrics.COVERAGE_LINE_HITS_DATA, "1=10;3=4"); + setupLineMeasure(CoreMetrics.CONDITIONS_BY_LINE, "1=10;3=4"); + setupLineMeasure(CoreMetrics.CONDITIONS_BY_LINE, "1=10;3=4"); + setupLineMeasure(CoreMetrics.COVERED_CONDITIONS_BY_LINE, "1=10;3=4"); + setupLineMeasure(CoreMetrics.IT_COVERAGE_LINE_HITS_DATA, "1=11;2=4"); + setupLineMeasure(CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE, "1=10;3=4"); + setupLineMeasure(CoreMetrics.IT_CONDITIONS_BY_LINE, "1=10;3=4"); + setupLineMeasure(CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA, "1=10;3=4"); + setupLineMeasure(CoreMetrics.OVERALL_CONDITIONS_BY_LINE, "1=10;3=4"); + setupLineMeasure(CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE, "1=10;3=4"); + + sut.applyLineMeasures(inputFile, output); + + FileSourceDb.Data data = output.build(); + assertThat(data.getLines(0).getUtLineHits()).isEqualTo(10); + assertThat(data.getLines(0).getItLineHits()).isEqualTo(11); + assertThat(data.getLines(0).getScmRevision()).isEqualTo("ABC"); + assertThat(data.getLines(0).getScmAuthor()).isEqualTo("him"); + + assertThat(data.getLines(1).hasUtLineHits()).isFalse(); + assertThat(data.getLines(1).getItLineHits()).isEqualTo(4); + assertThat(data.getLines(1).getScmAuthor()).isEqualTo("her"); + + assertThat(data.getLines(2).getUtLineHits()).isEqualTo(4); + assertThat(data.getLines(2).hasScmAuthor()).isFalse(); + } + + private void setupLineMeasure(Metric metric, String dataPerLine) { + when(measureCache.byMetric(inputFile.key(), metric.key())).thenReturn( + Arrays.asList(new Measure().setData(dataPerLine).setMetric(metric))); + } + + + @Test + public void applyDuplications() throws Exception { + DuplicationGroup group1 = new DuplicationGroup(new DuplicationGroup.Block(inputFile.key(), 1, 1)) + .addDuplicate(new DuplicationGroup.Block(inputFile.key(), 3, 1)) + .addDuplicate(new DuplicationGroup.Block("anotherFile1", 12, 1)) + .addDuplicate(new DuplicationGroup.Block("anotherFile2", 13, 1)); + DuplicationGroup group2 = new DuplicationGroup(new DuplicationGroup.Block(inputFile.key(), 1, 2)) + .addDuplicate(new DuplicationGroup.Block("anotherFile1", 12, 2)) + .addDuplicate(new DuplicationGroup.Block("anotherFile2", 13, 2)); + when(duplicationCache.byComponent(inputFile.key())).thenReturn(Lists.newArrayList(group1, group2)); + + sut.applyDuplications(inputFile.key(), output); + + FileSourceDb.Data data = output.build(); + assertThat(data.getLines(0).getDuplicationsList()).containsExactly(1, 3); + assertThat(data.getLines(1).getDuplicationsList()).containsExactly(3); + assertThat(data.getLines(2).getDuplicationsList()).containsExactly(2); + } + + @Test + public void applyHighlighting_missing() throws Exception { + when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(null); + + sut.applyHighlighting(inputFile, output); + + FileSourceDb.Data data = output.build(); + assertThat(data.getLines(0).hasHighlighting()).isFalse(); + assertThat(data.getLines(1).hasHighlighting()).isFalse(); + assertThat(data.getLines(2).hasHighlighting()).isFalse(); + } + + @Test + public void applyHighlighting() throws Exception { + SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder() + .registerHighlightingRule(0, 4, TypeOfText.ANNOTATION) + .registerHighlightingRule(4, 5, TypeOfText.COMMENT) + .registerHighlightingRule(7, 16, TypeOfText.CONSTANT) + .build(); + when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting); + inputFile.setOriginalLineOffsets(new long[] {0, 4, 7}); + + sut.applyHighlighting(inputFile, output); + + FileSourceDb.Data data = output.build(); + assertThat(data.getLines(0).getHighlighting()).isEqualTo("0,4,a"); + assertThat(data.getLines(1).getHighlighting()).isEqualTo("0,1,cd"); + assertThat(data.getLines(2).getHighlighting()).isEqualTo("0,9,c"); + } + + @Test + public void applyHighlighting_multiple_lines() throws Exception { + SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder() + .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION) + .registerHighlightingRule(4, 9, TypeOfText.COMMENT) + .registerHighlightingRule(10, 16, TypeOfText.CONSTANT) + .build(); + when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting); + inputFile.setOriginalLineOffsets(new long[] {0, 4, 7}); + + sut.applyHighlighting(inputFile, output); + + FileSourceDb.Data data = output.build(); + assertThat(data.getLines(0).getHighlighting()).isEqualTo("0,3,a"); + assertThat(data.getLines(1).getHighlighting()).isEqualTo("0,3,cd"); + assertThat(data.getLines(2).getHighlighting()).isEqualTo("0,2,cd;3,9,c"); + } + + @Test + public void applyHighlighting_nested_rules() throws Exception { + SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder() + .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION) + .registerHighlightingRule(4, 6, TypeOfText.COMMENT) + .registerHighlightingRule(7, 16, TypeOfText.CONSTANT) + .registerHighlightingRule(8, 15, TypeOfText.KEYWORD) + .build(); + when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting); + inputFile.setOriginalLineOffsets(new long[] {0, 4, 7}); + + sut.applyHighlighting(inputFile, output); + + FileSourceDb.Data data = output.build(); + assertThat(data.getLines(0).getHighlighting()).isEqualTo("0,3,a"); + assertThat(data.getLines(1).getHighlighting()).isEqualTo("0,2,cd"); + assertThat(data.getLines(2).getHighlighting()).isEqualTo("0,9,c;1,8,k"); + } + + @Test + public void applyHighlighting_nested_rules_and_multiple_lines() throws Exception { + SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder() + .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION) + .registerHighlightingRule(4, 6, TypeOfText.COMMENT) + .registerHighlightingRule(4, 16, TypeOfText.CONSTANT) + .registerHighlightingRule(8, 15, TypeOfText.KEYWORD) + .build(); + when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting); + inputFile.setOriginalLineOffsets(new long[] {0, 4, 7}); + + sut.applyHighlighting(inputFile, output); + + FileSourceDb.Data data = output.build(); + assertThat(data.getLines(0).getHighlighting()).isEqualTo("0,3,a"); + assertThat(data.getLines(1).getHighlighting()).isEqualTo("0,3,c;0,2,cd"); + assertThat(data.getLines(2).getHighlighting()).isEqualTo("0,9,c;1,8,k"); + } + + @Test + public void applySymbolReferences_missing() throws Exception { + when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING)).thenReturn(null); + + sut.applySymbolReferences(inputFile, output); + + FileSourceDb.Data data = output.build(); + assertThat(data.getLines(0).hasSymbols()).isFalse(); + assertThat(data.getLines(1).hasSymbols()).isFalse(); + assertThat(data.getLines(2).hasSymbols()).isFalse(); + } + + @Test + public void applySymbolReferences() throws Exception { + DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(inputFile.key(), null); + org.sonar.api.batch.sensor.symbol.Symbol s1 = symbolBuilder.newSymbol(1, 2); + symbolBuilder.newReference(s1, 4); + symbolBuilder.newReference(s1, 11); + org.sonar.api.batch.sensor.symbol.Symbol s2 = symbolBuilder.newSymbol(4, 6); + symbolBuilder.newReference(s2, 0); + symbolBuilder.newReference(s2, 7); + when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING)).thenReturn(symbolBuilder.build()); + inputFile.setOriginalLineOffsets(new long[] {0, 4, 7}); + + sut.applySymbolReferences(inputFile, output); + + FileSourceDb.Data data = output.build(); + assertThat(data.getLines(0).getSymbols()).isEqualTo("1,2,1;0,2,2"); + assertThat(data.getLines(1).getSymbols()).isEqualTo("0,1,1;0,2,2"); + assertThat(data.getLines(2).getSymbols()).isEqualTo("4,5,1;0,2,2"); + } + + @Test + public void applySymbolReferences_declaration_order_is_not_important() throws Exception { + DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(inputFile.key(), null); + org.sonar.api.batch.sensor.symbol.Symbol s2 = symbolBuilder.newSymbol(4, 6); + symbolBuilder.newReference(s2, 7); + symbolBuilder.newReference(s2, 0); + org.sonar.api.batch.sensor.symbol.Symbol s1 = symbolBuilder.newSymbol(1, 2); + symbolBuilder.newReference(s1, 11); + symbolBuilder.newReference(s1, 4); + when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING)).thenReturn(symbolBuilder.build()); + inputFile.setOriginalLineOffsets(new long[] {0, 4, 7}); + + sut.applySymbolReferences(inputFile, output); + + FileSourceDb.Data data = output.build(); + assertThat(data.getLines(0).getSymbols()).isEqualTo("1,2,1;0,2,2"); + assertThat(data.getLines(1).getSymbols()).isEqualTo("0,1,1;0,2,2"); + assertThat(data.getLines(2).getSymbols()).isEqualTo("4,5,1;0,2,2"); + } +} diff --git a/sonar-batch/src/test/java/org/sonar/batch/index/SourcePersisterTest.java b/sonar-batch/src/test/java/org/sonar/batch/index/SourcePersisterTest.java deleted file mode 100644 index b23b40aa541..00000000000 --- a/sonar-batch/src/test/java/org/sonar/batch/index/SourcePersisterTest.java +++ /dev/null @@ -1,428 +0,0 @@ -/* - * SonarQube, open source software quality management tool. - * Copyright (C) 2008-2014 SonarSource - * mailto:contact AT sonarsource DOT com - * - * SonarQube is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 3 of the License, or (at your option) any later version. - * - * SonarQube is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software Foundation, - * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - */ -package org.sonar.batch.index; - -import org.apache.commons.codec.digest.DigestUtils; -import org.apache.commons.io.FileUtils; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.sonar.api.batch.fs.InputPath; -import org.sonar.api.batch.fs.internal.DefaultInputFile; -import org.sonar.api.batch.sensor.duplication.DuplicationGroup; -import org.sonar.api.batch.sensor.highlighting.TypeOfText; -import org.sonar.api.database.model.Snapshot; -import org.sonar.api.measures.CoreMetrics; -import org.sonar.api.measures.Measure; -import org.sonar.api.resources.File; -import org.sonar.api.resources.Project; -import org.sonar.api.utils.DateUtils; -import org.sonar.api.utils.System2; -import org.sonar.batch.ProjectTree; -import org.sonar.batch.duplication.DuplicationCache; -import org.sonar.batch.highlighting.SyntaxHighlightingData; -import org.sonar.batch.highlighting.SyntaxHighlightingDataBuilder; -import org.sonar.batch.scan.filesystem.InputPathCache; -import org.sonar.batch.scan.measure.MeasureCache; -import org.sonar.batch.source.CodeColorizers; -import org.sonar.batch.symbol.DefaultSymbolTableBuilder; -import org.sonar.core.persistence.AbstractDaoTestCase; -import org.sonar.core.source.SnapshotDataTypes; -import org.sonar.core.source.db.FileSourceDao; -import org.sonar.core.source.db.FileSourceDto; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class SourcePersisterTest extends AbstractDaoTestCase { - - @Rule - public TemporaryFolder temp = new TemporaryFolder(); - - private SourcePersister sourcePersister; - private InputPathCache inputPathCache; - private ResourceCache resourceCache; - private ProjectTree projectTree; - private System2 system2; - private MeasureCache measureCache; - private ComponentDataCache componentDataCache; - private DuplicationCache duplicationCache; - - private static final String PROJECT_KEY = "foo"; - - private java.io.File basedir; - - @Before - public void before() throws IOException { - Snapshot snapshot = new Snapshot(); - snapshot.setId(1000); - inputPathCache = mock(InputPathCache.class); - resourceCache = mock(ResourceCache.class); - projectTree = mock(ProjectTree.class); - system2 = mock(System2.class); - measureCache = mock(MeasureCache.class); - when(measureCache.byMetric(anyString(), anyString())).thenReturn(Collections.emptyList()); - componentDataCache = mock(ComponentDataCache.class); - duplicationCache = mock(DuplicationCache.class); - sourcePersister = new SourcePersister(inputPathCache, - getMyBatis(), measureCache, componentDataCache, projectTree, system2, - resourceCache, mock(CodeColorizers.class), duplicationCache); - Project project = new Project(PROJECT_KEY); - project.setUuid("projectUuid"); - when(projectTree.getRootProject()).thenReturn(project); - basedir = temp.newFolder(); - } - - @Test - public void testPersistUpdateWhenSrcHashIsMissing() throws Exception { - setupData("file_sources_missing_src_hash"); - Date now = DateUtils.parseDateTime("2014-10-29T16:44:02+0100"); - when(system2.newDate()).thenReturn(now); - - String relativePathSame = "src/same.java"; - java.io.File sameFile = new java.io.File(basedir, relativePathSame); - FileUtils.write(sameFile, "unchanged\ncontent"); - DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame) - .setLines(2) - .setAbsolutePath(sameFile.getAbsolutePath()) - .setHash("123456") - .setLineHashes(new byte[][] {md5("unchanged"), md5("content")}); - when(inputPathCache.all()).thenReturn(Arrays.asList(inputFileNew)); - - mockResourceCache(relativePathSame, PROJECT_KEY, "uuidsame"); - - sourcePersister.persist(); - FileSourceDto fileSourceDto = new FileSourceDao(getMyBatis()).select("uuidsame"); - assertThat(fileSourceDto.getCreatedAt()).isEqualTo(DateUtils.parseDateTime("2014-10-10T16:44:02+0200").getTime()); - assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime()); - assertThat(fileSourceDto.getSrcHash()).isEqualTo("123456"); - } - - @Test - public void testPersistDontTouchUnchanged() throws Exception { - setupData("file_sources"); - when(system2.newDate()).thenReturn(DateUtils.parseDateTime("2014-10-29T16:44:02+0100")); - - String relativePathSame = "src/same.java"; - java.io.File sameFile = new java.io.File(basedir, relativePathSame); - FileUtils.write(sameFile, "unchanged\ncontent"); - DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame).setLines(2) - .setAbsolutePath(sameFile.getAbsolutePath()) - .setHash("123456") - .setLineHashes(new byte[][] {md5("unchanged"), md5("ncontent")}); - when(inputPathCache.all()).thenReturn(Arrays.asList(inputFileNew)); - - mockResourceCache(relativePathSame, PROJECT_KEY, "uuidsame"); - - sourcePersister.persist(); - checkTables("testPersistDontTouchUnchanged", "file_sources"); - } - - @Test - public void testPersistUpdateChanged() throws Exception { - setupData("file_sources"); - Date now = DateUtils.parseDateTime("2014-10-29T16:44:02+0100"); - when(system2.newDate()).thenReturn(now); - - String relativePathSame = "src/changed.java"; - java.io.File sameFile = new java.io.File(basedir, relativePathSame); - FileUtils.write(sameFile, "changed\ncontent"); - DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame).setLines(2) - .setAbsolutePath(sameFile.getAbsolutePath()) - .setHash("123456") - .setLineHashes(new byte[][] {md5("changed"), md5("content")}); - when(inputPathCache.all()).thenReturn(Arrays.asList(inputFileNew)); - - mockResourceCache(relativePathSame, PROJECT_KEY, "uuidsame"); - - sourcePersister.persist(); - - FileSourceDto fileSourceDto = new FileSourceDao(getMyBatis()).select("uuidsame"); - assertThat(fileSourceDto.getCreatedAt()).isEqualTo(DateUtils.parseDateTime("2014-10-10T16:44:02+0200").getTime()); - assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime()); - assertThat(fileSourceDto.getData()).isEqualTo( - ",,,,,,,,,,,,,,,changed\r\n,,,,,,,,,,,,,,,content\r\n"); - assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("changed") + "\n" + md5Hex("content")); - assertThat(fileSourceDto.getDataHash()).isEqualTo("d1a4dd62422639f665a8d80b37c59f8d"); - assertThat(fileSourceDto.getSrcHash()).isEqualTo("123456"); - } - - @Test - public void testPersistEmptyFile() throws Exception { - setupData("file_sources"); - when(system2.newDate()).thenReturn(DateUtils.parseDateTime("2014-10-29T16:44:02+0100")); - - String relativePathEmpty = "src/empty.java"; - DefaultInputFile inputFileEmpty = new DefaultInputFile(PROJECT_KEY, relativePathEmpty) - .setLines(0) - .setHash("abcd") - .setLineHashes(new byte[][] {}); - when(inputPathCache.all()).thenReturn(Arrays.asList(inputFileEmpty)); - - mockResourceCache(relativePathEmpty, PROJECT_KEY, "uuidempty"); - - sourcePersister.persist(); - checkTables("testPersistEmptyFile", "file_sources"); - } - - @Test - public void testPersistNewFileNoScmNoHighlighting() throws Exception { - setupData("file_sources"); - Date now = DateUtils.parseDateTime("2014-10-29T16:44:02+0100"); - when(system2.newDate()).thenReturn(now); - - String relativePathNew = "src/new.java"; - java.io.File newFile = new java.io.File(basedir, relativePathNew); - FileUtils.write(newFile, "foo\nbar\nbiz"); - DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathNew) - .setLines(3) - .setAbsolutePath(newFile.getAbsolutePath()) - .setLineHashes(new byte[][] {md5("foo"), md5("bar"), md5("biz")}); - when(inputPathCache.all()).thenReturn(Arrays.asList(inputFileNew)); - - mockResourceCache(relativePathNew, PROJECT_KEY, "uuidnew"); - - sourcePersister.persist(); - FileSourceDto fileSourceDto = new FileSourceDao(getMyBatis()).select("uuidnew"); - assertThat(fileSourceDto.getCreatedAt()).isEqualTo(now.getTime()); - assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime()); - assertThat(fileSourceDto.getData()).isEqualTo( - ",,,,,,,,,,,,,,,foo\r\n,,,,,,,,,,,,,,,bar\r\n,,,,,,,,,,,,,,,biz\r\n"); - assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("foo") + "\n" + md5Hex("bar") + "\n" + md5Hex("biz")); - assertThat(fileSourceDto.getDataHash()).isEqualTo("a34ed99cc7d27150c82f5cba2b22b665"); - - } - - @Test - public void testPersistNewFileWithScmAndCoverageAndHighlighting() throws Exception { - setupData("file_sources"); - Date now = DateUtils.parseDateTime("2014-10-29T16:44:02+0100"); - when(system2.newDate()).thenReturn(now); - - String relativePathNew = "src/new.java"; - java.io.File newFile = new java.io.File(basedir, relativePathNew); - FileUtils.write(newFile, "foo\nbar\nbiz"); - DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathNew) - .setLines(3) - .setAbsolutePath(newFile.getAbsolutePath()) - .setOriginalLineOffsets(new long[] {0, 4, 7}) - .setLineHashes(new byte[][] {md5("foo"), md5("bar"), md5("biz")}); - when(inputPathCache.all()).thenReturn(Arrays.asList(inputFileNew)); - - mockResourceCache(relativePathNew, PROJECT_KEY, "uuidnew"); - - String fileKey = PROJECT_KEY + ":" + relativePathNew; - when(measureCache.byMetric(fileKey, CoreMetrics.SCM_AUTHORS_BY_LINE_KEY)) - .thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_AUTHORS_BY_LINE, "1=julien;2=simon;3=julien"))); - when(measureCache.byMetric(fileKey, CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE_KEY)) - .thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE, "1=2014-10-11T16:44:02+0100;2=2014-10-12T16:44:02+0100;3=2014-10-13T16:44:02+0100"))); - when(measureCache.byMetric(fileKey, CoreMetrics.SCM_REVISIONS_BY_LINE_KEY)) - .thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_REVISIONS_BY_LINE, "1=123;2=234;3=345"))); - when(measureCache.byMetric(fileKey, CoreMetrics.COVERAGE_LINE_HITS_DATA_KEY)) - .thenReturn(Arrays.asList(new Measure(CoreMetrics.COVERAGE_LINE_HITS_DATA, "1=1;3=0"))); - when(measureCache.byMetric(fileKey, CoreMetrics.CONDITIONS_BY_LINE_KEY)) - .thenReturn(Arrays.asList(new Measure(CoreMetrics.CONDITIONS_BY_LINE, "1=4"))); - when(measureCache.byMetric(fileKey, CoreMetrics.COVERED_CONDITIONS_BY_LINE_KEY)) - .thenReturn(Arrays.asList(new Measure(CoreMetrics.COVERED_CONDITIONS_BY_LINE, "1=2"))); - when(measureCache.byMetric(fileKey, CoreMetrics.IT_COVERAGE_LINE_HITS_DATA_KEY)) - .thenReturn(Arrays.asList(new Measure(CoreMetrics.IT_COVERAGE_LINE_HITS_DATA, "1=2;3=0"))); - when(measureCache.byMetric(fileKey, CoreMetrics.IT_CONDITIONS_BY_LINE_KEY)) - .thenReturn(Arrays.asList(new Measure(CoreMetrics.IT_CONDITIONS_BY_LINE, "1=5"))); - when(measureCache.byMetric(fileKey, CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE_KEY)) - .thenReturn(Arrays.asList(new Measure(CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE, "1=3"))); - when(measureCache.byMetric(fileKey, CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA_KEY)) - .thenReturn(Arrays.asList(new Measure(CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA, "1=3;3=0"))); - when(measureCache.byMetric(fileKey, CoreMetrics.OVERALL_CONDITIONS_BY_LINE_KEY)) - .thenReturn(Arrays.asList(new Measure(CoreMetrics.OVERALL_CONDITIONS_BY_LINE, "1=6"))); - when(measureCache.byMetric(fileKey, CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE_KEY)) - .thenReturn(Arrays.asList(new Measure(CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE, "1=4"))); - - SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder() - .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION) - .registerHighlightingRule(4, 5, TypeOfText.COMMENT) - .registerHighlightingRule(7, 16, TypeOfText.CONSTANT) - .build(); - when(componentDataCache.getData(fileKey, SnapshotDataTypes.SYNTAX_HIGHLIGHTING)) - .thenReturn(highlighting); - - DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(fileKey, null); - org.sonar.api.batch.sensor.symbol.Symbol s1 = symbolBuilder.newSymbol(1, 2); - symbolBuilder.newReference(s1, 4); - symbolBuilder.newReference(s1, 11); - org.sonar.api.batch.sensor.symbol.Symbol s2 = symbolBuilder.newSymbol(4, 6); - symbolBuilder.newReference(s2, 0); - symbolBuilder.newReference(s2, 7); - when(componentDataCache.getData(fileKey, SnapshotDataTypes.SYMBOL_HIGHLIGHTING)) - .thenReturn(symbolBuilder.build()); - - DuplicationGroup group1 = new DuplicationGroup(new DuplicationGroup.Block(fileKey, 1, 1)) - .addDuplicate(new DuplicationGroup.Block(fileKey, 3, 1)) - .addDuplicate(new DuplicationGroup.Block("anotherFile1", 12, 1)) - .addDuplicate(new DuplicationGroup.Block("anotherFile2", 13, 1)); - - DuplicationGroup group2 = new DuplicationGroup(new DuplicationGroup.Block(fileKey, 1, 2)) - .addDuplicate(new DuplicationGroup.Block("anotherFile1", 12, 2)) - .addDuplicate(new DuplicationGroup.Block("anotherFile2", 13, 2)); - when(duplicationCache.byComponent(fileKey)).thenReturn(Arrays.asList(group1, group2)); - - sourcePersister.persist(); - - FileSourceDto fileSourceDto = new FileSourceDao(getMyBatis()).select("uuidnew"); - assertThat(fileSourceDto.getCreatedAt()).isEqualTo(now.getTime()); - assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime()); - assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("foo") + "\n" + md5Hex("bar") + "\n" + md5Hex("biz")); - assertThat(fileSourceDto.getData()).isEqualTo( - "123,julien,2014-10-11T16:44:02+0100,1,4,2,2,5,3,3,6,4,\"0,3,a\",\"1,2,1;0,2,2\",\"1,3\",foo\r\n" - + "234,simon,2014-10-12T16:44:02+0100,,,,,,,,,,\"0,1,cd\",\"0,1,1;0,2,2\",3,bar\r\n" - + "345,julien,2014-10-13T16:44:02+0100,0,,,0,,,0,,,\"0,9,c\",\"4,5,1;0,2,2\",2,biz\r\n"); - assertThat(fileSourceDto.getDataHash()).isEqualTo("26930cf0250d525b04083185ff24a046"); - } - - @Test - public void testSimpleConversionOfHighlightingOffset() { - DefaultInputFile file = new DefaultInputFile(PROJECT_KEY, "src/foo.java") - .setLines(3) - .setOriginalLineOffsets(new long[] {0, 4, 7}); - - SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder() - .registerHighlightingRule(0, 4, TypeOfText.ANNOTATION) - .registerHighlightingRule(4, 5, TypeOfText.COMMENT) - .registerHighlightingRule(7, 16, TypeOfText.CONSTANT) - .build(); - - String[] highlightingPerLine = sourcePersister.computeHighlightingPerLine(file, highlighting); - - assertThat(highlightingPerLine).containsOnly("0,4,a", "0,1,cd", "0,9,c"); - } - - @Test - public void testConversionOfHighlightingOffsetMultiLine() { - DefaultInputFile file = new DefaultInputFile(PROJECT_KEY, "src/foo.java") - .setLines(3) - .setOriginalLineOffsets(new long[] {0, 4, 7}); - - SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder() - .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION) - .registerHighlightingRule(4, 9, TypeOfText.COMMENT) - .registerHighlightingRule(10, 16, TypeOfText.CONSTANT) - .build(); - - String[] highlightingPerLine = sourcePersister.computeHighlightingPerLine(file, highlighting); - - assertThat(highlightingPerLine).containsOnly("0,3,a", "0,3,cd", "0,2,cd;3,9,c"); - } - - @Test - public void testConversionOfHighlightingNestedRules() { - DefaultInputFile file = new DefaultInputFile(PROJECT_KEY, "src/foo.java") - .setLines(3) - .setOriginalLineOffsets(new long[] {0, 4, 7}); - - SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder() - .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION) - .registerHighlightingRule(4, 6, TypeOfText.COMMENT) - .registerHighlightingRule(7, 16, TypeOfText.CONSTANT) - .registerHighlightingRule(8, 15, TypeOfText.KEYWORD) - .build(); - - String[] highlightingPerLine = sourcePersister.computeHighlightingPerLine(file, highlighting); - - assertThat(highlightingPerLine).containsOnly("0,3,a", "0,2,cd", "0,9,c;1,8,k"); - } - - @Test - public void testConversionOfHighlightingNestedRulesMultiLine() { - DefaultInputFile file = new DefaultInputFile(PROJECT_KEY, "src/foo.java") - .setLines(3) - .setOriginalLineOffsets(new long[] {0, 4, 7}); - - SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder() - .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION) - .registerHighlightingRule(4, 6, TypeOfText.COMMENT) - .registerHighlightingRule(4, 16, TypeOfText.CONSTANT) - .registerHighlightingRule(8, 15, TypeOfText.KEYWORD) - .build(); - - String[] highlightingPerLine = sourcePersister.computeHighlightingPerLine(file, highlighting); - - assertThat(highlightingPerLine).containsOnly("0,3,a", "0,3,c;0,2,cd", "0,9,c;1,8,k"); - } - - @Test - public void testSimpleConversionOfSymbolOffset() { - DefaultInputFile file = new DefaultInputFile(PROJECT_KEY, "src/foo.java") - .setLines(3) - .setOriginalLineOffsets(new long[] {0, 4, 7}); - - DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(PROJECT_KEY + ":" + "src/foo.java", null); - org.sonar.api.batch.sensor.symbol.Symbol s1 = symbolBuilder.newSymbol(1, 2); - symbolBuilder.newReference(s1, 4); - symbolBuilder.newReference(s1, 11); - org.sonar.api.batch.sensor.symbol.Symbol s2 = symbolBuilder.newSymbol(4, 6); - symbolBuilder.newReference(s2, 0); - symbolBuilder.newReference(s2, 7); - - String[] symbolsPerLine = sourcePersister.computeSymbolReferencesPerLine(file, symbolBuilder.build()); - - assertThat(symbolsPerLine).containsOnly("1,2,1;0,2,2", "0,1,1;0,2,2", "4,5,1;0,2,2"); - } - - @Test - public void verifyDeclarationOrderOfSymbolHasNoImpact() { - DefaultInputFile file = new DefaultInputFile(PROJECT_KEY, "src/foo.java") - .setLines(3) - .setOriginalLineOffsets(new long[] {0, 4, 7}); - - DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(PROJECT_KEY + ":" + "src/foo.java", null); - org.sonar.api.batch.sensor.symbol.Symbol s2 = symbolBuilder.newSymbol(4, 6); - symbolBuilder.newReference(s2, 7); - symbolBuilder.newReference(s2, 0); - org.sonar.api.batch.sensor.symbol.Symbol s1 = symbolBuilder.newSymbol(1, 2); - symbolBuilder.newReference(s1, 11); - symbolBuilder.newReference(s1, 4); - - String[] symbolsPerLine = sourcePersister.computeSymbolReferencesPerLine(file, symbolBuilder.build()); - - assertThat(symbolsPerLine).containsOnly("1,2,1;0,2,2", "0,1,1;0,2,2", "4,5,1;0,2,2"); - } - - private void mockResourceCache(String relativePathEmpty, String projectKey, String uuid) { - File sonarFile = File.create(relativePathEmpty); - sonarFile.setUuid(uuid); - when(resourceCache.get(projectKey + ":" + relativePathEmpty)).thenReturn(new BatchResource(1, sonarFile, null)); - } - - private byte[] md5(String string) { - return DigestUtils.md5(string); - } - - private String md5Hex(String string) { - return DigestUtils.md5Hex(string); - } - -} diff --git a/sonar-batch/src/test/resources/org/sonar/batch/index/MeasurePersisterTest/shouldUpdateMeasure-result.xml b/sonar-batch/src/test/resources/org/sonar/batch/index/MeasurePersisterTest/shouldUpdateMeasure-result.xml deleted file mode 100644 index 07f08fb086c..00000000000 --- a/sonar-batch/src/test/resources/org/sonar/batch/index/MeasurePersisterTest/shouldUpdateMeasure-result.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - diff --git a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources.xml b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources.xml deleted file mode 100644 index 61fa8fc265a..00000000000 --- a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - diff --git a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources_missing_src_hash.xml b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources_missing_src_hash.xml index 4a26ff98e6a..2fc32f5845b 100644 --- a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources_missing_src_hash.xml +++ b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources_missing_src_hash.xml @@ -1,6 +1,6 @@ + + + diff --git a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistDontTouchUnchanged-result.xml b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistDontTouchUnchanged-result.xml index 940080dc041..0942d062ede 100644 --- a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistDontTouchUnchanged-result.xml +++ b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistDontTouchUnchanged-result.xml @@ -1,7 +1,7 @@ - - - diff --git a/sonar-core/pom.xml b/sonar-core/pom.xml index b1d6b8696e9..3682bc1fb64 100644 --- a/sonar-core/pom.xml +++ b/sonar-core/pom.xml @@ -51,6 +51,12 @@ org.codehaus.sonar sonar-update-center-common
+ + org.codehaus.sonar + sonar-batch-protocol + ${project.version} + + org.codehaus.sonar sonar-home @@ -183,6 +189,24 @@ + + org.codehaus.mojo + build-helper-maven-plugin + + + add-source + generate-sources + + add-source + + + + src/main/gen-java + + + + + org.apache.maven.plugins maven-jar-plugin diff --git a/sonar-core/src/main/java/org/sonar/core/persistence/DatabaseVersion.java b/sonar-core/src/main/java/org/sonar/core/persistence/DatabaseVersion.java index b2d44f173e4..80de635be56 100644 --- a/sonar-core/src/main/java/org/sonar/core/persistence/DatabaseVersion.java +++ b/sonar-core/src/main/java/org/sonar/core/persistence/DatabaseVersion.java @@ -33,7 +33,7 @@ import java.util.List; */ public class DatabaseVersion implements BatchComponent, ServerComponent { - public static final int LAST_VERSION = 780; + public static final int LAST_VERSION = 783; /** * List of all the tables.n diff --git a/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDao.java b/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDao.java index c0a75f7a412..eced8499a4d 100644 --- a/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDao.java +++ b/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDao.java @@ -31,6 +31,7 @@ import org.sonar.core.persistence.MyBatis; import javax.annotation.CheckForNull; +import java.io.InputStream; import java.io.Reader; import java.sql.Connection; import java.sql.PreparedStatement; @@ -56,17 +57,48 @@ public class FileSourceDao implements BatchComponent, ServerComponent, DaoCompon } } - public void readDataStream(String fileUuid, Function function) { + public void readDataStream(String fileUuid, Function function) { DbSession dbSession = mybatis.openSession(false); + Connection connection = dbSession.getConnection(); + PreparedStatement pstmt = null; + ResultSet rs = null; + InputStream input = null; try { - readColumnStream(dbSession, fileUuid, function, "data"); + pstmt = connection.prepareStatement("SELECT binary_data FROM file_sources WHERE file_uuid=?"); + pstmt.setString(1, fileUuid); + rs = pstmt.executeQuery(); + if (rs.next()) { + input = rs.getBinaryStream(1); + function.apply(input); + } + } catch (SQLException e) { + throw new IllegalStateException("Fail to read FILE_SOURCES.BINARY_DATA of file " + fileUuid, e); } finally { + IOUtils.closeQuietly(input); + DbUtils.closeQuietly(connection, pstmt, rs); MyBatis.closeQuietly(dbSession); } } public void readLineHashesStream(DbSession dbSession, String fileUuid, Function function) { - readColumnStream(dbSession, fileUuid, function, "line_hashes"); + Connection connection = dbSession.getConnection(); + PreparedStatement pstmt = null; + ResultSet rs = null; + Reader reader = null; + try { + pstmt = connection.prepareStatement("SELECT line_hashes FROM file_sources WHERE file_uuid=?"); + pstmt.setString(1, fileUuid); + rs = pstmt.executeQuery(); + if (rs.next()) { + reader = rs.getCharacterStream(1); + function.apply(reader); + } + } catch (SQLException e) { + throw new IllegalStateException("Fail to read FILE_SOURCES.LINE_HASHES of file " + fileUuid, e); + } finally { + IOUtils.closeQuietly(reader); + DbUtils.closeQuietly(connection, pstmt, rs); + } } public void insert(FileSourceDto dto) { @@ -89,24 +121,4 @@ public class FileSourceDao implements BatchComponent, ServerComponent, DaoCompon } } - private void readColumnStream(DbSession dbSession, String fileUuid, Function function, String column) { - Connection connection = dbSession.getConnection(); - PreparedStatement pstmt = null; - ResultSet rs = null; - Reader reader = null; - try { - pstmt = connection.prepareStatement("SELECT " + column + " FROM file_sources WHERE file_uuid = ?"); - pstmt.setString(1, fileUuid); - rs = pstmt.executeQuery(); - if (rs.next()) { - reader = rs.getCharacterStream(1); - function.apply(reader); - } - } catch (SQLException e) { - throw new IllegalStateException("Fail to read FILE_SOURCES." + column.toUpperCase() + " of file " + fileUuid, e); - } finally { - IOUtils.closeQuietly(reader); - DbUtils.closeQuietly(connection, pstmt, rs); - } - } } diff --git a/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDto.java b/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDto.java index 999ee5f2dd5..7840d213df6 100644 --- a/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDto.java +++ b/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDto.java @@ -19,36 +19,30 @@ */ package org.sonar.core.source.db; +import net.jpountz.lz4.LZ4BlockInputStream; +import net.jpountz.lz4.LZ4BlockOutputStream; +import org.apache.commons.io.IOUtils; +import org.sonar.server.source.db.FileSourceDb; + import javax.annotation.CheckForNull; import javax.annotation.Nullable; -public class FileSourceDto { +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; - public static final int CSV_INDEX_SCM_REVISION = 0; - public static final int CSV_INDEX_SCM_AUTHOR = 1; - public static final int CSV_INDEX_SCM_DATE = 2; - public static final int CSV_INDEX_UT_LINE_HITS = 3; - public static final int CSV_INDEX_UT_CONDITIONS = 4; - public static final int CSV_INDEX_UT_COVERED_CONDITIONS = 5; - public static final int CSV_INDEX_IT_LINE_HITS = 6; - public static final int CSV_INDEX_IT_CONDITIONS = 7; - public static final int CSV_INDEX_IT_COVERED_CONDITIONS = 8; - public static final int CSV_INDEX_OVERALL_LINE_HITS = 9; - public static final int CSV_INDEX_OVERALL_CONDITIONS = 10; - public static final int CSV_INDEX_OVERALL_COVERED_CONDITIONS = 11; - public static final int CSV_INDEX_HIGHLIGHTING = 12; - public static final int CSV_INDEX_SYMBOLS = 13; - public static final int CSV_INDEX_DUPLICATIONS = 14; +public class FileSourceDto { private Long id; private String projectUuid; private String fileUuid; private long createdAt; private long updatedAt; - private String data; private String lineHashes; - private String dataHash; private String srcHash; + private byte[] binaryData; + private String dataHash; public Long getId() { return id; @@ -77,32 +71,91 @@ public class FileSourceDto { return this; } - @CheckForNull - public String getData() { - return data; + public String getDataHash() { + return dataHash; } - public FileSourceDto setData(@Nullable String data) { - this.data = data; + /** + * MD5 of column BINARY_DATA. Used to know to detect data changes and need for update. + */ + public FileSourceDto setDataHash(String s) { + this.dataHash = s; return this; } - @CheckForNull - public String getLineHashes() { - return lineHashes; + /** + * Compressed value of serialized protobuf message {@link org.sonar.server.source.db.FileSourceDb.Data} + */ + public byte[] getBinaryData() { + return binaryData; } - public FileSourceDto setLineHashes(@Nullable String lineHashes) { - this.lineHashes = lineHashes; + /** + * Compressed value of serialized protobuf message {@link org.sonar.server.source.db.FileSourceDb.Data} + */ + public FileSourceDb.Data getData() { + return decodeData(binaryData); + } + + public static FileSourceDb.Data decodeData(byte[] binaryData) { + // stream is always closed + return decodeData(new ByteArrayInputStream(binaryData)); + } + + /** + * Decompress and deserialize content of column FILE_SOURCES.BINARY_DATA. + * The parameter "input" is always closed by this method. + */ + public static FileSourceDb.Data decodeData(InputStream binaryInput) { + LZ4BlockInputStream lz4Input = null; + try { + lz4Input = new LZ4BlockInputStream(binaryInput); + return FileSourceDb.Data.parseFrom(lz4Input); + } catch (IOException e) { + throw new IllegalStateException("Fail to decompress and deserialize source data", e); + } finally { + IOUtils.closeQuietly(lz4Input); + } + } + + /** + * Set compressed value of the protobuf message {@link org.sonar.server.source.db.FileSourceDb.Data} + */ + public FileSourceDto setBinaryData(byte[] data) { + this.binaryData = data; return this; } - public String getDataHash() { - return dataHash; + public FileSourceDto setData(FileSourceDb.Data data) { + this.binaryData = encodeData(data); + return this; + } + + /** + * Serialize and compress protobuf message {@link org.sonar.server.source.db.FileSourceDb.Data} + * in the column BINARY_DATA. + */ + public static byte[] encodeData(FileSourceDb.Data data) { + ByteArrayOutputStream byteOutput = new ByteArrayOutputStream(); + LZ4BlockOutputStream compressedOutput = new LZ4BlockOutputStream(byteOutput); + try { + data.writeTo(compressedOutput); + compressedOutput.close(); + return byteOutput.toByteArray(); + } catch (IOException e) { + throw new IllegalStateException("Fail to serialize and compress source data", e); + } finally { + IOUtils.closeQuietly(compressedOutput); + } } - public FileSourceDto setDataHash(String dataHash) { - this.dataHash = dataHash; + @CheckForNull + public String getLineHashes() { + return lineHashes; + } + + public FileSourceDto setLineHashes(@Nullable String lineHashes) { + this.lineHashes = lineHashes; return this; } @@ -110,6 +163,9 @@ public class FileSourceDto { return srcHash; } + /** + * Hash of file content. Value is computed by batch. + */ public FileSourceDto setSrcHash(String srcHash) { this.srcHash = srcHash; return this; diff --git a/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceMapper.java b/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceMapper.java index 2fd054897eb..e8876892357 100644 --- a/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceMapper.java +++ b/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceMapper.java @@ -26,7 +26,7 @@ import java.util.List; public interface FileSourceMapper { - List selectAllFileDataHashByProject(String projectUuid); + List selectHashesForProject(String projectUuid); @CheckForNull FileSourceDto select(String fileUuid); diff --git a/sonar-core/src/main/resources/org/sonar/core/persistence/rows-h2.sql b/sonar-core/src/main/resources/org/sonar/core/persistence/rows-h2.sql index 92e9f0aff0c..fd2663d75e5 100644 --- a/sonar-core/src/main/resources/org/sonar/core/persistence/rows-h2.sql +++ b/sonar-core/src/main/resources/org/sonar/core/persistence/rows-h2.sql @@ -308,6 +308,9 @@ INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('777'); INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('778'); INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('779'); INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('780'); +INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('781'); +INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('782'); +INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('783'); INSERT INTO USERS(ID, LOGIN, NAME, EMAIL, CRYPTED_PASSWORD, SALT, CREATED_AT, UPDATED_AT, REMEMBER_TOKEN, REMEMBER_TOKEN_EXPIRES_AT) VALUES (1, 'admin', 'Administrator', '', 'a373a0e667abb2604c1fd571eb4ad47fe8cc0878', '48bc4b0d93179b5103fd3885ea9119498e9d161b', '1418215735482', '1418215735482', null, null); ALTER TABLE USERS ALTER COLUMN ID RESTART WITH 2; diff --git a/sonar-core/src/main/resources/org/sonar/core/persistence/schema-h2.ddl b/sonar-core/src/main/resources/org/sonar/core/persistence/schema-h2.ddl index 202f896611e..fadf4581aa9 100644 --- a/sonar-core/src/main/resources/org/sonar/core/persistence/schema-h2.ddl +++ b/sonar-core/src/main/resources/org/sonar/core/persistence/schema-h2.ddl @@ -554,8 +554,8 @@ CREATE TABLE "FILE_SOURCES" ( "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1), "PROJECT_UUID" VARCHAR(50) NOT NULL, "FILE_UUID" VARCHAR(50) NOT NULL, - "DATA" CLOB(2147483647), "LINE_HASHES" CLOB(2147483647), + "BINARY_DATA" BINARY(167772150), "DATA_HASH" VARCHAR(50) NOT NULL, "SRC_HASH" VARCHAR(50) NULL, "CREATED_AT" BIGINT NOT NULL, diff --git a/sonar-core/src/main/resources/org/sonar/core/source/db/FileSourceMapper.xml b/sonar-core/src/main/resources/org/sonar/core/source/db/FileSourceMapper.xml index 009f36d1451..2edae7f055c 100644 --- a/sonar-core/src/main/resources/org/sonar/core/source/db/FileSourceMapper.xml +++ b/sonar-core/src/main/resources/org/sonar/core/source/db/FileSourceMapper.xml @@ -5,29 +5,32 @@ - SELECT id, file_uuid as fileUuid, data_hash as dataHash, src_hash as srcHash FROM file_sources WHERE project_uuid = #{projectUuid} - insert into file_sources (project_uuid, file_uuid, created_at, updated_at, data, line_hashes, data_hash, src_hash) - values (#{projectUuid,jdbcType=VARCHAR}, #{fileUuid,jdbcType=VARCHAR}, #{createdAt,jdbcType=BIGINT}, #{updatedAt,jdbcType=BIGINT}, #{data,jdbcType=CLOB}, #{lineHashes,jdbcType=CLOB}, #{dataHash,jdbcType=VARCHAR}, #{srcHash,jdbcType=VARCHAR}) + insert into file_sources (project_uuid, file_uuid, created_at, updated_at, binary_data, line_hashes, data_hash, src_hash) + values (#{projectUuid,jdbcType=VARCHAR}, #{fileUuid,jdbcType=VARCHAR}, #{createdAt,jdbcType=BIGINT}, + #{updatedAt,jdbcType=BIGINT}, #{binaryData,jdbcType=BLOB}, #{lineHashes,jdbcType=CLOB}, + #{dataHash,jdbcType=VARCHAR}, #{srcHash,jdbcType=VARCHAR}) update file_sources set - updated_at = #{updatedAt}, - data = #{data}, - line_hashes = #{lineHashes}, - data_hash = #{dataHash}, - src_hash = #{srcHash} + updated_at = #{updatedAt,jdbcType=BIGINT}, + binary_data = #{binaryData,jdbcType=BLOB}, + line_hashes = #{lineHashes,jdbcType=CLOB}, + data_hash = #{dataHash,jdbcType=VARCHAR}, + src_hash = #{srcHash,jdbcType=VARCHAR} where id = #{id} diff --git a/sonar-core/src/test/java/org/sonar/core/source/db/FileSourceDaoTest.java b/sonar-core/src/test/java/org/sonar/core/source/db/FileSourceDaoTest.java index dc8dc262db2..aed0f04fd71 100644 --- a/sonar-core/src/test/java/org/sonar/core/source/db/FileSourceDaoTest.java +++ b/sonar-core/src/test/java/org/sonar/core/source/db/FileSourceDaoTest.java @@ -25,13 +25,12 @@ import org.apache.commons.io.IOUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.sonar.api.utils.DateUtils; import org.sonar.core.persistence.AbstractDaoTestCase; import org.sonar.core.persistence.DbSession; import java.io.IOException; +import java.io.InputStream; import java.io.Reader; -import java.util.Date; import static org.assertj.core.api.Assertions.assertThat; @@ -56,77 +55,82 @@ public class FileSourceDaoTest extends AbstractDaoTestCase { public void select() throws Exception { setupData("shared"); - FileSourceDto fileSourceDto = dao.select("ab12"); + FileSourceDto fileSourceDto = dao.select("FILE1_UUID"); - assertThat(fileSourceDto.getData()).isEqualTo("aef12a,alice,2014-04-25T12:34:56+0100,,class Foo"); + assertThat(fileSourceDto.getBinaryData()).isNotEmpty(); assertThat(fileSourceDto.getDataHash()).isEqualTo("hash"); - assertThat(fileSourceDto.getProjectUuid()).isEqualTo("abcd"); - assertThat(fileSourceDto.getFileUuid()).isEqualTo("ab12"); - assertThat(new Date(fileSourceDto.getCreatedAt())).isEqualTo(DateUtils.parseDateTime("2014-10-29T16:44:02+0100")); - assertThat(new Date(fileSourceDto.getUpdatedAt())).isEqualTo(DateUtils.parseDateTime("2014-10-30T16:44:02+0100")); + assertThat(fileSourceDto.getProjectUuid()).isEqualTo("PRJ_UUID"); + assertThat(fileSourceDto.getFileUuid()).isEqualTo("FILE1_UUID"); + assertThat(fileSourceDto.getCreatedAt()).isEqualTo(1500000000000L); + assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(1500000000000L); } @Test public void select_data() throws Exception { setupData("shared"); - StringParser stringParser = new StringParser(); - dao.readDataStream("ab12", stringParser); + InputStreamToStringFunction fn = new InputStreamToStringFunction(); + dao.readDataStream("FILE1_UUID", fn); - assertThat(stringParser.getResult()).isEqualTo("aef12a,alice,2014-04-25T12:34:56+0100,,class Foo"); + assertThat(fn.result).isNotEmpty(); } @Test public void select_line_hashes() throws Exception { setupData("shared"); - StringParser stringParser = new StringParser(); - dao.readLineHashesStream(session, "ab12", stringParser); + ReaderToStringFunction fn = new ReaderToStringFunction(); + dao.readLineHashesStream(session, "FILE1_UUID", fn); - assertThat(stringParser.getResult()).isEqualTo("truc"); + assertThat(fn.result).isEqualTo("ABC\\nDEF\\nGHI"); } @Test public void no_line_hashes_on_unknown_file() throws Exception { setupData("shared"); - StringParser stringParser = new StringParser(); - dao.readLineHashesStream(session, "unknown", stringParser); + ReaderToStringFunction fn = new ReaderToStringFunction(); + dao.readLineHashesStream(session, "unknown", fn); - assertThat(stringParser.getResult()).isEmpty(); + assertThat(fn.result).isNull(); } @Test public void insert() throws Exception { setupData("shared"); - dao.insert(new FileSourceDto().setProjectUuid("prj").setFileUuid("file").setData("bla bla") - .setDataHash("hash2") - .setLineHashes("foo\nbar") - .setSrcHash("hache") - .setCreatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime()) - .setUpdatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime())); - - checkTable("insert", "file_sources"); + dao.insert(new FileSourceDto() + .setProjectUuid("PRJ_UUID") + .setFileUuid("FILE2_UUID") + .setBinaryData("FILE2_BINARY_DATA".getBytes()) + .setDataHash("FILE2_DATA_HASH") + .setLineHashes("LINE1_HASH\\nLINE2_HASH") + .setSrcHash("FILE2_HASH") + .setCreatedAt(1500000000000L) + .setUpdatedAt(1500000000001L)); + + checkTable("insert", "file_sources", "project_uuid", "file_uuid", "data_hash", "line_hashes", "src_hash", "created_at", "updated_at"); } @Test public void update() throws Exception { setupData("shared"); - dao.update(new FileSourceDto().setId(101L).setProjectUuid("prj").setFileUuid("file") - .setData("updated data") - .setDataHash("hash2") - .setSrcHash("123456") - .setLineHashes("foo2\nbar2") - .setUpdatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime())); + dao.update(new FileSourceDto().setId(101L) + .setProjectUuid("PRJ_UUID") + .setFileUuid("FILE1_UUID") + .setBinaryData("updated data".getBytes()) + .setDataHash("NEW_DATA_HASH") + .setSrcHash("NEW_FILE_HASH") + .setLineHashes("NEW_LINE_HASHES") + .setUpdatedAt(1500000000002L)); - checkTable("update", "file_sources"); + checkTable("update", "file_sources", "project_uuid", "file_uuid", "data_hash", "line_hashes", "src_hash", "created_at", "updated_at"); } - class StringParser implements Function { + private static class ReaderToStringFunction implements Function { - String result = ""; + String result = null; @Override public String apply(Reader input) { @@ -137,9 +141,20 @@ public class FileSourceDaoTest extends AbstractDaoTestCase { throw new RuntimeException(e); } } + } + + private static class InputStreamToStringFunction implements Function { - public String getResult() { - return result; + String result = null; + + @Override + public String apply(InputStream input) { + try { + result = IOUtils.toString(input); + return IOUtils.toString(input); + } catch (IOException e) { + throw new RuntimeException(e); + } } } } diff --git a/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/delete_file_sources_of_disabled_resources-result.xml b/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/delete_file_sources_of_disabled_resources-result.xml index 13b1c270dc4..7caaa526513 100644 --- a/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/delete_file_sources_of_disabled_resources-result.xml +++ b/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/delete_file_sources_of_disabled_resources-result.xml @@ -1,5 +1,5 @@ - diff --git a/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/delete_file_sources_of_disabled_resources.xml b/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/delete_file_sources_of_disabled_resources.xml index fa5e4a7828d..aaae9915d98 100644 --- a/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/delete_file_sources_of_disabled_resources.xml +++ b/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/delete_file_sources_of_disabled_resources.xml @@ -72,8 +72,8 @@ depth="[null]" scope="PRJ" qualifier="TRK" created_at="1228222680000" build_date="1228222680000" version="[null]" path="[null]"/> - - diff --git a/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/select_purgeable_file_uuids.xml b/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/select_purgeable_file_uuids.xml index cfc39014c9d..5bbb5aa5891 100644 --- a/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/select_purgeable_file_uuids.xml +++ b/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/select_purgeable_file_uuids.xml @@ -79,8 +79,8 @@ depth="[null]" scope="PRJ" qualifier="TRK" created_at="1228222680000" build_date="1228222680000" version="[null]" path="[null]"/> - - diff --git a/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/shouldDeleteProject.xml b/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/shouldDeleteProject.xml index 1f164a2692e..d50cc3926f5 100644 --- a/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/shouldDeleteProject.xml +++ b/sonar-core/src/test/resources/org/sonar/core/purge/PurgeDaoTest/shouldDeleteProject.xml @@ -103,6 +103,6 @@ depth="[null]" scope="FIL" qualifier="FIL" created_at="1228222680000" build_date="1228222680000" version="[null]" path="[null]"/> - diff --git a/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/insert-result.xml b/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/insert-result.xml index f43f9f06564..74bca5ec788 100644 --- a/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/insert-result.xml +++ b/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/insert-result.xml @@ -1,16 +1,17 @@ - + - + diff --git a/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/shared.xml b/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/shared.xml index fbfa94a6ddd..79a340f841d 100644 --- a/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/shared.xml +++ b/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/shared.xml @@ -1,9 +1,9 @@ - + diff --git a/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/update-result.xml b/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/update-result.xml index 64ff997152f..40cbfa91a43 100644 --- a/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/update-result.xml +++ b/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/update-result.xml @@ -1,10 +1,11 @@ - +