import org.sonar.server.db.migrations.Select.RowReader;
import org.sonar.server.db.migrations.SqlStatement;
+import javax.annotation.Nullable;
+
import java.sql.SQLException;
import java.util.Date;
+import static com.google.common.base.Charsets.UTF_8;
+
/**
* Used in the Active Record Migration 714
*
byte[] longAuthors = row.getBytes(8);
byte[] shortDates = row.getBytes(9);
byte[] longDates = row.getBytes(10);
-
- String[] sourceData = new FileSourceDto(source, shortRevisions, longRevisions, shortAuthors, longAuthors, shortDates, longDates).getSourceData();
+ byte[] shortHits = row.getBytes(11);
+ byte[] longHits = row.getBytes(12);
+ byte[] shortCond = row.getBytes(13);
+ byte[] longCond = row.getBytes(14);
+ byte[] shortCovCond = row.getBytes(15);
+ byte[] longCovCond = row.getBytes(16);
+
+ String[] sourceData = new FileSourceDto(source,
+ ofNullableBytes(shortRevisions, longRevisions),
+ ofNullableBytes(shortAuthors, longAuthors),
+ ofNullableBytes(shortDates, longDates),
+ ofNullableBytes(shortHits, longHits),
+ ofNullableBytes(shortCond, longCond),
+ ofNullableBytes(shortCovCond, longCovCond)
+ ).getSourceData();
update.setString(1, projectUuid)
.setString(2, fileUuid)
}
}
+ private static String ofNullableBytes(@Nullable byte[] shortBytes, @Nullable byte[] longBytes) {
+ byte[] result;
+ if (shortBytes == null) {
+ if (longBytes == null) {
+ return "";
+ } else {
+ result = longBytes;
+ }
+ } else {
+ result = shortBytes;
+ }
+ return new String(result, UTF_8);
+ }
+
private final System2 system;
public FeedFileSources(Database db, System2 system) {
Long revisionMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'revisions_by_line'").get(simpleLongReader);
Long authorMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'authors_by_line'").get(simpleLongReader);
Long datesMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'last_commit_datetimes_by_line'").get(simpleLongReader);
+ Long coverageHitsByLineMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'coverage_line_hits_data'").get(simpleLongReader);
+ Long conditionsByLineMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'conditions_by_line'").get(simpleLongReader);
+ Long coveredConditionsByLineMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'covered_conditions_by_line'").get(simpleLongReader);
MassUpdate massUpdate = context.prepareMassUpdate();
massUpdate.select("SELECT " +
"m2.text_value as short_authors_by_line, " +
"m2.measure_data as long_authors_by_line, " +
"m3.text_value as short_dates_by_line, " +
- "m3.measure_data as short_dates_by_line " +
+ "m3.measure_data as long_dates_by_line, " +
+ "m4.text_value as short_hits_by_line, " +
+ "m4.measure_data as long_hits_by_line, " +
+ "m5.text_value as short_cond_by_line, " +
+ "m5.measure_data as long_cond_by_line, " +
+ "m6.text_value as short_cover_cond_by_line, " +
+ "m6.measure_data as long_cover_cond_by_line " +
"FROM snapshots s " +
"JOIN snapshot_sources ss " +
"ON s.id = ss.snapshot_id AND s.islast = ? " +
"ON m2.snapshot_id = s.id AND m2.metric_id = ? " +
"LEFT JOIN project_measures m3 " +
"ON m3.snapshot_id = s.id AND m3.metric_id = ? " +
+ "LEFT JOIN project_measures m4 " +
+ "ON m4.snapshot_id = s.id AND m4.metric_id = ? " +
+ "LEFT JOIN project_measures m5 " +
+ "ON m5.snapshot_id = s.id AND m5.metric_id = ? " +
+ "LEFT JOIN project_measures m6 " +
+ "ON m6.snapshot_id = s.id AND m6.metric_id = ? " +
"WHERE " +
"f.enabled = ? " +
"AND f.scope = 'FIL' " +
.setLong(2, revisionMetricId != null ? revisionMetricId : 0L)
.setLong(3, authorMetricId != null ? authorMetricId : 0L)
.setLong(4, datesMetricId != null ? datesMetricId : 0L)
- .setBoolean(5, true);
+ .setLong(5, coverageHitsByLineMetricId != null ? coverageHitsByLineMetricId : 0L)
+ .setLong(6, conditionsByLineMetricId != null ? conditionsByLineMetricId : 0L)
+ .setLong(7, coveredConditionsByLineMetricId != null ? coveredConditionsByLineMetricId : 0L)
+ .setBoolean(8, true);
massUpdate.update("INSERT INTO file_sources" +
"(project_uuid, file_uuid, created_at, updated_at, data, line_hashes, data_hash)" +
import org.sonar.api.utils.KeyValueFormat;
import org.sonar.api.utils.text.CsvWriter;
-import javax.annotation.Nullable;
-
import java.io.ByteArrayOutputStream;
import java.io.OutputStreamWriter;
import java.util.Iterator;
private Iterator<String> sourceSplitter;
private Map<Integer, String> revisions;
-
private Map<Integer, String> authors;
-
private Map<Integer, String> dates;
+ private Map<Integer, String> hits;
+ private Map<Integer, String> conditions;
+ private Map<Integer, String> coveredConditions;
- FileSourceDto(String source, @Nullable byte[] shortRevisions, @Nullable byte[] longRevisions, @Nullable byte[] shortAuthors, @Nullable byte[] longAuthors,
- @Nullable byte[] shortDates, @Nullable byte[] longDates) {
+ FileSourceDto(String source, String revisions, String authors, String dates, String hits, String conditions, String coveredConditions) {
sourceSplitter = Splitter.onPattern("\r?\n|\r").split(source).iterator();
- revisions = KeyValueFormat.parseIntString(ofNullableBytes(shortRevisions, longRevisions));
- authors = KeyValueFormat.parseIntString(ofNullableBytes(shortAuthors, longAuthors));
- dates = KeyValueFormat.parseIntString(ofNullableBytes(shortDates, longDates));
+ this.revisions = KeyValueFormat.parseIntString(revisions);
+ this.authors = KeyValueFormat.parseIntString(authors);
+ this.dates = KeyValueFormat.parseIntString(dates);
+ this.hits = KeyValueFormat.parseIntString(hits);
+ this.conditions = KeyValueFormat.parseIntString(conditions);
+ this.coveredConditions = KeyValueFormat.parseIntString(coveredConditions);
}
String[] getSourceData() {
line++;
sourceLine = sourceSplitter.next();
lineHashes.append(lineChecksum(sourceLine)).append("\n");
- csv.values(revisions.get(line), authors.get(line), dates.get(line), highlighting, sourceLine);
+ csv.values(revisions.get(line), authors.get(line), dates.get(line),
+ hits.get(line), conditions.get(line), coveredConditions.get(line),
+ highlighting, sourceLine);
}
csv.close();
return new String[] {new String(output.toByteArray(), UTF_8), lineHashes.toString()};
return DigestUtils.md5Hex(reducedLine);
}
- private static String ofNullableBytes(@Nullable byte[] shortBytes, @Nullable byte[] longBytes) {
- byte[] result;
- if (shortBytes == null) {
- if (longBytes == null) {
- return "";
- } else {
- result = longBytes;
- }
- } else {
- result = shortBytes;
- }
- return new String(result, UTF_8);
- }
}
import org.sonar.api.utils.DateUtils;
import org.sonar.api.utils.System2;
import org.sonar.core.persistence.TestDatabase;
-import org.sonar.server.db.migrations.DatabaseMigration;
import java.sql.Connection;
import java.sql.PreparedStatement;
@ClassRule
public static TestDatabase db = new TestDatabase().schema(FeedFileSourcesTest.class, "schema.sql");
- DatabaseMigration migration;
+ FeedFileSources migration;
System2 system;
}
@Test
- public void migrate_sources_with_no_scm() throws Exception {
+ public void migrate_sources_with_no_scm_no_coverage() throws Exception {
db.prepareDbUnit(getClass(), "before.xml");
db.executeUpdateSql("insert into snapshot_sources " +
}
@Test
- public void migrate_sources_with_scm() throws Exception {
+ public void migrate_sources_with_scm_and_coverage_in_text_value() throws Exception {
+ migrate_sources_with_scm_and_coverage_in("text_value");
+ }
+
+ @Test
+ public void migrate_sources_with_scm_and_coverage_in_measure_data() throws Exception {
+ migrate_sources_with_scm_and_coverage_in("measure_data");
+ }
+
+ private void migrate_sources_with_scm_and_coverage_in(String columnName) throws Exception {
db.prepareDbUnit(getClass(), "before.xml");
Connection connection = null;
.executeUpdate();
PreparedStatement revisionStmt = connection.prepareStatement("insert into project_measures " +
- "(metric_id, snapshot_id, text_value) " +
+ "(metric_id, snapshot_id, " + columnName + ") " +
"values " +
"(1, 6, ?)");
revisionStmt.setBytes(1, "1=aef12a;2=abe465;3=afb789;4=afb789".getBytes(Charsets.UTF_8));
revisionStmt.executeUpdate();
PreparedStatement authorStmt = connection.prepareStatement("insert into project_measures " +
- "(metric_id, snapshot_id, text_value) " +
+ "(metric_id, snapshot_id, " + columnName + ") " +
"values " +
"(2, 6, ?)");
authorStmt.setBytes(1, "1=alice;2=bob;3=carol;4=carol".getBytes(Charsets.UTF_8));
authorStmt.executeUpdate();
PreparedStatement dateStmt = connection.prepareStatement("insert into project_measures " +
- "(metric_id, snapshot_id, text_value) " +
+ "(metric_id, snapshot_id, " + columnName + ") " +
"values " +
"(3, 6, ?)");
dateStmt.setBytes(1, "1=2014-04-25T12:34:56+0100;2=2014-07-25T12:34:56+0100;3=2014-03-23T12:34:56+0100;4=2014-03-23T12:34:56+0100".getBytes(Charsets.UTF_8));
dateStmt.executeUpdate();
+
+ PreparedStatement hitsStmt = connection.prepareStatement("insert into project_measures " +
+ "(metric_id, snapshot_id, " + columnName + ") " +
+ "values " +
+ "(4, 6, ?)");
+ hitsStmt.setBytes(1, "1=1;3=0".getBytes(Charsets.UTF_8));
+ hitsStmt.executeUpdate();
+
+ PreparedStatement condStmt = connection.prepareStatement("insert into project_measures " +
+ "(metric_id, snapshot_id, " + columnName + ") " +
+ "values " +
+ "(5, 6, ?)");
+ condStmt.setBytes(1, "1=4".getBytes(Charsets.UTF_8));
+ condStmt.executeUpdate();
+
+ PreparedStatement coveredCondStmt = connection.prepareStatement("insert into project_measures " +
+ "(metric_id, snapshot_id, " + columnName + ") " +
+ "values " +
+ "(6, 6, ?)");
+ coveredCondStmt.setBytes(1, "1=2".getBytes(Charsets.UTF_8));
+ coveredCondStmt.executeUpdate();
} finally {
DbUtils.commitAndCloseQuietly(connection);
}
-
migration.execute();
db.assertDbUnit(getClass(), "after-with-scm.xml", "file_sources");
<dataset>
<file_sources id="1" project_uuid="uuid-MyProject" file_uuid="uuid-MyFile.xoo" created_at="1416238020000" updated_at="1414770242000"
- data="aef12a,alice,2014-04-25T12:34:56+0100,,class Foo { abe465,bob,2014-07-25T12:34:56+0100,, // Empty afb789,carol,2014-03-23T12:34:56+0100,,} afb789,carol,2014-03-23T12:34:56+0100,, "
+ data="aef12a,alice,2014-04-25T12:34:56+0100,1,4,2,,class Foo { abe465,bob,2014-07-25T12:34:56+0100,,,,, // Empty afb789,carol,2014-03-23T12:34:56+0100,0,,,,} afb789,carol,2014-03-23T12:34:56+0100,,,,, "
line_hashes="6a19ce786467960a3a9b0d26383a464a aab2dbc5fdeaa80b050b1d049ede357c cbb184dd8e05c9709e5dcaedaa0495cf "
data_hash="" />
<dataset>
<file_sources id="1" project_uuid="uuid-MyProject" file_uuid="uuid-MyFile.xoo" created_at="1416238020000" updated_at="1414770242000"
- data=",,,,class Foo { ,,,, // Empty ,,,,} ,,,, "
+ data=",,,,,,,class Foo { ,,,,,,, // Empty ,,,,,,,} ,,,,,,, "
line_hashes="6a19ce786467960a3a9b0d26383a464a aab2dbc5fdeaa80b050b1d049ede357c cbb184dd8e05c9709e5dcaedaa0495cf "
data_hash="" />
+++ /dev/null
-<dataset>
-
- <metrics id="1" name="revisions_by_line" description="[null]" direction="0" domain="SCM" short_name="Revisions by line" qualitative="false" val_type="DATA"
- user_managed="false" enabled="true" origin="JAV" worst_value="[null]" best_value="[null]" optimized_best_value="[null]" hidden="[false]" delete_historical_data="false" />
- <metrics id="2" name="authors_by_line" description="[null]" direction="0" domain="SCM" short_name="Authors by line" qualitative="false" val_type="DATA"
- user_managed="false" enabled="true" origin="JAV" worst_value="[null]" best_value="[null]" optimized_best_value="[null]" hidden="[false]" delete_historical_data="false" />
- <metrics id="3" name="last_commit_datetimes_by_line" description="[null]" direction="0" domain="SCM" short_name="Last commit dates by line" qualitative="false" val_type="DATA"
- user_managed="false" enabled="true" origin="JAV" worst_value="[null]" best_value="[null]" optimized_best_value="[null]" hidden="[false]" delete_historical_data="false" />
-
- <projects id="1" uuid="uuid-MyProject" kee="MyProject" scope="PRJ" qualifier="TRK" />
- <projects id="2" uuid="uuid-prj" kee="MyProject:src/main/xoo/prj" scope="DIR" qualifier="DIR" />
- <projects id="3" uuid="uuid-MyFile.xoo" kee="MyProject:src/main/xoo/prj/MyFile.xoo" scope="FIL" qualifier="FIL" />
-
- <snapshots id="1" project_id="1" parent_snapshot_id="1" root_project_id="1" root_snapshot_id="1"
- status="P" islast="[false]" purge_status="1"
- period1_mode="days1" period1_param="30" period1_date="2011-09-24"
- period2_mode="days2" period2_param="31" period2_date="2011-09-25"
- period3_mode="days3" period3_param="32" period3_date="2011-09-26"
- period4_mode="days4" period4_param="33" period4_date="2011-09-27"
- period5_mode="days5" period5_param="34" period5_date="2011-09-28"
- depth="1" scope="PRJ" qualifier="TRK" created_at="2008-12-02" build_date="2011-09-29"
- version="2.1-SNAPSHOT" path="1.2."/>
- <snapshots id="2" project_id="1" parent_snapshot_id="1" root_project_id="1" root_snapshot_id="1"
- status="P" islast="[true]" purge_status="1"
- period1_mode="days1" period1_param="30" period1_date="2011-09-24"
- period2_mode="days2" period2_param="31" period2_date="2011-09-25"
- period3_mode="days3" period3_param="32" period3_date="2011-09-26"
- period4_mode="days4" period4_param="33" period4_date="2011-09-27"
- period5_mode="days5" period5_param="34" period5_date="2011-09-28"
- depth="1" scope="PRJ" qualifier="TRK" created_at="2008-12-02" build_date="2011-09-29"
- version="2.1-SNAPSHOT" path="1.2."/>
-
- <snapshots id="3" project_id="2" parent_snapshot_id="1" root_project_id="1" root_snapshot_id="1"
- status="P" islast="[false]" purge_status="1"
- period1_mode="days1" period1_param="30" period1_date="2011-09-24"
- period2_mode="days2" period2_param="31" period2_date="2011-09-25"
- period3_mode="days3" period3_param="32" period3_date="2011-09-26"
- period4_mode="days4" period4_param="33" period4_date="2011-09-27"
- period5_mode="days5" period5_param="34" period5_date="2011-09-28"
- depth="1" scope="DIR" qualifier="DIR" created_at="2008-12-02" build_date="2011-09-29"
- version="2.1-SNAPSHOT" path="1.2."/>
- <snapshots id="4" project_id="2" parent_snapshot_id="2" root_project_id="1" root_snapshot_id="2"
- status="P" islast="[true]" purge_status="1"
- period1_mode="days1" period1_param="30" period1_date="2011-09-24"
- period2_mode="days2" period2_param="31" period2_date="2011-09-25"
- period3_mode="days3" period3_param="32" period3_date="2011-09-26"
- period4_mode="days4" period4_param="33" period4_date="2011-09-27"
- period5_mode="days5" period5_param="34" period5_date="2011-09-28"
- depth="1" scope="DIR" qualifier="DIR" created_at="2008-12-02" build_date="2011-09-29"
- version="2.1-SNAPSHOT" path="1.2."/>
-
- <snapshots id="5" project_id="3" parent_snapshot_id="3" root_project_id="1" root_snapshot_id="1"
- status="P" islast="[false]" purge_status="1"
- period1_mode="days1" period1_param="30" period1_date="2011-09-24"
- period2_mode="days2" period2_param="31" period2_date="2011-09-25"
- period3_mode="days3" period3_param="32" period3_date="2011-09-26"
- period4_mode="days4" period4_param="33" period4_date="2011-09-27"
- period5_mode="days5" period5_param="34" period5_date="2011-09-28"
- depth="1" scope="DIR" qualifier="DIR" created_at="2008-12-02" build_date="2011-09-29"
- version="2.1-SNAPSHOT" path="1.2."/>
- <snapshots id="6" project_id="3" parent_snapshot_id="4" root_project_id="1" root_snapshot_id="2"
- status="P" islast="[true]" purge_status="1"
- period1_mode="days1" period1_param="30" period1_date="2011-09-24"
- period2_mode="days2" period2_param="31" period2_date="2011-09-25"
- period3_mode="days3" period3_param="32" period3_date="2011-09-26"
- period4_mode="days4" period4_param="33" period4_date="2011-09-27"
- period5_mode="days5" period5_param="34" period5_date="2011-09-28"
- depth="1" scope="FIL" qualifier="FIL" created_at="2008-12-02" build_date="2011-09-29"
- version="2.1-SNAPSHOT" path="1.2."/>
-
-</dataset>
user_managed="false" enabled="true" origin="JAV" worst_value="[null]" best_value="[null]" optimized_best_value="[null]" hidden="[false]" delete_historical_data="false" />
<metrics id="3" name="last_commit_datetimes_by_line" description="[null]" direction="0" domain="SCM" short_name="Last commit dates by line" qualitative="false" val_type="DATA"
user_managed="false" enabled="true" origin="JAV" worst_value="[null]" best_value="[null]" optimized_best_value="[null]" hidden="[false]" delete_historical_data="false" />
+ <metrics id="4" name="coverage_line_hits_data" description="[null]" direction="0" domain="Test" short_name="Coverage hits by line" qualitative="false" val_type="DATA"
+ user_managed="false" enabled="true" origin="JAV" worst_value="[null]" best_value="[null]" optimized_best_value="[null]" hidden="[false]" delete_historical_data="false" />
+ <metrics id="5" name="conditions_by_line" description="[null]" direction="0" domain="Tests" short_name="Conditions by line" qualitative="false" val_type="DATA"
+ user_managed="false" enabled="true" origin="JAV" worst_value="[null]" best_value="[null]" optimized_best_value="[null]" hidden="[false]" delete_historical_data="false" />
+ <metrics id="6" name="covered_conditions_by_line" description="[null]" direction="0" domain="Tests" short_name="Covered conditions by line" qualitative="false" val_type="DATA"
+ user_managed="false" enabled="true" origin="JAV" worst_value="[null]" best_value="[null]" optimized_best_value="[null]" hidden="[false]" delete_historical_data="false" />
<projects id="1" uuid="uuid-MyProject" kee="MyProject" scope="PRJ" qualifier="TRK" />
<projects id="2" uuid="uuid-prj" kee="MyProject:src/main/xoo/prj" scope="DIR" qualifier="DIR" />
Map<Integer, String> authorsByLine = getLineMetric(file, CoreMetrics.SCM_AUTHORS_BY_LINE_KEY);
Map<Integer, String> revisionsByLine = getLineMetric(file, CoreMetrics.SCM_REVISIONS_BY_LINE_KEY);
Map<Integer, String> datesByLine = getLineMetric(file, CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE_KEY);
+ Map<Integer, String> hitsByLine = getLineMetric(file, CoreMetrics.COVERAGE_LINE_HITS_DATA_KEY);
+ Map<Integer, String> condByLine = getLineMetric(file, CoreMetrics.CONDITIONS_BY_LINE_KEY);
+ Map<Integer, String> coveredCondByLine = getLineMetric(file, CoreMetrics.COVERED_CONDITIONS_BY_LINE_KEY);
SyntaxHighlightingData highlighting = loadHighlighting(file);
String[] highlightingPerLine = computeHighlightingPerLine(file, highlighting);
ByteArrayOutputStream output = new ByteArrayOutputStream();
CsvWriter csv = CsvWriter.of(new OutputStreamWriter(output, UTF_8));
for (int lineIdx = 1; lineIdx <= file.lines(); lineIdx++) {
- csv.values(revisionsByLine.get(lineIdx), authorsByLine.get(lineIdx), datesByLine.get(lineIdx), highlightingPerLine[lineIdx - 1],
+ csv.values(revisionsByLine.get(lineIdx), authorsByLine.get(lineIdx), datesByLine.get(lineIdx),
+ hitsByLine.get(lineIdx), condByLine.get(lineIdx), coveredCondByLine.get(lineIdx),
+ highlightingPerLine[lineIdx - 1],
CharMatcher.anyOf(BOM).removeFrom(lines.get(lineIdx - 1)));
}
csv.close();
String relativePathSame = "src/same.java";
java.io.File sameFile = new java.io.File(basedir, relativePathSame);
FileUtils.write(sameFile, "unchanged\ncontent");
- DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame).setLines(2).setAbsolutePath(sameFile.getAbsolutePath());
+ DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame).setLines(2).setAbsolutePath(sameFile.getAbsolutePath())
+ .setLineHashes(new String[] {"foo", "bar"});
when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew));
mockResourceCache(relativePathSame, PROJECT_KEY, "uuidsame");
assertThat(fileSourceDto.getCreatedAt()).isEqualTo(DateUtils.parseDateTime("2014-10-10T16:44:02+0200").getTime());
assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
assertThat(fileSourceDto.getData()).isEqualTo(
- ",,,,changed\r\n,,,,content\r\n");
+ ",,,,,,,changed\r\n,,,,,,,content\r\n");
assertThat(fileSourceDto.getLineHashes()).isEqualTo("foo\nbar");
- assertThat(fileSourceDto.getDataHash()).isEqualTo("e41cca9c51ff853c748f708f39dfc035");
+ assertThat(fileSourceDto.getDataHash()).isEqualTo("54f7fa51128a7ee577a476974c56568c");
}
@Test
assertThat(fileSourceDto.getCreatedAt()).isEqualTo(now.getTime());
assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
assertThat(fileSourceDto.getData()).isEqualTo(
- ",,,,foo\r\n,,,,bar\r\n,,,,biz\r\n");
+ ",,,,,,,foo\r\n,,,,,,,bar\r\n,,,,,,,biz\r\n");
assertThat(fileSourceDto.getLineHashes()).isEqualTo("foo\nbar\nbee");
- assertThat(fileSourceDto.getDataHash()).isEqualTo("0c43ed6418d690ee0ffc3e43e6660967");
+ assertThat(fileSourceDto.getDataHash()).isEqualTo("419c2b162018f6bbeb04fc0500d7852d");
}
@Test
- public void testPersistNewFileWithScmAndHighlighting() throws Exception {
+ public void testPersistNewFileWithScmAndCoverageAndHighlighting() throws Exception {
setupData("file_sources");
Date now = DateUtils.parseDateTime("2014-10-29T16:44:02+0100");
when(system2.newDate()).thenReturn(now);
.thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE, "1=2014-10-11T16:44:02+0100;2=2014-10-12T16:44:02+0100;3=2014-10-13T16:44:02+0100")));
when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.SCM_REVISIONS_BY_LINE_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_REVISIONS_BY_LINE, "1=123;2=234;3=345")));
+ when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.COVERAGE_LINE_HITS_DATA_KEY))
+ .thenReturn(Arrays.asList(new Measure(CoreMetrics.COVERAGE_LINE_HITS_DATA, "1=1;3=0")));
+ when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.CONDITIONS_BY_LINE_KEY))
+ .thenReturn(Arrays.asList(new Measure(CoreMetrics.CONDITIONS_BY_LINE, "1=4")));
+ when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.COVERED_CONDITIONS_BY_LINE_KEY))
+ .thenReturn(Arrays.asList(new Measure(CoreMetrics.COVERED_CONDITIONS_BY_LINE, "1=2")));
SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
.registerHighlightingRule(0, 3, TypeOfText.ANNOTATION)
assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
assertThat(fileSourceDto.getLineHashes()).isEqualTo("foo\nbar\nbee");
assertThat(fileSourceDto.getData()).isEqualTo(
- "123,julien,2014-10-11T16:44:02+0100,\"0,3,a\",foo\r\n"
- + "234,simon,2014-10-12T16:44:02+0100,\"0,1,cd\",bar\r\n"
- + "345,julien,2014-10-13T16:44:02+0100,\"0,9,c\",biz\r\n");
- assertThat(fileSourceDto.getDataHash()).isEqualTo("a2aaee165e33957a67331fb9f869e0f1");
+ "123,julien,2014-10-11T16:44:02+0100,1,4,2,\"0,3,a\",foo\r\n"
+ + "234,simon,2014-10-12T16:44:02+0100,,,,\"0,1,cd\",bar\r\n"
+ + "345,julien,2014-10-13T16:44:02+0100,0,,,\"0,9,c\",biz\r\n");
+ assertThat(fileSourceDto.getDataHash()).isEqualTo("66cf8a9176f59672044663f48a19989c");
}
@Test
<dataset>
<file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame"
- data=",,,,unchanged ,,,,content "
+ data=",,,,,,,unchanged ,,,,,,,content "
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
- data_hash="ee716d4ed9faae16eb9167714442a3bc"
+ data_hash="5de921a62f62af2ba05afe1cfe067f31"
created_at="1412952242000" updated_at="1412952242000" />
</dataset>
<dataset>
<file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame"
- data=",,,,unchanged ,,,,content "
+ data=",,,,,,,unchanged ,,,,,,,content "
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
- data_hash="ee716d4ed9faae16eb9167714442a3bc"
+ data_hash="5de921a62f62af2ba05afe1cfe067f31"
created_at="1412952242000" updated_at="1412952242000" />
</dataset>
<dataset>
<file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame"
- data=",,,,unchanged ,,,,content "
+ data=",,,,,,,unchanged ,,,,,,,content "
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
- data_hash="ee716d4ed9faae16eb9167714442a3bc"
+ data_hash="5de921a62f62af2ba05afe1cfe067f31"
created_at="1412952242000" updated_at="1412952242000" />
<file_sources id="102" project_uuid="projectUuid" file_uuid="uuidempty" data="[null]"