diff options
author | Julien HENRY <julien.henry@sonarsource.com> | 2014-11-24 18:00:31 +0100 |
---|---|---|
committer | Julien HENRY <julien.henry@sonarsource.com> | 2014-11-24 21:47:19 +0100 |
commit | ee2626afdde71c4f3380f3d22b3cc132be8bf465 (patch) | |
tree | 1291cd06e39a8325c1c9b70668d8550135b7d852 | |
parent | f7598fbe9b8df747b525e9642fba3e7a3ae9da77 (diff) | |
download | sonarqube-ee2626afdde71c4f3380f3d22b3cc132be8bf465.tar.gz sonarqube-ee2626afdde71c4f3380f3d22b3cc132be8bf465.zip |
SONAR-5868 Allow issue tracking mechanism to work without full previous sources
21 files changed, 199 insertions, 80 deletions
diff --git a/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v50/FeedFileSources.java b/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v50/FeedFileSources.java index 3c09e05947f..d7f698f1800 100644 --- a/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v50/FeedFileSources.java +++ b/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v50/FeedFileSources.java @@ -21,14 +21,15 @@ package org.sonar.server.db.migrations.v50; import org.sonar.api.utils.System2; import org.sonar.core.persistence.Database; -import org.sonar.server.db.migrations.*; +import org.sonar.server.db.migrations.BaseDataChange; +import org.sonar.server.db.migrations.MassUpdate; import org.sonar.server.db.migrations.Select.Row; import org.sonar.server.db.migrations.Select.RowReader; +import org.sonar.server.db.migrations.SqlStatement; import java.sql.SQLException; import java.util.Date; - /** * Used in the Active Record Migration 714 * @@ -56,14 +57,15 @@ public class FeedFileSources extends BaseDataChange { byte[] shortDates = row.getBytes(9); byte[] longDates = row.getBytes(10); - String sourceData = new FileSourceDto(source, shortRevisions, longRevisions, shortAuthors, longAuthors, shortDates, longDates).getSourceData(); + String[] sourceData = new FileSourceDto(source, shortRevisions, longRevisions, shortAuthors, longAuthors, shortDates, longDates).getSourceData(); update.setString(1, projectUuid) .setString(2, fileUuid) .setLong(3, now.getTime()) .setLong(4, (updatedAt == null ? now : updatedAt).getTime()) - .setString(5, sourceData) - .setString(6, ""); + .setString(5, sourceData[0]) + .setString(6, sourceData[1]) + .setString(7, ""); return true; } @@ -91,43 +93,43 @@ public class FeedFileSources extends BaseDataChange { MassUpdate massUpdate = context.prepareMassUpdate(); massUpdate.select("SELECT " + - "p.uuid as project_uuid, " + - "f.uuid as file_uuid, " + - "ss.data as source, " + - "ss.updated_at, " + - "m1.text_value as short_revisions_by_line, " + - "m1.measure_data as long_revisions_by_line, " + - "m2.text_value as short_authors_by_line, " + - "m2.measure_data as long_authors_by_line, " + - "m3.text_value as short_dates_by_line, " + - "m3.measure_data as short_dates_by_line " + + "p.uuid as project_uuid, " + + "f.uuid as file_uuid, " + + "ss.data as source, " + + "ss.updated_at, " + + "m1.text_value as short_revisions_by_line, " + + "m1.measure_data as long_revisions_by_line, " + + "m2.text_value as short_authors_by_line, " + + "m2.measure_data as long_authors_by_line, " + + "m3.text_value as short_dates_by_line, " + + "m3.measure_data as short_dates_by_line " + "FROM snapshots s " + "JOIN snapshot_sources ss " + - "ON s.id = ss.snapshot_id AND s.islast = ? " + + "ON s.id = ss.snapshot_id AND s.islast = ? " + "JOIN projects p " + - "ON s.root_project_id = p.id " + + "ON s.root_project_id = p.id " + "JOIN projects f " + - "ON s.project_id = f.id " + + "ON s.project_id = f.id " + "LEFT JOIN project_measures m1 " + - "ON m1.snapshot_id = s.id AND m1.metric_id = ? " + + "ON m1.snapshot_id = s.id AND m1.metric_id = ? " + "LEFT JOIN project_measures m2 " + - "ON m2.snapshot_id = s.id AND m2.metric_id = ? " + + "ON m2.snapshot_id = s.id AND m2.metric_id = ? " + "LEFT JOIN project_measures m3 " + - "ON m3.snapshot_id = s.id AND m3.metric_id = ? " + + "ON m3.snapshot_id = s.id AND m3.metric_id = ? " + "WHERE " + - "f.enabled = ? " + - "AND f.scope = 'FIL' " + - "AND p.scope = 'PRJ' AND p.qualifier = 'TRK' ") - .setBoolean(1, true) - .setLong(2, revisionMetricId != null ? revisionMetricId : 0L) - .setLong(3, authorMetricId != null ? authorMetricId : 0L) - .setLong(4, datesMetricId != null ? datesMetricId : 0L) - .setBoolean(5, true); + "f.enabled = ? " + + "AND f.scope = 'FIL' " + + "AND p.scope = 'PRJ' AND p.qualifier = 'TRK' ") + .setBoolean(1, true) + .setLong(2, revisionMetricId != null ? revisionMetricId : 0L) + .setLong(3, authorMetricId != null ? authorMetricId : 0L) + .setLong(4, datesMetricId != null ? datesMetricId : 0L) + .setBoolean(5, true); massUpdate.update("INSERT INTO file_sources" + - "(project_uuid, file_uuid, created_at, updated_at, data, data_hash)" + + "(project_uuid, file_uuid, created_at, updated_at, data, line_hashes, data_hash)" + "VALUES " + - "(?, ?, ?, ?, ?, ?)"); + "(?, ?, ?, ?, ?, ?, ?)"); massUpdate.execute(new FileSourceBuilder(system)); } diff --git a/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v50/FileSourceDto.java b/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v50/FileSourceDto.java index 893e1fc8c6b..31a5ec2849f 100644 --- a/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v50/FileSourceDto.java +++ b/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v50/FileSourceDto.java @@ -20,6 +20,8 @@ package org.sonar.server.db.migrations.v50; import com.google.common.base.Splitter; +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.commons.lang.StringUtils; import org.sonar.api.utils.KeyValueFormat; import org.sonar.api.utils.text.CsvWriter; @@ -34,6 +36,8 @@ import static com.google.common.base.Charsets.UTF_8; class FileSourceDto { + private static final String SPACE_CHARS = "\t\n\r "; + private Iterator<String> sourceSplitter; private Map<Integer, String> revisions; @@ -50,19 +54,29 @@ class FileSourceDto { dates = KeyValueFormat.parseIntString(ofNullableBytes(shortDates, longDates)); } - String getSourceData() { + String[] getSourceData() { String highlighting = ""; ByteArrayOutputStream output = new ByteArrayOutputStream(); int line = 0; String sourceLine = null; CsvWriter csv = CsvWriter.of(new OutputStreamWriter(output, UTF_8)); + StringBuilder lineHashes = new StringBuilder(); while (sourceSplitter.hasNext()) { - line ++; + line++; sourceLine = sourceSplitter.next(); + lineHashes.append(lineChecksum(sourceLine)).append("\n"); csv.values(revisions.get(line), authors.get(line), dates.get(line), highlighting, sourceLine); } csv.close(); - return new String(output.toByteArray(), UTF_8); + return new String[] {new String(output.toByteArray(), UTF_8), lineHashes.toString()}; + } + + public static String lineChecksum(String line) { + String reducedLine = StringUtils.replaceChars(line, SPACE_CHARS, ""); + if (line.isEmpty()) { + return ""; + } + return DigestUtils.md5Hex(reducedLine); } private static String ofNullableBytes(@Nullable byte[] shortBytes, @Nullable byte[] longBytes) { diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/713_create_file_sources.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/713_create_file_sources.rb index 422f355f95b..65775fc9adf 100644 --- a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/713_create_file_sources.rb +++ b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/713_create_file_sources.rb @@ -28,6 +28,7 @@ class CreateFileSources < ActiveRecord::Migration t.column :project_uuid, :string, :limit => 50, :null => false t.column :file_uuid, :string, :limit => 50, :null => false t.column :data, :text, :null => true + t.column :line_hashes, :text, :null => true t.column :data_hash, :string, :limit => 50, :null => true t.column :created_at, :integer, :limit => 8, :null => false t.column :updated_at, :integer, :limit => 8, :null => false diff --git a/sonar-batch/src/main/java/org/sonar/batch/index/SourcePersister.java b/sonar-batch/src/main/java/org/sonar/batch/index/SourcePersister.java index 6e42a07c18e..d2d27beb498 100644 --- a/sonar-batch/src/main/java/org/sonar/batch/index/SourcePersister.java +++ b/sonar-batch/src/main/java/org/sonar/batch/index/SourcePersister.java @@ -20,6 +20,7 @@ package org.sonar.batch.index; import com.google.common.base.CharMatcher; +import com.google.common.base.Joiner; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; @@ -56,7 +57,12 @@ import javax.annotation.Nullable; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; -import java.util.*; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import static com.google.common.base.Charsets.UTF_8; @@ -147,15 +153,23 @@ public class SourcePersister implements ScanPersister { String newDataHash = newData != null ? DigestUtils.md5Hex(newData) : "0"; Date now = system2.newDate(); if (previous == null) { - FileSourceDto newFileSource = new FileSourceDto().setProjectUuid(projectTree.getRootProject().getUuid()).setFileUuid(fileUuid).setData(newData) + FileSourceDto newFileSource = new FileSourceDto() + .setProjectUuid(projectTree.getRootProject().getUuid()) + .setFileUuid(fileUuid) + .setData(newData) .setDataHash(newDataHash) + .setLineHashes(StringUtils.defaultIfEmpty(Joiner.on('\n').join(inputFile.lineHashes()), null)) .setCreatedAt(now.getTime()) .setUpdatedAt(now.getTime()); mapper.insert(newFileSource); session.commit(); } else { if (!newDataHash.equals(previous.getDataHash())) { - previous.setData(newData).setDataHash(newDataHash).setUpdatedAt(now.getTime()); + previous + .setData(newData) + .setLineHashes(StringUtils.defaultIfEmpty(Joiner.on('\n').join(inputFile.lineHashes()), null)) + .setDataHash(newDataHash) + .setUpdatedAt(now.getTime()); mapper.update(previous); session.commit(); } diff --git a/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/DefaultInputFileValueCoder.java b/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/DefaultInputFileValueCoder.java index 49b19f6fec1..e216534167f 100644 --- a/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/DefaultInputFileValueCoder.java +++ b/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/DefaultInputFileValueCoder.java @@ -48,6 +48,7 @@ class DefaultInputFileValueCoder implements ValueCoder { value.put(f.lines()); putUTFOrNull(value, f.encoding()); value.putLongArray(f.originalLineOffsets()); + value.putStringArray(f.lineHashes()); } private void putUTFOrNull(Value value, @Nullable String utfOrNull) { @@ -74,6 +75,7 @@ class DefaultInputFileValueCoder implements ValueCoder { file.setLines(value.getInt()); file.setEncoding(value.getString()); file.setOriginalLineOffsets(value.getLongArray()); + file.setLineHashes(value.getStringArray()); return file; } diff --git a/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/FileMetadata.java b/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/FileMetadata.java index ca6fd9aed4b..95089291ef6 100644 --- a/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/FileMetadata.java +++ b/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/FileMetadata.java @@ -19,11 +19,11 @@ */ package org.sonar.batch.scan.filesystem; -import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; import org.apache.commons.codec.binary.Hex; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; import java.io.BufferedReader; import java.io.File; @@ -45,6 +45,7 @@ class FileMetadata { private static final char LINE_FEED = '\n'; private static final char CARRIAGE_RETURN = '\r'; private static final char BOM = '\uFEFF'; + private static final String SPACE_CHARS = "\t\n\r "; // This singleton aims only to increase the coverage by allowing // to test the private method ! @@ -61,14 +62,13 @@ class FileMetadata { Reader reader = null; long currentOriginalOffset = 0; List<Long> originalLineOffsets = new ArrayList<Long>(); - List<Integer> lineCheckSum = new ArrayList<Integer>(); - int hash = 5381; + List<String> lineHashes = new ArrayList<String>(); StringBuilder currentLineStr = new StringBuilder(); int lines = 0; char c = (char) -1; try { - MessageDigest md5Digest = DigestUtils.getMd5Digest(); - md5Digest.reset(); + MessageDigest globalMd5Digest = DigestUtils.getMd5Digest(); + globalMd5Digest.reset(); reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), encoding)); int i = reader.read(); boolean afterCR = false; @@ -95,24 +95,24 @@ class FileMetadata { afterCR = true; c = LINE_FEED; } - currentLineStr.append(c); - hash = ((hash << 5) + hash) + (c & 0xff); if (c == LINE_FEED) { lines++; originalLineOffsets.add(currentOriginalOffset); - lineCheckSum.add(hash); - hash = 5381; + lineHashes.add(md5IgnoreWhitespace(currentLineStr)); currentLineStr.setLength(0); + } else { + currentLineStr.append(c); } - md5Digest.update(charToBytesUTF(c)); + globalMd5Digest.update(charToBytesUTF(c)); i = reader.read(); } if (c != (char) -1) { + // Last empty line lines++; - lineCheckSum.add(hash); + lineHashes.add(md5IgnoreWhitespace(currentLineStr)); } - String filehash = Hex.encodeHexString(md5Digest.digest()); - return new Metadata(lines, filehash, originalLineOffsets, lineCheckSum); + String filehash = Hex.encodeHexString(globalMd5Digest.digest()); + return new Metadata(lines, filehash, originalLineOffsets, lineHashes.toArray(new String[0])); } catch (IOException e) { throw new IllegalStateException(String.format("Fail to read file '%s' with encoding '%s'", file.getAbsolutePath(), encoding), e); @@ -121,6 +121,14 @@ class FileMetadata { } } + private String md5IgnoreWhitespace(StringBuilder currentLineStr) { + String reducedLine = StringUtils.replaceChars(currentLineStr.toString(), SPACE_CHARS, ""); + if (reducedLine.isEmpty()) { + return ""; + } + return DigestUtils.md5Hex(reducedLine); + } + private byte[] charToBytesUTF(char c) { char[] buffer = new char[] {c}; byte[] b = new byte[buffer.length << 1]; @@ -136,13 +144,13 @@ class FileMetadata { final int lines; final String hash; final long[] originalLineOffsets; - final int[] lineChecksum; + final String[] lineHashes; - private Metadata(int lines, String hash, List<Long> originalLineOffsets, List<Integer> lineCheckSum) { + private Metadata(int lines, String hash, List<Long> originalLineOffsets, String[] lineHashes) { this.lines = lines; this.hash = hash; this.originalLineOffsets = Longs.toArray(originalLineOffsets); - this.lineChecksum = Ints.toArray(lineCheckSum); + this.lineHashes = lineHashes; } } } diff --git a/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/InputFileBuilder.java b/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/InputFileBuilder.java index a94f059e85a..b961e072890 100644 --- a/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/InputFileBuilder.java +++ b/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/InputFileBuilder.java @@ -100,6 +100,7 @@ class InputFileBuilder { inputFile.setLines(metadata.lines); inputFile.setHash(metadata.hash); inputFile.setOriginalLineOffsets(metadata.originalLineOffsets); + inputFile.setLineHashes(metadata.lineHashes); inputFile.setStatus(statusDetection.status(inputFile.moduleKey(), inputFile.relativePath(), metadata.hash)); if (analysisMode.isIncremental() && inputFile.status() == InputFile.Status.SAME) { return null; diff --git a/sonar-batch/src/test/java/org/sonar/batch/index/SourcePersisterTest.java b/sonar-batch/src/test/java/org/sonar/batch/index/SourcePersisterTest.java index 8cb3ec22724..7452dc2995e 100644 --- a/sonar-batch/src/test/java/org/sonar/batch/index/SourcePersisterTest.java +++ b/sonar-batch/src/test/java/org/sonar/batch/index/SourcePersisterTest.java @@ -130,7 +130,9 @@ public class SourcePersisterTest extends AbstractDaoTestCase { String relativePathSame = "src/changed.java"; java.io.File sameFile = new java.io.File(basedir, relativePathSame); FileUtils.write(sameFile, "changed\ncontent"); - DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame).setLines(2).setAbsolutePath(sameFile.getAbsolutePath()); + DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame).setLines(2) + .setAbsolutePath(sameFile.getAbsolutePath()) + .setLineHashes(new String[] {"foo", "bar"}); when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew)); mockResourceCache(relativePathSame, PROJECT_KEY, "uuidsame"); @@ -142,6 +144,7 @@ public class SourcePersisterTest extends AbstractDaoTestCase { assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime()); assertThat(fileSourceDto.getData()).isEqualTo( ",,,,changed\r\n,,,,content\r\n"); + assertThat(fileSourceDto.getLineHashes()).isEqualTo("foo\nbar"); assertThat(fileSourceDto.getDataHash()).isEqualTo("e41cca9c51ff853c748f708f39dfc035"); } @@ -151,7 +154,9 @@ public class SourcePersisterTest extends AbstractDaoTestCase { when(system2.newDate()).thenReturn(DateUtils.parseDateTime("2014-10-29T16:44:02+0100")); String relativePathEmpty = "src/empty.java"; - DefaultInputFile inputFileEmpty = new DefaultInputFile(PROJECT_KEY, relativePathEmpty).setLines(0); + DefaultInputFile inputFileEmpty = new DefaultInputFile(PROJECT_KEY, relativePathEmpty) + .setLines(0) + .setLineHashes(new String[] {}); when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileEmpty)); mockResourceCache(relativePathEmpty, PROJECT_KEY, "uuidempty"); @@ -169,7 +174,10 @@ public class SourcePersisterTest extends AbstractDaoTestCase { String relativePathNew = "src/new.java"; java.io.File newFile = new java.io.File(basedir, relativePathNew); FileUtils.write(newFile, "foo\nbar\nbiz"); - DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathNew).setLines(3).setAbsolutePath(newFile.getAbsolutePath()); + DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathNew) + .setLines(3) + .setAbsolutePath(newFile.getAbsolutePath()) + .setLineHashes(new String[] {"foo", "bar", "bee"}); when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew)); mockResourceCache(relativePathNew, PROJECT_KEY, "uuidnew"); @@ -180,6 +188,7 @@ public class SourcePersisterTest extends AbstractDaoTestCase { assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime()); assertThat(fileSourceDto.getData()).isEqualTo( ",,,,foo\r\n,,,,bar\r\n,,,,biz\r\n"); + assertThat(fileSourceDto.getLineHashes()).isEqualTo("foo\nbar\nbee"); assertThat(fileSourceDto.getDataHash()).isEqualTo("0c43ed6418d690ee0ffc3e43e6660967"); } @@ -196,7 +205,8 @@ public class SourcePersisterTest extends AbstractDaoTestCase { DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathNew) .setLines(3) .setAbsolutePath(newFile.getAbsolutePath()) - .setOriginalLineOffsets(new long[] {0, 4, 7}); + .setOriginalLineOffsets(new long[] {0, 4, 7}) + .setLineHashes(new String[] {"foo", "bar", "bee"}); when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew)); mockResourceCache(relativePathNew, PROJECT_KEY, "uuidnew"); @@ -221,6 +231,7 @@ public class SourcePersisterTest extends AbstractDaoTestCase { FileSourceDto fileSourceDto = new FileSourceDao(getMyBatis()).select("uuidnew"); assertThat(fileSourceDto.getCreatedAt()).isEqualTo(now.getTime()); assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime()); + assertThat(fileSourceDto.getLineHashes()).isEqualTo("foo\nbar\nbee"); assertThat(fileSourceDto.getData()).isEqualTo( "123,julien,2014-10-11T16:44:02+0100,\"0,3,a\",foo\r\n" + "234,simon,2014-10-12T16:44:02+0100,\"0,1,cd\",bar\r\n" diff --git a/sonar-batch/src/test/java/org/sonar/batch/scan/filesystem/FileMetadataTest.java b/sonar-batch/src/test/java/org/sonar/batch/scan/filesystem/FileMetadataTest.java index f3a1d51c990..6814f6888ed 100644 --- a/sonar-batch/src/test/java/org/sonar/batch/scan/filesystem/FileMetadataTest.java +++ b/sonar-batch/src/test/java/org/sonar/batch/scan/filesystem/FileMetadataTest.java @@ -20,6 +20,7 @@ package org.sonar.batch.scan.filesystem; import com.google.common.base.Charsets; +import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.FileUtils; import org.junit.Rule; import org.junit.Test; @@ -52,7 +53,7 @@ public class FileMetadataTest { assertThat(metadata.lines).isEqualTo(0); assertThat(metadata.hash).isNotEmpty(); assertThat(metadata.originalLineOffsets).containsOnly(0); - assertThat(metadata.lineChecksum).isEmpty(); + assertThat(metadata.lineHashes).isEmpty(); } @Test @@ -64,7 +65,7 @@ public class FileMetadataTest { assertThat(metadata.lines).isEqualTo(3); assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_WITHOUT_LATEST_EOL); assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10); - assertThat(metadata.lineChecksum).containsOnly(2090263731, 2090104836, 193487042); + assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz")); } @Test @@ -76,7 +77,7 @@ public class FileMetadataTest { assertThat(metadata.lines).isEqualTo(4); assertThat(metadata.hash).isEqualTo(NON_ASCII); assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10, 18); - assertThat(metadata.lineChecksum).containsOnly(2090410746, 2090243139, -931663839, 5381); + assertThat(metadata.lineHashes).containsOnly(md5("föo"), md5("bàr"), md5("\u1D11Ebaßz"), ""); } @Test @@ -88,7 +89,7 @@ public class FileMetadataTest { assertThat(metadata.lines).isEqualTo(4); assertThat(metadata.hash).isEqualTo(NON_ASCII); assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10, 18); - assertThat(metadata.lineChecksum).containsOnly(2090410746, 2090243139, -931663839, 5381); + assertThat(metadata.lineHashes).containsOnly(md5("föo"), md5("bàr"), md5("\u1D11Ebaßz"), ""); } @Test @@ -100,7 +101,7 @@ public class FileMetadataTest { assertThat(metadata.lines).isEqualTo(3); assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_WITHOUT_LATEST_EOL); assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 8); - assertThat(metadata.lineChecksum).containsOnly(2090263731, 2090104836, 193487042); + assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz")); } @Test @@ -112,7 +113,7 @@ public class FileMetadataTest { assertThat(metadata.lines).isEqualTo(4); assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_WITH_LATEST_EOL); assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 8, 12); - assertThat(metadata.lineChecksum).containsOnly(2090263731, 2090104836, 2090105100, 5381); + assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz"), ""); } @Test @@ -124,7 +125,7 @@ public class FileMetadataTest { assertThat(metadata.lines).isEqualTo(4); assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_WITH_LATEST_EOL); assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 9, 13); - assertThat(metadata.lineChecksum).containsOnly(2090263731, 2090104836, 2090105100, 5381); + assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz"), ""); } @Test @@ -136,7 +137,7 @@ public class FileMetadataTest { assertThat(metadata.lines).isEqualTo(3); assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_WITHOUT_LATEST_EOL); assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 9); - assertThat(metadata.lineChecksum).containsOnly(2090263731, 2090104836, 193487042); + assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz")); } @Test @@ -148,7 +149,7 @@ public class FileMetadataTest { assertThat(metadata.lines).isEqualTo(4); assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_NEW_LINE_FIRST); assertThat(metadata.originalLineOffsets).containsOnly(0, 1, 5, 10); - assertThat(metadata.lineChecksum).containsOnly(177583, 2090263731, 2090104836, 193487042); + assertThat(metadata.lineHashes).containsOnly("", md5("foo"), md5("bar"), md5("baz")); } @Test @@ -160,7 +161,17 @@ public class FileMetadataTest { assertThat(metadata.lines).isEqualTo(3); assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_WITHOUT_LATEST_EOL); assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 9); - assertThat(metadata.lineChecksum).containsOnly(2090263731, 2090104836, 193487042); + assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz")); + } + + @Test + public void ignore_whitespace_when_computing_line_hashes() throws Exception { + File tempFile = temp.newFile(); + FileUtils.write(tempFile, " foo\nb ar\r\nbaz \t", Charsets.UTF_8, true); + + FileMetadata.Metadata metadata = FileMetadata.INSTANCE.read(tempFile, Charsets.UTF_8); + assertThat(metadata.lines).isEqualTo(3); + assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz")); } @Test @@ -192,4 +203,8 @@ public class FileMetadataTest { assertThat(hash1).isEqualTo(hash1a); assertThat(hash1).isNotEqualTo(hash2); } + + private static String md5(String input) { + return DigestUtils.md5Hex(input); + } } diff --git a/sonar-batch/src/test/java/org/sonar/batch/scan/filesystem/InputPathCacheTest.java b/sonar-batch/src/test/java/org/sonar/batch/scan/filesystem/InputPathCacheTest.java index 7008b0450c0..bbb63290841 100644 --- a/sonar-batch/src/test/java/org/sonar/batch/scan/filesystem/InputPathCacheTest.java +++ b/sonar-batch/src/test/java/org/sonar/batch/scan/filesystem/InputPathCacheTest.java @@ -67,10 +67,16 @@ public class InputPathCacheTest { .setStatus(Status.ADDED) .setHash("xyz") .setLines(1) + .setEncoding("UTF-8") + .setOriginalLineOffsets(new long[] {0, 4}) + .setLineHashes(new String[] {"foo", "bar"}) .setFile(temp.newFile("Bar.java"))); - assertThat(cache.getFile("struts", "src/main/java/Foo.java").relativePath()) - .isEqualTo("src/main/java/Foo.java"); + DefaultInputFile loadedFile = (DefaultInputFile) cache.getFile("struts-core", "src/main/java/Bar.java"); + assertThat(loadedFile.relativePath()).isEqualTo("src/main/java/Bar.java"); + assertThat(loadedFile.encoding()).isEqualTo("UTF-8"); + assertThat(loadedFile.originalLineOffsets()).containsOnly(0, 4); + assertThat(loadedFile.lineHashes()).containsOnly("foo", "bar"); assertThat(cache.filesByModule("struts")).hasSize(1); assertThat(cache.filesByModule("struts-core")).hasSize(1); diff --git a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources.xml b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources.xml index fae5b462879..a4e8ed467b3 100644 --- a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources.xml +++ b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/file_sources.xml @@ -1,5 +1,8 @@ <dataset> - - <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" data=",,,,unchanged ,,,,content " data_hash="ee716d4ed9faae16eb9167714442a3bc" created_at="1412952242000" updated_at="1412952242000" /> + <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" + data=",,,,unchanged ,,,,content " + line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555" + data_hash="ee716d4ed9faae16eb9167714442a3bc" + created_at="1412952242000" updated_at="1412952242000" /> </dataset> diff --git a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistDontTouchUnchanged-result.xml b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistDontTouchUnchanged-result.xml index fae5b462879..98dd81f73df 100644 --- a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistDontTouchUnchanged-result.xml +++ b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistDontTouchUnchanged-result.xml @@ -1,5 +1,9 @@ <dataset> - <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" data=",,,,unchanged ,,,,content " data_hash="ee716d4ed9faae16eb9167714442a3bc" created_at="1412952242000" updated_at="1412952242000" /> + <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" + data=",,,,unchanged ,,,,content " + line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555" + data_hash="ee716d4ed9faae16eb9167714442a3bc" + created_at="1412952242000" updated_at="1412952242000" /> </dataset> diff --git a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistEmptyFile-result.xml b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistEmptyFile-result.xml index ac3c16563af..cd844a149d8 100644 --- a/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistEmptyFile-result.xml +++ b/sonar-batch/src/test/resources/org/sonar/batch/index/SourcePersisterTest/testPersistEmptyFile-result.xml @@ -1,6 +1,13 @@ <dataset> - <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" data=",,,,unchanged ,,,,content " data_hash="ee716d4ed9faae16eb9167714442a3bc" created_at="1412952242000" updated_at="1412952242000" /> - <file_sources id="102" project_uuid="projectUuid" file_uuid="uuidempty" data="[null]" data_hash="0" created_at="1414597442000" updated_at="1414597442000" /> + <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" + data=",,,,unchanged ,,,,content " + line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555" + data_hash="ee716d4ed9faae16eb9167714442a3bc" + created_at="1412952242000" updated_at="1412952242000" /> + + <file_sources id="102" project_uuid="projectUuid" file_uuid="uuidempty" data="[null]" + line_hashes="[null]" + data_hash="0" created_at="1414597442000" updated_at="1414597442000" /> </dataset> diff --git a/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDto.java b/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDto.java index e992ee4a648..722f10551fb 100644 --- a/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDto.java +++ b/sonar-core/src/main/java/org/sonar/core/source/db/FileSourceDto.java @@ -29,6 +29,7 @@ public class FileSourceDto { private long createdAt; private long updatedAt; private String data; + private String lineHashes; private String dataHash; public Long getId() { @@ -68,6 +69,16 @@ public class FileSourceDto { return this; } + @CheckForNull + public String getLineHashes() { + return lineHashes; + } + + public FileSourceDto setLineHashes(@Nullable String lineHashes) { + this.lineHashes = lineHashes; + return this; + } + public String getDataHash() { return dataHash; } diff --git a/sonar-core/src/main/resources/org/sonar/core/persistence/schema-h2.ddl b/sonar-core/src/main/resources/org/sonar/core/persistence/schema-h2.ddl index 7de7f8d46c2..fec355edc63 100644 --- a/sonar-core/src/main/resources/org/sonar/core/persistence/schema-h2.ddl +++ b/sonar-core/src/main/resources/org/sonar/core/persistence/schema-h2.ddl @@ -571,6 +571,7 @@ CREATE TABLE "FILE_SOURCES" ( "PROJECT_UUID" VARCHAR(50) NOT NULL, "FILE_UUID" VARCHAR(50) NOT NULL, "DATA" CLOB(2147483647), + "LINE_HASHES" CLOB(2147483647), "DATA_HASH" VARCHAR(50) NOT NULL, "CREATED_AT" BIGINT NOT NULL, "UPDATED_AT" BIGINT NOT NULL diff --git a/sonar-core/src/main/resources/org/sonar/core/source/db/FileSourceMapper.xml b/sonar-core/src/main/resources/org/sonar/core/source/db/FileSourceMapper.xml index 4b3b88465c3..d7b65c501ea 100644 --- a/sonar-core/src/main/resources/org/sonar/core/source/db/FileSourceMapper.xml +++ b/sonar-core/src/main/resources/org/sonar/core/source/db/FileSourceMapper.xml @@ -5,7 +5,7 @@ <mapper namespace="org.sonar.core.source.db.FileSourceMapper"> <select id="select" parameterType="string" resultType="org.sonar.core.source.db.FileSourceDto"> - SELECT id, project_uuid as projectUuid, file_uuid as fileUuid, created_at as createdAt, updated_at as updatedAt, data, data_hash as dataHash + SELECT id, project_uuid as projectUuid, file_uuid as fileUuid, created_at as createdAt, updated_at as updatedAt, data, line_hashes as lineHashes, data_hash as dataHash FROM file_sources WHERE file_uuid = #{fileUuid} </select> @@ -17,14 +17,15 @@ </select> <insert id="insert" parameterType="org.sonar.core.source.db.FileSourceDto" useGeneratedKeys="false"> - insert into file_sources (project_uuid, file_uuid, created_at, updated_at, data, data_hash) - values (#{projectUuid}, #{fileUuid}, #{createdAt}, #{updatedAt}, #{data}, #{dataHash}) + insert into file_sources (project_uuid, file_uuid, created_at, updated_at, data, line_hashes, data_hash) + values (#{projectUuid}, #{fileUuid}, #{createdAt}, #{updatedAt}, #{data}, #{lineHashes}, #{dataHash}) </insert> <update id="update" parameterType="org.sonar.core.source.db.FileSourceDto" useGeneratedKeys="false"> update file_sources set updated_at = #{updatedAt}, data = #{data}, + line_hashes = #{lineHashes}, data_hash = #{dataHash} where id = #{id} </update> diff --git a/sonar-core/src/test/java/org/sonar/core/source/db/FileSourceDaoTest.java b/sonar-core/src/test/java/org/sonar/core/source/db/FileSourceDaoTest.java index ddd45cedfa5..0487d80b379 100644 --- a/sonar-core/src/test/java/org/sonar/core/source/db/FileSourceDaoTest.java +++ b/sonar-core/src/test/java/org/sonar/core/source/db/FileSourceDaoTest.java @@ -53,7 +53,9 @@ public class FileSourceDaoTest extends AbstractDaoTestCase { @Test public void insert() throws Exception { - dao.insert(new FileSourceDto().setProjectUuid("prj").setFileUuid("file").setData("bla bla").setDataHash("hash2") + dao.insert(new FileSourceDto().setProjectUuid("prj").setFileUuid("file").setData("bla bla") + .setDataHash("hash2") + .setLineHashes("foo\nbar") .setCreatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime()) .setUpdatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime())); @@ -62,10 +64,12 @@ public class FileSourceDaoTest extends AbstractDaoTestCase { @Test public void update() throws Exception { - dao.update(new FileSourceDto().setId(101L).setProjectUuid("prj").setFileUuid("file").setData("updated data").setDataHash("hash2") + dao.update(new FileSourceDto().setId(101L).setProjectUuid("prj").setFileUuid("file") + .setData("updated data") + .setDataHash("hash2") + .setLineHashes("foo2\nbar2") .setUpdatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime())); checkTable("update", "file_sources"); } - } diff --git a/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/insert-result.xml b/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/insert-result.xml index c5d483b7e55..49ea06706e5 100644 --- a/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/insert-result.xml +++ b/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/insert-result.xml @@ -2,11 +2,13 @@ <file_sources id="101" project_uuid="abcd" file_uuid="ab12" data="aef12a,alice,2014-04-25T12:34:56+0100,,class Foo" data_hash="hash" + line_hashes="truc" created_at="1414597442000" updated_at="1414683842000" /> <file_sources id="102" project_uuid="prj" file_uuid="file" data="bla bla" data_hash="hash2" + line_hashes="foo bar" created_at="1414770242000" updated_at="1414770242000" /> </dataset> diff --git a/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/shared.xml b/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/shared.xml index fb3c258ddc0..538240d2fac 100644 --- a/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/shared.xml +++ b/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/shared.xml @@ -2,6 +2,7 @@ <file_sources id="101" project_uuid="abcd" file_uuid="ab12" data="aef12a,alice,2014-04-25T12:34:56+0100,,class Foo" data_hash="hash" + line_hashes="truc" created_at="1414597442000" updated_at="1414683842000" /> </dataset> diff --git a/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/update-result.xml b/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/update-result.xml index 465b2e52cb8..ddad2e2b410 100644 --- a/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/update-result.xml +++ b/sonar-core/src/test/resources/org/sonar/core/source/db/FileSourceDaoTest/update-result.xml @@ -2,6 +2,7 @@ <file_sources id="101" project_uuid="abcd" file_uuid="ab12" data="updated data" data_hash="hash2" + line_hashes="foo2 bar2" created_at="1414597442000" updated_at="1414770242000" /> diff --git a/sonar-plugin-api/src/main/java/org/sonar/api/batch/fs/internal/DefaultInputFile.java b/sonar-plugin-api/src/main/java/org/sonar/api/batch/fs/internal/DefaultInputFile.java index a328071b25a..24f965709bc 100644 --- a/sonar-plugin-api/src/main/java/org/sonar/api/batch/fs/internal/DefaultInputFile.java +++ b/sonar-plugin-api/src/main/java/org/sonar/api/batch/fs/internal/DefaultInputFile.java @@ -40,6 +40,7 @@ public class DefaultInputFile implements InputFile, Serializable { private int lines; private String encoding; long[] originalLineOffsets; + String[] lineHashes; public DefaultInputFile(String moduleKey, String relativePath) { this.moduleKey = moduleKey; @@ -113,6 +114,10 @@ public class DefaultInputFile implements InputFile, Serializable { return originalLineOffsets; } + public String[] lineHashes() { + return lineHashes; + } + public DefaultInputFile setAbsolutePath(String s) { this.absolutePath = PathUtils.sanitize(s); return this; @@ -158,6 +163,11 @@ public class DefaultInputFile implements InputFile, Serializable { return this; } + public DefaultInputFile setLineHashes(String[] lineHashes) { + this.lineHashes = lineHashes; + return this; + } + @Override public boolean equals(Object o) { if (this == o) { |