import org.sonar.api.utils.System2;
import org.sonar.core.persistence.Database;
-import org.sonar.server.db.migrations.*;
+import org.sonar.server.db.migrations.BaseDataChange;
+import org.sonar.server.db.migrations.MassUpdate;
import org.sonar.server.db.migrations.Select.Row;
import org.sonar.server.db.migrations.Select.RowReader;
+import org.sonar.server.db.migrations.SqlStatement;
import java.sql.SQLException;
import java.util.Date;
-
/**
* Used in the Active Record Migration 714
*
byte[] shortDates = row.getBytes(9);
byte[] longDates = row.getBytes(10);
- String sourceData = new FileSourceDto(source, shortRevisions, longRevisions, shortAuthors, longAuthors, shortDates, longDates).getSourceData();
+ String[] sourceData = new FileSourceDto(source, shortRevisions, longRevisions, shortAuthors, longAuthors, shortDates, longDates).getSourceData();
update.setString(1, projectUuid)
.setString(2, fileUuid)
.setLong(3, now.getTime())
.setLong(4, (updatedAt == null ? now : updatedAt).getTime())
- .setString(5, sourceData)
- .setString(6, "");
+ .setString(5, sourceData[0])
+ .setString(6, sourceData[1])
+ .setString(7, "");
return true;
}
MassUpdate massUpdate = context.prepareMassUpdate();
massUpdate.select("SELECT " +
- "p.uuid as project_uuid, " +
- "f.uuid as file_uuid, " +
- "ss.data as source, " +
- "ss.updated_at, " +
- "m1.text_value as short_revisions_by_line, " +
- "m1.measure_data as long_revisions_by_line, " +
- "m2.text_value as short_authors_by_line, " +
- "m2.measure_data as long_authors_by_line, " +
- "m3.text_value as short_dates_by_line, " +
- "m3.measure_data as short_dates_by_line " +
+ "p.uuid as project_uuid, " +
+ "f.uuid as file_uuid, " +
+ "ss.data as source, " +
+ "ss.updated_at, " +
+ "m1.text_value as short_revisions_by_line, " +
+ "m1.measure_data as long_revisions_by_line, " +
+ "m2.text_value as short_authors_by_line, " +
+ "m2.measure_data as long_authors_by_line, " +
+ "m3.text_value as short_dates_by_line, " +
+ "m3.measure_data as short_dates_by_line " +
"FROM snapshots s " +
"JOIN snapshot_sources ss " +
- "ON s.id = ss.snapshot_id AND s.islast = ? " +
+ "ON s.id = ss.snapshot_id AND s.islast = ? " +
"JOIN projects p " +
- "ON s.root_project_id = p.id " +
+ "ON s.root_project_id = p.id " +
"JOIN projects f " +
- "ON s.project_id = f.id " +
+ "ON s.project_id = f.id " +
"LEFT JOIN project_measures m1 " +
- "ON m1.snapshot_id = s.id AND m1.metric_id = ? " +
+ "ON m1.snapshot_id = s.id AND m1.metric_id = ? " +
"LEFT JOIN project_measures m2 " +
- "ON m2.snapshot_id = s.id AND m2.metric_id = ? " +
+ "ON m2.snapshot_id = s.id AND m2.metric_id = ? " +
"LEFT JOIN project_measures m3 " +
- "ON m3.snapshot_id = s.id AND m3.metric_id = ? " +
+ "ON m3.snapshot_id = s.id AND m3.metric_id = ? " +
"WHERE " +
- "f.enabled = ? " +
- "AND f.scope = 'FIL' " +
- "AND p.scope = 'PRJ' AND p.qualifier = 'TRK' ")
- .setBoolean(1, true)
- .setLong(2, revisionMetricId != null ? revisionMetricId : 0L)
- .setLong(3, authorMetricId != null ? authorMetricId : 0L)
- .setLong(4, datesMetricId != null ? datesMetricId : 0L)
- .setBoolean(5, true);
+ "f.enabled = ? " +
+ "AND f.scope = 'FIL' " +
+ "AND p.scope = 'PRJ' AND p.qualifier = 'TRK' ")
+ .setBoolean(1, true)
+ .setLong(2, revisionMetricId != null ? revisionMetricId : 0L)
+ .setLong(3, authorMetricId != null ? authorMetricId : 0L)
+ .setLong(4, datesMetricId != null ? datesMetricId : 0L)
+ .setBoolean(5, true);
massUpdate.update("INSERT INTO file_sources" +
- "(project_uuid, file_uuid, created_at, updated_at, data, data_hash)" +
+ "(project_uuid, file_uuid, created_at, updated_at, data, line_hashes, data_hash)" +
"VALUES " +
- "(?, ?, ?, ?, ?, ?)");
+ "(?, ?, ?, ?, ?, ?, ?)");
massUpdate.execute(new FileSourceBuilder(system));
}
package org.sonar.server.db.migrations.v50;
import com.google.common.base.Splitter;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.commons.lang.StringUtils;
import org.sonar.api.utils.KeyValueFormat;
import org.sonar.api.utils.text.CsvWriter;
class FileSourceDto {
+ private static final String SPACE_CHARS = "\t\n\r ";
+
private Iterator<String> sourceSplitter;
private Map<Integer, String> revisions;
dates = KeyValueFormat.parseIntString(ofNullableBytes(shortDates, longDates));
}
- String getSourceData() {
+ String[] getSourceData() {
String highlighting = "";
ByteArrayOutputStream output = new ByteArrayOutputStream();
int line = 0;
String sourceLine = null;
CsvWriter csv = CsvWriter.of(new OutputStreamWriter(output, UTF_8));
+ StringBuilder lineHashes = new StringBuilder();
while (sourceSplitter.hasNext()) {
- line ++;
+ line++;
sourceLine = sourceSplitter.next();
+ lineHashes.append(lineChecksum(sourceLine)).append("\n");
csv.values(revisions.get(line), authors.get(line), dates.get(line), highlighting, sourceLine);
}
csv.close();
- return new String(output.toByteArray(), UTF_8);
+ return new String[] {new String(output.toByteArray(), UTF_8), lineHashes.toString()};
+ }
+
+ public static String lineChecksum(String line) {
+ String reducedLine = StringUtils.replaceChars(line, SPACE_CHARS, "");
+ if (line.isEmpty()) {
+ return "";
+ }
+ return DigestUtils.md5Hex(reducedLine);
}
private static String ofNullableBytes(@Nullable byte[] shortBytes, @Nullable byte[] longBytes) {
t.column :project_uuid, :string, :limit => 50, :null => false
t.column :file_uuid, :string, :limit => 50, :null => false
t.column :data, :text, :null => true
+ t.column :line_hashes, :text, :null => true
t.column :data_hash, :string, :limit => 50, :null => true
t.column :created_at, :integer, :limit => 8, :null => false
t.column :updated_at, :integer, :limit => 8, :null => false
package org.sonar.batch.index;
import com.google.common.base.CharMatcher;
+import com.google.common.base.Joiner;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
-import java.util.*;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
import static com.google.common.base.Charsets.UTF_8;
String newDataHash = newData != null ? DigestUtils.md5Hex(newData) : "0";
Date now = system2.newDate();
if (previous == null) {
- FileSourceDto newFileSource = new FileSourceDto().setProjectUuid(projectTree.getRootProject().getUuid()).setFileUuid(fileUuid).setData(newData)
+ FileSourceDto newFileSource = new FileSourceDto()
+ .setProjectUuid(projectTree.getRootProject().getUuid())
+ .setFileUuid(fileUuid)
+ .setData(newData)
.setDataHash(newDataHash)
+ .setLineHashes(StringUtils.defaultIfEmpty(Joiner.on('\n').join(inputFile.lineHashes()), null))
.setCreatedAt(now.getTime())
.setUpdatedAt(now.getTime());
mapper.insert(newFileSource);
session.commit();
} else {
if (!newDataHash.equals(previous.getDataHash())) {
- previous.setData(newData).setDataHash(newDataHash).setUpdatedAt(now.getTime());
+ previous
+ .setData(newData)
+ .setLineHashes(StringUtils.defaultIfEmpty(Joiner.on('\n').join(inputFile.lineHashes()), null))
+ .setDataHash(newDataHash)
+ .setUpdatedAt(now.getTime());
mapper.update(previous);
session.commit();
}
value.put(f.lines());
putUTFOrNull(value, f.encoding());
value.putLongArray(f.originalLineOffsets());
+ value.putStringArray(f.lineHashes());
}
private void putUTFOrNull(Value value, @Nullable String utfOrNull) {
file.setLines(value.getInt());
file.setEncoding(value.getString());
file.setOriginalLineOffsets(value.getLongArray());
+ file.setLineHashes(value.getStringArray());
return file;
}
*/
package org.sonar.batch.scan.filesystem;
-import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
import java.io.BufferedReader;
import java.io.File;
private static final char LINE_FEED = '\n';
private static final char CARRIAGE_RETURN = '\r';
private static final char BOM = '\uFEFF';
+ private static final String SPACE_CHARS = "\t\n\r ";
// This singleton aims only to increase the coverage by allowing
// to test the private method !
Reader reader = null;
long currentOriginalOffset = 0;
List<Long> originalLineOffsets = new ArrayList<Long>();
- List<Integer> lineCheckSum = new ArrayList<Integer>();
- int hash = 5381;
+ List<String> lineHashes = new ArrayList<String>();
StringBuilder currentLineStr = new StringBuilder();
int lines = 0;
char c = (char) -1;
try {
- MessageDigest md5Digest = DigestUtils.getMd5Digest();
- md5Digest.reset();
+ MessageDigest globalMd5Digest = DigestUtils.getMd5Digest();
+ globalMd5Digest.reset();
reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), encoding));
int i = reader.read();
boolean afterCR = false;
afterCR = true;
c = LINE_FEED;
}
- currentLineStr.append(c);
- hash = ((hash << 5) + hash) + (c & 0xff);
if (c == LINE_FEED) {
lines++;
originalLineOffsets.add(currentOriginalOffset);
- lineCheckSum.add(hash);
- hash = 5381;
+ lineHashes.add(md5IgnoreWhitespace(currentLineStr));
currentLineStr.setLength(0);
+ } else {
+ currentLineStr.append(c);
}
- md5Digest.update(charToBytesUTF(c));
+ globalMd5Digest.update(charToBytesUTF(c));
i = reader.read();
}
if (c != (char) -1) {
+ // Last empty line
lines++;
- lineCheckSum.add(hash);
+ lineHashes.add(md5IgnoreWhitespace(currentLineStr));
}
- String filehash = Hex.encodeHexString(md5Digest.digest());
- return new Metadata(lines, filehash, originalLineOffsets, lineCheckSum);
+ String filehash = Hex.encodeHexString(globalMd5Digest.digest());
+ return new Metadata(lines, filehash, originalLineOffsets, lineHashes.toArray(new String[0]));
} catch (IOException e) {
throw new IllegalStateException(String.format("Fail to read file '%s' with encoding '%s'", file.getAbsolutePath(), encoding), e);
}
}
+ private String md5IgnoreWhitespace(StringBuilder currentLineStr) {
+ String reducedLine = StringUtils.replaceChars(currentLineStr.toString(), SPACE_CHARS, "");
+ if (reducedLine.isEmpty()) {
+ return "";
+ }
+ return DigestUtils.md5Hex(reducedLine);
+ }
+
private byte[] charToBytesUTF(char c) {
char[] buffer = new char[] {c};
byte[] b = new byte[buffer.length << 1];
final int lines;
final String hash;
final long[] originalLineOffsets;
- final int[] lineChecksum;
+ final String[] lineHashes;
- private Metadata(int lines, String hash, List<Long> originalLineOffsets, List<Integer> lineCheckSum) {
+ private Metadata(int lines, String hash, List<Long> originalLineOffsets, String[] lineHashes) {
this.lines = lines;
this.hash = hash;
this.originalLineOffsets = Longs.toArray(originalLineOffsets);
- this.lineChecksum = Ints.toArray(lineCheckSum);
+ this.lineHashes = lineHashes;
}
}
}
inputFile.setLines(metadata.lines);
inputFile.setHash(metadata.hash);
inputFile.setOriginalLineOffsets(metadata.originalLineOffsets);
+ inputFile.setLineHashes(metadata.lineHashes);
inputFile.setStatus(statusDetection.status(inputFile.moduleKey(), inputFile.relativePath(), metadata.hash));
if (analysisMode.isIncremental() && inputFile.status() == InputFile.Status.SAME) {
return null;
String relativePathSame = "src/changed.java";
java.io.File sameFile = new java.io.File(basedir, relativePathSame);
FileUtils.write(sameFile, "changed\ncontent");
- DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame).setLines(2).setAbsolutePath(sameFile.getAbsolutePath());
+ DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame).setLines(2)
+ .setAbsolutePath(sameFile.getAbsolutePath())
+ .setLineHashes(new String[] {"foo", "bar"});
when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew));
mockResourceCache(relativePathSame, PROJECT_KEY, "uuidsame");
assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
assertThat(fileSourceDto.getData()).isEqualTo(
",,,,changed\r\n,,,,content\r\n");
+ assertThat(fileSourceDto.getLineHashes()).isEqualTo("foo\nbar");
assertThat(fileSourceDto.getDataHash()).isEqualTo("e41cca9c51ff853c748f708f39dfc035");
}
when(system2.newDate()).thenReturn(DateUtils.parseDateTime("2014-10-29T16:44:02+0100"));
String relativePathEmpty = "src/empty.java";
- DefaultInputFile inputFileEmpty = new DefaultInputFile(PROJECT_KEY, relativePathEmpty).setLines(0);
+ DefaultInputFile inputFileEmpty = new DefaultInputFile(PROJECT_KEY, relativePathEmpty)
+ .setLines(0)
+ .setLineHashes(new String[] {});
when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileEmpty));
mockResourceCache(relativePathEmpty, PROJECT_KEY, "uuidempty");
String relativePathNew = "src/new.java";
java.io.File newFile = new java.io.File(basedir, relativePathNew);
FileUtils.write(newFile, "foo\nbar\nbiz");
- DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathNew).setLines(3).setAbsolutePath(newFile.getAbsolutePath());
+ DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathNew)
+ .setLines(3)
+ .setAbsolutePath(newFile.getAbsolutePath())
+ .setLineHashes(new String[] {"foo", "bar", "bee"});
when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew));
mockResourceCache(relativePathNew, PROJECT_KEY, "uuidnew");
assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
assertThat(fileSourceDto.getData()).isEqualTo(
",,,,foo\r\n,,,,bar\r\n,,,,biz\r\n");
+ assertThat(fileSourceDto.getLineHashes()).isEqualTo("foo\nbar\nbee");
assertThat(fileSourceDto.getDataHash()).isEqualTo("0c43ed6418d690ee0ffc3e43e6660967");
}
DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathNew)
.setLines(3)
.setAbsolutePath(newFile.getAbsolutePath())
- .setOriginalLineOffsets(new long[] {0, 4, 7});
+ .setOriginalLineOffsets(new long[] {0, 4, 7})
+ .setLineHashes(new String[] {"foo", "bar", "bee"});
when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew));
mockResourceCache(relativePathNew, PROJECT_KEY, "uuidnew");
FileSourceDto fileSourceDto = new FileSourceDao(getMyBatis()).select("uuidnew");
assertThat(fileSourceDto.getCreatedAt()).isEqualTo(now.getTime());
assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
+ assertThat(fileSourceDto.getLineHashes()).isEqualTo("foo\nbar\nbee");
assertThat(fileSourceDto.getData()).isEqualTo(
"123,julien,2014-10-11T16:44:02+0100,\"0,3,a\",foo\r\n"
+ "234,simon,2014-10-12T16:44:02+0100,\"0,1,cd\",bar\r\n"
package org.sonar.batch.scan.filesystem;
import com.google.common.base.Charsets;
+import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.FileUtils;
import org.junit.Rule;
import org.junit.Test;
assertThat(metadata.lines).isEqualTo(0);
assertThat(metadata.hash).isNotEmpty();
assertThat(metadata.originalLineOffsets).containsOnly(0);
- assertThat(metadata.lineChecksum).isEmpty();
+ assertThat(metadata.lineHashes).isEmpty();
}
@Test
assertThat(metadata.lines).isEqualTo(3);
assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_WITHOUT_LATEST_EOL);
assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10);
- assertThat(metadata.lineChecksum).containsOnly(2090263731, 2090104836, 193487042);
+ assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz"));
}
@Test
assertThat(metadata.lines).isEqualTo(4);
assertThat(metadata.hash).isEqualTo(NON_ASCII);
assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10, 18);
- assertThat(metadata.lineChecksum).containsOnly(2090410746, 2090243139, -931663839, 5381);
+ assertThat(metadata.lineHashes).containsOnly(md5("föo"), md5("bàr"), md5("\u1D11Ebaßz"), "");
}
@Test
assertThat(metadata.lines).isEqualTo(4);
assertThat(metadata.hash).isEqualTo(NON_ASCII);
assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10, 18);
- assertThat(metadata.lineChecksum).containsOnly(2090410746, 2090243139, -931663839, 5381);
+ assertThat(metadata.lineHashes).containsOnly(md5("föo"), md5("bàr"), md5("\u1D11Ebaßz"), "");
}
@Test
assertThat(metadata.lines).isEqualTo(3);
assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_WITHOUT_LATEST_EOL);
assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 8);
- assertThat(metadata.lineChecksum).containsOnly(2090263731, 2090104836, 193487042);
+ assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz"));
}
@Test
assertThat(metadata.lines).isEqualTo(4);
assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_WITH_LATEST_EOL);
assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 8, 12);
- assertThat(metadata.lineChecksum).containsOnly(2090263731, 2090104836, 2090105100, 5381);
+ assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz"), "");
}
@Test
assertThat(metadata.lines).isEqualTo(4);
assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_WITH_LATEST_EOL);
assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 9, 13);
- assertThat(metadata.lineChecksum).containsOnly(2090263731, 2090104836, 2090105100, 5381);
+ assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz"), "");
}
@Test
assertThat(metadata.lines).isEqualTo(3);
assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_WITHOUT_LATEST_EOL);
assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 9);
- assertThat(metadata.lineChecksum).containsOnly(2090263731, 2090104836, 193487042);
+ assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz"));
}
@Test
assertThat(metadata.lines).isEqualTo(4);
assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_NEW_LINE_FIRST);
assertThat(metadata.originalLineOffsets).containsOnly(0, 1, 5, 10);
- assertThat(metadata.lineChecksum).containsOnly(177583, 2090263731, 2090104836, 193487042);
+ assertThat(metadata.lineHashes).containsOnly("", md5("foo"), md5("bar"), md5("baz"));
}
@Test
assertThat(metadata.lines).isEqualTo(3);
assertThat(metadata.hash).isEqualTo(EXPECTED_HASH_WITHOUT_LATEST_EOL);
assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 9);
- assertThat(metadata.lineChecksum).containsOnly(2090263731, 2090104836, 193487042);
+ assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz"));
+ }
+
+ @Test
+ public void ignore_whitespace_when_computing_line_hashes() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, " foo\nb ar\r\nbaz \t", Charsets.UTF_8, true);
+
+ FileMetadata.Metadata metadata = FileMetadata.INSTANCE.read(tempFile, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(3);
+ assertThat(metadata.lineHashes).containsOnly(md5("foo"), md5("bar"), md5("baz"));
}
@Test
assertThat(hash1).isEqualTo(hash1a);
assertThat(hash1).isNotEqualTo(hash2);
}
+
+ private static String md5(String input) {
+ return DigestUtils.md5Hex(input);
+ }
}
.setStatus(Status.ADDED)
.setHash("xyz")
.setLines(1)
+ .setEncoding("UTF-8")
+ .setOriginalLineOffsets(new long[] {0, 4})
+ .setLineHashes(new String[] {"foo", "bar"})
.setFile(temp.newFile("Bar.java")));
- assertThat(cache.getFile("struts", "src/main/java/Foo.java").relativePath())
- .isEqualTo("src/main/java/Foo.java");
+ DefaultInputFile loadedFile = (DefaultInputFile) cache.getFile("struts-core", "src/main/java/Bar.java");
+ assertThat(loadedFile.relativePath()).isEqualTo("src/main/java/Bar.java");
+ assertThat(loadedFile.encoding()).isEqualTo("UTF-8");
+ assertThat(loadedFile.originalLineOffsets()).containsOnly(0, 4);
+ assertThat(loadedFile.lineHashes()).containsOnly("foo", "bar");
assertThat(cache.filesByModule("struts")).hasSize(1);
assertThat(cache.filesByModule("struts-core")).hasSize(1);
<dataset>
-
- <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" data=",,,,unchanged ,,,,content " data_hash="ee716d4ed9faae16eb9167714442a3bc" created_at="1412952242000" updated_at="1412952242000" />
+ <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame"
+ data=",,,,unchanged ,,,,content "
+ line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
+ data_hash="ee716d4ed9faae16eb9167714442a3bc"
+ created_at="1412952242000" updated_at="1412952242000" />
</dataset>
<dataset>
- <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" data=",,,,unchanged ,,,,content " data_hash="ee716d4ed9faae16eb9167714442a3bc" created_at="1412952242000" updated_at="1412952242000" />
+ <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame"
+ data=",,,,unchanged ,,,,content "
+ line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
+ data_hash="ee716d4ed9faae16eb9167714442a3bc"
+ created_at="1412952242000" updated_at="1412952242000" />
</dataset>
<dataset>
- <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame" data=",,,,unchanged ,,,,content " data_hash="ee716d4ed9faae16eb9167714442a3bc" created_at="1412952242000" updated_at="1412952242000" />
- <file_sources id="102" project_uuid="projectUuid" file_uuid="uuidempty" data="[null]" data_hash="0" created_at="1414597442000" updated_at="1414597442000" />
+ <file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame"
+ data=",,,,unchanged ,,,,content "
+ line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
+ data_hash="ee716d4ed9faae16eb9167714442a3bc"
+ created_at="1412952242000" updated_at="1412952242000" />
+
+ <file_sources id="102" project_uuid="projectUuid" file_uuid="uuidempty" data="[null]"
+ line_hashes="[null]"
+ data_hash="0" created_at="1414597442000" updated_at="1414597442000" />
</dataset>
private long createdAt;
private long updatedAt;
private String data;
+ private String lineHashes;
private String dataHash;
public Long getId() {
return this;
}
+ @CheckForNull
+ public String getLineHashes() {
+ return lineHashes;
+ }
+
+ public FileSourceDto setLineHashes(@Nullable String lineHashes) {
+ this.lineHashes = lineHashes;
+ return this;
+ }
+
public String getDataHash() {
return dataHash;
}
"PROJECT_UUID" VARCHAR(50) NOT NULL,
"FILE_UUID" VARCHAR(50) NOT NULL,
"DATA" CLOB(2147483647),
+ "LINE_HASHES" CLOB(2147483647),
"DATA_HASH" VARCHAR(50) NOT NULL,
"CREATED_AT" BIGINT NOT NULL,
"UPDATED_AT" BIGINT NOT NULL
<mapper namespace="org.sonar.core.source.db.FileSourceMapper">
<select id="select" parameterType="string" resultType="org.sonar.core.source.db.FileSourceDto">
- SELECT id, project_uuid as projectUuid, file_uuid as fileUuid, created_at as createdAt, updated_at as updatedAt, data, data_hash as dataHash
+ SELECT id, project_uuid as projectUuid, file_uuid as fileUuid, created_at as createdAt, updated_at as updatedAt, data, line_hashes as lineHashes, data_hash as dataHash
FROM file_sources
WHERE file_uuid = #{fileUuid}
</select>
</select>
<insert id="insert" parameterType="org.sonar.core.source.db.FileSourceDto" useGeneratedKeys="false">
- insert into file_sources (project_uuid, file_uuid, created_at, updated_at, data, data_hash)
- values (#{projectUuid}, #{fileUuid}, #{createdAt}, #{updatedAt}, #{data}, #{dataHash})
+ insert into file_sources (project_uuid, file_uuid, created_at, updated_at, data, line_hashes, data_hash)
+ values (#{projectUuid}, #{fileUuid}, #{createdAt}, #{updatedAt}, #{data}, #{lineHashes}, #{dataHash})
</insert>
<update id="update" parameterType="org.sonar.core.source.db.FileSourceDto" useGeneratedKeys="false">
update file_sources set
updated_at = #{updatedAt},
data = #{data},
+ line_hashes = #{lineHashes},
data_hash = #{dataHash}
where id = #{id}
</update>
@Test
public void insert() throws Exception {
- dao.insert(new FileSourceDto().setProjectUuid("prj").setFileUuid("file").setData("bla bla").setDataHash("hash2")
+ dao.insert(new FileSourceDto().setProjectUuid("prj").setFileUuid("file").setData("bla bla")
+ .setDataHash("hash2")
+ .setLineHashes("foo\nbar")
.setCreatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime())
.setUpdatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime()));
@Test
public void update() throws Exception {
- dao.update(new FileSourceDto().setId(101L).setProjectUuid("prj").setFileUuid("file").setData("updated data").setDataHash("hash2")
+ dao.update(new FileSourceDto().setId(101L).setProjectUuid("prj").setFileUuid("file")
+ .setData("updated data")
+ .setDataHash("hash2")
+ .setLineHashes("foo2\nbar2")
.setUpdatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime()));
checkTable("update", "file_sources");
}
-
}
<file_sources id="101" project_uuid="abcd" file_uuid="ab12"
data="aef12a,alice,2014-04-25T12:34:56+0100,,class Foo" data_hash="hash"
+ line_hashes="truc"
created_at="1414597442000" updated_at="1414683842000" />
<file_sources id="102" project_uuid="prj" file_uuid="file"
data="bla bla" data_hash="hash2"
+ line_hashes="foo bar"
created_at="1414770242000" updated_at="1414770242000" />
</dataset>
<file_sources id="101" project_uuid="abcd" file_uuid="ab12"
data="aef12a,alice,2014-04-25T12:34:56+0100,,class Foo" data_hash="hash"
+ line_hashes="truc"
created_at="1414597442000" updated_at="1414683842000" />
</dataset>
<file_sources id="101" project_uuid="abcd" file_uuid="ab12"
data="updated data" data_hash="hash2"
+ line_hashes="foo2 bar2"
created_at="1414597442000" updated_at="1414770242000" />
private int lines;
private String encoding;
long[] originalLineOffsets;
+ String[] lineHashes;
public DefaultInputFile(String moduleKey, String relativePath) {
this.moduleKey = moduleKey;
return originalLineOffsets;
}
+ public String[] lineHashes() {
+ return lineHashes;
+ }
+
public DefaultInputFile setAbsolutePath(String s) {
this.absolutePath = PathUtils.sanitize(s);
return this;
return this;
}
+ public DefaultInputFile setLineHashes(String[] lineHashes) {
+ this.lineHashes = lineHashes;
+ return this;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) {