private final CpdMappings mappings;
private final FileSystem fs;
private final Settings settings;
- private final BlockCache duplicationCache;
+ private final BlockCache blockCache;
private final Project project;
public DefaultCpdEngine(@Nullable Project project, IndexFactory indexFactory, CpdMappings mappings, FileSystem fs, Settings settings, BlockCache duplicationCache) {
this.mappings = mappings;
this.fs = fs;
this.settings = settings;
- this.duplicationCache = duplicationCache;
+ this.blockCache = duplicationCache;
}
public DefaultCpdEngine(IndexFactory indexFactory, CpdMappings mappings, FileSystem fs, Settings settings, BlockCache duplicationCache) {
for (InputFile inputFile : sourceFiles) {
LOG.debug("Populating index from {}", inputFile);
String resourceEffectiveKey = ((DeprecatedDefaultInputFile) inputFile).key();
- FileBlocks fileBlocks = duplicationCache.byComponent(resourceEffectiveKey);
+ FileBlocks fileBlocks = blockCache.byComponent(resourceEffectiveKey);
if (fileBlocks != null) {
index.insert(inputFile, fileBlocks.blocks());
} else if (bridge != null) {
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import org.sonar.api.batch.fs.InputFile;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.batch.sensor.duplication.DuplicationGroup;
+import org.sonar.api.batch.sensor.measure.internal.DefaultMeasure;
+import org.sonar.api.measures.CoreMetrics;
import org.sonar.batch.mediumtest.BatchMediumTester;
import org.sonar.batch.mediumtest.BatchMediumTester.TaskResult;
import org.sonar.plugins.cpd.CpdPlugin;
}
@Test
- public void testDuplications() throws IOException {
+ public void testCrossFileDuplications() throws IOException {
File srcDir = new File(baseDir, "src");
srcDir.mkdir();
// 4 measures per file
assertThat(result.measures()).hasSize(8);
- InputFile inputFile = result.inputFile("src/sample1.xoo");
+ InputFile inputFile1 = result.inputFile("src/sample1.xoo");
+ InputFile inputFile2 = result.inputFile("src/sample2.xoo");
+ // One clone group on each file
+ List<DuplicationGroup> duplicationGroupsFile1 = result.duplicationsFor(inputFile1);
+ assertThat(duplicationGroupsFile1).hasSize(1);
+
+ DuplicationGroup cloneGroupFile1 = duplicationGroupsFile1.get(0);
+ assertThat(cloneGroupFile1.duplicates()).hasSize(1);
+ assertThat(cloneGroupFile1.originBlock().startLine()).isEqualTo(1);
+ assertThat(cloneGroupFile1.originBlock().length()).isEqualTo(17);
+ assertThat(cloneGroupFile1.originBlock().resourceKey()).isEqualTo(((DefaultInputFile) inputFile1).key());
+ assertThat(cloneGroupFile1.duplicates()).hasSize(1);
+ assertThat(cloneGroupFile1.duplicates().get(0).resourceKey()).isEqualTo(((DefaultInputFile) inputFile2).key());
+
+ List<DuplicationGroup> duplicationGroupsFile2 = result.duplicationsFor(inputFile2);
+ assertThat(duplicationGroupsFile2).hasSize(1);
+
+ DuplicationGroup cloneGroupFile2 = duplicationGroupsFile2.get(0);
+ assertThat(cloneGroupFile2.duplicates()).hasSize(1);
+ assertThat(cloneGroupFile2.originBlock().startLine()).isEqualTo(1);
+ assertThat(cloneGroupFile2.originBlock().length()).isEqualTo(17);
+ assertThat(cloneGroupFile2.originBlock().resourceKey()).isEqualTo(((DefaultInputFile) inputFile2).key());
+ assertThat(cloneGroupFile2.duplicates()).hasSize(1);
+ assertThat(cloneGroupFile2.duplicates().get(0).resourceKey()).isEqualTo(((DefaultInputFile) inputFile1).key());
+ }
+
+ @Test
+ public void testIntraFileDuplications() throws IOException {
+ File srcDir = new File(baseDir, "src");
+ srcDir.mkdir();
+
+ String content = "Sample xoo\ncontent\nfoo\nbar\nSample xoo\ncontent\n";
+
+ File xooFile = new File(srcDir, "sample.xoo");
+ FileUtils.write(xooFile, content);
+
+ TaskResult result = tester.newTask()
+ .properties(builder
+ .put("sonar.sources", "src")
+ .put("sonar.cpd.xoo.minimumTokens", "2")
+ .put("sonar.cpd.xoo.minimumLines", "2")
+ .put("sonar.verbose", "true")
+ .build())
+ .start();
+
+ // 4 measures per file
+ assertThat(result.measures()).hasSize(4);
+
+ InputFile inputFile = result.inputFile("src/sample.xoo");
// One clone group
List<DuplicationGroup> duplicationGroups = result.duplicationsFor(inputFile);
assertThat(duplicationGroups).hasSize(1);
DuplicationGroup cloneGroup = duplicationGroups.get(0);
assertThat(cloneGroup.duplicates()).hasSize(1);
assertThat(cloneGroup.originBlock().startLine()).isEqualTo(1);
- assertThat(cloneGroup.originBlock().length()).isEqualTo(17);
+ assertThat(cloneGroup.originBlock().length()).isEqualTo(2);
+ assertThat(cloneGroup.duplicates()).hasSize(1);
+ assertThat(cloneGroup.duplicates().get(0).startLine()).isEqualTo(5);
+ assertThat(cloneGroup.duplicates().get(0).length()).isEqualTo(2);
+
+ assertThat(result.measures()).contains(new DefaultMeasure<String>()
+ .forMetric(CoreMetrics.DUPLICATION_LINES_DATA)
+ .onFile(inputFile)
+ .withValue("1=1;2=1;3=0;4=0;5=1;6=1;7=0"));
}
}
import org.apache.commons.lang.StringUtils;
import org.sonar.api.utils.System2;
import org.sonar.core.persistence.Database;
-import org.sonar.server.db.migrations.*;
+import org.sonar.server.db.migrations.BaseDataChange;
+import org.sonar.server.db.migrations.MassUpdate;
import org.sonar.server.db.migrations.Select.Row;
import org.sonar.server.db.migrations.Select.RowReader;
+import org.sonar.server.db.migrations.SqlStatement;
import javax.annotation.Nullable;
Long overallCoverageHitsByLineMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'overall_coverage_line_hits_data'").get(simpleLongReader);
Long overallConditionsByLineMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'overall_conditions_by_line'").get(simpleLongReader);
Long overallCoveredConditionsByLineMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'overall_covered_conditions_by_line'").get(simpleLongReader);
+ Long duplicationDataMetricId = context.prepareSelect("SELECT id FROM metrics WHERE name = 'duplications_data'").get(simpleLongReader);
MassUpdate massUpdate = context.prepareMassUpdate();
massUpdate.select("SELECT " +
String[] getSourceData() {
String highlighting = "";
String symbolRefs = "";
+ String duplications = "";
ByteArrayOutputStream output = new ByteArrayOutputStream();
int line = 0;
String sourceLine = null;
utHits.get(line), utConditions.get(line), utCoveredConditions.get(line),
itHits.get(line), itConditions.get(line), itCoveredConditions.get(line),
overallHits.get(line), overallConditions.get(line), overallCoveredConditions.get(line),
- highlighting, symbolRefs, sourceLine);
+ highlighting, symbolRefs, duplications, sourceLine);
}
csv.close();
return new String[] {new String(output.toByteArray(), UTF_8), lineHashes.toString()};
<dataset>
<file_sources id="1" project_uuid="uuid-MyProject" file_uuid="uuid-MyFile.xoo" created_at="1416238020000" updated_at="1414770242000"
- data="aef12a,alice,2014-04-25T12:34:56+0100,1,4,2,2,5,3,3,6,4,,,class Foo { abe465,bob,2014-07-25T12:34:56+0100,,,,,,,,,,,, // Empty afb789,carol,2014-03-23T12:34:56+0100,0,,,0,,,0,,,,,} afb789,carol,2014-03-23T12:34:56+0100,,,,,,,,,,,, "
+ data="aef12a,alice,2014-04-25T12:34:56+0100,1,4,2,2,5,3,3,6,4,,,,class Foo { abe465,bob,2014-07-25T12:34:56+0100,,,,,,,,,,,,, // Empty afb789,carol,2014-03-23T12:34:56+0100,0,,,0,,,0,,,,,,} afb789,carol,2014-03-23T12:34:56+0100,,,,,,,,,,,,, "
line_hashes="6a19ce786467960a3a9b0d26383a464a aab2dbc5fdeaa80b050b1d049ede357c cbb184dd8e05c9709e5dcaedaa0495cf "
data_hash="" />
<dataset>
<file_sources id="1" project_uuid="uuid-MyProject" file_uuid="uuid-MyFile.xoo" created_at="1416238020000" updated_at="1414770242000"
- data=",,,,,,,,,,,,,,class Foo { ,,,,,,,,,,,,,, // Empty ,,,,,,,,,,,,,,} ,,,,,,,,,,,,,, "
+ data=",,,,,,,,,,,,,,,class Foo { ,,,,,,,,,,,,,,, // Empty ,,,,,,,,,,,,,,,} ,,,,,,,,,,,,,,, "
line_hashes="6a19ce786467960a3a9b0d26383a464a aab2dbc5fdeaa80b050b1d049ede357c cbb184dd8e05c9709e5dcaedaa0495cf "
data_hash="" />
import org.sonar.api.batch.fs.InputFile;
import org.sonar.api.batch.fs.InputPath;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.sensor.duplication.DuplicationGroup;
+import org.sonar.api.batch.sensor.duplication.DuplicationGroup.Block;
import org.sonar.api.batch.sensor.symbol.Symbol;
import org.sonar.api.database.model.Snapshot;
import org.sonar.api.measures.CoreMetrics;
import org.sonar.api.utils.System2;
import org.sonar.api.utils.text.CsvWriter;
import org.sonar.batch.ProjectTree;
+import org.sonar.batch.duplication.DuplicationCache;
import org.sonar.batch.highlighting.SyntaxHighlightingData;
import org.sonar.batch.highlighting.SyntaxHighlightingRule;
import org.sonar.batch.scan.filesystem.InputPathCache;
private final ProjectTree projectTree;
private final ResourceCache resourceCache;
private CodeColorizers codeColorizers;
+ private DuplicationCache duplicationCache;
public SourcePersister(ResourcePersister resourcePersister, SnapshotSourceDao sourceDao, InputPathCache inputPathCache,
- MyBatis mybatis, MeasureCache measureCache, ComponentDataCache componentDataCache, ProjectTree projectTree, System2 system2, ResourceCache resourceCache,
- CodeColorizers codeColorizers) {
+ MyBatis mybatis, MeasureCache measureCache, ComponentDataCache componentDataCache, ProjectTree projectTree, System2 system2,
+ ResourceCache resourceCache, CodeColorizers codeColorizers, DuplicationCache duplicationCache) {
this.resourcePersister = resourcePersister;
this.sourceDao = sourceDao;
this.inputPathCache = inputPathCache;
this.system2 = system2;
this.resourceCache = resourceCache;
this.codeColorizers = codeColorizers;
+ this.duplicationCache = duplicationCache;
}
public void saveSource(Resource resource, String source, Date updatedAt) {
SyntaxHighlightingData highlighting = loadHighlighting(file);
String[] highlightingPerLine = computeHighlightingPerLine(file, highlighting);
String[] symbolReferencesPerLine = computeSymbolReferencesPerLine(file, loadSymbolReferences(file));
+ String[] duplicationsPerLine = computeDuplicationsPerLine(file, duplicationCache.byComponent(file.key()));
ByteArrayOutputStream output = new ByteArrayOutputStream();
CsvWriter csv = CsvWriter.of(new OutputStreamWriter(output, UTF_8));
utHitsByLine.get(lineIdx), utCondByLine.get(lineIdx), utCoveredCondByLine.get(lineIdx),
itHitsByLine.get(lineIdx), itCondByLine.get(lineIdx), itCoveredCondByLine.get(lineIdx),
overallHitsByLine.get(lineIdx), overallCondByLine.get(lineIdx), overallCoveredCondByLine.get(lineIdx),
- highlightingPerLine[lineIdx - 1], symbolReferencesPerLine[lineIdx - 1],
+ highlightingPerLine[lineIdx - 1], symbolReferencesPerLine[lineIdx - 1], duplicationsPerLine[lineIdx - 1],
CharMatcher.anyOf(BOM).removeFrom(lines.get(lineIdx - 1)));
}
csv.close();
return StringUtils.defaultIfEmpty(new String(output.toByteArray(), UTF_8), null);
}
+ private String[] computeDuplicationsPerLine(DefaultInputFile file, List<DuplicationGroup> duplicationGroups) {
+ String[] result = new String[file.lines()];
+ if (duplicationGroups == null) {
+ return result;
+ }
+ StringBuilder[] dupPerLine = new StringBuilder[file.lines()];
+ int blockId = 1;
+ for (DuplicationGroup group : duplicationGroups) {
+ addBlock(blockId, group.originBlock(), dupPerLine);
+ blockId++;
+ for (Block dups : group.duplicates()) {
+ if (dups.resourceKey().equals(file.key())) {
+ addBlock(blockId, dups, dupPerLine);
+ blockId++;
+ }
+ }
+ }
+ for (int i = 0; i < file.lines(); i++) {
+ result[i] = dupPerLine[i] != null ? dupPerLine[i].toString() : null;
+ }
+ return result;
+ }
+
+ private void addBlock(int blockId, Block block, StringBuilder[] dupPerLine) {
+ int currentLine = block.startLine();
+ for (int i = 0; i < block.length(); i++) {
+ if (dupPerLine[currentLine - 1] == null) {
+ dupPerLine[currentLine - 1] = new StringBuilder();
+ }
+ if (dupPerLine[currentLine - 1].length() > 0) {
+ dupPerLine[currentLine - 1].append(',');
+ }
+ dupPerLine[currentLine - 1].append(blockId);
+ currentLine++;
+ }
+
+ }
+
@CheckForNull
private SyntaxHighlightingData loadHighlighting(DefaultInputFile file) {
SyntaxHighlightingData highlighting = componentDataCache.getData(file.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING);
import org.junit.rules.TemporaryFolder;
import org.sonar.api.batch.fs.InputPath;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.sensor.duplication.DuplicationGroup;
import org.sonar.api.batch.sensor.highlighting.TypeOfText;
import org.sonar.api.database.model.Snapshot;
import org.sonar.api.measures.CoreMetrics;
import org.sonar.api.utils.DateUtils;
import org.sonar.api.utils.System2;
import org.sonar.batch.ProjectTree;
+import org.sonar.batch.duplication.DuplicationCache;
import org.sonar.batch.highlighting.SyntaxHighlightingData;
import org.sonar.batch.highlighting.SyntaxHighlightingDataBuilder;
import org.sonar.batch.scan.filesystem.InputPathCache;
private System2 system2;
private MeasureCache measureCache;
private ComponentDataCache componentDataCache;
+ private DuplicationCache duplicationCache;
private static final String PROJECT_KEY = "foo";
measureCache = mock(MeasureCache.class);
when(measureCache.byMetric(anyString(), anyString())).thenReturn(Collections.<org.sonar.api.measures.Measure>emptyList());
componentDataCache = mock(ComponentDataCache.class);
+ duplicationCache = mock(DuplicationCache.class);
sourcePersister = new SourcePersister(resourcePersister, new SnapshotSourceDao(getMyBatis()), inputPathCache,
getMyBatis(), measureCache, componentDataCache, projectTree, system2,
- resourceCache, mock(CodeColorizers.class));
+ resourceCache, mock(CodeColorizers.class), duplicationCache);
Project project = new Project(PROJECT_KEY);
project.setUuid("projectUuid");
when(projectTree.getRootProject()).thenReturn(project);
assertThat(fileSourceDto.getCreatedAt()).isEqualTo(DateUtils.parseDateTime("2014-10-10T16:44:02+0200").getTime());
assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
assertThat(fileSourceDto.getData()).isEqualTo(
- ",,,,,,,,,,,,,,changed\r\n,,,,,,,,,,,,,,content\r\n");
+ ",,,,,,,,,,,,,,,changed\r\n,,,,,,,,,,,,,,,content\r\n");
assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("changed") + "\n" + md5Hex("content"));
- assertThat(fileSourceDto.getDataHash()).isEqualTo("bd582d7001cfca180c3dacab10043292");
+ assertThat(fileSourceDto.getDataHash()).isEqualTo("d1a4dd62422639f665a8d80b37c59f8d");
}
@Test
assertThat(fileSourceDto.getCreatedAt()).isEqualTo(now.getTime());
assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
assertThat(fileSourceDto.getData()).isEqualTo(
- ",,,,,,,,,,,,,,foo\r\n,,,,,,,,,,,,,,bar\r\n,,,,,,,,,,,,,,biz\r\n");
+ ",,,,,,,,,,,,,,,foo\r\n,,,,,,,,,,,,,,,bar\r\n,,,,,,,,,,,,,,,biz\r\n");
assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("foo") + "\n" + md5Hex("bar") + "\n" + md5Hex("biz"));
- assertThat(fileSourceDto.getDataHash()).isEqualTo("e1827ac156bb76144486e6570a591cfb");
+ assertThat(fileSourceDto.getDataHash()).isEqualTo("a34ed99cc7d27150c82f5cba2b22b665");
}
mockResourceCache(relativePathNew, PROJECT_KEY, "uuidnew");
- when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.SCM_AUTHORS_BY_LINE_KEY))
+ String fileKey = PROJECT_KEY + ":" + relativePathNew;
+ when(measureCache.byMetric(fileKey, CoreMetrics.SCM_AUTHORS_BY_LINE_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_AUTHORS_BY_LINE, "1=julien;2=simon;3=julien")));
- when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE_KEY))
+ when(measureCache.byMetric(fileKey, CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_LAST_COMMIT_DATETIMES_BY_LINE, "1=2014-10-11T16:44:02+0100;2=2014-10-12T16:44:02+0100;3=2014-10-13T16:44:02+0100")));
- when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.SCM_REVISIONS_BY_LINE_KEY))
+ when(measureCache.byMetric(fileKey, CoreMetrics.SCM_REVISIONS_BY_LINE_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.SCM_REVISIONS_BY_LINE, "1=123;2=234;3=345")));
- when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.COVERAGE_LINE_HITS_DATA_KEY))
+ when(measureCache.byMetric(fileKey, CoreMetrics.COVERAGE_LINE_HITS_DATA_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.COVERAGE_LINE_HITS_DATA, "1=1;3=0")));
- when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.CONDITIONS_BY_LINE_KEY))
+ when(measureCache.byMetric(fileKey, CoreMetrics.CONDITIONS_BY_LINE_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.CONDITIONS_BY_LINE, "1=4")));
- when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.COVERED_CONDITIONS_BY_LINE_KEY))
+ when(measureCache.byMetric(fileKey, CoreMetrics.COVERED_CONDITIONS_BY_LINE_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.COVERED_CONDITIONS_BY_LINE, "1=2")));
- when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.IT_COVERAGE_LINE_HITS_DATA_KEY))
+ when(measureCache.byMetric(fileKey, CoreMetrics.IT_COVERAGE_LINE_HITS_DATA_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.IT_COVERAGE_LINE_HITS_DATA, "1=2;3=0")));
- when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.IT_CONDITIONS_BY_LINE_KEY))
+ when(measureCache.byMetric(fileKey, CoreMetrics.IT_CONDITIONS_BY_LINE_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.IT_CONDITIONS_BY_LINE, "1=5")));
- when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE_KEY))
+ when(measureCache.byMetric(fileKey, CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.IT_COVERED_CONDITIONS_BY_LINE, "1=3")));
- when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA_KEY))
+ when(measureCache.byMetric(fileKey, CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.OVERALL_COVERAGE_LINE_HITS_DATA, "1=3;3=0")));
- when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.OVERALL_CONDITIONS_BY_LINE_KEY))
+ when(measureCache.byMetric(fileKey, CoreMetrics.OVERALL_CONDITIONS_BY_LINE_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.OVERALL_CONDITIONS_BY_LINE, "1=6")));
- when(measureCache.byMetric(PROJECT_KEY + ":" + relativePathNew, CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE_KEY))
+ when(measureCache.byMetric(fileKey, CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE_KEY))
.thenReturn(Arrays.asList(new Measure(CoreMetrics.OVERALL_COVERED_CONDITIONS_BY_LINE, "1=4")));
SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
.registerHighlightingRule(4, 5, TypeOfText.COMMENT)
.registerHighlightingRule(7, 16, TypeOfText.CONSTANT)
.build();
- when(componentDataCache.getData(PROJECT_KEY + ":" + relativePathNew, SnapshotDataTypes.SYNTAX_HIGHLIGHTING))
+ when(componentDataCache.getData(fileKey, SnapshotDataTypes.SYNTAX_HIGHLIGHTING))
.thenReturn(highlighting);
- DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(PROJECT_KEY + ":" + relativePathNew, null);
+ DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(fileKey, null);
org.sonar.api.batch.sensor.symbol.Symbol s1 = symbolBuilder.newSymbol(1, 2);
symbolBuilder.newReference(s1, 4);
symbolBuilder.newReference(s1, 11);
org.sonar.api.batch.sensor.symbol.Symbol s2 = symbolBuilder.newSymbol(4, 6);
symbolBuilder.newReference(s2, 0);
symbolBuilder.newReference(s2, 7);
- when(componentDataCache.getData(PROJECT_KEY + ":" + relativePathNew, SnapshotDataTypes.SYMBOL_HIGHLIGHTING))
+ when(componentDataCache.getData(fileKey, SnapshotDataTypes.SYMBOL_HIGHLIGHTING))
.thenReturn(symbolBuilder.build());
+ DuplicationGroup group1 = new DuplicationGroup(new DuplicationGroup.Block(fileKey, 1, 1))
+ .addDuplicate(new DuplicationGroup.Block(fileKey, 3, 1))
+ .addDuplicate(new DuplicationGroup.Block("anotherFile1", 12, 1))
+ .addDuplicate(new DuplicationGroup.Block("anotherFile2", 13, 1));
+
+ DuplicationGroup group2 = new DuplicationGroup(new DuplicationGroup.Block(fileKey, 1, 2))
+ .addDuplicate(new DuplicationGroup.Block("anotherFile1", 12, 2))
+ .addDuplicate(new DuplicationGroup.Block("anotherFile2", 13, 2));
+ when(duplicationCache.byComponent(fileKey)).thenReturn(Arrays.asList(group1, group2));
+
sourcePersister.persist();
FileSourceDto fileSourceDto = new FileSourceDao(getMyBatis()).select("uuidnew");
assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(now.getTime());
assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("foo") + "\n" + md5Hex("bar") + "\n" + md5Hex("biz"));
assertThat(fileSourceDto.getData()).isEqualTo(
- "123,julien,2014-10-11T16:44:02+0100,1,4,2,2,5,3,3,6,4,\"0,3,a\",\"1,2,1;0,2,2\",foo\r\n"
- + "234,simon,2014-10-12T16:44:02+0100,,,,,,,,,,\"0,1,cd\",\"0,1,1;0,2,2\",bar\r\n"
- + "345,julien,2014-10-13T16:44:02+0100,0,,,0,,,0,,,\"0,9,c\",\"4,5,1;0,2,2\",biz\r\n");
- assertThat(fileSourceDto.getDataHash()).isEqualTo("594752666dd282f4a3bb985829c790fa");
+ "123,julien,2014-10-11T16:44:02+0100,1,4,2,2,5,3,3,6,4,\"0,3,a\",\"1,2,1;0,2,2\",\"1,3\",foo\r\n"
+ + "234,simon,2014-10-12T16:44:02+0100,,,,,,,,,,\"0,1,cd\",\"0,1,1;0,2,2\",3,bar\r\n"
+ + "345,julien,2014-10-13T16:44:02+0100,0,,,0,,,0,,,\"0,9,c\",\"4,5,1;0,2,2\",2,biz\r\n");
+ assertThat(fileSourceDto.getDataHash()).isEqualTo("26930cf0250d525b04083185ff24a046");
}
@Test
<dataset>
<file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame"
- data=",,,,,,,,,,,,,,unchanged ,,,,,,,,,,,,,,content "
+ data=",,,,,,,,,,,,,,,unchanged ,,,,,,,,,,,,,,,content "
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
- data_hash="21a2d025d55b25d6412b1565afb516a5"
+ data_hash="0263047cd758c68c27683625f072f010"
created_at="1412952242000" updated_at="1412952242000" />
</dataset>
<dataset>
<file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame"
- data=",,,,,,,,,,,,,,unchanged ,,,,,,,,,,,,,,content "
+ data=",,,,,,,,,,,,,,,unchanged ,,,,,,,,,,,,,,,content "
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
- data_hash="21a2d025d55b25d6412b1565afb516a5"
+ data_hash="0263047cd758c68c27683625f072f010"
created_at="1412952242000" updated_at="1412952242000" />
</dataset>
<dataset>
<file_sources id="101" project_uuid="projectUuid" file_uuid="uuidsame"
- data=",,,,,,,,,,,,,,unchanged ,,,,,,,,,,,,,,content "
+ data=",,,,,,,,,,,,,,,unchanged ,,,,,,,,,,,,,,,content "
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
- data_hash="21a2d025d55b25d6412b1565afb516a5"
+ data_hash="0263047cd758c68c27683625f072f010"
created_at="1412952242000" updated_at="1412952242000" />
<file_sources id="102" project_uuid="projectUuid" file_uuid="uuidempty" data="[null]"