diff options
author | Julien HENRY <julien.henry@sonarsource.com> | 2014-01-14 16:36:46 +0100 |
---|---|---|
committer | Julien HENRY <julien.henry@sonarsource.com> | 2014-01-14 16:37:31 +0100 |
commit | eda36f17ec11950bc73719dd84677591e92f8888 (patch) | |
tree | c02fa4d476df08358ad0d4da4367225f82230067 /plugins | |
parent | 75acf326ac694edcdc1631a0a2a6fffe58fd802e (diff) | |
download | sonarqube-eda36f17ec11950bc73719dd84677591e92f8888.tar.gz sonarqube-eda36f17ec11950bc73719dd84677591e92f8888.zip |
SONAR-3024 Fix SonarEngine to work with new resource keys
Diffstat (limited to 'plugins')
6 files changed, 108 insertions, 144 deletions
diff --git a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/SonarBridgeEngine.java b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/SonarBridgeEngine.java index 28604e52bc2..aa4bf2b4da6 100644 --- a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/SonarBridgeEngine.java +++ b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/SonarBridgeEngine.java @@ -30,10 +30,11 @@ import org.sonar.api.batch.SensorContext; import org.sonar.api.config.Settings; import org.sonar.api.resources.Language; import org.sonar.api.resources.Project; -import org.sonar.api.resources.Resource; import org.sonar.api.scan.filesystem.FileQuery; -import org.sonar.api.scan.filesystem.ModuleFileSystem; +import org.sonar.api.scan.filesystem.internal.DefaultInputFile; +import org.sonar.api.scan.filesystem.internal.InputFile; import org.sonar.api.utils.SonarException; +import org.sonar.batch.scan.filesystem.DefaultModuleFileSystem; import org.sonar.duplications.DuplicationPredicates; import org.sonar.duplications.block.Block; import org.sonar.duplications.index.CloneGroup; @@ -41,10 +42,13 @@ import org.sonar.duplications.internal.pmd.TokenizerBridge; import org.sonar.plugins.cpd.index.IndexFactory; import org.sonar.plugins.cpd.index.SonarDuplicationsIndex; -import java.io.File; import java.util.Collection; import java.util.List; -import java.util.concurrent.*; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; public class SonarBridgeEngine extends CpdEngine { @@ -57,17 +61,17 @@ public class SonarBridgeEngine extends CpdEngine { private final IndexFactory indexFactory; private final CpdMapping[] mappings; - private final ModuleFileSystem fileSystem; + private final DefaultModuleFileSystem fileSystem; private final Settings settings; - public SonarBridgeEngine(IndexFactory indexFactory, CpdMapping[] mappings, ModuleFileSystem moduleFileSystem, Settings settings) { + public SonarBridgeEngine(IndexFactory indexFactory, CpdMapping[] mappings, DefaultModuleFileSystem moduleFileSystem, Settings settings) { this.indexFactory = indexFactory; this.mappings = mappings; this.fileSystem = moduleFileSystem; this.settings = settings; } - public SonarBridgeEngine(IndexFactory indexFactory, ModuleFileSystem moduleFileSystem, Settings settings) { + public SonarBridgeEngine(IndexFactory indexFactory, DefaultModuleFileSystem moduleFileSystem, Settings settings) { this(indexFactory, new CpdMapping[0], moduleFileSystem, settings); } @@ -80,8 +84,9 @@ public class SonarBridgeEngine extends CpdEngine { public void analyse(Project project, SensorContext context) { String[] cpdExclusions = settings.getStringArray(CoreProperties.CPD_EXCLUSIONS); logExclusions(cpdExclusions, LOG); - List<File> sourceFiles = fileSystem.files(FileQuery.onSource().onLanguage(project.getLanguageKey()).withExclusions(cpdExclusions)); - if (sourceFiles.isEmpty()) { + Iterable<InputFile> sourceFiles = fileSystem.inputFiles(FileQuery.onSource().onLanguage(project.getLanguageKey()) + .withExclusions(cpdExclusions)); + if (!sourceFiles.iterator().hasNext()) { return; } @@ -91,12 +96,11 @@ public class SonarBridgeEngine extends CpdEngine { SonarDuplicationsIndex index = indexFactory.create(project); TokenizerBridge bridge = new TokenizerBridge(mapping.getTokenizer(), fileSystem.sourceCharset().name(), getBlockSize(project)); - for (File file : sourceFiles) { - LOG.debug("Populating index from {}", file); - Resource resource = mapping.createResource(file, fileSystem.sourceDirs()); - String resourceId = SonarEngine.getFullKey(project, resource); - List<Block> blocks = bridge.chunk(resourceId, file); - index.insert(resource, blocks); + for (InputFile inputFile : sourceFiles) { + LOG.debug("Populating index from {}", inputFile); + String resourceEffectiveKey = inputFile.attribute(DefaultInputFile.ATTRIBUTE_COMPONENT_KEY); + List<Block> blocks = bridge.chunk(resourceEffectiveKey, inputFile.file()); + index.insert(inputFile, blocks); } // Detect @@ -104,12 +108,10 @@ public class SonarBridgeEngine extends CpdEngine { ExecutorService executorService = Executors.newSingleThreadExecutor(); try { - for (File file : sourceFiles) { - LOG.debug("Detection of duplications for {}", file); - Resource resource = mapping.createResource(file, fileSystem.sourceDirs()); - String resourceKey = SonarEngine.getFullKey(project, resource); - - Collection<Block> fileBlocks = index.getByResource(resource, resourceKey); + for (InputFile inputFile : sourceFiles) { + LOG.debug("Detection of duplications for {}", inputFile); + String resourceEffectiveKey = inputFile.attribute(DefaultInputFile.ATTRIBUTE_COMPONENT_KEY); + Collection<Block> fileBlocks = index.getByInputFile(inputFile, resourceEffectiveKey); Iterable<CloneGroup> filtered; try { @@ -117,14 +119,14 @@ public class SonarBridgeEngine extends CpdEngine { filtered = Iterables.filter(duplications, minimumTokensPredicate); } catch (TimeoutException e) { filtered = null; - LOG.warn("Timeout during detection of duplications for " + file, e); + LOG.warn("Timeout during detection of duplications for " + inputFile, e); } catch (InterruptedException e) { - throw new SonarException("Fail during detection of duplication for "+ file, e); + throw new SonarException("Fail during detection of duplication for " + inputFile, e); } catch (ExecutionException e) { - throw new SonarException("Fail during detection of duplication for "+ file, e); + throw new SonarException("Fail during detection of duplication for " + inputFile, e); } - SonarEngine.save(context, resource, filtered); + SonarEngine.save(context, inputFile, filtered); } } finally { executorService.shutdown(); diff --git a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/SonarEngine.java b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/SonarEngine.java index d92157f3e11..d4d545d34d0 100644 --- a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/SonarEngine.java +++ b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/SonarEngine.java @@ -28,19 +28,17 @@ import org.slf4j.LoggerFactory; import org.sonar.api.CoreProperties; import org.sonar.api.batch.SensorContext; import org.sonar.api.config.Settings; -import org.sonar.api.database.model.ResourceModel; import org.sonar.api.measures.CoreMetrics; import org.sonar.api.measures.Measure; import org.sonar.api.measures.PersistenceMode; import org.sonar.api.resources.Java; -import org.sonar.api.resources.JavaFile; import org.sonar.api.resources.Language; import org.sonar.api.resources.Project; -import org.sonar.api.resources.Resource; import org.sonar.api.scan.filesystem.FileQuery; -import org.sonar.api.scan.filesystem.ModuleFileSystem; -import org.sonar.api.scan.filesystem.PathResolver; +import org.sonar.api.scan.filesystem.internal.DefaultInputFile; +import org.sonar.api.scan.filesystem.internal.InputFile; import org.sonar.api.utils.SonarException; +import org.sonar.batch.scan.filesystem.DefaultModuleFileSystem; import org.sonar.duplications.block.Block; import org.sonar.duplications.block.BlockChunker; import org.sonar.duplications.detector.suffixtree.SuffixTreeCloneDetectionAlgorithm; @@ -57,7 +55,6 @@ import org.sonar.plugins.cpd.index.SonarDuplicationsIndex; import javax.annotation.Nullable; -import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStreamReader; @@ -85,14 +82,12 @@ public class SonarEngine extends CpdEngine { private static final int TIMEOUT = 5 * 60; private final IndexFactory indexFactory; - private final ModuleFileSystem fileSystem; - private final PathResolver pathResolver; + private final DefaultModuleFileSystem fileSystem; private final Settings settings; - public SonarEngine(IndexFactory indexFactory, ModuleFileSystem moduleFileSystem, PathResolver pathResolver, Settings settings) { + public SonarEngine(IndexFactory indexFactory, DefaultModuleFileSystem moduleFileSystem, Settings settings) { this.indexFactory = indexFactory; this.fileSystem = moduleFileSystem; - this.pathResolver = pathResolver; this.settings = settings; } @@ -101,80 +96,70 @@ public class SonarEngine extends CpdEngine { return Java.KEY.equals(language.getKey()); } - static String getFullKey(Project project, Resource resource) { - return new StringBuilder(ResourceModel.KEY_SIZE) - .append(project.getKey()) - .append(':') - .append(resource.getKey()) - .toString(); - } - @Override public void analyse(Project project, SensorContext context) { String[] cpdExclusions = settings.getStringArray(CoreProperties.CPD_EXCLUSIONS); logExclusions(cpdExclusions, LOG); - List<File> sourceFiles = fileSystem.files(FileQuery.onSource().onLanguage(project.getLanguageKey()).withExclusions(cpdExclusions)); - if (sourceFiles.isEmpty()) { + Iterable<InputFile> sourceFiles = fileSystem.inputFiles(FileQuery.onSource().onLanguage(project.getLanguageKey()).withExclusions(cpdExclusions)); + if (!sourceFiles.iterator().hasNext()) { return; } SonarDuplicationsIndex index = createIndex(project, sourceFiles); detect(index, context, project, sourceFiles); } - private SonarDuplicationsIndex createIndex(Project project, List<File> sourceFiles) { + private SonarDuplicationsIndex createIndex(Project project, Iterable<InputFile> sourceFiles) { final SonarDuplicationsIndex index = indexFactory.create(project); TokenChunker tokenChunker = JavaTokenProducer.build(); StatementChunker statementChunker = JavaStatementBuilder.build(); BlockChunker blockChunker = new BlockChunker(BLOCK_SIZE); - for (File file : sourceFiles) { - LOG.debug("Populating index from {}", file); - Resource resource = getResource(file); - String resourceKey = getFullKey(project, resource); + for (InputFile inputFile : sourceFiles) { + LOG.debug("Populating index from {}", inputFile); + String resourceEffectiveKey = inputFile.attribute(DefaultInputFile.ATTRIBUTE_COMPONENT_KEY); List<Statement> statements; Reader reader = null; try { - reader = new InputStreamReader(new FileInputStream(file), fileSystem.sourceCharset()); + reader = new InputStreamReader(new FileInputStream(inputFile.file()), fileSystem.sourceCharset()); statements = statementChunker.chunk(tokenChunker.chunk(reader)); } catch (FileNotFoundException e) { - throw new SonarException("Cannot find file " + file, e); + throw new SonarException("Cannot find file " + inputFile.file(), e); } finally { IOUtils.closeQuietly(reader); } - List<Block> blocks = blockChunker.chunk(resourceKey, statements); - index.insert(resource, blocks); + List<Block> blocks = blockChunker.chunk(resourceEffectiveKey, statements); + index.insert(inputFile, blocks); } return index; } - private void detect(SonarDuplicationsIndex index, SensorContext context, Project project, List<File> sourceFiles) { + private void detect(SonarDuplicationsIndex index, SensorContext context, Project project, Iterable<InputFile> sourceFiles) { ExecutorService executorService = Executors.newSingleThreadExecutor(); try { - for (File file : sourceFiles) { - LOG.debug("Detection of duplications for {}", file); - Resource resource = getResource(file); - String resourceKey = getFullKey(project, resource); + for (InputFile inputFile : sourceFiles) { + LOG.debug("Detection of duplications for {}", inputFile); + String resourceEffectiveKey = inputFile.attribute(DefaultInputFile.ATTRIBUTE_COMPONENT_KEY); - Collection<Block> fileBlocks = index.getByResource(resource, resourceKey); + Collection<Block> fileBlocks = index.getByInputFile(inputFile, resourceEffectiveKey); List<CloneGroup> clones; try { clones = executorService.submit(new Task(index, fileBlocks)).get(TIMEOUT, TimeUnit.SECONDS); } catch (TimeoutException e) { clones = null; - LOG.warn("Timeout during detection of duplications for " + file, e); + LOG.warn("Timeout during detection of duplications for " + inputFile, e); } catch (InterruptedException e) { - throw new SonarException("Fail during detection of duplication for " + file, e); + throw new SonarException("Fail during detection of duplication for " + inputFile, e); } catch (ExecutionException e) { - throw new SonarException("Fail during detection of duplication for " + file, e); + throw new SonarException("Fail during detection of duplication for " + inputFile, e); } - save(context, resource, clones); + save(context, inputFile, clones); } } finally { executorService.shutdown(); @@ -195,12 +180,7 @@ public class SonarEngine extends CpdEngine { } } - protected Resource getResource(File file) { - String relativePathFromBaseDir = pathResolver.relativePath(fileSystem.baseDir(), file); - return JavaFile.create(relativePathFromBaseDir, "unused", false); - } - - static void save(SensorContext context, Resource resource, @Nullable Iterable<CloneGroup> duplications) { + static void save(SensorContext context, InputFile inputFile, @Nullable Iterable<CloneGroup> duplications) { if (duplications == null || Iterables.isEmpty(duplications)) { return; } @@ -219,13 +199,13 @@ public class SonarEngine extends CpdEngine { } } // Save - context.saveMeasure(resource, CoreMetrics.DUPLICATED_FILES, 1.0); - context.saveMeasure(resource, CoreMetrics.DUPLICATED_LINES, (double) duplicatedLines.size()); - context.saveMeasure(resource, CoreMetrics.DUPLICATED_BLOCKS, duplicatedBlocks); + context.saveMeasure(inputFile, CoreMetrics.DUPLICATED_FILES, 1.0); + context.saveMeasure(inputFile, CoreMetrics.DUPLICATED_LINES, (double) duplicatedLines.size()); + context.saveMeasure(inputFile, CoreMetrics.DUPLICATED_BLOCKS, duplicatedBlocks); Measure data = new Measure(CoreMetrics.DUPLICATIONS_DATA, toXml(duplications)) .setPersistenceMode(PersistenceMode.DATABASE); - context.saveMeasure(resource, data); + context.saveMeasure(inputFile, data); } private static String toXml(Iterable<CloneGroup> duplications) { diff --git a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/DbDuplicationsIndex.java b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/DbDuplicationsIndex.java index 9ea3a65ba38..2ef72e7cae6 100644 --- a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/DbDuplicationsIndex.java +++ b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/DbDuplicationsIndex.java @@ -23,7 +23,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.sonar.api.database.model.Snapshot; import org.sonar.api.resources.Project; -import org.sonar.api.resources.Resource; +import org.sonar.api.scan.filesystem.internal.InputFile; import org.sonar.batch.index.ResourcePersister; import org.sonar.core.duplication.DuplicationDao; import org.sonar.core.duplication.DuplicationUnitDto; @@ -56,12 +56,12 @@ public class DbDuplicationsIndex { this.languageKey = currentProject.getLanguageKey(); } - int getSnapshotIdFor(Resource resource) { - return resourcePersister.getSnapshotOrFail(resource).getId(); + int getSnapshotIdFor(InputFile inputFile) { + return resourcePersister.getSnapshotOrFail(inputFile).getId(); } - public void prepareCache(Resource resource) { - int resourceSnapshotId = getSnapshotIdFor(resource); + public void prepareCache(InputFile inputFile) { + int resourceSnapshotId = getSnapshotIdFor(inputFile); List<DuplicationUnitDto> units = dao.selectCandidates(resourceSnapshotId, lastSnapshotId, languageKey); cache.clear(); // TODO Godin: maybe remove conversion of units to blocks? @@ -74,11 +74,11 @@ public class DbDuplicationsIndex { // TODO Godin: in fact we could work directly with id instead of key - this will allow to decrease memory consumption Block block = Block.builder() - .setResourceId(resourceKey) - .setBlockHash(new ByteArray(hash)) - .setIndexInFile(indexInFile) - .setLines(startLine, endLine) - .build(); + .setResourceId(resourceKey) + .setBlockHash(new ByteArray(hash)) + .setIndexInFile(indexInFile) + .setLines(startLine, endLine) + .build(); // Group blocks by hash Collection<Block> sameHash = cache.get(block.getBlockHash()); @@ -99,19 +99,19 @@ public class DbDuplicationsIndex { } } - public void insert(Resource resource, Collection<Block> blocks) { - int resourceSnapshotId = getSnapshotIdFor(resource); + public void insert(InputFile inputFile, Collection<Block> blocks) { + int resourceSnapshotId = getSnapshotIdFor(inputFile); // TODO Godin: maybe remove conversion of blocks to units? List<DuplicationUnitDto> units = Lists.newArrayList(); for (Block block : blocks) { DuplicationUnitDto unit = new DuplicationUnitDto( - currentProjectSnapshotId, - resourceSnapshotId, - block.getBlockHash().toString(), - block.getIndexInFile(), - block.getStartLine(), - block.getEndLine()); + currentProjectSnapshotId, + resourceSnapshotId, + block.getBlockHash().toString(), + block.getIndexInFile(), + block.getStartLine(), + block.getEndLine()); units.add(unit); } diff --git a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/SonarDuplicationsIndex.java b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/SonarDuplicationsIndex.java index 9c74235bad6..cd554934c04 100644 --- a/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/SonarDuplicationsIndex.java +++ b/plugins/sonar-cpd-plugin/src/main/java/org/sonar/plugins/cpd/index/SonarDuplicationsIndex.java @@ -20,7 +20,7 @@ package org.sonar.plugins.cpd.index; import com.google.common.collect.Lists; -import org.sonar.api.resources.Resource; +import org.sonar.api.scan.filesystem.internal.InputFile; import org.sonar.duplications.block.Block; import org.sonar.duplications.block.ByteArray; import org.sonar.duplications.index.AbstractCloneIndex; @@ -43,18 +43,18 @@ public class SonarDuplicationsIndex extends AbstractCloneIndex { this.db = db; } - public void insert(Resource resource, Collection<Block> blocks) { + public void insert(InputFile inputFile, Collection<Block> blocks) { for (Block block : blocks) { mem.insert(block); } if (db != null) { - db.insert(resource, blocks); + db.insert(inputFile, blocks); } } - public Collection<Block> getByResource(Resource resource, String resourceKey) { + public Collection<Block> getByInputFile(InputFile inputFile, String resourceKey) { if (db != null) { - db.prepareCache(resource); + db.prepareCache(inputFile); } return mem.getByResourceId(resourceKey); } diff --git a/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdSensorTest.java b/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdSensorTest.java index 26d5637373f..0cf3ade848b 100644 --- a/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdSensorTest.java +++ b/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/CpdSensorTest.java @@ -42,7 +42,7 @@ public class CpdSensorTest { @Before public void setUp() { IndexFactory indexFactory = mock(IndexFactory.class); - sonarEngine = new SonarEngine(indexFactory, null, null, null); + sonarEngine = new SonarEngine(indexFactory, null, null); sonarBridgeEngine = new SonarBridgeEngine(indexFactory, null, null); settings = new Settings(new PropertyDefinitions(CpdPlugin.class)); sensor = new CpdSensor(sonarEngine, sonarBridgeEngine, settings); diff --git a/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/SonarEngineTest.java b/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/SonarEngineTest.java index c2346fe86fb..f22eabb3a81 100644 --- a/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/SonarEngineTest.java +++ b/plugins/sonar-cpd-plugin/src/test/java/org/sonar/plugins/cpd/SonarEngineTest.java @@ -19,15 +19,13 @@ */ package org.sonar.plugins.cpd; +import com.google.common.base.Charsets; import org.junit.Before; import org.junit.Test; import org.sonar.api.batch.SensorContext; import org.sonar.api.measures.CoreMetrics; -import org.sonar.api.resources.File; -import org.sonar.api.resources.JavaFile; -import org.sonar.api.resources.Resource; -import org.sonar.api.scan.filesystem.ModuleFileSystem; -import org.sonar.api.scan.filesystem.PathResolver; +import org.sonar.api.scan.filesystem.internal.InputFile; +import org.sonar.api.scan.filesystem.internal.InputFileBuilder; import org.sonar.api.test.IsMeasure; import org.sonar.duplications.index.CloneGroup; import org.sonar.duplications.index.ClonePart; @@ -36,44 +34,28 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import static org.fest.assertions.Assertions.assertThat; -import static org.mockito.Matchers.any; import static org.mockito.Matchers.argThat; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; public class SonarEngineTest { private SensorContext context; - private Resource resource; + private InputFile inputFile; @Before public void setUp() { context = mock(SensorContext.class); - resource = new JavaFile("key1"); - } - - @Test - public void testGetResource() { - PathResolver pathResolver = mock(PathResolver.class); - ModuleFileSystem fileSystem = mock(ModuleFileSystem.class); - when(pathResolver.relativePath(any(java.io.File.class), any(java.io.File.class))).thenReturn("src/main/java/com/foo/Bar.java"); - - SonarEngine engine = new SonarEngine(null, fileSystem, pathResolver, null); - Resource resource = engine.getResource(new java.io.File("")); - - assertThat(resource.getKey()).isEqualTo("/src/main/java/com/foo/Bar.java"); - assertThat(resource).isInstanceOf(JavaFile.class); + inputFile = new InputFileBuilder(new java.io.File(""), Charsets.UTF_8, "src/main/java/Foo.java").build(); } @SuppressWarnings("unchecked") @Test public void testNothingToSave() { - SonarEngine.save(context, resource, null); - SonarEngine.save(context, resource, Collections.EMPTY_LIST); + SonarEngine.save(context, inputFile, null); + SonarEngine.save(context, inputFile, Collections.EMPTY_LIST); verifyZeroInteractions(context); } @@ -81,13 +63,13 @@ public class SonarEngineTest { @Test public void testOneSimpleDuplicationBetweenTwoFiles() { List<CloneGroup> groups = Arrays.asList(newCloneGroup(new ClonePart("key1", 0, 5, 204), new ClonePart("key2", 0, 15, 214))); - SonarEngine.save(context, resource, groups); + SonarEngine.save(context, inputFile, groups); - verify(context).saveMeasure(resource, CoreMetrics.DUPLICATED_FILES, 1d); - verify(context).saveMeasure(resource, CoreMetrics.DUPLICATED_BLOCKS, 1d); - verify(context).saveMeasure(resource, CoreMetrics.DUPLICATED_LINES, 200d); + verify(context).saveMeasure(inputFile, CoreMetrics.DUPLICATED_FILES, 1d); + verify(context).saveMeasure(inputFile, CoreMetrics.DUPLICATED_BLOCKS, 1d); + verify(context).saveMeasure(inputFile, CoreMetrics.DUPLICATED_LINES, 200d); verify(context).saveMeasure( - eq(resource), + eq(inputFile), argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "<duplications><g>" + "<b s=\"5\" l=\"200\" r=\"key1\"/>" + "<b s=\"15\" l=\"200\" r=\"key2\"/>" @@ -97,13 +79,13 @@ public class SonarEngineTest { @Test public void testDuplicationOnSameFile() throws Exception { List<CloneGroup> groups = Arrays.asList(newCloneGroup(new ClonePart("key1", 0, 5, 204), new ClonePart("key1", 0, 215, 414))); - SonarEngine.save(context, resource, groups); + SonarEngine.save(context, inputFile, groups); - verify(context).saveMeasure(resource, CoreMetrics.DUPLICATED_FILES, 1d); - verify(context).saveMeasure(resource, CoreMetrics.DUPLICATED_LINES, 400d); - verify(context).saveMeasure(resource, CoreMetrics.DUPLICATED_BLOCKS, 2d); + verify(context).saveMeasure(inputFile, CoreMetrics.DUPLICATED_FILES, 1d); + verify(context).saveMeasure(inputFile, CoreMetrics.DUPLICATED_LINES, 400d); + verify(context).saveMeasure(inputFile, CoreMetrics.DUPLICATED_BLOCKS, 2d); verify(context).saveMeasure( - eq(resource), + eq(inputFile), argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "<duplications><g>" + "<b s=\"5\" l=\"200\" r=\"key1\"/>" + "<b s=\"215\" l=\"200\" r=\"key1\"/>" @@ -113,13 +95,13 @@ public class SonarEngineTest { @Test public void testOneDuplicatedGroupInvolvingMoreThanTwoFiles() throws Exception { List<CloneGroup> groups = Arrays.asList(newCloneGroup(new ClonePart("key1", 0, 5, 204), new ClonePart("key2", 0, 15, 214), new ClonePart("key3", 0, 25, 224))); - SonarEngine.save(context, resource, groups); + SonarEngine.save(context, inputFile, groups); - verify(context).saveMeasure(resource, CoreMetrics.DUPLICATED_FILES, 1d); - verify(context).saveMeasure(resource, CoreMetrics.DUPLICATED_BLOCKS, 1d); - verify(context).saveMeasure(resource, CoreMetrics.DUPLICATED_LINES, 200d); + verify(context).saveMeasure(inputFile, CoreMetrics.DUPLICATED_FILES, 1d); + verify(context).saveMeasure(inputFile, CoreMetrics.DUPLICATED_BLOCKS, 1d); + verify(context).saveMeasure(inputFile, CoreMetrics.DUPLICATED_LINES, 200d); verify(context).saveMeasure( - eq(resource), + eq(inputFile), argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "<duplications><g>" + "<b s=\"5\" l=\"200\" r=\"key1\"/>" + "<b s=\"15\" l=\"200\" r=\"key2\"/>" @@ -132,13 +114,13 @@ public class SonarEngineTest { List<CloneGroup> groups = Arrays.asList( newCloneGroup(new ClonePart("key1", 0, 5, 204), new ClonePart("key2", 0, 15, 214)), newCloneGroup(new ClonePart("key1", 0, 15, 214), new ClonePart("key3", 0, 15, 214))); - SonarEngine.save(context, resource, groups); + SonarEngine.save(context, inputFile, groups); - verify(context).saveMeasure(resource, CoreMetrics.DUPLICATED_FILES, 1d); - verify(context).saveMeasure(resource, CoreMetrics.DUPLICATED_BLOCKS, 2d); - verify(context).saveMeasure(resource, CoreMetrics.DUPLICATED_LINES, 210d); + verify(context).saveMeasure(inputFile, CoreMetrics.DUPLICATED_FILES, 1d); + verify(context).saveMeasure(inputFile, CoreMetrics.DUPLICATED_BLOCKS, 2d); + verify(context).saveMeasure(inputFile, CoreMetrics.DUPLICATED_LINES, 210d); verify(context).saveMeasure( - eq(resource), + eq(inputFile), argThat(new IsMeasure(CoreMetrics.DUPLICATIONS_DATA, "<duplications>" + "<g>" + "<b s=\"5\" l=\"200\" r=\"key1\"/>" @@ -153,7 +135,7 @@ public class SonarEngineTest { @Test public void shouldEscapeXmlEntities() { - File csharpFile = new File("Loads/File Loads/Subs & Reds/SubsRedsDelivery.cs"); + InputFile csharpFile = new InputFileBuilder(new java.io.File(""), Charsets.UTF_8, "Loads/File Loads/Subs & Reds/SubsRedsDelivery.cs").build(); List<CloneGroup> groups = Arrays.asList(newCloneGroup( new ClonePart("Loads/File Loads/Subs & Reds/SubsRedsDelivery.cs", 0, 5, 204), new ClonePart("Loads/File Loads/Subs & Reds/SubsRedsDelivery2.cs", 0, 15, 214))); |