import org.sonar.batch.protocol.output.BatchReport.Scm;
import org.sonar.batch.protocol.output.BatchReport.Scm.Changeset;
import org.sonar.batch.protocol.output.BatchReportReader;
-import org.sonar.batch.report.PublishReportJob;
+import org.sonar.batch.report.ReportPublisher;
import javax.annotation.Nullable;
public abstract class AbstractNewCoverageFileAnalyzer implements Decorator {
private final List<PeriodStruct> structs;
- private final PublishReportJob publishReportJob;
+ private final ReportPublisher publishReportJob;
private final ResourceCache resourceCache;
- public AbstractNewCoverageFileAnalyzer(TimeMachineConfiguration timeMachineConfiguration, PublishReportJob publishReportJob, ResourceCache resourceCache) {
+ public AbstractNewCoverageFileAnalyzer(TimeMachineConfiguration timeMachineConfiguration, ReportPublisher publishReportJob, ResourceCache resourceCache) {
this(Lists.<PeriodStruct>newArrayList(), publishReportJob, resourceCache);
for (Period period : timeMachineConfiguration.periods()) {
structs.add(new PeriodStruct(period.getIndex(), period.getDate()));
}
}
- AbstractNewCoverageFileAnalyzer(List<PeriodStruct> structs, PublishReportJob publishReportJob, ResourceCache resourceCache) {
+ AbstractNewCoverageFileAnalyzer(List<PeriodStruct> structs, ReportPublisher publishReportJob, ResourceCache resourceCache) {
this.resourceCache = resourceCache;
this.publishReportJob = publishReportJob;
this.structs = structs;
import org.sonar.api.measures.Metric;
import org.sonar.batch.components.TimeMachineConfiguration;
import org.sonar.batch.index.ResourceCache;
-import org.sonar.batch.report.PublishReportJob;
+import org.sonar.batch.report.ReportPublisher;
import java.util.List;
public class NewCoverageFileAnalyzer extends AbstractNewCoverageFileAnalyzer {
- public NewCoverageFileAnalyzer(TimeMachineConfiguration timeMachineConfiguration, PublishReportJob publishReportJob, ResourceCache resourceCache) {
+ public NewCoverageFileAnalyzer(TimeMachineConfiguration timeMachineConfiguration, ReportPublisher publishReportJob, ResourceCache resourceCache) {
super(timeMachineConfiguration, publishReportJob, resourceCache);
}
- NewCoverageFileAnalyzer(List<PeriodStruct> structs, PublishReportJob publishReportJob, ResourceCache resourceCache) {
+ NewCoverageFileAnalyzer(List<PeriodStruct> structs, ReportPublisher publishReportJob, ResourceCache resourceCache) {
super(structs, publishReportJob, resourceCache);
}
import org.sonar.api.measures.Metric;
import org.sonar.batch.components.TimeMachineConfiguration;
import org.sonar.batch.index.ResourceCache;
-import org.sonar.batch.report.PublishReportJob;
+import org.sonar.batch.report.ReportPublisher;
public class NewItCoverageFileAnalyzer extends AbstractNewCoverageFileAnalyzer {
- public NewItCoverageFileAnalyzer(TimeMachineConfiguration timeMachineConfiguration, PublishReportJob publishReportJob, ResourceCache resourceCache) {
+ public NewItCoverageFileAnalyzer(TimeMachineConfiguration timeMachineConfiguration, ReportPublisher publishReportJob, ResourceCache resourceCache) {
super(timeMachineConfiguration, publishReportJob, resourceCache);
}
import org.sonar.api.measures.Metric;
import org.sonar.batch.components.TimeMachineConfiguration;
import org.sonar.batch.index.ResourceCache;
-import org.sonar.batch.report.PublishReportJob;
+import org.sonar.batch.report.ReportPublisher;
public class NewOverallCoverageFileAnalyzer extends AbstractNewCoverageFileAnalyzer {
- public NewOverallCoverageFileAnalyzer(TimeMachineConfiguration timeMachineConfiguration, PublishReportJob publishReportJob, ResourceCache resourceCache) {
+ public NewOverallCoverageFileAnalyzer(TimeMachineConfiguration timeMachineConfiguration, ReportPublisher publishReportJob, ResourceCache resourceCache) {
super(timeMachineConfiguration, publishReportJob, resourceCache);
}
import org.sonar.batch.protocol.output.BatchReport.Scm;
import org.sonar.batch.protocol.output.BatchReport.Scm.Changeset;
import org.sonar.batch.protocol.output.BatchReportWriter;
-import org.sonar.batch.report.PublishReportJob;
+import org.sonar.batch.report.ReportPublisher;
import java.text.ParseException;
import java.text.SimpleDateFormat;
List<AbstractNewCoverageFileAnalyzer.PeriodStruct> structs = Arrays.asList(
new AbstractNewCoverageFileAnalyzer.PeriodStruct(1, newDate("2009-12-25")),
new AbstractNewCoverageFileAnalyzer.PeriodStruct(3, newDate("2011-02-18")));
- PublishReportJob publishReportJob = mock(PublishReportJob.class);
+ ReportPublisher publishReportJob = mock(ReportPublisher.class);
java.io.File reportBaseDir = temp.newFolder();
when(publishReportJob.getReportDir()).thenReturn(reportBaseDir);
writer = new BatchReportWriter(reportBaseDir);
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.fs.internal.FileMetadata;
import org.sonar.api.batch.sensor.highlighting.TypeOfText;
import org.sonar.api.batch.sensor.internal.DefaultSensorDescriptor;
import org.sonar.api.batch.sensor.internal.SensorContextTester;
import java.io.File;
import java.io.IOException;
+import java.io.StringReader;
import static org.assertj.core.api.Assertions.assertThat;
public void testExecution() throws IOException {
File symbol = new File(baseDir, "src/foo.xoo.highlighting");
FileUtils.write(symbol, "1:4:k\n12:15:cppd\n\n#comment");
- DefaultInputFile inputFile = new DefaultInputFile("foo", "src/foo.xoo").setLanguage("xoo").setLastValidOffset(100);
+ DefaultInputFile inputFile = new DefaultInputFile("foo", "src/foo.xoo").setLanguage("xoo")
+ .initMetadata(new FileMetadata().readMetadata(new StringReader(" xoo\nazertyazer\nfoo")));
context.fileSystem().add(inputFile);
sensor.execute(context);
- assertThat(context.highlightingTypeFor("foo:src/foo.xoo", 2)).containsOnly(TypeOfText.KEYWORD);
- assertThat(context.highlightingTypeFor("foo:src/foo.xoo", 13)).containsOnly(TypeOfText.CPP_DOC);
+ assertThat(context.highlightingTypeAt("foo:src/foo.xoo", 1, 2)).containsOnly(TypeOfText.KEYWORD);
+ assertThat(context.highlightingTypeAt("foo:src/foo.xoo", 2, 8)).containsOnly(TypeOfText.CPP_DOC);
}
}
import org.sonar.batch.scan.report.*;
import org.sonar.batch.scm.ScmConfiguration;
import org.sonar.batch.scm.ScmSensor;
+import org.sonar.batch.source.CodeColorizerSensor;
import org.sonar.batch.source.LinesSensor;
import org.sonar.core.computation.dbcleaner.DefaultPurgeTask;
import org.sonar.core.computation.dbcleaner.period.DefaultPeriodCleaner;
ScmSensor.class,
LinesSensor.class,
+ CodeColorizerSensor.class,
// Issues tracking
IssueTracking.class,
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.batch.deprecated;
+
+import org.sonar.api.batch.fs.InputFile.Type;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.component.Component;
+import org.sonar.api.resources.Qualifiers;
+
+public class InputFileComponent implements Component {
+
+ private final DefaultInputFile inputFile;
+
+ public InputFileComponent(DefaultInputFile inputFile) {
+ this.inputFile = inputFile;
+ }
+
+ @Override
+ public String key() {
+ return inputFile.key();
+ }
+
+ @Override
+ public String path() {
+ return inputFile.relativePath();
+ }
+
+ @Override
+ public String name() {
+ return inputFile.file().getName();
+ }
+
+ @Override
+ public String longName() {
+ return inputFile.relativePath();
+ }
+
+ @Override
+ public String qualifier() {
+ return inputFile.type() == Type.MAIN ? Qualifiers.FILE : Qualifiers.UNIT_TEST_FILE;
+ }
+
+}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.batch.deprecated.perspectives;
+
+import org.sonar.core.component.PerspectiveBuilder;
+import org.sonar.core.component.PerspectiveNotFoundException;
+import org.sonar.core.component.ResourceComponent;
+
+import com.google.common.collect.Maps;
+import org.sonar.api.batch.SonarIndex;
+import org.sonar.api.batch.fs.InputDir;
+import org.sonar.api.batch.fs.InputFile;
+import org.sonar.api.batch.fs.InputPath;
+import org.sonar.api.component.Component;
+import org.sonar.api.component.Perspective;
+import org.sonar.api.component.ResourcePerspectives;
+import org.sonar.api.resources.Directory;
+import org.sonar.api.resources.File;
+import org.sonar.api.resources.Resource;
+
+import javax.annotation.CheckForNull;
+
+import java.util.Map;
+
+public class BatchPerspectives implements ResourcePerspectives {
+
+ private final Map<Class<?>, PerspectiveBuilder<?>> builders = Maps.newHashMap();
+ private final SonarIndex resourceIndex;
+
+ public BatchPerspectives(PerspectiveBuilder[] builders, SonarIndex resourceIndex) {
+ this.resourceIndex = resourceIndex;
+ for (PerspectiveBuilder builder : builders) {
+ // TODO check duplications
+ this.builders.put(builder.getPerspectiveClass(), builder);
+ }
+ }
+
+ @Override
+ @CheckForNull
+ public <P extends Perspective> P as(Class<P> perspectiveClass, Component component) {
+ if (component.key() == null) {
+ return null;
+ }
+ PerspectiveBuilder<P> builder = builderFor(perspectiveClass);
+ return builder.loadPerspective(perspectiveClass, component);
+ }
+
+ @Override
+ @CheckForNull
+ public <P extends Perspective> P as(Class<P> perspectiveClass, Resource resource) {
+ Resource indexedResource = resource;
+ if (resource.getEffectiveKey() == null) {
+ indexedResource = resourceIndex.getResource(resource);
+ }
+ if (indexedResource != null) {
+ return as(perspectiveClass, new ResourceComponent(indexedResource));
+ }
+ return null;
+ }
+
+ @Override
+ public <P extends Perspective> P as(Class<P> perspectiveClass, InputPath inputPath) {
+ Resource r;
+ if (inputPath instanceof InputDir) {
+ r = Directory.create(((InputDir) inputPath).relativePath());
+ } else if (inputPath instanceof InputFile) {
+ r = File.create(((InputFile) inputPath).relativePath());
+ } else {
+ throw new IllegalArgumentException("Unknow input path type: " + inputPath);
+ }
+ return as(perspectiveClass, r);
+ }
+
+ private <T extends Perspective> PerspectiveBuilder<T> builderFor(Class<T> clazz) {
+ PerspectiveBuilder<T> builder = (PerspectiveBuilder<T>) builders.get(clazz);
+ if (builder == null) {
+ throw new PerspectiveNotFoundException("Perspective class is not registered: " + clazz);
+ }
+ return builder;
+ }
+}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.highlighting;
-
-import org.sonar.api.batch.sensor.highlighting.internal.SyntaxHighlightingRule;
-import org.sonar.batch.index.Data;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-public class SyntaxHighlightingData implements Data {
-
- public static final String FIELD_SEPARATOR = ",";
- public static final String RULE_SEPARATOR = ";";
-
- private List<SyntaxHighlightingRule> syntaxHighlightingRuleSet;
-
- public SyntaxHighlightingData(Collection<SyntaxHighlightingRule> syntaxHighlightingRuleSet) {
- this.syntaxHighlightingRuleSet = new ArrayList<SyntaxHighlightingRule>(syntaxHighlightingRuleSet);
- }
-
- public List<SyntaxHighlightingRule> syntaxHighlightingRuleSet() {
- return syntaxHighlightingRuleSet;
- }
-
- @Override
- public String writeString() {
- StringBuilder sb = new StringBuilder();
- for (SyntaxHighlightingRule highlightingRule : syntaxHighlightingRuleSet) {
- if (sb.length() > 0) {
- sb.append(RULE_SEPARATOR);
- }
- sb.append(highlightingRule.getStartPosition())
- .append(FIELD_SEPARATOR)
- .append(highlightingRule.getEndPosition())
- .append(FIELD_SEPARATOR)
- .append(highlightingRule.getTextType().cssClass());
- }
-
- return sb.toString();
- }
-
-}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.highlighting;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.Ordering;
-import com.google.common.collect.Sets;
-import org.sonar.api.batch.sensor.highlighting.TypeOfText;
-import org.sonar.api.batch.sensor.highlighting.internal.SyntaxHighlightingRule;
-
-import javax.annotation.Nullable;
-
-import java.util.Iterator;
-import java.util.Set;
-
-public class SyntaxHighlightingDataBuilder {
-
- private Set<SyntaxHighlightingRule> syntaxHighlightingRuleSet;
-
- public SyntaxHighlightingDataBuilder() {
- syntaxHighlightingRuleSet = Sets.newTreeSet(new Ordering<SyntaxHighlightingRule>() {
- @Override
- public int compare(@Nullable SyntaxHighlightingRule left,
- @Nullable SyntaxHighlightingRule right) {
- int result = left.getStartPosition() - right.getStartPosition();
- if (result == 0) {
- result = right.getEndPosition() - left.getEndPosition();
- }
- return result;
- }
- });
- }
-
- @VisibleForTesting
- public Set<SyntaxHighlightingRule> getSyntaxHighlightingRuleSet() {
- return syntaxHighlightingRuleSet;
- }
-
- public SyntaxHighlightingDataBuilder registerHighlightingRule(int startOffset, int endOffset, TypeOfText typeOfText) {
- SyntaxHighlightingRule syntaxHighlightingRule = SyntaxHighlightingRule.create(startOffset, endOffset,
- typeOfText);
- this.syntaxHighlightingRuleSet.add(syntaxHighlightingRule);
- return this;
- }
-
- public SyntaxHighlightingData build() {
- checkOverlappingBoudaries();
- return new SyntaxHighlightingData(syntaxHighlightingRuleSet);
- }
-
- private void checkOverlappingBoudaries() {
- if (syntaxHighlightingRuleSet.size() > 1) {
- Iterator<SyntaxHighlightingRule> it = syntaxHighlightingRuleSet.iterator();
- SyntaxHighlightingRule previous = it.next();
- while (it.hasNext()) {
- SyntaxHighlightingRule current = it.next();
- if (previous.getEndPosition() > current.getStartPosition() && !(previous.getEndPosition() >= current.getEndPosition())) {
- String errorMsg = String.format("Cannot register highlighting rule for characters from %s to %s as it " +
- "overlaps at least one existing rule", current.getStartPosition(), current.getEndPosition());
- throw new IllegalStateException(errorMsg);
- }
- previous = current;
- }
- }
- }
-
-}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.highlighting;
-
-import com.persistit.Value;
-import com.persistit.encoding.CoderContext;
-import com.persistit.encoding.ValueCoder;
-import org.sonar.api.batch.sensor.highlighting.internal.SyntaxHighlightingRule;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class SyntaxHighlightingDataValueCoder implements ValueCoder {
-
- private SyntaxHighlightingRuleValueCoder rulesCoder = new SyntaxHighlightingRuleValueCoder();
-
- @Override
- public void put(Value value, Object object, CoderContext context) {
- SyntaxHighlightingData data = (SyntaxHighlightingData) object;
- value.put(data.syntaxHighlightingRuleSet().size());
- for (SyntaxHighlightingRule rule : data.syntaxHighlightingRuleSet()) {
- rulesCoder.put(value, rule, context);
- }
- }
-
- @Override
- public Object get(Value value, Class clazz, CoderContext context) {
- int count = value.getInt();
- List<SyntaxHighlightingRule> rules = new ArrayList<SyntaxHighlightingRule>(count);
- for (int i = 0; i < count; i++) {
- rules.add((SyntaxHighlightingRule) rulesCoder.get(value, SyntaxHighlightingRule.class, context));
- }
- return new SyntaxHighlightingData(rules);
- }
-}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.highlighting;
-
-import com.persistit.Value;
-import com.persistit.encoding.CoderContext;
-import com.persistit.encoding.ValueCoder;
-import org.sonar.api.batch.sensor.highlighting.TypeOfText;
-import org.sonar.api.batch.sensor.highlighting.internal.SyntaxHighlightingRule;
-
-class SyntaxHighlightingRuleValueCoder implements ValueCoder {
-
- @Override
- public void put(Value value, Object object, CoderContext context) {
- SyntaxHighlightingRule rule = (SyntaxHighlightingRule) object;
- value.put(rule.getStartPosition());
- value.put(rule.getEndPosition());
- value.put(rule.getTextType().ordinal());
- }
-
- @Override
- public Object get(Value value, Class clazz, CoderContext context) {
- int startPosition = value.getInt();
- int endPosition = value.getInt();
- TypeOfText type = TypeOfText.values()[value.getInt()];
- return SyntaxHighlightingRule.create(startPosition, endPosition, type);
- }
-}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-@ParametersAreNonnullByDefault
-package org.sonar.batch.highlighting;
-
-import javax.annotation.ParametersAreNonnullByDefault;
*/
package org.sonar.batch.index;
+import org.sonar.api.batch.fs.InputPath;
import org.sonar.api.database.model.Snapshot;
import org.sonar.api.resources.Resource;
import org.sonar.api.resources.ResourceUtils;
private Snapshot s;
private final BatchResource parent;
private final Collection<BatchResource> children = new ArrayList<BatchResource>();
+ private InputPath inputPath;
public BatchResource(int batchId, Resource r, @Nullable BatchResource parent) {
this.batchId = batchId;
public boolean isDir() {
return ResourceUtils.isDirectory(r);
}
+
+ public BatchResource setInputPath(InputPath inputPath) {
+ this.inputPath = inputPath;
+ return this;
+ }
+
+ @CheckForNull
+ public InputPath inputPath() {
+ return inputPath;
+ }
}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.index;
-
-import org.sonar.api.BatchComponent;
-import org.sonar.batch.highlighting.SyntaxHighlightingData;
-import org.sonar.batch.highlighting.SyntaxHighlightingDataValueCoder;
-
-import javax.annotation.CheckForNull;
-
-public class ComponentDataCache implements BatchComponent {
- private final Cache cache;
-
- public ComponentDataCache(Caches caches) {
- caches.registerValueCoder(SyntaxHighlightingData.class, new SyntaxHighlightingDataValueCoder());
- cache = caches.createCache("componentData");
- }
-
- public <D extends Data> ComponentDataCache setData(String componentKey, String dataType, D data) {
- cache.put(componentKey, dataType, data);
- return this;
- }
-
- public ComponentDataCache setStringData(String componentKey, String dataType, String data) {
- return setData(componentKey, dataType, new StringData(data));
- }
-
- @CheckForNull
- public <D extends Data> D getData(String componentKey, String dataType) {
- return (D) cache.get(componentKey, dataType);
- }
-
- @CheckForNull
- public String getStringData(String componentKey, String dataType) {
- Data data = (Data) cache.get(componentKey, dataType);
- return data == null ? null : ((StringData) data).data();
- }
-
- public <D extends Data> Iterable<Cache.Entry<D>> entries(String componentKey) {
- return cache.entries(componentKey);
- }
-}
import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.batch.sensor.duplication.Duplication;
import org.sonar.api.batch.sensor.duplication.internal.DefaultDuplication;
-import org.sonar.api.batch.sensor.highlighting.internal.SyntaxHighlightingRule;
import org.sonar.api.measures.CoreMetrics;
import org.sonar.api.measures.Measure;
-import org.sonar.api.source.Symbol;
import org.sonar.api.utils.KeyValueFormat;
import org.sonar.batch.duplication.DuplicationCache;
-import org.sonar.batch.highlighting.SyntaxHighlightingData;
+import org.sonar.batch.protocol.output.BatchReport.Range;
import org.sonar.batch.protocol.output.BatchReport.Scm;
import org.sonar.batch.protocol.output.BatchReport.Scm.Changeset;
-import org.sonar.batch.protocol.output.BatchReportReader;
-import org.sonar.batch.report.PublishReportJob;
-import org.sonar.batch.scan.filesystem.InputFileMetadata;
+import org.sonar.batch.protocol.output.BatchReport.Symbols;
+import org.sonar.batch.protocol.output.BatchReport.SyntaxHighlighting.HighlightingRule;
+import org.sonar.batch.protocol.output.*;
+import org.sonar.batch.report.BatchReportUtils;
+import org.sonar.batch.report.ReportPublisher;
import org.sonar.batch.scan.measure.MeasureCache;
-import org.sonar.batch.source.CodeColorizers;
-import org.sonar.batch.symbol.SymbolData;
-import org.sonar.core.source.SnapshotDataTypes;
import org.sonar.core.source.db.FileSourceDto;
import org.sonar.server.source.db.FileSourceDb;
import org.sonar.server.source.db.FileSourceDb.Data.Builder;
private static final String BOM = "\uFEFF";
private final MeasureCache measureCache;
- private final ComponentDataCache componentDataCache;
private final DuplicationCache duplicationCache;
- private final CodeColorizers codeColorizers;
- private final PublishReportJob publishReportJob;
+ private final ReportPublisher reportPublisher;
private final ResourceCache resourceCache;
- public SourceDataFactory(MeasureCache measureCache, ComponentDataCache componentDataCache,
- DuplicationCache duplicationCache, CodeColorizers codeColorizers, PublishReportJob publishReportJob, ResourceCache resourceCache) {
+ public SourceDataFactory(MeasureCache measureCache, DuplicationCache duplicationCache, ReportPublisher reportPublisher, ResourceCache resourceCache) {
this.measureCache = measureCache;
- this.componentDataCache = componentDataCache;
this.duplicationCache = duplicationCache;
- this.codeColorizers = codeColorizers;
- this.publishReportJob = publishReportJob;
+ this.reportPublisher = reportPublisher;
this.resourceCache = resourceCache;
}
- public byte[] consolidateData(DefaultInputFile inputFile, InputFileMetadata metadata) throws IOException {
+ public byte[] consolidateData(DefaultInputFile inputFile) throws IOException {
FileSourceDb.Data.Builder dataBuilder = createForSource(inputFile);
applyLineMeasures(inputFile, dataBuilder);
applyScm(inputFile, dataBuilder);
applyDuplications(inputFile.key(), dataBuilder);
- applyHighlighting(inputFile, metadata, dataBuilder);
- applySymbolReferences(inputFile, metadata, dataBuilder);
+ applyHighlighting(inputFile, dataBuilder);
+ applySymbolReferences(inputFile, dataBuilder);
return FileSourceDto.encodeData(dataBuilder.build());
}
}
void applyScm(DefaultInputFile inputFile, Builder dataBuilder) {
- BatchReportReader reader = new BatchReportReader(publishReportJob.getReportDir());
+ BatchReportReader reader = new BatchReportReader(reportPublisher.getReportDir());
Scm componentScm = reader.readComponentScm(resourceCache.get(inputFile).batchId());
if (componentScm != null) {
for (int i = 0; i < componentScm.getChangesetIndexByLineCount(); i++) {
void apply(String value, FileSourceDb.Line.Builder lineBuilder);
}
- void applyHighlighting(DefaultInputFile inputFile, InputFileMetadata metadata, FileSourceDb.Data.Builder to) {
- SyntaxHighlightingData highlighting = componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING);
- String language = inputFile.language();
- if (highlighting == null && language != null) {
- highlighting = codeColorizers.toSyntaxHighlighting(inputFile.file(), inputFile.charset(), language);
- }
- if (highlighting == null) {
+ void applyHighlighting(DefaultInputFile inputFile, FileSourceDb.Data.Builder to) {
+ BatchReportReader reader = new BatchReportReader(reportPublisher.getReportDir());
+ List<HighlightingRule> highlightingRules = reader.readComponentSyntaxHighlighting(resourceCache.get(inputFile).batchId());
+ if (highlightingRules.isEmpty()) {
return;
}
StringBuilder[] highlightingPerLine = new StringBuilder[inputFile.lines()];
RuleItemWriter ruleItemWriter = new RuleItemWriter();
int currentLineIdx = 1;
- for (SyntaxHighlightingRule rule : highlighting.syntaxHighlightingRuleSet()) {
- while (currentLineIdx < inputFile.lines() && rule.getStartPosition() >= metadata.originalLineOffsets()[currentLineIdx]) {
+ for (HighlightingRule rule : highlightingRules) {
+ while (currentLineIdx < inputFile.lines() && rule.getRange().getStartLine() > currentLineIdx) {
// This rule starts on another line so advance
currentLineIdx++;
}
// Now we know current rule starts on current line
- writeDataPerLine(metadata.originalLineOffsets(), rule, rule.getStartPosition(), rule.getEndPosition(), highlightingPerLine, currentLineIdx, ruleItemWriter);
+ writeDataPerLine(inputFile.originalLineOffsets(), rule, rule.getRange(), highlightingPerLine, ruleItemWriter);
}
for (int i = 0; i < highlightingPerLine.length; i++) {
StringBuilder sb = highlightingPerLine[i];
}
}
- void applySymbolReferences(DefaultInputFile file, InputFileMetadata metadata, FileSourceDb.Data.Builder to) {
- SymbolData symbolRefs = componentDataCache.getData(file.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING);
- if (symbolRefs != null) {
- StringBuilder[] refsPerLine = new StringBuilder[file.lines()];
- int symbolId = 1;
- List<Symbol> symbols = new ArrayList<Symbol>(symbolRefs.referencesBySymbol().keySet());
- // Sort symbols to avoid false variation that would lead to an unnecessary update
- Collections.sort(symbols, new Comparator<Symbol>() {
- @Override
- public int compare(Symbol o1, Symbol o2) {
- return o1.getDeclarationStartOffset() - o2.getDeclarationStartOffset();
- }
- });
- for (Symbol symbol : symbols) {
- int declarationStartOffset = symbol.getDeclarationStartOffset();
- int declarationEndOffset = symbol.getDeclarationEndOffset();
- int length = declarationEndOffset - declarationStartOffset;
- addSymbol(symbolId, declarationStartOffset, declarationEndOffset, metadata.originalLineOffsets(), refsPerLine);
- for (Integer referenceStartOffset : symbolRefs.referencesBySymbol().get(symbol)) {
- if (referenceStartOffset == declarationStartOffset) {
- // Ignore old API that used to store reference as first declaration
- continue;
- }
- addSymbol(symbolId, referenceStartOffset, referenceStartOffset + length, metadata.originalLineOffsets(), refsPerLine);
+ void applySymbolReferences(DefaultInputFile inputFile, FileSourceDb.Data.Builder to) {
+ BatchReportReader reader = new BatchReportReader(reportPublisher.getReportDir());
+ List<Symbols.Symbol> symbols = new ArrayList<Symbols.Symbol>(reader.readComponentSymbols(resourceCache.get(inputFile).batchId()));
+ if (symbols.isEmpty()) {
+ return;
+ }
+ StringBuilder[] refsPerLine = new StringBuilder[inputFile.lines()];
+ int symbolId = 1;
+ // Sort symbols to avoid false variation that would lead to an unnecessary update
+ Collections.sort(symbols, new Comparator<Symbols.Symbol>() {
+ @Override
+ public int compare(Symbols.Symbol o1, Symbols.Symbol o2) {
+ if (o1.getDeclaration().getStartLine() == o2.getDeclaration().getStartLine()) {
+ return Integer.compare(o1.getDeclaration().getStartOffset(), o2.getDeclaration().getStartOffset());
+ } else {
+ return Integer.compare(o1.getDeclaration().getStartLine(), o2.getDeclaration().getStartLine());
}
- symbolId++;
}
- for (int i = 0; i < refsPerLine.length; i++) {
- StringBuilder sb = refsPerLine[i];
- if (sb != null) {
- to.getLinesBuilder(i).setSymbols(sb.toString());
- }
+ });
+ for (Symbols.Symbol symbol : symbols) {
+ addSymbol(symbolId, symbol.getDeclaration(), inputFile.originalLineOffsets(), refsPerLine);
+ for (Range reference : symbol.getReferenceList()) {
+ addSymbol(symbolId, reference, inputFile.originalLineOffsets(), refsPerLine);
}
+ symbolId++;
}
- }
-
- private void addSymbol(int symbolId, int startOffset, int endOffset, int[] originalLineOffsets, StringBuilder[] result) {
- int startLine = binarySearchLine(startOffset, originalLineOffsets);
- writeDataPerLine(originalLineOffsets, symbolId, startOffset, endOffset, result, startLine, new SymbolItemWriter());
- }
-
- private int binarySearchLine(int declarationStartOffset, int[] originalLineOffsets) {
- int begin = 0;
- int end = originalLineOffsets.length - 1;
- while (begin < end) {
- int mid = (int) Math.round((begin + end) / 2D);
- if (declarationStartOffset < originalLineOffsets[mid]) {
- end = mid - 1;
- } else {
- begin = mid;
+ for (int i = 0; i < refsPerLine.length; i++) {
+ StringBuilder sb = refsPerLine[i];
+ if (sb != null) {
+ to.getLinesBuilder(i).setSymbols(sb.toString());
}
}
- return begin + 1;
}
- private <G> void writeDataPerLine(int[] originalLineOffsets, G item, int globalStartOffset, int globalEndOffset, StringBuilder[] dataPerLine, int startLine,
- RangeItemWriter<G> writer) {
- int currentLineIdx = startLine;
- // We know current item starts on current line
- long ruleStartOffsetCurrentLine = globalStartOffset;
- while (currentLineIdx < originalLineOffsets.length && globalEndOffset >= originalLineOffsets[currentLineIdx]) {
+ private void addSymbol(int symbolId, Range range, int[] originalLineOffsets, StringBuilder[] result) {
+ writeDataPerLine(originalLineOffsets, symbolId, range, result, new SymbolItemWriter());
+ }
+
+ private <G> void writeDataPerLine(int[] originalLineOffsets, G item, Range range, StringBuilder[] dataPerLine, RangeItemWriter<G> writer) {
+ int currentLineIdx = range.getStartLine();
+ long ruleStartOffsetCurrentLine = range.getStartOffset();
+ while (currentLineIdx < dataPerLine.length && range.getEndLine() > currentLineIdx) {
// item continue on next line so write current line and continue on next line with same item
- writeItem(item, dataPerLine, currentLineIdx, ruleStartOffsetCurrentLine - originalLineOffsets[currentLineIdx - 1], originalLineOffsets[currentLineIdx]
- - originalLineOffsets[currentLineIdx - 1], writer);
+ writeItem(item, dataPerLine, currentLineIdx, ruleStartOffsetCurrentLine, lineLength(originalLineOffsets, currentLineIdx), writer);
currentLineIdx++;
- ruleStartOffsetCurrentLine = originalLineOffsets[currentLineIdx - 1];
+ ruleStartOffsetCurrentLine = 0;
}
// item ends on current line
- writeItem(item, dataPerLine, currentLineIdx, ruleStartOffsetCurrentLine - originalLineOffsets[currentLineIdx - 1], globalEndOffset
- - originalLineOffsets[currentLineIdx - 1], writer);
+ writeItem(item, dataPerLine, currentLineIdx, ruleStartOffsetCurrentLine, range.getEndOffset(), writer);
+ }
+
+ private int lineLength(int[] originalLineOffsets, int currentLineIdx) {
+ return originalLineOffsets[currentLineIdx]
+ - originalLineOffsets[currentLineIdx - 1];
}
private <G> void writeItem(G item, StringBuilder[] dataPerLine, int currentLineIdx, long startLineOffset, long endLineOffset, RangeItemWriter<G> writer) {
void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, G item);
}
- private static class RuleItemWriter implements RangeItemWriter<SyntaxHighlightingRule> {
+ private static class RuleItemWriter implements RangeItemWriter<HighlightingRule> {
@Override
- public void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, SyntaxHighlightingRule item) {
+ public void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, HighlightingRule item) {
if (currentLineSb.length() > 0) {
- currentLineSb.append(SyntaxHighlightingData.RULE_SEPARATOR);
+ currentLineSb.append(';');
}
currentLineSb.append(startLineOffset)
- .append(SyntaxHighlightingData.FIELD_SEPARATOR)
+ .append(',')
.append(endLineOffset)
- .append(SyntaxHighlightingData.FIELD_SEPARATOR)
- .append(item.getTextType().cssClass());
+ .append(',')
+ .append(BatchReportUtils.toCssClass(item.getType()));
}
}
@Override
public void writeItem(StringBuilder currentLineSb, long startLineOffset, long endLineOffset, Integer symbolId) {
if (currentLineSb.length() > 0) {
- currentLineSb.append(SymbolData.SYMBOL_SEPARATOR);
+ currentLineSb.append(";");
}
currentLineSb.append(startLineOffset)
- .append(SymbolData.FIELD_SEPARATOR)
+ .append(",")
.append(endLineOffset)
- .append(SymbolData.FIELD_SEPARATOR)
+ .append(",")
.append(symbolId);
}
}
*/
package org.sonar.batch.index;
+import org.sonar.api.batch.fs.internal.FileMetadata;
+import org.sonar.api.batch.fs.internal.FileMetadata.LineHashConsumer;
+
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.ibatis.session.ResultContext;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.utils.System2;
import org.sonar.batch.ProjectTree;
-import org.sonar.batch.scan.filesystem.FileMetadata;
-import org.sonar.batch.scan.filesystem.FileMetadata.LineHashConsumer;
-import org.sonar.batch.scan.filesystem.InputFileMetadata;
import org.sonar.batch.scan.filesystem.InputPathCache;
import org.sonar.core.persistence.DbSession;
import org.sonar.core.persistence.MyBatis;
private void persist(DbSession session, FileSourceMapper mapper, DefaultInputFile inputFile, Map<String, FileSourceDto> previousDtosByUuid) {
String fileUuid = resourceCache.get(inputFile).resource().getUuid();
- InputFileMetadata metadata = inputPathCache.getFileMetadata(inputFile);
- byte[] data = computeData(inputFile, metadata);
+ byte[] data = computeData(inputFile);
String dataHash = DigestUtils.md5Hex(data);
FileSourceDto previousDto = previousDtosByUuid.get(fileUuid);
if (previousDto == null) {
.setFileUuid(fileUuid)
.setBinaryData(data)
.setDataHash(dataHash)
- .setSrcHash(metadata.hash())
+ .setSrcHash(inputFile.hash())
.setLineHashes(lineHashesAsMd5Hex(inputFile))
.setCreatedAt(system2.now())
.setUpdatedAt(0L);
} else {
// Update only if data_hash has changed or if src_hash is missing (progressive migration)
boolean binaryDataUpdated = !dataHash.equals(previousDto.getDataHash());
- boolean srcHashUpdated = !metadata.hash().equals(previousDto.getSrcHash());
+ boolean srcHashUpdated = !inputFile.hash().equals(previousDto.getSrcHash());
if (binaryDataUpdated || srcHashUpdated) {
previousDto
.setBinaryData(data)
.setDataHash(dataHash)
- .setSrcHash(metadata.hash())
+ .setSrcHash(inputFile.hash())
.setLineHashes(lineHashesAsMd5Hex(inputFile));
// Optimization only change updated at when updating binary data to avoid unecessary indexation by E/S
if (binaryDataUpdated) {
return result.toString();
}
- private byte[] computeData(DefaultInputFile inputFile, InputFileMetadata metadata) {
+ private byte[] computeData(DefaultInputFile inputFile) {
try {
- return dataFactory.consolidateData(inputFile, metadata);
+ return dataFactory.consolidateData(inputFile);
} catch (IOException e) {
throw new IllegalStateException("Fail to read file " + inputFile, e);
}
@CheckForNull
@Override
- protected Issuable loadPerspective(Class<Issuable> perspectiveClass, Component component) {
+ public Issuable loadPerspective(Class<Issuable> perspectiveClass, Component component) {
boolean supported = true;
if (component instanceof ResourceComponent) {
supported = Scopes.isHigherThanOrEquals(((ResourceComponent) component).scope(), Scopes.FILE);
*/
package org.sonar.batch.issue.tracking;
+import org.sonar.api.batch.fs.internal.FileMetadata;
+import org.sonar.api.batch.fs.internal.FileMetadata.LineHashConsumer;
+
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Multimap;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.lang.ObjectUtils;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
-import org.sonar.batch.scan.filesystem.FileMetadata;
-import org.sonar.batch.scan.filesystem.FileMetadata.LineHashConsumer;
import javax.annotation.Nullable;
import org.sonar.batch.issue.tracking.ServerLineHashesLoader;
import org.sonar.batch.protocol.input.*;
import org.sonar.batch.protocol.input.BatchInput.ServerIssue;
-import org.sonar.batch.report.PublishReportJob;
+import org.sonar.batch.report.ReportPublisher;
import org.sonar.batch.repository.GlobalRepositoriesLoader;
import org.sonar.batch.repository.ProjectRepositoriesLoader;
import org.sonar.batch.repository.ServerIssuesLoader;
public static BatchMediumTesterBuilder builder() {
BatchMediumTesterBuilder builder = new BatchMediumTesterBuilder().registerCoreMetrics();
builder.bootstrapProperties.put(MEDIUM_TEST_ENABLED, "true");
- builder.bootstrapProperties.put(PublishReportJob.KEEP_REPORT_PROP_KEY, "true");
+ builder.bootstrapProperties.put(ReportPublisher.KEEP_REPORT_PROP_KEY, "true");
builder.bootstrapProperties.put(CoreProperties.WORKING_DIRECTORY, Files.createTempDir().getAbsolutePath());
return builder;
}
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.sonar.api.batch.AnalysisMode;
import org.sonar.api.batch.fs.InputDir;
import org.sonar.api.batch.fs.InputFile;
+import org.sonar.api.batch.fs.TextPointer;
+import org.sonar.api.batch.fs.TextRange;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.batch.sensor.dependency.internal.DefaultDependency;
import org.sonar.api.batch.sensor.duplication.Duplication;
import org.sonar.api.batch.sensor.highlighting.TypeOfText;
-import org.sonar.api.batch.sensor.highlighting.internal.SyntaxHighlightingRule;
import org.sonar.api.batch.sensor.measure.internal.DefaultMeasure;
import org.sonar.api.issue.Issue;
import org.sonar.api.issue.internal.DefaultIssue;
import org.sonar.api.measures.Measure;
-import org.sonar.api.source.Symbol;
import org.sonar.batch.dependency.DependencyCache;
import org.sonar.batch.duplication.DuplicationCache;
-import org.sonar.batch.highlighting.SyntaxHighlightingData;
import org.sonar.batch.index.Cache.Entry;
-import org.sonar.batch.index.ComponentDataCache;
import org.sonar.batch.issue.IssueCache;
+import org.sonar.batch.protocol.output.BatchReport.Component;
+import org.sonar.batch.protocol.output.BatchReport.Metadata;
+import org.sonar.batch.protocol.output.BatchReport.Range;
+import org.sonar.batch.protocol.output.BatchReport.Symbols.Symbol;
+import org.sonar.batch.protocol.output.BatchReport.SyntaxHighlighting.HighlightingRule;
+import org.sonar.batch.protocol.output.*;
+import org.sonar.batch.report.BatchReportUtils;
+import org.sonar.batch.report.ReportPublisher;
import org.sonar.batch.scan.ProjectScanContainer;
import org.sonar.batch.scan.filesystem.InputPathCache;
import org.sonar.batch.scan.measure.MeasureCache;
-import org.sonar.batch.symbol.SymbolData;
-import org.sonar.core.source.SnapshotDataTypes;
import javax.annotation.CheckForNull;
+import javax.annotation.Nullable;
import java.io.Serializable;
import java.util.*;
private List<org.sonar.api.batch.sensor.measure.Measure> measures = new ArrayList<>();
private Map<String, List<Duplication>> duplications = new HashMap<>();
private Map<String, InputFile> inputFiles = new HashMap<>();
+ private Map<String, Component> reportComponents = new HashMap<>();
private Map<String, InputDir> inputDirs = new HashMap<>();
- private Map<InputFile, SyntaxHighlightingData> highlightingPerFile = new HashMap<>();
- private Map<InputFile, SymbolData> symbolTablePerFile = new HashMap<>();
private Map<String, Map<String, Integer>> dependencies = new HashMap<>();
+ private BatchReportReader reader;
@Override
public void scanTaskCompleted(ProjectScanContainer container) {
issues.add(issue);
}
+ if (!container.getComponentByType(AnalysisMode.class).isPreview()) {
+ ReportPublisher reportPublisher = container.getComponentByType(ReportPublisher.class);
+ reader = new BatchReportReader(reportPublisher.getReportDir());
+ Metadata readMetadata = getReportReader().readMetadata();
+ int rootComponentRef = readMetadata.getRootComponentRef();
+ storeReportComponents(rootComponentRef, null, readMetadata.hasBranch() ? readMetadata.getBranch() : null);
+ }
+
storeFs(container);
storeMeasures(container);
- storeComponentData(container);
storeDuplication(container);
- // storeTestCases(container);
- // storeCoveragePerTest(container);
storeDependencies(container);
+ }
+
+ private void storeReportComponents(int componentRef, String parentModuleKey, @Nullable String branch) {
+ Component component = getReportReader().readComponent(componentRef);
+ if (component.hasKey()) {
+ reportComponents.put(component.getKey() + (branch != null ? ":" + branch : ""), component);
+ } else {
+ reportComponents.put(parentModuleKey + (branch != null ? ":" + branch : "") + ":" + component.getPath(), component);
+ }
+ for (int childId : component.getChildRefList()) {
+ storeReportComponents(childId, component.hasKey() ? component.getKey() : parentModuleKey, branch);
+ }
+
+ }
+ public BatchReportReader getReportReader() {
+ return reader;
}
private void storeMeasures(ProjectScanContainer container) {
}
}
- private void storeComponentData(ProjectScanContainer container) {
- ComponentDataCache componentDataCache = container.getComponentByType(ComponentDataCache.class);
- for (InputFile file : inputFiles.values()) {
- SyntaxHighlightingData highlighting = componentDataCache.getData(((DefaultInputFile) file).key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING);
- if (highlighting != null) {
- highlightingPerFile.put(file, highlighting);
- }
- SymbolData symbolTable = componentDataCache.getData(((DefaultInputFile) file).key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING);
- if (symbolTable != null) {
- symbolTablePerFile.put(file, symbolTable);
- }
- }
- }
-
private void storeFs(ProjectScanContainer container) {
InputPathCache inputFileCache = container.getComponentByType(InputPathCache.class);
for (InputFile inputPath : inputFileCache.allFiles()) {
* Get highlighting types at a given position in an inputfile
* @param charIndex 0-based offset in file
*/
- public List<TypeOfText> highlightingTypeFor(InputFile file, int charIndex) {
- SyntaxHighlightingData syntaxHighlightingData = highlightingPerFile.get(file);
- if (syntaxHighlightingData == null) {
+ public List<TypeOfText> highlightingTypeFor(InputFile file, int line, int lineOffset) {
+ int ref = reportComponents.get(((DefaultInputFile) file).key()).getRef();
+ List<HighlightingRule> syntaxHighlightingRules = getReportReader().readComponentSyntaxHighlighting(ref);
+ if (syntaxHighlightingRules.isEmpty()) {
return Collections.emptyList();
}
+ TextPointer pointer = file.newPointer(line, lineOffset);
List<TypeOfText> result = new ArrayList<TypeOfText>();
- for (SyntaxHighlightingRule sortedRule : syntaxHighlightingData.syntaxHighlightingRuleSet()) {
- if (sortedRule.getStartPosition() <= charIndex && sortedRule.getEndPosition() > charIndex) {
- result.add(sortedRule.getTextType());
+ for (HighlightingRule sortedRule : syntaxHighlightingRules) {
+ TextRange ruleRange = toRange(file, sortedRule.getRange());
+ if (ruleRange.start().compareTo(pointer) <= 0 && ruleRange.end().compareTo(pointer) > 0) {
+ result.add(BatchReportUtils.toBatchType(sortedRule.getType()));
}
}
return result;
}
+ private static TextRange toRange(InputFile file, Range reportRange) {
+ return file.newRange(file.newPointer(reportRange.getStartLine(), reportRange.getStartOffset()), file.newPointer(reportRange.getEndLine(), reportRange.getEndOffset()));
+ }
+
/**
- * Get list of all positions of a symbol in an inputfile
+ * Get list of all start positions of a symbol in an inputfile
* @param symbolStartOffset 0-based start offset for the symbol in file
* @param symbolEndOffset 0-based end offset for the symbol in file
*/
@CheckForNull
- public Set<Integer> symbolReferencesFor(InputFile file, int symbolStartOffset, int symbolEndOffset) {
- SymbolData data = symbolTablePerFile.get(file);
- if (data == null) {
- return null;
+ public List<Range> symbolReferencesFor(InputFile file, int symbolStartLine, int symbolStartLineOffset) {
+ int ref = reportComponents.get(((DefaultInputFile) file).key()).getRef();
+ List<Symbol> symbols = getReportReader().readComponentSymbols(ref);
+ if (symbols.isEmpty()) {
+ return Collections.emptyList();
}
- for (Symbol symbol : data.referencesBySymbol().keySet()) {
- if (symbol.getDeclarationStartOffset() == symbolStartOffset && symbol.getDeclarationEndOffset() == symbolEndOffset) {
- return data.referencesBySymbol().get(symbol);
+ for (Symbol symbol : symbols) {
+ if (symbol.getDeclaration().getStartLine() == symbolStartLine && symbol.getDeclaration().getStartOffset() == symbolStartLineOffset) {
+ return symbol.getReferenceList();
}
}
- return null;
+ return Collections.emptyList();
}
/**
import org.sonar.batch.index.DefaultIndex;
import org.sonar.batch.issue.ignore.scanner.IssueExclusionsLoader;
import org.sonar.batch.issue.tracking.LocalIssueTracking;
-import org.sonar.batch.report.PublishReportJob;
+import org.sonar.batch.report.ReportPublisher;
import org.sonar.batch.rule.QProfileVerifier;
import org.sonar.batch.scan.filesystem.DefaultModuleFileSystem;
import org.sonar.batch.scan.filesystem.FileSystemLogger;
private final IssueExclusionsLoader issueExclusionsLoader;
private final IssuesReports issuesReport;
private final LocalIssueTracking localIssueTracking;
- private final PublishReportJob publishReportJob;
+ private final ReportPublisher publishReportJob;
public DatabaseLessPhaseExecutor(Phases phases, InitializersExecutor initializersExecutor, SensorsExecutor sensorsExecutor,
SensorContext sensorContext, DefaultIndex index,
EventBus eventBus, ProjectInitializer pi, FileSystemLogger fsLogger, IssuesReports jsonReport, DefaultModuleFileSystem fs, QProfileVerifier profileVerifier,
- IssueExclusionsLoader issueExclusionsLoader, LocalIssueTracking localIssueTracking, PublishReportJob publishReportJob) {
+ IssueExclusionsLoader issueExclusionsLoader, LocalIssueTracking localIssueTracking, ReportPublisher publishReportJob) {
this.phases = phases;
this.initializersExecutor = initializersExecutor;
this.sensorsExecutor = sensorsExecutor;
import org.sonar.batch.index.ResourcePersister;
import org.sonar.batch.index.ScanPersister;
import org.sonar.batch.issue.ignore.scanner.IssueExclusionsLoader;
-import org.sonar.batch.report.PublishReportJob;
+import org.sonar.batch.report.ReportPublisher;
import org.sonar.batch.rule.QProfileVerifier;
import org.sonar.batch.scan.filesystem.DefaultModuleFileSystem;
import org.sonar.batch.scan.filesystem.FileSystemLogger;
private final PostJobsExecutor postJobsExecutor;
private final InitializersExecutor initializersExecutor;
private final SensorsExecutor sensorsExecutor;
- private final PublishReportJob publishReportJob;
+ private final ReportPublisher publishReportJob;
private final SensorContext sensorContext;
private final DefaultIndex index;
private final ProjectInitializer pi;
public DatabaseModePhaseExecutor(Phases phases, DecoratorsExecutor decoratorsExecutor,
InitializersExecutor initializersExecutor, PostJobsExecutor postJobsExecutor, SensorsExecutor sensorsExecutor,
SensorContext sensorContext, DefaultIndex index,
- EventBus eventBus, PublishReportJob publishReportJob, ProjectInitializer pi,
+ EventBus eventBus, ReportPublisher publishReportJob, ProjectInitializer pi,
ScanPersister[] persisters, FileSystemLogger fsLogger, IssuesReports jsonReport, DefaultModuleFileSystem fs, QProfileVerifier profileVerifier,
IssueExclusionsLoader issueExclusionsLoader, DefaultAnalysisMode analysisMode, DatabaseSession session, ResourcePersister resourcePersister) {
this.phases = phases;
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.batch.report;
+
+import org.sonar.api.batch.sensor.highlighting.TypeOfText;
+import org.sonar.batch.protocol.Constants.HighlightingType;
+
+public class BatchReportUtils {
+
+ private BatchReportUtils() {
+ }
+
+ public static HighlightingType toProtocolType(TypeOfText textType) {
+ switch (textType) {
+ case ANNOTATION:
+ return HighlightingType.ANNOTATION;
+ case COMMENT:
+ return HighlightingType.COMMENT;
+ case CONSTANT:
+ return HighlightingType.CONSTANT;
+ case CPP_DOC:
+ return HighlightingType.CPP_DOC;
+ case KEYWORD:
+ return HighlightingType.KEYWORD;
+ case KEYWORD_LIGHT:
+ return HighlightingType.KEYWORD_LIGHT;
+ case PREPROCESS_DIRECTIVE:
+ return HighlightingType.PREPROCESS_DIRECTIVE;
+ case STRING:
+ return HighlightingType.HIGHLIGHTING_STRING;
+ case STRUCTURED_COMMENT:
+ return HighlightingType.STRUCTURED_COMMENT;
+ default:
+ throw new IllegalArgumentException("Unknow highlighting type: " + textType);
+ }
+ }
+
+ public static TypeOfText toBatchType(HighlightingType type) {
+ switch (type) {
+ case ANNOTATION:
+ return TypeOfText.ANNOTATION;
+ case COMMENT:
+ return TypeOfText.COMMENT;
+ case CONSTANT:
+ return TypeOfText.CONSTANT;
+ case CPP_DOC:
+ return TypeOfText.CPP_DOC;
+ case HIGHLIGHTING_STRING:
+ return TypeOfText.STRING;
+ case KEYWORD:
+ return TypeOfText.KEYWORD;
+ case KEYWORD_LIGHT:
+ return TypeOfText.KEYWORD_LIGHT;
+ case PREPROCESS_DIRECTIVE:
+ return TypeOfText.PREPROCESS_DIRECTIVE;
+ case STRUCTURED_COMMENT:
+ return TypeOfText.STRUCTURED_COMMENT;
+ default:
+ throw new IllegalArgumentException(type + " is not a valid type");
+ }
+ }
+
+ public static String toCssClass(HighlightingType type) {
+ return toBatchType(type).cssClass();
+ }
+}
/**
* Adds components and analysis metadata to output report
*/
-public class ComponentsPublisher implements ReportPublisher {
+public class ComponentsPublisher implements ReportPublisherStep {
private final ResourceCache resourceCache;
private final ProjectReactor reactor;
import org.sonar.batch.protocol.output.BatchReport.Duplication;
import org.sonar.batch.protocol.output.BatchReport.Range;
-public class DuplicationsPublisher implements ReportPublisher {
+public class DuplicationsPublisher implements ReportPublisherStep {
private final ResourceCache resourceCache;
private final DuplicationCache duplicationCache;
import java.util.Date;
import java.util.Iterator;
-public class IssuesPublisher implements ReportPublisher {
+public class IssuesPublisher implements ReportPublisherStep {
private final ResourceCache resourceCache;
private final IssueCache issueCache;
import java.io.Serializable;
-public class MeasuresPublisher implements ReportPublisher {
+public class MeasuresPublisher implements ReportPublisherStep {
private final ResourceCache resourceCache;
private final MeasureCache measureCache;
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.report;
-
-import com.github.kevinsawicki.http.HttpRequest;
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.io.FileUtils;
-import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
-import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
-import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.sonar.api.BatchComponent;
-import org.sonar.api.CoreProperties;
-import org.sonar.api.batch.bootstrap.ProjectReactor;
-import org.sonar.api.config.Settings;
-import org.sonar.api.platform.Server;
-import org.sonar.api.utils.TempFolder;
-import org.sonar.api.utils.ZipUtils;
-import org.sonar.batch.bootstrap.DefaultAnalysisMode;
-import org.sonar.batch.bootstrap.ServerClient;
-import org.sonar.batch.protocol.output.BatchReportWriter;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URL;
-
-public class PublishReportJob implements BatchComponent, Startable {
-
- private static final Logger LOG = LoggerFactory.getLogger(PublishReportJob.class);
- public static final String KEEP_REPORT_PROP_KEY = "sonar.batch.keepReport";
-
- private final ServerClient serverClient;
- private final Server server;
- private final Settings settings;
- private final ProjectReactor projectReactor;
- private final DefaultAnalysisMode analysisMode;
- private final TempFolder temp;
-
- private ReportPublisher[] publishers;
-
- private File reportDir;
- private BatchReportWriter writer;
-
- public PublishReportJob(Settings settings, ServerClient serverClient, Server server,
- ProjectReactor projectReactor, DefaultAnalysisMode analysisMode, TempFolder temp, ReportPublisher[] publishers) {
- this.serverClient = serverClient;
- this.server = server;
- this.projectReactor = projectReactor;
- this.settings = settings;
- this.analysisMode = analysisMode;
- this.temp = temp;
- this.publishers = publishers;
- }
-
- @Override
- public void start() throws StartingException {
- reportDir = new File(projectReactor.getRoot().getWorkDir(), "batch-report");
- writer = new BatchReportWriter(reportDir);
- }
-
- @Override
- public void stop() throws StoppingException {
- if (!settings.getBoolean(KEEP_REPORT_PROP_KEY)) {
- FileUtils.deleteQuietly(reportDir);
- } else {
- LOG.info("Batch report generated in " + reportDir);
- }
- }
-
- public File getReportDir() {
- return reportDir;
- }
-
- public BatchReportWriter getWriter() {
- return writer;
- }
-
- public void execute() {
- // If this is a preview analysis then we should not upload reports
- if (!analysisMode.isPreview()) {
- File report = prepareReport();
- if (!analysisMode.isMediumTest()) {
- uploadMultiPartReport(report);
- }
- }
- logSuccess(LoggerFactory.getLogger(getClass()));
- }
-
- private File prepareReport() {
- try {
- long startTime = System.currentTimeMillis();
- for (ReportPublisher publisher : publishers) {
- publisher.publish(writer);
- }
- long stopTime = System.currentTimeMillis();
- LOG.info("Analysis reports generated in " + (stopTime - startTime) + "ms, dir size=" + FileUtils.byteCountToDisplaySize(FileUtils.sizeOfDirectory(reportDir)));
-
- startTime = System.currentTimeMillis();
- File reportZip = temp.newFile("batch-report", ".zip");
- ZipUtils.zipDir(reportDir, reportZip);
- stopTime = System.currentTimeMillis();
- LOG.info("Analysis reports compressed in " + (stopTime - startTime) + "ms, zip size=" + FileUtils.byteCountToDisplaySize(FileUtils.sizeOf(reportZip)));
- return reportZip;
- } catch (IOException e) {
- throw new IllegalStateException("Unable to prepare batch report", e);
- }
- }
-
- @VisibleForTesting
- void uploadMultiPartReport(File report) {
- LOG.debug("Publish results");
- long startTime = System.currentTimeMillis();
- URL url;
- try {
- String effectiveKey = projectReactor.getRoot().getKeyWithBranch();
- url = new URL(serverClient.getURL() + "/api/computation/submit_report?projectKey=" + effectiveKey);
- } catch (MalformedURLException e) {
- throw new IllegalArgumentException("Invalid URL", e);
- }
- HttpRequest request = HttpRequest.post(url);
- request.trustAllCerts();
- request.trustAllHosts();
- request.header("User-Agent", String.format("SonarQube %s", server.getVersion()));
- request.basic(serverClient.getLogin(), serverClient.getPassword());
- request.part("report", null, "application/octet-stream", report);
- if (!request.ok()) {
- int responseCode = request.code();
- if (responseCode == 401) {
- throw new IllegalStateException(String.format(serverClient.getMessageWhenNotAuthorized(), CoreProperties.LOGIN, CoreProperties.PASSWORD));
- }
- if (responseCode == 403) {
- // SONAR-4397 Details are in response content
- throw new IllegalStateException(request.body());
- }
- throw new IllegalStateException(String.format("Fail to execute request [code=%s, url=%s]: %s", responseCode, url, request.body()));
- }
- long stopTime = System.currentTimeMillis();
- LOG.info("Analysis reports sent to server in " + (stopTime - startTime) + "ms");
- }
-
- @VisibleForTesting
- void logSuccess(Logger logger) {
- if (analysisMode.isPreview() || analysisMode.isMediumTest()) {
- logger.info("ANALYSIS SUCCESSFUL");
-
- } else {
- String baseUrl = settings.getString(CoreProperties.SERVER_BASE_URL);
- if (baseUrl.equals(settings.getDefaultValue(CoreProperties.SERVER_BASE_URL))) {
- // If server base URL was not configured in Sonar server then is is better to take URL configured on batch side
- baseUrl = serverClient.getURL();
- }
- if (!baseUrl.endsWith("/")) {
- baseUrl += "/";
- }
- String effectiveKey = projectReactor.getRoot().getKeyWithBranch();
- String url = baseUrl + "dashboard/index/" + effectiveKey;
- logger.info("ANALYSIS SUCCESSFUL, you can browse {}", url);
- logger.info("Note that you will be able to access the updated dashboard once the server has processed the submitted analysis report.");
- }
- }
-}
*/
package org.sonar.batch.report;
+import com.github.kevinsawicki.http.HttpRequest;
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.io.FileUtils;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.sonar.api.BatchComponent;
+import org.sonar.api.CoreProperties;
+import org.sonar.api.batch.bootstrap.ProjectReactor;
+import org.sonar.api.config.Settings;
+import org.sonar.api.platform.Server;
+import org.sonar.api.utils.TempFolder;
+import org.sonar.api.utils.ZipUtils;
+import org.sonar.batch.bootstrap.DefaultAnalysisMode;
+import org.sonar.batch.bootstrap.ServerClient;
import org.sonar.batch.protocol.output.BatchReportWriter;
-/**
- * Adds a sub-part of data to output report
- */
-public interface ReportPublisher {
+import java.io.File;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+
+public class ReportPublisher implements BatchComponent, Startable {
+
+ private static final Logger LOG = LoggerFactory.getLogger(ReportPublisher.class);
+ public static final String KEEP_REPORT_PROP_KEY = "sonar.batch.keepReport";
+
+ private final ServerClient serverClient;
+ private final Server server;
+ private final Settings settings;
+ private final ProjectReactor projectReactor;
+ private final DefaultAnalysisMode analysisMode;
+ private final TempFolder temp;
+
+ private ReportPublisherStep[] publishers;
+
+ private File reportDir;
+ private BatchReportWriter writer;
+
+ public ReportPublisher(Settings settings, ServerClient serverClient, Server server,
+ ProjectReactor projectReactor, DefaultAnalysisMode analysisMode, TempFolder temp, ReportPublisherStep[] publishers) {
+ this.serverClient = serverClient;
+ this.server = server;
+ this.projectReactor = projectReactor;
+ this.settings = settings;
+ this.analysisMode = analysisMode;
+ this.temp = temp;
+ this.publishers = publishers;
+ }
+
+ @Override
+ public void start() throws StartingException {
+ reportDir = new File(projectReactor.getRoot().getWorkDir(), "batch-report");
+ writer = new BatchReportWriter(reportDir);
+ }
+
+ @Override
+ public void stop() throws StoppingException {
+ if (!settings.getBoolean(KEEP_REPORT_PROP_KEY)) {
+ FileUtils.deleteQuietly(reportDir);
+ } else {
+ LOG.info("Batch report generated in " + reportDir);
+ }
+ }
+
+ public File getReportDir() {
+ return reportDir;
+ }
+
+ public BatchReportWriter getWriter() {
+ return writer;
+ }
+
+ public void execute() {
+ // If this is a preview analysis then we should not upload reports
+ if (!analysisMode.isPreview()) {
+ File report = prepareReport();
+ if (!analysisMode.isMediumTest()) {
+ uploadMultiPartReport(report);
+ }
+ }
+ logSuccess(LoggerFactory.getLogger(getClass()));
+ }
+
+ private File prepareReport() {
+ try {
+ long startTime = System.currentTimeMillis();
+ for (ReportPublisherStep publisher : publishers) {
+ publisher.publish(writer);
+ }
+ long stopTime = System.currentTimeMillis();
+ LOG.info("Analysis reports generated in " + (stopTime - startTime) + "ms, dir size=" + FileUtils.byteCountToDisplaySize(FileUtils.sizeOfDirectory(reportDir)));
+
+ startTime = System.currentTimeMillis();
+ File reportZip = temp.newFile("batch-report", ".zip");
+ ZipUtils.zipDir(reportDir, reportZip);
+ stopTime = System.currentTimeMillis();
+ LOG.info("Analysis reports compressed in " + (stopTime - startTime) + "ms, zip size=" + FileUtils.byteCountToDisplaySize(FileUtils.sizeOf(reportZip)));
+ return reportZip;
+ } catch (IOException e) {
+ throw new IllegalStateException("Unable to prepare batch report", e);
+ }
+ }
+
+ @VisibleForTesting
+ void uploadMultiPartReport(File report) {
+ LOG.debug("Publish results");
+ long startTime = System.currentTimeMillis();
+ URL url;
+ try {
+ String effectiveKey = projectReactor.getRoot().getKeyWithBranch();
+ url = new URL(serverClient.getURL() + "/api/computation/submit_report?projectKey=" + effectiveKey);
+ } catch (MalformedURLException e) {
+ throw new IllegalArgumentException("Invalid URL", e);
+ }
+ HttpRequest request = HttpRequest.post(url);
+ request.trustAllCerts();
+ request.trustAllHosts();
+ request.header("User-Agent", String.format("SonarQube %s", server.getVersion()));
+ request.basic(serverClient.getLogin(), serverClient.getPassword());
+ request.part("report", null, "application/octet-stream", report);
+ if (!request.ok()) {
+ int responseCode = request.code();
+ if (responseCode == 401) {
+ throw new IllegalStateException(String.format(serverClient.getMessageWhenNotAuthorized(), CoreProperties.LOGIN, CoreProperties.PASSWORD));
+ }
+ if (responseCode == 403) {
+ // SONAR-4397 Details are in response content
+ throw new IllegalStateException(request.body());
+ }
+ throw new IllegalStateException(String.format("Fail to execute request [code=%s, url=%s]: %s", responseCode, url, request.body()));
+ }
+ long stopTime = System.currentTimeMillis();
+ LOG.info("Analysis reports sent to server in " + (stopTime - startTime) + "ms");
+ }
- void publish(BatchReportWriter writer);
+ @VisibleForTesting
+ void logSuccess(Logger logger) {
+ if (analysisMode.isPreview() || analysisMode.isMediumTest()) {
+ logger.info("ANALYSIS SUCCESSFUL");
+ } else {
+ String baseUrl = settings.getString(CoreProperties.SERVER_BASE_URL);
+ if (baseUrl.equals(settings.getDefaultValue(CoreProperties.SERVER_BASE_URL))) {
+ // If server base URL was not configured in Sonar server then is is better to take URL configured on batch side
+ baseUrl = serverClient.getURL();
+ }
+ if (!baseUrl.endsWith("/")) {
+ baseUrl += "/";
+ }
+ String effectiveKey = projectReactor.getRoot().getKeyWithBranch();
+ String url = baseUrl + "dashboard/index/" + effectiveKey;
+ logger.info("ANALYSIS SUCCESSFUL, you can browse {}", url);
+ logger.info("Note that you will be able to access the updated dashboard once the server has processed the submitted analysis report.");
+ }
+ }
}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.batch.report;
+
+import org.sonar.batch.protocol.output.BatchReportWriter;
+
+/**
+ * Adds a sub-part of data to output report
+ */
+public interface ReportPublisherStep {
+
+ void publish(BatchReportWriter writer);
+
+}
import org.sonar.api.BatchComponent;
import org.sonar.api.batch.InstantiationStrategy;
import org.sonar.api.batch.bootstrap.ProjectDefinition;
+import org.sonar.api.batch.fs.internal.FileMetadata;
import org.sonar.api.batch.rule.CheckFactory;
import org.sonar.api.checks.NoSonarFilter;
import org.sonar.api.platform.ComponentContainer;
import org.sonar.batch.deprecated.ResourceFilters;
import org.sonar.batch.deprecated.components.DefaultProjectClasspath;
import org.sonar.batch.deprecated.components.DefaultTimeMachine;
+import org.sonar.batch.deprecated.perspectives.BatchPerspectives;
import org.sonar.batch.events.EventBus;
import org.sonar.batch.index.DefaultIndex;
import org.sonar.batch.issue.IssuableFactory;
import org.sonar.batch.sensor.DefaultSensorContext;
import org.sonar.batch.sensor.DefaultSensorStorage;
import org.sonar.batch.sensor.coverage.CoverageExclusions;
-import org.sonar.core.component.ScanPerspectives;
+import org.sonar.batch.source.HighlightableBuilder;
+import org.sonar.batch.source.SymbolizableBuilder;
public class ModuleScanContainer extends ComponentContainer {
private static final Logger LOG = LoggerFactory.getLogger(ModuleScanContainer.class);
SensorsExecutor.class,
InitializersExecutor.class,
ProjectInitializer.class,
- PublishReportJob.class,
+ ReportPublisher.class,
ComponentsPublisher.class,
IssuesPublisher.class,
MeasuresPublisher.class,
IgnoreIssuesFilter.class,
NoSonarFilter.class,
- ScanPerspectives.class);
+ // Perspectives
+ BatchPerspectives.class,
+ HighlightableBuilder.class,
+ SymbolizableBuilder.class);
}
private void addDataBaseComponents() {
import org.sonar.batch.scan.filesystem.InputPathCache;
import org.sonar.batch.scan.measure.MeasureCache;
import org.sonar.batch.source.CodeColorizers;
-import org.sonar.batch.source.HighlightableBuilder;
-import org.sonar.batch.source.SymbolizableBuilder;
import org.sonar.core.component.ScanGraph;
import org.sonar.core.issue.IssueUpdater;
import org.sonar.core.issue.workflow.FunctionExecutor;
DefaultFileLinesContextFactory.class,
Caches.class,
ResourceCache.class,
- ComponentDataCache.class,
SourceDataFactory.class,
// file system
// lang
Languages.class,
DefaultLanguagesRepository.class,
- HighlightableBuilder.class,
- SymbolizableBuilder.class,
// Differential periods
PeriodsDefinition.class,
ProjectSettings.class,
// Report
- PublishReportJob.class,
+ ReportPublisher.class,
ComponentsPublisher.class,
IssuesPublisher.class,
MeasuresPublisher.class,
import org.sonar.api.resources.Languages;
import org.sonar.api.resources.Project;
import org.sonar.api.resources.Resource;
+import org.sonar.batch.index.ResourceCache;
import org.sonar.batch.index.ResourceKeyMigration;
import org.sonar.batch.index.ResourcePersister;
private final ResourceKeyMigration migration;
private final Project module;
private final ResourcePersister resourcePersister;
+ private final ResourceCache resourceCache;
- public ComponentIndexer(Project module, Languages languages, SonarIndex sonarIndex, @Nullable ResourceKeyMigration migration,
+ public ComponentIndexer(Project module, Languages languages, SonarIndex sonarIndex, ResourceCache resourceCache, @Nullable ResourceKeyMigration migration,
@Nullable ResourcePersister resourcePersister) {
this.module = module;
this.languages = languages;
this.sonarIndex = sonarIndex;
+ this.resourceCache = resourceCache;
this.migration = migration;
this.resourcePersister = resourcePersister;
}
- public ComponentIndexer(Project module, Languages languages, SonarIndex sonarIndex) {
- this(module, languages, sonarIndex, null, null);
+ public ComponentIndexer(Project module, Languages languages, SonarIndex sonarIndex, ResourceCache resourceCache) {
+ this(module, languages, sonarIndex, resourceCache, null, null);
}
public void execute(DefaultModuleFileSystem fs) {
boolean unitTest = InputFile.Type.TEST == inputFile.type();
Resource sonarFile = File.create(inputFile.relativePath(), languages.get(languageKey), unitTest);
sonarIndex.index(sonarFile);
+ resourceCache.get(sonarFile).setInputPath(inputFile);
}
if (resourcePersister != null) {
resourcePersister.persist();
}
}
-
}
import java.io.File;
import java.nio.file.Path;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
+import java.util.*;
+import java.util.concurrent.*;
/**
* Index input files into {@link InputPathCache}.
private final boolean isAggregator;
private final ExclusionFilters exclusionFilters;
private final InputFileBuilderFactory inputFileBuilderFactory;
- private final InputPathCache inputPathCache;
private ProgressReport progressReport;
private ExecutorService executorService;
private List<Future<Void>> tasks;
public FileIndexer(List<InputFileFilter> filters, ExclusionFilters exclusionFilters, InputFileBuilderFactory inputFileBuilderFactory,
- ProjectDefinition def, InputPathCache inputPathCache) {
- this.inputPathCache = inputPathCache;
+ ProjectDefinition def) {
this.filters = filters;
this.exclusionFilters = exclusionFilters;
this.inputFileBuilderFactory = inputFileBuilderFactory;
tasks.add(executorService.submit(new Callable<Void>() {
@Override
public Void call() {
- InputFileMetadata metadata = inputFileBuilder.completeAndComputeMetadata(inputFile, type);
- if (metadata != null && accept(inputFile)) {
- fs.add(inputFile);
- status.markAsIndexed(inputFile);
- inputPathCache.put(inputFile.moduleKey(), inputFile.relativePath(), metadata);
- File parentDir = inputFile.file().getParentFile();
+ DeprecatedDefaultInputFile completedInputFile = inputFileBuilder.completeAndComputeMetadata(inputFile, type);
+ if (completedInputFile != null && accept(completedInputFile)) {
+ fs.add(completedInputFile);
+ status.markAsIndexed(completedInputFile);
+ File parentDir = completedInputFile.file().getParentFile();
String relativePath = new PathResolver().relativePath(fs.baseDir(), parentDir);
if (relativePath != null) {
DefaultInputDir inputDir = new DefaultInputDir(fs.moduleKey(), relativePath);
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.scan.filesystem;
-
-import com.google.common.base.Charsets;
-import com.google.common.primitives.Ints;
-import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.commons.io.ByteOrderMark;
-import org.apache.commons.io.input.BOMInputStream;
-import org.sonar.api.BatchComponent;
-import org.sonar.api.CoreProperties;
-import org.sonar.api.batch.AnalysisMode;
-import org.sonar.api.batch.fs.internal.DefaultInputFile;
-import org.sonar.api.utils.log.Logger;
-import org.sonar.api.utils.log.Loggers;
-
-import javax.annotation.CheckForNull;
-import javax.annotation.Nullable;
-
-import java.io.*;
-import java.nio.charset.Charset;
-import java.security.MessageDigest;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Computes hash of files. Ends of Lines are ignored, so files with
- * same content but different EOL encoding have the same hash.
- */
-public class FileMetadata implements BatchComponent {
-
- private static final Logger LOG = Loggers.get(FileMetadata.class);
-
- private static final char LINE_FEED = '\n';
- private static final char CARRIAGE_RETURN = '\r';
- private final AnalysisMode analysisMode;
-
- public FileMetadata(AnalysisMode analysisMode) {
- this.analysisMode = analysisMode;
- }
-
- private abstract static class CharHandler {
-
- void handleAll(char c) {
- }
-
- void handleIgnoreEoL(char c) {
- }
-
- void newLine() {
- }
-
- void eof() {
- }
- }
-
- private static class LineCounter extends CharHandler {
- private boolean empty = true;
- private int lines = 1;
- private int nonBlankLines = 0;
- private boolean blankLine = true;
- boolean alreadyLoggedInvalidCharacter = false;
- private final File file;
- private final Charset encoding;
-
- LineCounter(File file, Charset encoding) {
- this.file = file;
- this.encoding = encoding;
- }
-
- @Override
- void handleAll(char c) {
- this.empty = false;
- if (!alreadyLoggedInvalidCharacter && c == '\ufffd') {
- LOG.warn("Invalid character encountered in file " + file + " at line " + lines
- + " for encoding " + encoding + ". Please fix file content or configure the encoding to be used using property '" + CoreProperties.ENCODING_PROPERTY + "'.");
- alreadyLoggedInvalidCharacter = true;
- }
- }
-
- @Override
- void newLine() {
- lines++;
- if (!blankLine) {
- nonBlankLines++;
- }
- blankLine = true;
- }
-
- @Override
- void handleIgnoreEoL(char c) {
- if (!Character.isWhitespace(c)) {
- blankLine = false;
- }
- }
-
- @Override
- void eof() {
- if (!blankLine) {
- nonBlankLines++;
- }
- }
-
- public int lines() {
- return lines;
- }
-
- public int nonBlankLines() {
- return nonBlankLines;
- }
-
- public boolean isEmpty() {
- return empty;
- }
- }
-
- private static class FileHashComputer extends CharHandler {
- private MessageDigest globalMd5Digest = DigestUtils.getMd5Digest();
- private StringBuilder sb = new StringBuilder();
-
- @Override
- void handleIgnoreEoL(char c) {
- sb.append(c);
- }
-
- @Override
- void newLine() {
- sb.append(LINE_FEED);
- globalMd5Digest.update(sb.toString().getBytes(Charsets.UTF_8));
- sb.setLength(0);
- }
-
- @Override
- void eof() {
- if (sb.length() > 0) {
- globalMd5Digest.update(sb.toString().getBytes(Charsets.UTF_8));
- }
- }
-
- @CheckForNull
- public String getHash() {
- return Hex.encodeHexString(globalMd5Digest.digest());
- }
- }
-
- private static class LineHashComputer extends CharHandler {
- private final MessageDigest lineMd5Digest = DigestUtils.getMd5Digest();
- private final StringBuilder sb = new StringBuilder();
- private final LineHashConsumer consumer;
- private int line = 1;
-
- public LineHashComputer(LineHashConsumer consumer) {
- this.consumer = consumer;
- }
-
- @Override
- void handleIgnoreEoL(char c) {
- if (!Character.isWhitespace(c)) {
- sb.append(c);
- }
- }
-
- @Override
- void newLine() {
- consumer.consume(line, sb.length() > 0 ? lineMd5Digest.digest(sb.toString().getBytes(Charsets.UTF_8)) : null);
- sb.setLength(0);
- line++;
- }
-
- @Override
- void eof() {
- consumer.consume(line, sb.length() > 0 ? lineMd5Digest.digest(sb.toString().getBytes(Charsets.UTF_8)) : null);
- }
-
- }
-
- private static class LineOffsetCounter extends CharHandler {
- private int currentOriginalOffset = 0;
- private List<Integer> originalLineOffsets = new ArrayList<Integer>();
- private int lastValidOffset = 0;
-
- public LineOffsetCounter() {
- originalLineOffsets.add(0);
- }
-
- @Override
- void handleAll(char c) {
- currentOriginalOffset++;
- }
-
- @Override
- void newLine() {
- originalLineOffsets.add(currentOriginalOffset);
- }
-
- @Override
- void eof() {
- lastValidOffset = currentOriginalOffset;
- }
-
- public List<Integer> getOriginalLineOffsets() {
- return originalLineOffsets;
- }
-
- public int getLastValidOffset() {
- return lastValidOffset;
- }
-
- }
-
- /**
- * Compute hash of a file ignoring line ends differences.
- * Maximum performance is needed.
- */
- Metadata read(File file, Charset encoding) {
- LineCounter lineCounter = new LineCounter(file, encoding);
- FileHashComputer fileHashComputer = new FileHashComputer();
- LineOffsetCounter lineOffsetCounter = new LineOffsetCounter();
- if (!analysisMode.isPreview()) {
- scanFile(file, encoding, lineCounter, fileHashComputer, lineOffsetCounter);
- } else {
- // No need to compute line offsets in preview mode since there is no syntax highlighting
- scanFile(file, encoding, lineCounter, fileHashComputer);
- }
- return new Metadata(lineCounter.lines(), lineCounter.nonBlankLines(), fileHashComputer.getHash(), lineOffsetCounter.getOriginalLineOffsets(),
- lineOffsetCounter.getLastValidOffset(),
- lineCounter.isEmpty());
- }
-
- private static void scanFile(File file, Charset encoding, CharHandler... handlers) {
- char c = (char) 0;
- try (BOMInputStream bomIn = new BOMInputStream(new FileInputStream(file),
- ByteOrderMark.UTF_8, ByteOrderMark.UTF_16LE, ByteOrderMark.UTF_16BE, ByteOrderMark.UTF_32LE, ByteOrderMark.UTF_32BE);
- Reader reader = new BufferedReader(new InputStreamReader(bomIn, encoding))) {
- int i = reader.read();
- boolean afterCR = false;
- while (i != -1) {
- c = (char) i;
- if (afterCR) {
- for (CharHandler handler : handlers) {
- if (c != CARRIAGE_RETURN && c != LINE_FEED) {
- handler.handleIgnoreEoL(c);
- }
- handler.handleAll(c);
- handler.newLine();
- }
- afterCR = c == CARRIAGE_RETURN;
- } else if (c == LINE_FEED) {
- for (CharHandler handler : handlers) {
- handler.handleAll(c);
- handler.newLine();
- }
- } else if (c == CARRIAGE_RETURN) {
- afterCR = true;
- for (CharHandler handler : handlers) {
- handler.handleAll(c);
- }
- } else {
- for (CharHandler handler : handlers) {
- handler.handleIgnoreEoL(c);
- handler.handleAll(c);
- }
- }
- i = reader.read();
- }
- for (CharHandler handler : handlers) {
- handler.eof();
- }
- } catch (IOException e) {
- throw new IllegalStateException(String.format("Fail to read file '%s' with encoding '%s'", file.getAbsolutePath(), encoding), e);
- }
- }
-
- static class Metadata {
- final int lines;
- final int nonBlankLines;
- final String hash;
- final int[] originalLineOffsets;
- final int lastValidOffset;
- final boolean empty;
-
- private Metadata(int lines, int nonBlankLines, String hash, List<Integer> originalLineOffsets, int lastValidOffset, boolean empty) {
- this.lines = lines;
- this.nonBlankLines = nonBlankLines;
- this.hash = hash;
- this.empty = empty;
- this.originalLineOffsets = Ints.toArray(originalLineOffsets);
- this.lastValidOffset = lastValidOffset;
- }
- }
-
- public static interface LineHashConsumer {
-
- void consume(int lineIdx, @Nullable byte[] hash);
-
- }
-
- /**
- * Compute a MD5 hash of each line of the file after removing of all blank chars
- */
- public static void computeLineHashesForIssueTracking(DefaultInputFile f, LineHashConsumer consumer) {
- scanFile(f.file(), f.charset(), new LineHashComputer(consumer));
- }
-}
import org.sonar.api.batch.fs.FileSystem;
import org.sonar.api.batch.fs.InputFile;
import org.sonar.api.batch.fs.internal.DeprecatedDefaultInputFile;
+import org.sonar.api.batch.fs.internal.FileMetadata;
import org.sonar.api.config.Settings;
import org.sonar.api.scan.filesystem.PathResolver;
import org.sonar.batch.bootstrap.DefaultAnalysisMode;
* Optimization to not compute InputFile metadata if the file is excluded from analysis.
*/
@CheckForNull
- InputFileMetadata completeAndComputeMetadata(DeprecatedDefaultInputFile inputFile, InputFile.Type type) {
+ DeprecatedDefaultInputFile completeAndComputeMetadata(DeprecatedDefaultInputFile inputFile, InputFile.Type type) {
inputFile.setType(type);
inputFile.setModuleBaseDir(fs.baseDir().toPath());
inputFile.setCharset(fs.encoding());
}
inputFile.setLanguage(lang);
- InputFileMetadata result = new InputFileMetadata();
+ inputFile.initMetadata(fileMetadata.readMetadata(inputFile.file(), fs.encoding()));
- FileMetadata.Metadata metadata = fileMetadata.read(inputFile.file(), fs.encoding());
- inputFile.setLines(metadata.lines);
- inputFile.setLastValidOffset(metadata.lastValidOffset);
-
- result.setNonBlankLines(metadata.nonBlankLines);
- result.setHash(metadata.hash);
- result.setOriginalLineOffsets(metadata.originalLineOffsets);
- result.setEmpty(metadata.empty);
-
- inputFile.setStatus(statusDetection.status(inputFile.moduleKey(), inputFile.relativePath(), metadata.hash));
+ inputFile.setStatus(statusDetection.status(inputFile.moduleKey(), inputFile.relativePath(), inputFile.hash()));
if (analysisMode.isIncremental() && inputFile.status() == InputFile.Status.SAME) {
return null;
}
- return result;
+ return inputFile;
}
}
*/
package org.sonar.batch.scan.filesystem;
+import org.sonar.api.batch.fs.internal.FileMetadata;
+
import org.sonar.api.BatchComponent;
import org.sonar.api.batch.bootstrap.ProjectDefinition;
import org.sonar.api.config.Settings;
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.scan.filesystem;
-
-import java.io.Serializable;
-
-/**
- * Additional input file metadata that are stored in a disk storage to save memory
- */
-public class InputFileMetadata implements Serializable {
-
- private String hash;
- private int nonBlankLines;
- private int[] originalLineOffsets;
- private boolean empty;
-
- /**
- * Digest hash of the file.
- */
- public String hash() {
- return hash;
- }
-
- public int nonBlankLines() {
- return nonBlankLines;
- }
-
- public int[] originalLineOffsets() {
- return originalLineOffsets;
- }
-
- public InputFileMetadata setHash(String hash) {
- this.hash = hash;
- return this;
- }
-
- public InputFileMetadata setNonBlankLines(int nonBlankLines) {
- this.nonBlankLines = nonBlankLines;
- return this;
- }
-
- public InputFileMetadata setOriginalLineOffsets(int[] originalLineOffsets) {
- this.originalLineOffsets = originalLineOffsets;
- return this;
- }
-
- public boolean isEmpty() {
- return this.empty;
- }
-
- public InputFileMetadata setEmpty(boolean empty) {
- this.empty = empty;
- return this;
- }
-
-}
import org.sonar.api.batch.fs.InputDir;
import org.sonar.api.batch.fs.InputFile;
import org.sonar.api.batch.fs.InputPath;
-import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.batch.index.BatchResource;
import javax.annotation.CheckForNull;
private final Map<String, SortedMap<String, InputFile>> inputFileCache = new LinkedHashMap<>();
private final Map<String, SortedMap<String, InputDir>> inputDirCache = new LinkedHashMap<>();
- private final Map<String, Map<String, InputFileMetadata>> inputFileMetadataCache = new LinkedHashMap<>();
public Iterable<InputFile> allFiles() {
return Iterables.concat(Iterables.transform(inputFileCache.values(), new Function<Map<String, InputFile>, Collection<InputFile>>() {
public InputPathCache removeModule(String moduleKey) {
inputFileCache.remove(moduleKey);
inputDirCache.remove(moduleKey);
- inputFileMetadataCache.remove(moduleKey);
return this;
}
if (inputFileCache.containsKey(moduleKey)) {
inputFileCache.get(moduleKey).remove(inputFile.relativePath());
}
- if (inputFileMetadataCache.containsKey(moduleKey)) {
- inputFileMetadataCache.get(moduleKey).remove(inputFile.relativePath());
- }
return this;
}
return this;
}
- public synchronized InputPathCache put(String moduleKey, String relativePath, InputFileMetadata metadata) {
- if (!inputFileMetadataCache.containsKey(moduleKey)) {
- inputFileMetadataCache.put(moduleKey, new HashMap<String, InputFileMetadata>());
- }
- inputFileMetadataCache.get(moduleKey).put(relativePath, metadata);
- return this;
- }
-
public InputPathCache put(String moduleKey, InputDir inputDir) {
if (!inputDirCache.containsKey(moduleKey)) {
inputDirCache.put(moduleKey, new TreeMap<String, InputDir>());
return null;
}
- @CheckForNull
- public InputFileMetadata getFileMetadata(String moduleKey, String relativePath) {
- if (inputFileMetadataCache.containsKey(moduleKey)) {
- return inputFileMetadataCache.get(moduleKey).get(relativePath);
- }
- return null;
- }
-
- public InputFileMetadata getFileMetadata(DefaultInputFile inputFile) {
- return getFileMetadata(inputFile.moduleKey(), inputFile.relativePath());
- }
-
@CheckForNull
public InputDir getDir(String moduleKey, String relativePath) {
if (inputDirCache.containsKey(moduleKey)) {
import org.sonar.batch.index.ResourceCache;
import org.sonar.batch.protocol.input.FileData;
import org.sonar.batch.protocol.input.ProjectRepositories;
-import org.sonar.batch.report.PublishReportJob;
-import org.sonar.batch.scan.filesystem.InputFileMetadata;
+import org.sonar.batch.report.ReportPublisher;
import org.sonar.batch.scan.filesystem.InputPathCache;
import java.util.LinkedList;
private final ScmConfiguration configuration;
private final FileSystem fs;
private final ProjectRepositories projectReferentials;
- private final InputPathCache inputPathCache;
private final ResourceCache resourceCache;
- private final PublishReportJob publishReportJob;
+ private final ReportPublisher publishReportJob;
public ScmSensor(ProjectDefinition projectDefinition, ScmConfiguration configuration,
ProjectRepositories projectReferentials, FileSystem fs, InputPathCache inputPathCache, ResourceCache resourceCache,
- PublishReportJob publishReportJob) {
+ ReportPublisher publishReportJob) {
this.projectDefinition = projectDefinition;
this.configuration = configuration;
this.projectReferentials = projectReferentials;
this.fs = fs;
- this.inputPathCache = inputPathCache;
this.resourceCache = resourceCache;
this.publishReportJob = publishReportJob;
}
LOG.warn("Forced reloading of SCM data for all files.");
}
List<InputFile> filesToBlame = new LinkedList<InputFile>();
- for (InputFile f : inputPathCache.allFiles()) {
+ for (InputFile f : fs.inputFiles(fs.predicates().all())) {
if (configuration.forceReloadAll()) {
addIfNotEmpty(filesToBlame, (DefaultInputFile) f);
} else {
}
private void addIfNotEmpty(List<InputFile> filesToBlame, DefaultInputFile f) {
- InputFileMetadata metadata = inputPathCache.getFileMetadata(f);
- if (!metadata.isEmpty()) {
+ if (!f.isEmpty()) {
filesToBlame.add(f);
}
}
*/
package org.sonar.batch.sensor;
+import com.google.common.base.Function;
import com.google.common.base.Preconditions;
-import org.sonar.api.batch.fs.FileSystem;
-import org.sonar.api.batch.fs.InputDir;
+import com.google.common.collect.Iterables;
+import org.sonar.api.batch.fs.*;
import org.sonar.api.batch.fs.InputFile;
-import org.sonar.api.batch.fs.InputPath;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.batch.measure.MetricFinder;
import org.sonar.api.batch.rule.ActiveRules;
import org.sonar.api.batch.sensor.duplication.Duplication;
import org.sonar.api.batch.sensor.duplication.internal.DefaultDuplication;
import org.sonar.api.batch.sensor.highlighting.internal.DefaultHighlighting;
+import org.sonar.api.batch.sensor.highlighting.internal.SyntaxHighlightingRule;
import org.sonar.api.batch.sensor.internal.SensorStorage;
import org.sonar.api.batch.sensor.issue.Issue;
import org.sonar.api.batch.sensor.issue.Issue.Severity;
import org.sonar.api.batch.sensor.measure.Measure;
import org.sonar.api.batch.sensor.measure.internal.DefaultMeasure;
-import org.sonar.api.component.ResourcePerspectives;
import org.sonar.api.config.Settings;
import org.sonar.api.design.Dependency;
-import org.sonar.api.issue.Issuable;
import org.sonar.api.issue.internal.DefaultIssue;
import org.sonar.api.measures.Formula;
import org.sonar.api.measures.Metric;
import org.sonar.api.measures.PersistenceMode;
import org.sonar.api.measures.SumChildDistributionFormula;
-import org.sonar.api.resources.Directory;
-import org.sonar.api.resources.File;
-import org.sonar.api.resources.Project;
-import org.sonar.api.resources.Qualifiers;
-import org.sonar.api.resources.Resource;
-import org.sonar.api.resources.Scopes;
+import org.sonar.api.resources.*;
import org.sonar.api.rule.RuleKey;
+import org.sonar.api.source.Symbol;
import org.sonar.batch.duplication.DuplicationCache;
-import org.sonar.batch.highlighting.SyntaxHighlightingData;
import org.sonar.batch.index.BatchResource;
-import org.sonar.batch.index.ComponentDataCache;
import org.sonar.batch.index.DefaultIndex;
import org.sonar.batch.index.ResourceCache;
+import org.sonar.batch.issue.ModuleIssues;
+import org.sonar.batch.protocol.output.BatchReport;
+import org.sonar.batch.protocol.output.BatchReport.Range;
+import org.sonar.batch.protocol.output.BatchReport.SyntaxHighlighting.HighlightingRule;
+import org.sonar.batch.protocol.output.BatchReportWriter;
+import org.sonar.batch.report.BatchReportUtils;
+import org.sonar.batch.report.ReportPublisher;
import org.sonar.batch.sensor.coverage.CoverageExclusions;
+import org.sonar.batch.source.DefaultSymbol;
import org.sonar.core.component.ComponentKeys;
-import org.sonar.core.source.SnapshotDataTypes;
+
+import java.util.Map;
+import java.util.Set;
public class DefaultSensorStorage implements SensorStorage {
private static final String USES = "USES";
private final MetricFinder metricFinder;
private final Project project;
- private final ResourcePerspectives perspectives;
+ private final ModuleIssues moduleIssues;
private final DefaultIndex sonarIndex;
private final CoverageExclusions coverageExclusions;
private final DuplicationCache duplicationCache;
private final ResourceCache resourceCache;
- private final ComponentDataCache componentDataCache;
+ private final ReportPublisher reportPublisher;
- public DefaultSensorStorage(MetricFinder metricFinder, Project project,
- ResourcePerspectives perspectives,
- Settings settings, FileSystem fs, ActiveRules activeRules, ComponentDataCache componentDataCache,
- DuplicationCache duplicationCache, DefaultIndex sonarIndex, CoverageExclusions coverageExclusions,
- ResourceCache resourceCache) {
+ public DefaultSensorStorage(MetricFinder metricFinder, Project project, ModuleIssues moduleIssues,
+ Settings settings, FileSystem fs, ActiveRules activeRules, DuplicationCache duplicationCache, DefaultIndex sonarIndex,
+ CoverageExclusions coverageExclusions, ResourceCache resourceCache, ReportPublisher reportPublisher) {
this.metricFinder = metricFinder;
this.project = project;
- this.perspectives = perspectives;
- this.componentDataCache = componentDataCache;
+ this.moduleIssues = moduleIssues;
this.sonarIndex = sonarIndex;
this.coverageExclusions = coverageExclusions;
this.duplicationCache = duplicationCache;
this.resourceCache = resourceCache;
+ this.reportPublisher = reportPublisher;
}
private Metric findMetricOrFail(String metricKey) {
} else {
r = project;
}
- Issuable issuable = perspectives.as(Issuable.class, r);
- if (issuable == null) {
- return;
- }
- issuable.addIssue(toDefaultIssue(project.getKey(), ComponentKeys.createEffectiveKey(project, r), issue));
+ moduleIssues.initAndAddIssue(toDefaultIssue(project.getKey(), ComponentKeys.createEffectiveKey(project, r), issue));
}
public static DefaultIssue toDefaultIssue(String projectKey, String componentKey, Issue issue) {
@Override
public void store(DefaultHighlighting highlighting) {
- String componentKey = ((DefaultInputFile) highlighting.inputFile()).key();
- componentDataCache.setData(componentKey, SnapshotDataTypes.SYNTAX_HIGHLIGHTING, new SyntaxHighlightingData(highlighting.getSyntaxHighlightingRuleSet()));
+ BatchReportWriter writer = reportPublisher.getWriter();
+ DefaultInputFile inputFile = (DefaultInputFile) highlighting.inputFile();
+ writer.writeComponentSyntaxHighlighting(resourceCache.get(inputFile).batchId(),
+ Iterables.transform(highlighting.getSyntaxHighlightingRuleSet(), new Function<SyntaxHighlightingRule, HighlightingRule>() {
+ private HighlightingRule.Builder builder = HighlightingRule.newBuilder();
+ private Range.Builder rangeBuilder = Range.newBuilder();
+
+ @Override
+ public HighlightingRule apply(SyntaxHighlightingRule input) {
+ builder.clear();
+ rangeBuilder.clear();
+ builder.setRange(rangeBuilder.setStartLine(input.range().start().line())
+ .setStartOffset(input.range().start().lineOffset())
+ .setEndLine(input.range().end().line())
+ .setEndOffset(input.range().end().lineOffset())
+ .build());
+ builder.setType(BatchReportUtils.toProtocolType(input.getTextType()));
+ return builder.build();
+ }
+
+ }));
+ }
+
+ public void store(DefaultInputFile inputFile, Map<Symbol, Set<TextRange>> referencesBySymbol) {
+ BatchReportWriter writer = reportPublisher.getWriter();
+ writer.writeComponentSymbols(resourceCache.get(inputFile).batchId(),
+ Iterables.transform(referencesBySymbol.entrySet(), new Function<Map.Entry<Symbol, Set<TextRange>>, BatchReport.Symbols.Symbol>() {
+ private BatchReport.Symbols.Symbol.Builder builder = BatchReport.Symbols.Symbol.newBuilder();
+ private Range.Builder rangeBuilder = Range.newBuilder();
+
+ @Override
+ public BatchReport.Symbols.Symbol apply(Map.Entry<Symbol, Set<TextRange>> input) {
+ builder.clear();
+ rangeBuilder.clear();
+ DefaultSymbol symbol = (DefaultSymbol) input.getKey();
+ builder.setDeclaration(rangeBuilder.setStartLine(symbol.range().start().line())
+ .setStartOffset(symbol.range().start().lineOffset())
+ .setEndLine(symbol.range().end().line())
+ .setEndOffset(symbol.range().end().lineOffset())
+ .build());
+ for (TextRange reference : input.getValue()) {
+ builder.addReference(rangeBuilder.setStartLine(reference.start().line())
+ .setStartOffset(reference.start().lineOffset())
+ .setEndLine(reference.end().line())
+ .setEndOffset(reference.end().lineOffset())
+ .build());
+ }
+ return builder.build();
+ }
+
+ }));
}
}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.batch.source;
+
+import org.sonar.api.batch.Phase;
+import org.sonar.api.batch.fs.FileSystem;
+import org.sonar.api.batch.fs.InputFile;
+import org.sonar.api.batch.sensor.Sensor;
+import org.sonar.api.batch.sensor.SensorContext;
+import org.sonar.api.batch.sensor.SensorDescriptor;
+import org.sonar.batch.index.ResourceCache;
+import org.sonar.batch.protocol.output.BatchReport.SyntaxHighlighting.HighlightingRule;
+import org.sonar.batch.protocol.output.BatchReportReader;
+import org.sonar.batch.report.ReportPublisher;
+import org.sonar.colorizer.CodeColorizer;
+
+import java.util.List;
+
+/**
+ * Execute deprecated {@link CodeColorizer} if necessary.
+ */
+@Phase(name = Phase.Name.POST)
+public final class CodeColorizerSensor implements Sensor {
+
+ private final ReportPublisher reportPublisher;
+ private final ResourceCache resourceCache;
+ private final CodeColorizers codeColorizers;
+
+ public CodeColorizerSensor(ReportPublisher reportPublisher, ResourceCache resourceCache, CodeColorizers codeColorizers) {
+ this.reportPublisher = reportPublisher;
+ this.resourceCache = resourceCache;
+ this.codeColorizers = codeColorizers;
+ }
+
+ @Override
+ public void describe(SensorDescriptor descriptor) {
+ descriptor.name("Code Colorizer Sensor")
+ .disabledInPreview();
+ }
+
+ @Override
+ public void execute(final SensorContext context) {
+ FileSystem fs = context.fileSystem();
+ for (InputFile f : fs.inputFiles(fs.predicates().all())) {
+ BatchReportReader reader = new BatchReportReader(reportPublisher.getReportDir());
+ int batchId = resourceCache.get(f).batchId();
+ List<HighlightingRule> highlightingRules = reader.readComponentSyntaxHighlighting(batchId);
+ String language = f.language();
+ if (!highlightingRules.isEmpty() || language == null) {
+ continue;
+ }
+ codeColorizers.toSyntaxHighlighting(f.file(), fs.encoding(), language, context.newHighlighting().onFile(f));
+ }
+ }
+
+}
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sonar.api.BatchComponent;
+import org.sonar.api.batch.sensor.highlighting.NewHighlighting;
import org.sonar.api.web.CodeColorizerFormat;
-import org.sonar.batch.highlighting.SyntaxHighlightingData;
import org.sonar.colorizer.CodeColorizer;
import org.sonar.colorizer.Tokenizer;
import javax.annotation.CheckForNull;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
+import java.io.*;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.List;
}
@CheckForNull
- public SyntaxHighlightingData toSyntaxHighlighting(File file, Charset charset, String language) {
+ public void toSyntaxHighlighting(File file, Charset charset, String language, NewHighlighting highlighting) {
CodeColorizerFormat format = byLang.get(language);
List<Tokenizer> tokenizers;
if (format == null) {
if ("java".equals(language)) {
tokenizers = CodeColorizer.Format.JAVA.getTokenizers();
} else {
- return null;
+ return;
}
} else {
tokenizers = format.getTokenizers();
}
try (Reader reader = new BufferedReader(new InputStreamReader(new BOMInputStream(new FileInputStream(file)), charset))) {
- return new HighlightingRenderer().render(reader, tokenizers);
+ new HighlightingRenderer().render(reader, tokenizers, highlighting);
} catch (Exception e) {
throw new IllegalStateException("Unable to read source file for colorization", e);
}
*/
package org.sonar.batch.source;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.batch.sensor.highlighting.TypeOfText;
+import org.sonar.api.batch.sensor.highlighting.internal.DefaultHighlighting;
+import org.sonar.api.batch.sensor.internal.SensorStorage;
import org.sonar.api.component.Component;
import org.sonar.api.source.Highlightable;
-import org.sonar.batch.highlighting.SyntaxHighlightingDataBuilder;
-import org.sonar.batch.index.ComponentDataCache;
-import org.sonar.core.source.SnapshotDataTypes;
+import org.sonar.batch.deprecated.InputFileComponent;
/**
* @since 3.6
*/
public class DefaultHighlightable implements Highlightable {
- private final Component component;
- private final ComponentDataCache cache;
- private final SyntaxHighlightingDataBuilder builder;
+ private final DefaultInputFile inputFile;
+ private final SensorStorage sensorStorage;
- public DefaultHighlightable(Component component, ComponentDataCache cache) {
- this.component = component;
- this.cache = cache;
- this.builder = new SyntaxHighlightingDataBuilder();
+ public DefaultHighlightable(DefaultInputFile inputFile, SensorStorage sensorStorage) {
+ this.inputFile = inputFile;
+ this.sensorStorage = sensorStorage;
}
@Override
public HighlightingBuilder newHighlighting() {
- return new DefaultHighlightingBuilder(component.key(), cache, builder);
+ DefaultHighlighting defaultHighlighting = new DefaultHighlighting(sensorStorage);
+ defaultHighlighting.onFile(inputFile);
+ return new DefaultHighlightingBuilder(defaultHighlighting);
}
@Override
public Component component() {
- return component;
- }
-
- public SyntaxHighlightingDataBuilder getHighlightingRules() {
- return builder;
+ return new InputFileComponent(inputFile);
}
private static class DefaultHighlightingBuilder implements HighlightingBuilder {
- private final SyntaxHighlightingDataBuilder builder;
- private String componentKey;
- private ComponentDataCache cache;
+ private final DefaultHighlighting defaultHighlighting;
- public DefaultHighlightingBuilder(String componentKey, ComponentDataCache cache, SyntaxHighlightingDataBuilder builder) {
- this.componentKey = componentKey;
- this.cache = cache;
- this.builder = builder;
+ public DefaultHighlightingBuilder(DefaultHighlighting defaultHighlighting) {
+ this.defaultHighlighting = defaultHighlighting;
}
@Override
public HighlightingBuilder highlight(int startOffset, int endOffset, String typeOfText) {
TypeOfText type = org.sonar.api.batch.sensor.highlighting.TypeOfText.forCssClass(typeOfText);
- builder.registerHighlightingRule(startOffset, endOffset, type);
+ defaultHighlighting.highlight(startOffset, endOffset, type);
return this;
}
@Override
public void done() {
- cache.setData(componentKey, SnapshotDataTypes.SYNTAX_HIGHLIGHTING, builder.build());
+ defaultHighlighting.save();
}
}
}
package org.sonar.batch.source;
import com.google.common.base.Objects;
+import org.sonar.api.batch.fs.TextRange;
import java.io.Serializable;
public class DefaultSymbol implements org.sonar.api.source.Symbol, Serializable {
- private final int declarationStartOffset;
- private final int declarationEndOffset;
+ private TextRange range;
+ private int length;
- public DefaultSymbol(int startOffset, int endOffset) {
- this.declarationStartOffset = startOffset;
- this.declarationEndOffset = endOffset;
+ public DefaultSymbol(TextRange range, int length) {
+ this.range = range;
+ this.length = length;
}
@Override
public int getDeclarationStartOffset() {
- return declarationStartOffset;
+ throw new UnsupportedOperationException("getDeclarationStartOffset");
}
@Override
public int getDeclarationEndOffset() {
- return declarationEndOffset;
+ throw new UnsupportedOperationException("getDeclarationEndOffset");
}
@Override
public String getFullyQualifiedName() {
- return null;
+ throw new UnsupportedOperationException("getFullyQualifiedName");
+ }
+
+ public TextRange range() {
+ return range;
+ }
+
+ public int getLength() {
+ return length;
}
@Override
public String toString() {
return Objects.toStringHelper("Symbol")
- .add("offset", String.format("%d-%d", declarationStartOffset, declarationEndOffset))
+ .add("range", range)
.toString();
}
}
package org.sonar.batch.source;
+import org.sonar.api.batch.fs.TextRange;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.source.Symbol;
import org.sonar.api.source.Symbolizable;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
+import java.util.*;
public class DefaultSymbolTable implements Symbolizable.SymbolTable {
- private Map<Symbol, Set<Integer>> referencesBySymbol;
+ private Map<Symbol, Set<TextRange>> referencesBySymbol;
- private DefaultSymbolTable(Map<Symbol, Set<Integer>> referencesBySymbol) {
+ private DefaultSymbolTable(Map<Symbol, Set<TextRange>> referencesBySymbol) {
this.referencesBySymbol = referencesBySymbol;
}
- public Map<Symbol, Set<Integer>> getReferencesBySymbol() {
+ public Map<Symbol, Set<TextRange>> getReferencesBySymbol() {
return referencesBySymbol;
}
@Override
public List<Integer> references(Symbol symbol) {
- return new ArrayList<Integer>(referencesBySymbol.get(symbol));
+ throw new UnsupportedOperationException("references");
}
public static class Builder implements Symbolizable.SymbolTableBuilder {
- private final Map<Symbol, Set<Integer>> referencesBySymbol = new LinkedHashMap<Symbol, Set<Integer>>();
- private final String componentKey;
+ private final Map<Symbol, Set<TextRange>> referencesBySymbol = new LinkedHashMap<Symbol, Set<TextRange>>();
+ private final DefaultInputFile inputFile;
- public Builder(String componentKey) {
- this.componentKey = componentKey;
+ public Builder(DefaultInputFile inputFile) {
+ this.inputFile = inputFile;
}
@Override
public Symbol newSymbol(int fromOffset, int toOffset) {
- Symbol symbol = new DefaultSymbol(fromOffset, toOffset);
- referencesBySymbol.put(symbol, new TreeSet<Integer>());
+ TextRange declarationRange = inputFile.newRange(fromOffset, toOffset);
+ DefaultSymbol symbol = new DefaultSymbol(declarationRange, toOffset - fromOffset);
+ referencesBySymbol.put(symbol, new TreeSet<TextRange>(new Comparator<TextRange>() {
+ @Override
+ public int compare(TextRange o1, TextRange o2) {
+ return o1.start().compareTo(o2.start());
+ }
+ }));
return symbol;
}
if (!referencesBySymbol.containsKey(symbol)) {
throw new UnsupportedOperationException("Cannot add reference to a symbol in another file");
}
- if (fromOffset >= symbol.getDeclarationStartOffset() && fromOffset < symbol.getDeclarationEndOffset()) {
- throw new UnsupportedOperationException("Cannot add reference (" + fromOffset + ") overlapping " + symbol + " in " + componentKey);
+ TextRange referenceRange = inputFile.newRange(fromOffset, fromOffset + ((DefaultSymbol) symbol).getLength());
+
+ if (referenceRange.overlap(((DefaultSymbol) symbol).range())) {
+ throw new UnsupportedOperationException("Cannot add reference (" + fromOffset + ") overlapping " + symbol + " in " + inputFile.key());
}
- referencesBySymbol.get(symbol).add(fromOffset);
+ referencesBySymbol.get(symbol).add(referenceRange);
}
@Override
package org.sonar.batch.source;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.component.Component;
import org.sonar.api.source.Symbolizable;
-import org.sonar.batch.index.ComponentDataCache;
-import org.sonar.batch.symbol.SymbolData;
-import org.sonar.core.source.SnapshotDataTypes;
+import org.sonar.batch.deprecated.InputFileComponent;
+import org.sonar.batch.sensor.DefaultSensorStorage;
public class DefaultSymbolizable implements Symbolizable {
- private final ComponentDataCache cache;
- private final Component component;
+ private final DefaultInputFile inputFile;
+ private final DefaultSensorStorage sensorStorage;
- public DefaultSymbolizable(ComponentDataCache cache, Component component) {
- this.cache = cache;
- this.component = component;
+ public DefaultSymbolizable(DefaultInputFile inputFile, DefaultSensorStorage sensorStorage) {
+ this.inputFile = inputFile;
+ this.sensorStorage = sensorStorage;
}
@Override
public Component component() {
- return component;
+ return new InputFileComponent(inputFile);
}
@Override
public SymbolTableBuilder newSymbolTableBuilder() {
- return new DefaultSymbolTable.Builder(component.key());
+ return new DefaultSymbolTable.Builder(inputFile);
}
@Override
public void setSymbolTable(SymbolTable symbolTable) {
- SymbolData symbolData = new SymbolData(((DefaultSymbolTable) symbolTable).getReferencesBySymbol());
- cache.setData(component().key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING, symbolData);
+ sensorStorage.store(inputFile, ((DefaultSymbolTable) symbolTable).getReferencesBySymbol());
}
}
package org.sonar.batch.source;
import com.google.common.collect.ImmutableSet;
+import org.sonar.api.batch.fs.InputFile;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.sensor.internal.SensorStorage;
import org.sonar.api.component.Component;
import org.sonar.api.resources.Qualifiers;
import org.sonar.api.source.Highlightable;
-import org.sonar.batch.index.ComponentDataCache;
+import org.sonar.batch.index.BatchResource;
+import org.sonar.batch.index.ResourceCache;
import org.sonar.core.component.PerspectiveBuilder;
import org.sonar.core.component.ResourceComponent;
import java.util.Set;
-/**
- * @since 3.6
- * @deprecated since 4.5 no more used in batch 2.0
- */
-@Deprecated
public class HighlightableBuilder extends PerspectiveBuilder<Highlightable> {
private static final Set<String> SUPPORTED_QUALIFIERS = ImmutableSet.of(Qualifiers.FILE, Qualifiers.UNIT_TEST_FILE);
- private final ComponentDataCache cache;
+ private final ResourceCache cache;
+ private final SensorStorage sensorStorage;
- public HighlightableBuilder(ComponentDataCache cache) {
+ public HighlightableBuilder(ResourceCache cache, SensorStorage sensorStorage) {
super(Highlightable.class);
this.cache = cache;
+ this.sensorStorage = sensorStorage;
}
@CheckForNull
@Override
- protected Highlightable loadPerspective(Class<Highlightable> perspectiveClass, Component component) {
+ public Highlightable loadPerspective(Class<Highlightable> perspectiveClass, Component component) {
boolean supported = SUPPORTED_QUALIFIERS.contains(component.qualifier());
if (supported && component instanceof ResourceComponent) {
- return new DefaultHighlightable(component, cache);
+ BatchResource batchComponent = cache.get(component.key());
+ if (batchComponent != null) {
+ InputFile path = (InputFile) batchComponent.inputPath();
+ if (path != null) {
+ return new DefaultHighlightable((DefaultInputFile) path, sensorStorage);
+ }
+ }
}
return null;
}
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.sonar.api.batch.sensor.highlighting.NewHighlighting;
import org.sonar.api.batch.sensor.highlighting.TypeOfText;
-import org.sonar.batch.highlighting.SyntaxHighlightingData;
-import org.sonar.batch.highlighting.SyntaxHighlightingDataBuilder;
import org.sonar.colorizer.HtmlCodeBuilder;
import java.util.regex.Matcher;
private static final Logger LOG = LoggerFactory.getLogger(HighlightingCodeBuilder.class);
- private SyntaxHighlightingDataBuilder highlightingBuilder = new SyntaxHighlightingDataBuilder();
private int currentOffset = 0;
private static final Pattern START_TAG_PATTERN = Pattern.compile("<span class=\"(.+)\">");
private static final Pattern END_TAG_PATTERN = Pattern.compile("</span>");
private int startOffset = -1;
private String cssClass;
+ private final NewHighlighting highlighting;
+
+ public HighlightingCodeBuilder(NewHighlighting highlighting) {
+ this.highlighting = highlighting;
+ }
@Override
public Appendable append(CharSequence csq) {
} else {
Matcher endMatcher = END_TAG_PATTERN.matcher(htmlTag);
if (endMatcher.matches()) {
- highlightingBuilder.registerHighlightingRule(startOffset, currentOffset, TypeOfText.forCssClass(cssClass));
+ highlighting.highlight(startOffset, currentOffset, TypeOfText.forCssClass(cssClass));
startOffset = -1;
} else {
LOG.warn("Expected to match highlighting end html tag but was: " + htmlTag);
throw new UnsupportedOperationException();
}
- public SyntaxHighlightingData getHighlightingData() {
- return highlightingBuilder.build();
- }
-
}
*/
package org.sonar.batch.source;
-import org.sonar.batch.highlighting.SyntaxHighlightingData;
+import org.sonar.api.batch.sensor.highlighting.NewHighlighting;
import org.sonar.channel.Channel;
import org.sonar.channel.CodeReader;
import org.sonar.colorizer.HtmlCodeBuilder;
public class HighlightingRenderer {
- public SyntaxHighlightingData render(Reader code, List<? extends Channel<HtmlCodeBuilder>> tokenizers) {
+ public void render(Reader code, List<? extends Channel<HtmlCodeBuilder>> tokenizers, NewHighlighting highlighting) {
List<Channel<HtmlCodeBuilder>> allTokenizers = new ArrayList<Channel<HtmlCodeBuilder>>();
- HighlightingCodeBuilder codeBuilder = new HighlightingCodeBuilder();
+ HighlightingCodeBuilder codeBuilder = new HighlightingCodeBuilder(highlighting);
allTokenizers.addAll(tokenizers);
new TokenizerDispatcher(allTokenizers).colorize(new CodeReader(code), codeBuilder);
-
- return codeBuilder.getHighlightingData();
+ highlighting.save();
}
}
import org.sonar.api.batch.sensor.SensorDescriptor;
import org.sonar.api.batch.sensor.measure.internal.DefaultMeasure;
import org.sonar.api.measures.CoreMetrics;
-import org.sonar.batch.scan.filesystem.InputFileMetadata;
-import org.sonar.batch.scan.filesystem.InputPathCache;
@Phase(name = Phase.Name.PRE)
public final class LinesSensor implements Sensor {
- private final InputPathCache inputPathCache;
-
- public LinesSensor(InputPathCache inputPathCache) {
- this.inputPathCache = inputPathCache;
- }
-
@Override
public void describe(SensorDescriptor descriptor) {
descriptor.name("Lines Sensor");
.save();
if (f.language() == null) {
// As an approximation for files with no language plugin we consider every non blank line as ncloc
- InputFileMetadata metadata = inputPathCache.getFileMetadata((DefaultInputFile) f);
((DefaultMeasure<Integer>) context.<Integer>newMeasure()
.onFile(f)
.forMetric(CoreMetrics.NCLOC)
- .withValue(metadata.nonBlankLines()))
+ .withValue(((DefaultInputFile) f).nonBlankLines()))
.save();
// No test and no coverage on those files
((DefaultMeasure<Integer>) context.<Integer>newMeasure()
package org.sonar.batch.source;
+import com.google.common.collect.ImmutableSet;
+import org.sonar.api.batch.fs.InputFile;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.component.Component;
+import org.sonar.api.resources.Qualifiers;
import org.sonar.api.source.Symbolizable;
-import org.sonar.batch.index.ComponentDataCache;
+import org.sonar.batch.index.BatchResource;
+import org.sonar.batch.index.ResourceCache;
+import org.sonar.batch.sensor.DefaultSensorStorage;
import org.sonar.core.component.PerspectiveBuilder;
+import org.sonar.core.component.ResourceComponent;
+
+import javax.annotation.CheckForNull;
+
+import java.util.Set;
public class SymbolizableBuilder extends PerspectiveBuilder<Symbolizable> {
- private final ComponentDataCache cache;
+ private static final Set<String> SUPPORTED_QUALIFIERS = ImmutableSet.of(Qualifiers.FILE, Qualifiers.UNIT_TEST_FILE);
+ private final ResourceCache cache;
+ private final DefaultSensorStorage sensorStorage;
- public SymbolizableBuilder(ComponentDataCache cache) {
+ public SymbolizableBuilder(ResourceCache cache, DefaultSensorStorage sensorStorage) {
super(Symbolizable.class);
this.cache = cache;
+ this.sensorStorage = sensorStorage;
}
+ @CheckForNull
@Override
- protected Symbolizable loadPerspective(Class<Symbolizable> perspectiveClass, Component component) {
- return new DefaultSymbolizable(cache, component);
+ public Symbolizable loadPerspective(Class<Symbolizable> perspectiveClass, Component component) {
+ boolean supported = SUPPORTED_QUALIFIERS.contains(component.qualifier());
+ if (supported && component instanceof ResourceComponent) {
+ BatchResource batchComponent = cache.get(component.key());
+ if (batchComponent != null) {
+ InputFile path = (InputFile) batchComponent.inputPath();
+ if (path != null) {
+ return new DefaultSymbolizable((DefaultInputFile) path, sensorStorage);
+ }
+ }
+ }
+ return null;
}
}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-
-package org.sonar.batch.symbol;
-
-import org.sonar.api.source.Symbol;
-import org.sonar.batch.index.ComponentDataCache;
-import org.sonar.batch.source.DefaultSymbol;
-import org.sonar.core.source.SnapshotDataTypes;
-
-import java.io.Serializable;
-import java.util.Comparator;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-
-public class DefaultSymbolTableBuilder {
-
- private final String componentKey;
- private final ComponentDataCache cache;
- private final Map<org.sonar.api.source.Symbol, Set<Integer>> referencesBySymbol = new LinkedHashMap<org.sonar.api.source.Symbol, Set<Integer>>();
-
- public DefaultSymbolTableBuilder(String componentKey, ComponentDataCache cache) {
- this.componentKey = componentKey;
- this.cache = cache;
- }
-
- public Symbol newSymbol(int fromOffset, int toOffset) {
- org.sonar.api.source.Symbol symbol = new DefaultSymbol(fromOffset, toOffset);
- referencesBySymbol.put(symbol, new TreeSet<Integer>());
- return symbol;
- }
-
- public void newReference(Symbol symbol, int fromOffset) {
- if (!referencesBySymbol.containsKey(symbol)) {
- throw new UnsupportedOperationException("Cannot add reference to a symbol in another file");
- }
- if (fromOffset >= symbol.getDeclarationStartOffset() && fromOffset < symbol.getDeclarationEndOffset()) {
- throw new UnsupportedOperationException("Cannot add reference (" + fromOffset + ") overlapping " + symbol);
- }
- referencesBySymbol.get(symbol).add(fromOffset);
- }
-
- public SymbolData build() {
- return new SymbolData(referencesBySymbol);
- }
-
- public void done() {
- cache.setData(componentKey, SnapshotDataTypes.SYMBOL_HIGHLIGHTING, build());
- }
-
- public static class SymbolComparator implements Comparator<Symbol>, Serializable {
- @Override
- public int compare(Symbol left, Symbol right) {
- return left.getDeclarationStartOffset() - right.getDeclarationStartOffset();
- }
- }
-
- public static class ReferenceComparator implements Comparator<Integer>, Serializable {
- @Override
- public int compare(Integer left, Integer right) {
- int result;
- if (left != null && right != null) {
- result = left - right;
- } else {
- result = left == null ? -1 : 1;
- }
- return result;
- }
- }
-}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-
-package org.sonar.batch.symbol;
-
-import org.sonar.api.source.Symbol;
-import org.sonar.batch.index.Data;
-
-import java.util.Collection;
-import java.util.Map;
-import java.util.Set;
-
-public class SymbolData implements Data {
-
- public static final String FIELD_SEPARATOR = ",";
- public static final String SYMBOL_SEPARATOR = ";";
-
- private final Map<org.sonar.api.source.Symbol, Set<Integer>> referencesBySymbol;
-
- public SymbolData(Map<org.sonar.api.source.Symbol, Set<Integer>> referencesBySymbol) {
- this.referencesBySymbol = referencesBySymbol;
- }
-
- public Map<org.sonar.api.source.Symbol, Set<Integer>> referencesBySymbol() {
- return referencesBySymbol;
- }
-
- @Override
- public String writeString() {
- StringBuilder sb = new StringBuilder();
-
- for (Symbol symbol : referencesBySymbol.keySet()) {
- if (sb.length() > 0) {
- sb.append(SYMBOL_SEPARATOR);
- }
-
- sb.append(symbol.getDeclarationStartOffset())
- .append(FIELD_SEPARATOR)
- .append(symbol.getDeclarationEndOffset())
- .append(FIELD_SEPARATOR)
- .append(symbol.getDeclarationStartOffset());
- Collection<Integer> symbolReferences = referencesBySymbol.get(symbol);
- for (Integer symbolReference : symbolReferences) {
- sb.append(FIELD_SEPARATOR).append(symbolReference);
- }
- }
-
- return sb.toString();
- }
-
-}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-@javax.annotation.ParametersAreNonnullByDefault
-package org.sonar.batch.symbol;
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.highlighting;
-
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.sonar.api.batch.sensor.highlighting.internal.SyntaxHighlightingRule;
-
-import java.util.Collection;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.sonar.api.batch.sensor.highlighting.TypeOfText.COMMENT;
-import static org.sonar.api.batch.sensor.highlighting.TypeOfText.CPP_DOC;
-import static org.sonar.api.batch.sensor.highlighting.TypeOfText.KEYWORD;
-
-public class SyntaxHighlightingDataBuilderTest {
-
- private Collection<SyntaxHighlightingRule> highlightingRules;
-
- @Rule
- public ExpectedException throwable = ExpectedException.none();
-
- @Before
- public void setUpSampleRules() {
-
- SyntaxHighlightingDataBuilder highlightingDataBuilder = new SyntaxHighlightingDataBuilder();
- highlightingDataBuilder.registerHighlightingRule(0, 10, COMMENT);
- highlightingDataBuilder.registerHighlightingRule(10, 12, KEYWORD);
- highlightingDataBuilder.registerHighlightingRule(24, 38, KEYWORD);
- highlightingDataBuilder.registerHighlightingRule(42, 50, KEYWORD);
- highlightingDataBuilder.registerHighlightingRule(24, 65, CPP_DOC);
- highlightingDataBuilder.registerHighlightingRule(12, 20, COMMENT);
-
- highlightingRules = highlightingDataBuilder.getSyntaxHighlightingRuleSet();
- }
-
- @Test
- public void should_register_highlighting_rule() throws Exception {
- assertThat(highlightingRules).hasSize(6);
- }
-
- @Test
- public void should_order_by_start_then_end_offset() throws Exception {
- assertThat(highlightingRules).extracting("startPosition").containsOnly(0, 10, 12, 24, 24, 42);
- assertThat(highlightingRules).extracting("endPosition").containsOnly(10, 12, 20, 38, 65, 50);
- assertThat(highlightingRules).extracting("textType").containsOnly(COMMENT, KEYWORD, COMMENT, KEYWORD, CPP_DOC, KEYWORD);
- }
-
- @Test
- public void should_suport_overlapping() throws Exception {
- SyntaxHighlightingDataBuilder builder = new SyntaxHighlightingDataBuilder();
- builder.registerHighlightingRule(0, 15, KEYWORD);
- builder.registerHighlightingRule(8, 12, CPP_DOC);
- builder.build();
- }
-
- @Test
- public void should_prevent_boudaries_overlapping() throws Exception {
- throwable.expect(IllegalStateException.class);
- throwable.expectMessage("Cannot register highlighting rule for characters from 8 to 15 as it overlaps at least one existing rule");
-
- SyntaxHighlightingDataBuilder builder = new SyntaxHighlightingDataBuilder();
- builder.registerHighlightingRule(0, 10, KEYWORD);
- builder.registerHighlightingRule(8, 15, KEYWORD);
- builder.build();
- }
-}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.highlighting;
-
-import com.google.common.collect.Lists;
-import org.junit.Test;
-import org.sonar.api.batch.sensor.highlighting.internal.SyntaxHighlightingRule;
-
-import java.util.List;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.sonar.api.batch.sensor.highlighting.TypeOfText.COMMENT;
-import static org.sonar.api.batch.sensor.highlighting.TypeOfText.CPP_DOC;
-import static org.sonar.api.batch.sensor.highlighting.TypeOfText.KEYWORD;
-
-public class SyntaxHighlightingDataTest {
-
- @Test
- public void should_serialize_rules_to_string() throws Exception {
-
- List<SyntaxHighlightingRule> orderedHighlightingRules = Lists.newArrayList(
- SyntaxHighlightingRule.create(0, 10, COMMENT),
- SyntaxHighlightingRule.create(10, 12, KEYWORD),
- SyntaxHighlightingRule.create(12, 20, COMMENT),
- SyntaxHighlightingRule.create(24, 38, KEYWORD),
- SyntaxHighlightingRule.create(24, 65, CPP_DOC),
- SyntaxHighlightingRule.create(42, 50, KEYWORD)
- );
-
- String serializedRules = new SyntaxHighlightingData(orderedHighlightingRules).writeString();
- assertThat(serializedRules).isEqualTo("0,10,cd;10,12,k;12,20,cd;24,38,k;24,65,cppd;42,50,k");
- }
-}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.index;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-public class ComponentDataCacheTest {
-
- @ClassRule
- public static TemporaryFolder temp = new TemporaryFolder();
-
- Caches caches;
-
- @Before
- public void start() throws Exception {
- caches = CachesTest.createCacheOnTemp(temp);
- caches.start();
- }
-
- @After
- public void stop() {
- caches.stop();
- }
-
- @Test
- public void should_get_and_set_string_data() {
- ComponentDataCache cache = new ComponentDataCache(caches);
- cache.setStringData("org/struts/Action.java", "SYNTAX", "1:foo;3:bar");
- assertThat(cache.getStringData("org/struts/Action.java", "SYNTAX")).isEqualTo("1:foo;3:bar");
- assertThat(cache.getStringData("org/struts/Action.java", "OTHER")).isNull();
- assertThat(cache.getStringData("Other.java", "SYNTAX")).isNull();
- assertThat(cache.getStringData("Other.java", "OTHER")).isNull();
- }
-
- @Test
- public void should_get_and_set_data() {
- ComponentDataCache cache = new ComponentDataCache(caches);
- cache.setData("org/struts/Action.java", "COUNT", new LongData(1234L));
- LongData count = cache.getData("org/struts/Action.java", "COUNT");
- assertThat(count.data()).isEqualTo(1234L);
- }
-
- static class LongData implements Data {
-
- private long data;
-
- LongData() {
- }
-
- LongData(long data) {
- this.data = data;
- }
-
- public long data() {
- return data;
- }
-
- @Override
- public String writeString() {
- return String.valueOf(data);
- }
-
- }
-}
import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.batch.sensor.duplication.Duplication;
import org.sonar.api.batch.sensor.duplication.internal.DefaultDuplication;
-import org.sonar.api.batch.sensor.highlighting.TypeOfText;
import org.sonar.api.measures.CoreMetrics;
import org.sonar.api.measures.Measure;
import org.sonar.api.measures.Metric;
-import org.sonar.api.source.Symbol;
import org.sonar.batch.duplication.DuplicationCache;
-import org.sonar.batch.highlighting.SyntaxHighlightingData;
-import org.sonar.batch.highlighting.SyntaxHighlightingDataBuilder;
+import org.sonar.batch.protocol.Constants.HighlightingType;
+import org.sonar.batch.protocol.output.*;
+import org.sonar.batch.protocol.output.BatchReport.Range;
import org.sonar.batch.protocol.output.BatchReport.Scm;
import org.sonar.batch.protocol.output.BatchReport.Scm.Changeset;
-import org.sonar.batch.protocol.output.BatchReportWriter;
-import org.sonar.batch.report.PublishReportJob;
-import org.sonar.batch.scan.filesystem.InputFileMetadata;
+import org.sonar.batch.protocol.output.BatchReport.SyntaxHighlighting.HighlightingRule;
+import org.sonar.batch.report.ReportPublisher;
import org.sonar.batch.scan.measure.MeasureCache;
-import org.sonar.batch.source.CodeColorizers;
-import org.sonar.batch.symbol.DefaultSymbolTableBuilder;
-import org.sonar.core.source.SnapshotDataTypes;
import org.sonar.server.source.db.FileSourceDb;
import java.io.File;
public TemporaryFolder temp = new TemporaryFolder();
private MeasureCache measureCache = mock(MeasureCache.class);
- private ComponentDataCache componentDataCache = mock(ComponentDataCache.class);
private DuplicationCache duplicationCache = mock(DuplicationCache.class);
- private CodeColorizers colorizers = mock(CodeColorizers.class);
private DefaultInputFile inputFile;
- private InputFileMetadata metadata;
private SourceDataFactory sut;
private FileSourceDb.Data.Builder output;
private File reportDir;
+ private BatchReportWriter batchReportWriter;
@Before
public void setUp() throws Exception {
- PublishReportJob publishReportJob = mock(PublishReportJob.class);
+ ReportPublisher reportPublisher = mock(ReportPublisher.class);
reportDir = temp.newFolder();
- when(publishReportJob.getReportDir()).thenReturn(reportDir);
+ batchReportWriter = new BatchReportWriter(reportDir);
+ when(reportPublisher.getReportDir()).thenReturn(reportDir);
ResourceCache resourceCache = new ResourceCache();
resourceCache.add(org.sonar.api.resources.File.create("src/Foo.java").setEffectiveKey("module_key:src/Foo.java"), null);
when(measureCache.byMetric(anyString(), anyString())).thenReturn(Collections.<Measure>emptyList());
- sut = new SourceDataFactory(measureCache, componentDataCache, duplicationCache, colorizers, publishReportJob, resourceCache);
+ sut = new SourceDataFactory(measureCache, duplicationCache, reportPublisher, resourceCache);
// generate a file with 3 lines
File baseDir = temp.newFolder();
DefaultFileSystem fs = new DefaultFileSystem(baseDir.toPath());
.setLines(3)
.setCharset(Charsets.UTF_8);
fs.add(inputFile);
- metadata = new InputFileMetadata();
FileUtils.write(inputFile.file(), "one\ntwo\nthree\n");
output = sut.createForSource(inputFile);
when(duplicationCache.byComponent(anyString())).thenReturn(Collections.<DefaultDuplication>emptyList());
@Test
public void consolidateData() throws Exception {
- byte[] bytes = sut.consolidateData(inputFile, metadata);
+ byte[] bytes = sut.consolidateData(inputFile);
assertThat(bytes).isNotEmpty();
}
@Test
public void applyHighlighting_missing() throws Exception {
- when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(null);
-
- sut.applyHighlighting(inputFile, metadata, output);
+ sut.applyHighlighting(inputFile, output);
FileSourceDb.Data data = output.build();
assertThat(data.getLines(0).hasHighlighting()).isFalse();
@Test
public void applyHighlighting() throws Exception {
- SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
- .registerHighlightingRule(0, 4, TypeOfText.ANNOTATION)
- .registerHighlightingRule(4, 5, TypeOfText.COMMENT)
- .registerHighlightingRule(7, 16, TypeOfText.CONSTANT)
- .build();
- when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting);
- metadata.setOriginalLineOffsets(new int[] {0, 4, 7});
-
- sut.applyHighlighting(inputFile, metadata, output);
+ batchReportWriter.writeComponentSyntaxHighlighting(1, Arrays.asList(
+ newRule(1, 0, 1, 4, HighlightingType.ANNOTATION),
+ newRule(2, 0, 2, 1, HighlightingType.COMMENT),
+ newRule(3, 1, 3, 9, HighlightingType.CONSTANT)));
+ inputFile.setOriginalLineOffsets(new int[] {0, 4, 7});
+ sut.applyHighlighting(inputFile, output);
FileSourceDb.Data data = output.build();
assertThat(data.getLines(0).getHighlighting()).isEqualTo("0,4,a");
assertThat(data.getLines(1).getHighlighting()).isEqualTo("0,1,cd");
- assertThat(data.getLines(2).getHighlighting()).isEqualTo("0,9,c");
- }
-
- @Test
- public void applyHighlighting_ignore_bad_line() throws Exception {
- SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
- .registerHighlightingRule(0, 4, TypeOfText.ANNOTATION)
- .registerHighlightingRule(4, 5, TypeOfText.COMMENT)
- .registerHighlightingRule(7, 25, TypeOfText.CONSTANT)
- .build();
- when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting);
- metadata.setOriginalLineOffsets(new int[] {0, 4, 7, 15});
-
- sut.applyHighlighting(inputFile, metadata, output);
-
- FileSourceDb.Data data = output.build();
- assertThat(data.getLinesCount()).isEqualTo(3);
+ assertThat(data.getLines(2).getHighlighting()).isEqualTo("1,9,c");
}
@Test
public void applyHighlighting_multiple_lines() throws Exception {
- SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
- .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION)
- .registerHighlightingRule(4, 9, TypeOfText.COMMENT)
- .registerHighlightingRule(10, 16, TypeOfText.CONSTANT)
- .build();
- when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting);
- metadata.setOriginalLineOffsets(new int[] {0, 4, 7});
+ batchReportWriter.writeComponentSyntaxHighlighting(1, Arrays.asList(
+ newRule(1, 0, 1, 3, HighlightingType.ANNOTATION),
+ newRule(2, 0, 3, 2, HighlightingType.COMMENT),
+ newRule(3, 3, 3, 9, HighlightingType.CONSTANT)));
+ inputFile.setOriginalLineOffsets(new int[] {0, 4, 7});
- sut.applyHighlighting(inputFile, metadata, output);
+ sut.applyHighlighting(inputFile, output);
FileSourceDb.Data data = output.build();
assertThat(data.getLines(0).getHighlighting()).isEqualTo("0,3,a");
@Test
public void applyHighlighting_nested_rules() throws Exception {
- SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
- .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION)
- .registerHighlightingRule(4, 6, TypeOfText.COMMENT)
- .registerHighlightingRule(7, 16, TypeOfText.CONSTANT)
- .registerHighlightingRule(8, 15, TypeOfText.KEYWORD)
- .build();
- when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting);
- metadata.setOriginalLineOffsets(new int[] {0, 4, 7});
+ batchReportWriter.writeComponentSyntaxHighlighting(1, Arrays.asList(
+ newRule(1, 0, 1, 3, HighlightingType.ANNOTATION),
+ newRule(2, 0, 2, 2, HighlightingType.COMMENT),
+ newRule(3, 0, 3, 9, HighlightingType.CONSTANT),
+ newRule(3, 1, 3, 8, HighlightingType.KEYWORD)));
+
+ inputFile.setOriginalLineOffsets(new int[] {0, 4, 7});
- sut.applyHighlighting(inputFile, metadata, output);
+ sut.applyHighlighting(inputFile, output);
FileSourceDb.Data data = output.build();
assertThat(data.getLines(0).getHighlighting()).isEqualTo("0,3,a");
@Test
public void applyHighlighting_nested_rules_and_multiple_lines() throws Exception {
- SyntaxHighlightingData highlighting = new SyntaxHighlightingDataBuilder()
- .registerHighlightingRule(0, 3, TypeOfText.ANNOTATION)
- .registerHighlightingRule(4, 6, TypeOfText.COMMENT)
- .registerHighlightingRule(4, 16, TypeOfText.CONSTANT)
- .registerHighlightingRule(8, 15, TypeOfText.KEYWORD)
- .build();
- when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYNTAX_HIGHLIGHTING)).thenReturn(highlighting);
- metadata.setOriginalLineOffsets(new int[] {0, 4, 7});
+ batchReportWriter.writeComponentSyntaxHighlighting(1, Arrays.asList(
+ newRule(1, 0, 1, 3, HighlightingType.ANNOTATION),
+ newRule(2, 0, 3, 9, HighlightingType.CONSTANT),
+ newRule(2, 0, 2, 2, HighlightingType.COMMENT),
+ newRule(3, 1, 3, 8, HighlightingType.KEYWORD)));
- sut.applyHighlighting(inputFile, metadata, output);
+ inputFile.setOriginalLineOffsets(new int[] {0, 4, 7});
+
+ sut.applyHighlighting(inputFile, output);
FileSourceDb.Data data = output.build();
assertThat(data.getLines(0).getHighlighting()).isEqualTo("0,3,a");
assertThat(data.getLines(2).getHighlighting()).isEqualTo("0,9,c;1,8,k");
}
+ private HighlightingRule newRule(int startLine, int startOffset, int endLine, int endOffset, HighlightingType type) {
+ return BatchReport.SyntaxHighlighting.HighlightingRule.newBuilder()
+ .setRange(Range.newBuilder().setStartLine(startLine).setStartOffset(startOffset).setEndLine(endLine).setEndOffset(endOffset).build())
+ .setType(type)
+ .build();
+ }
+
+ private BatchReport.Symbols.Symbol newSymbol(int startLine, int startOffset, int endLine, int endOffset,
+ int startLine1, int startOffset1, int endLine1, int endOffset1,
+ int startLine2, int startOffset2, int endLine2, int endOffset2) {
+ return BatchReport.Symbols.Symbol.newBuilder()
+ .setDeclaration(Range.newBuilder().setStartLine(startLine).setStartOffset(startOffset).setEndLine(endLine).setEndOffset(endOffset).build())
+ .addReference(Range.newBuilder().setStartLine(startLine1).setStartOffset(startOffset1).setEndLine(endLine1).setEndOffset(endOffset1).build())
+ .addReference(Range.newBuilder().setStartLine(startLine2).setStartOffset(startOffset2).setEndLine(endLine2).setEndOffset(endOffset2).build())
+ .build();
+ }
+
@Test
public void applySymbolReferences_missing() throws Exception {
- when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING)).thenReturn(null);
-
- sut.applySymbolReferences(inputFile, metadata, output);
+ sut.applySymbolReferences(inputFile, output);
FileSourceDb.Data data = output.build();
assertThat(data.getLines(0).hasSymbols()).isFalse();
@Test
public void applySymbolReferences() throws Exception {
- DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(inputFile.key(), null);
- Symbol s1 = symbolBuilder.newSymbol(1, 2);
- symbolBuilder.newReference(s1, 4);
- symbolBuilder.newReference(s1, 11);
- Symbol s2 = symbolBuilder.newSymbol(4, 6);
- symbolBuilder.newReference(s2, 0);
- symbolBuilder.newReference(s2, 7);
- when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING)).thenReturn(symbolBuilder.build());
- metadata.setOriginalLineOffsets(new int[] {0, 4, 7});
-
- sut.applySymbolReferences(inputFile, metadata, output);
+ batchReportWriter.writeComponentSymbols(1, Arrays.asList(
+ newSymbol(1, 1, 1, 2,
+ 2, 0, 2, 1,
+ 3, 4, 3, 5),
+ newSymbol(2, 0, 2, 2,
+ 1, 0, 1, 2,
+ 3, 0, 3, 2)
+ ));
+ inputFile.setOriginalLineOffsets(new int[] {0, 4, 7});
+
+ sut.applySymbolReferences(inputFile, output);
FileSourceDb.Data data = output.build();
assertThat(data.getLines(0).getSymbols()).isEqualTo("1,2,1;0,2,2");
@Test
public void applySymbolReferences_declaration_order_is_not_important() throws Exception {
- DefaultSymbolTableBuilder symbolBuilder = new DefaultSymbolTableBuilder(inputFile.key(), null);
- Symbol s2 = symbolBuilder.newSymbol(4, 6);
- symbolBuilder.newReference(s2, 7);
- symbolBuilder.newReference(s2, 0);
- Symbol s1 = symbolBuilder.newSymbol(1, 2);
- symbolBuilder.newReference(s1, 11);
- symbolBuilder.newReference(s1, 4);
- when(componentDataCache.getData(inputFile.key(), SnapshotDataTypes.SYMBOL_HIGHLIGHTING)).thenReturn(symbolBuilder.build());
- metadata.setOriginalLineOffsets(new int[] {0, 4, 7});
-
- sut.applySymbolReferences(inputFile, metadata, output);
+ batchReportWriter.writeComponentSymbols(1, Arrays.asList(
+ newSymbol(2, 0, 2, 2,
+ 1, 0, 1, 2,
+ 3, 0, 3, 2),
+ newSymbol(1, 1, 1, 2,
+ 2, 0, 2, 1,
+ 3, 4, 3, 5)
+
+ ));
+ inputFile.setOriginalLineOffsets(new int[] {0, 4, 7});
+
+ sut.applySymbolReferences(inputFile, output);
FileSourceDb.Data data = output.build();
assertThat(data.getLines(0).getSymbols()).isEqualTo("1,2,1;0,2,2");
import org.sonar.api.utils.Duration;
import org.sonar.api.utils.System2;
import org.sonar.batch.issue.IssueCache;
-import org.sonar.batch.scan.filesystem.InputFileMetadata;
import org.sonar.batch.scan.filesystem.InputPathCache;
import org.sonar.core.issue.IssueUpdater;
import org.sonar.core.issue.db.IssueChangeDto;
import static com.google.common.collect.Lists.newArrayList;
import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyCollection;
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Matchers.argThat;
-import static org.mockito.Matchers.eq;
-import static org.mockito.Matchers.isA;
-import static org.mockito.Mockito.RETURNS_MOCKS;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.verifyZeroInteractions;
-import static org.mockito.Mockito.when;
+import static org.mockito.Matchers.*;
+import static org.mockito.Mockito.*;
public class IssueTrackingDecoratorTest {
List<ServerIssue> dbIssues = Collections.emptyList();
when(initialOpenIssues.selectAndRemoveIssues("struts:Action.java")).thenReturn(dbIssues);
when(inputPathCache.getFile("foo", "Action.java")).thenReturn(mock(DefaultInputFile.class));
- when(inputPathCache.getFileMetadata("foo", "Action.java")).thenReturn(new InputFileMetadata());
decorator.doDecorate(file);
// Apply filters, track, apply transitions, notify extensions then update cache
when(tracking.track(isA(SourceHashHolder.class), anyCollection(), anyCollection())).thenReturn(trackingResult);
when(inputPathCache.getFile("foo", "Action.java")).thenReturn(mock(DefaultInputFile.class));
- when(inputPathCache.getFileMetadata("foo", "Action.java")).thenReturn(new InputFileMetadata());
decorator.doDecorate(file);
File xooFile = new File(srcDir, "sample.xoo");
File xoohighlightingFile = new File(srcDir, "sample.xoo.highlighting");
- FileUtils.write(xooFile, "Sample xoo\ncontent");
+ FileUtils.write(xooFile, "Sample xoo\ncontent plop");
FileUtils.write(xoohighlightingFile, "0:10:s\n11:18:k");
TaskResult result = tester.newTask()
.start();
InputFile file = result.inputFile("src/sample.xoo");
- assertThat(result.highlightingTypeFor(file, 0)).containsExactly(TypeOfText.STRING);
- assertThat(result.highlightingTypeFor(file, 9)).containsExactly(TypeOfText.STRING);
- assertThat(result.highlightingTypeFor(file, 10)).isEmpty();
- assertThat(result.highlightingTypeFor(file, 11)).containsExactly(TypeOfText.KEYWORD);
+ assertThat(result.highlightingTypeFor(file, 1, 0)).containsExactly(TypeOfText.STRING);
+ assertThat(result.highlightingTypeFor(file, 1, 9)).containsExactly(TypeOfText.STRING);
+ assertThat(result.highlightingTypeFor(file, 2, 0)).containsExactly(TypeOfText.KEYWORD);
+ assertThat(result.highlightingTypeFor(file, 2, 8)).isEmpty();
}
System.out.println("Duration: " + (System.currentTimeMillis() - start));
InputFile file = result.inputFile("src/sample.xoo");
- assertThat(result.highlightingTypeFor(file, 0)).containsExactly(TypeOfText.STRING);
+ assertThat(result.highlightingTypeFor(file, 1, 0)).containsExactly(TypeOfText.STRING);
}
import org.sonar.api.batch.fs.InputFile;
import org.sonar.batch.mediumtest.BatchMediumTester;
import org.sonar.batch.mediumtest.TaskResult;
+import org.sonar.batch.protocol.output.BatchReport.Range;
import org.sonar.xoo.XooPlugin;
import java.io.File;
File xooFile = new File(srcDir, "sample.xoo");
File xooSymbolFile = new File(srcDir, "sample.xoo.symbol");
FileUtils.write(xooFile, "Sample xoo\ncontent\nanother xoo");
+ // Highlight xoo symbol
FileUtils.write(xooSymbolFile, "7,10,27");
TaskResult result = tester.newTask()
.start();
InputFile file = result.inputFile("src/sample.xoo");
- assertThat(result.symbolReferencesFor(file, 7, 10)).containsOnly(27);
+ assertThat(result.symbolReferencesFor(file, 1, 7)).containsOnly(Range.newBuilder().setStartLine(3).setStartOffset(8).setEndLine(3).setEndOffset(11).build());
}
}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.report;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.sonar.api.CoreProperties;
-import org.sonar.api.batch.bootstrap.ProjectDefinition;
-import org.sonar.api.batch.bootstrap.ProjectReactor;
-import org.sonar.api.config.Settings;
-import org.sonar.api.platform.Server;
-import org.sonar.api.utils.TempFolder;
-import org.sonar.batch.bootstrap.DefaultAnalysisMode;
-import org.sonar.batch.bootstrap.ServerClient;
-import org.sonar.batch.index.ResourceCache;
-import org.sonar.jpa.test.AbstractDbUnitTestCase;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-public class PublishReportJobTest extends AbstractDbUnitTestCase {
-
- private DefaultAnalysisMode mode;
-
- ResourceCache resourceCache = mock(ResourceCache.class);
-
- private ProjectReactor reactor;
-
- @Before
- public void setUp() {
- mode = mock(DefaultAnalysisMode.class);
- reactor = mock(ProjectReactor.class);
- when(reactor.getRoot()).thenReturn(ProjectDefinition.create().setKey("struts"));
- }
-
- @Test
- public void should_log_successful_analysis() throws Exception {
- Settings settings = new Settings();
- settings.setProperty(CoreProperties.SERVER_BASE_URL, "http://myserver/");
- PublishReportJob job = new PublishReportJob(settings, mock(ServerClient.class), mock(Server.class), reactor, mode, mock(TempFolder.class), new ReportPublisher[0]);
-
- Logger logger = mock(Logger.class);
- job.logSuccess(logger);
-
- verify(logger).info("ANALYSIS SUCCESSFUL, you can browse {}", "http://myserver/dashboard/index/struts");
- verify(logger).info("Note that you will be able to access the updated dashboard once the server has processed the submitted analysis report.");
- }
-
- @Test
- public void should_log_successful_preview_analysis() throws Exception {
- Settings settings = new Settings();
- when(mode.isPreview()).thenReturn(true);
- PublishReportJob job = new PublishReportJob(settings, mock(ServerClient.class), mock(Server.class), reactor, mode, mock(TempFolder.class), new ReportPublisher[0]);
-
- Logger logger = mock(Logger.class);
- job.logSuccess(logger);
-
- verify(logger).info("ANALYSIS SUCCESSFUL");
- }
-
-}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.batch.report;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.sonar.api.CoreProperties;
+import org.sonar.api.batch.bootstrap.ProjectDefinition;
+import org.sonar.api.batch.bootstrap.ProjectReactor;
+import org.sonar.api.config.Settings;
+import org.sonar.api.platform.Server;
+import org.sonar.api.utils.TempFolder;
+import org.sonar.batch.bootstrap.DefaultAnalysisMode;
+import org.sonar.batch.bootstrap.ServerClient;
+import org.sonar.batch.index.ResourceCache;
+import org.sonar.jpa.test.AbstractDbUnitTestCase;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+public class ReportPublisherTest extends AbstractDbUnitTestCase {
+
+ private DefaultAnalysisMode mode;
+
+ ResourceCache resourceCache = mock(ResourceCache.class);
+
+ private ProjectReactor reactor;
+
+ @Before
+ public void setUp() {
+ mode = mock(DefaultAnalysisMode.class);
+ reactor = mock(ProjectReactor.class);
+ when(reactor.getRoot()).thenReturn(ProjectDefinition.create().setKey("struts"));
+ }
+
+ @Test
+ public void should_log_successful_analysis() throws Exception {
+ Settings settings = new Settings();
+ settings.setProperty(CoreProperties.SERVER_BASE_URL, "http://myserver/");
+ ReportPublisher job = new ReportPublisher(settings, mock(ServerClient.class), mock(Server.class), reactor, mode, mock(TempFolder.class), new ReportPublisherStep[0]);
+
+ Logger logger = mock(Logger.class);
+ job.logSuccess(logger);
+
+ verify(logger).info("ANALYSIS SUCCESSFUL, you can browse {}", "http://myserver/dashboard/index/struts");
+ verify(logger).info("Note that you will be able to access the updated dashboard once the server has processed the submitted analysis report.");
+ }
+
+ @Test
+ public void should_log_successful_preview_analysis() throws Exception {
+ Settings settings = new Settings();
+ when(mode.isPreview()).thenReturn(true);
+ ReportPublisher job = new ReportPublisher(settings, mock(ServerClient.class), mock(Server.class), reactor, mode, mock(TempFolder.class), new ReportPublisherStep[0]);
+
+ Logger logger = mock(Logger.class);
+ job.logSuccess(logger);
+
+ verify(logger).info("ANALYSIS SUCCESSFUL");
+ }
+
+}
import org.sonar.api.batch.fs.internal.DefaultFileSystem;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.batch.fs.internal.DeprecatedDefaultInputFile;
-import org.sonar.api.resources.AbstractLanguage;
-import org.sonar.api.resources.Java;
-import org.sonar.api.resources.Languages;
-import org.sonar.api.resources.Project;
-import org.sonar.api.resources.Qualifiers;
+import org.sonar.api.resources.*;
+import org.sonar.batch.index.BatchResource;
+import org.sonar.batch.index.ResourceCache;
import java.io.File;
import java.io.IOException;
+import static org.mockito.Matchers.any;
import static org.mockito.Matchers.argThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
}
private ComponentIndexer createIndexer(Languages languages) {
- return new ComponentIndexer(project, languages, sonarIndex);
+ ResourceCache resourceCache = mock(ResourceCache.class);
+ when(resourceCache.get(any(Resource.class))).thenReturn(new BatchResource(1, org.sonar.api.resources.File.create("foo.php"), null));
+ return new ComponentIndexer(project, languages, sonarIndex, resourceCache);
}
@Test
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.batch.scan.filesystem;
-
-import com.google.common.base.Charsets;
-import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.io.FileUtils;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.junit.rules.TemporaryFolder;
-import org.sonar.api.batch.AnalysisMode;
-import org.sonar.api.batch.fs.internal.DefaultInputFile;
-import org.sonar.api.utils.log.LogTester;
-import org.sonar.api.utils.log.LoggerLevel;
-import org.sonar.batch.scan.filesystem.FileMetadata.LineHashConsumer;
-
-import javax.annotation.Nullable;
-
-import java.io.File;
-import java.nio.charset.Charset;
-
-import static org.apache.commons.codec.digest.DigestUtils.md5Hex;
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.Mockito.mock;
-
-public class FileMetadataTest {
-
- @Rule
- public ExpectedException thrown = ExpectedException.none();
-
- @Rule
- public TemporaryFolder temp = new TemporaryFolder();
-
- private AnalysisMode mode = mock(AnalysisMode.class);
-
- @Rule
- public LogTester logTester = new LogTester();
-
- @Test
- public void empty_file() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.touch(tempFile);
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(tempFile, Charsets.UTF_8);
- assertThat(metadata.lines).isEqualTo(1);
- assertThat(metadata.nonBlankLines).isEqualTo(0);
- assertThat(metadata.hash).isNotEmpty();
- assertThat(metadata.originalLineOffsets).containsOnly(0);
- assertThat(metadata.lastValidOffset).isEqualTo(0);
- assertThat(metadata.empty).isTrue();
- }
-
- @Test
- public void windows_without_latest_eol() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.write(tempFile, "foo\r\nbar\r\nbaz", Charsets.UTF_8, true);
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(tempFile, Charsets.UTF_8);
- assertThat(metadata.lines).isEqualTo(3);
- assertThat(metadata.nonBlankLines).isEqualTo(3);
- assertThat(metadata.hash).isEqualTo(md5Hex("foo\nbar\nbaz"));
- assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10);
- assertThat(metadata.lastValidOffset).isEqualTo(13);
- assertThat(metadata.empty).isFalse();
- }
-
- @Test
- public void read_with_wrong_encoding() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.write(tempFile, "marker´s\n", Charset.forName("cp1252"));
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(tempFile, Charsets.UTF_8);
- assertThat(metadata.lines).isEqualTo(2);
- assertThat(metadata.hash).isEqualTo(md5Hex("marker\ufffds\n"));
- assertThat(metadata.originalLineOffsets).containsOnly(0, 9);
- }
-
- @Test
- public void non_ascii_utf_8() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.write(tempFile, "föo\r\nbàr\r\n\u1D11Ebaßz\r\n", Charsets.UTF_8, true);
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(tempFile, Charsets.UTF_8);
- assertThat(metadata.lines).isEqualTo(4);
- assertThat(metadata.nonBlankLines).isEqualTo(3);
- assertThat(metadata.hash).isEqualTo(md5Hex("föo\nbàr\n\u1D11Ebaßz\n"));
- assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10, 18);
- }
-
- @Test
- public void non_ascii_utf_16() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.write(tempFile, "föo\r\nbàr\r\n\u1D11Ebaßz\r\n", Charsets.UTF_16, true);
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(tempFile, Charsets.UTF_16);
- assertThat(metadata.lines).isEqualTo(4);
- assertThat(metadata.nonBlankLines).isEqualTo(3);
- assertThat(metadata.hash).isEqualTo(md5Hex("föo\nbàr\n\u1D11Ebaßz\n"));
- assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10, 18);
- }
-
- @Test
- public void unix_without_latest_eol() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.write(tempFile, "foo\nbar\nbaz", Charsets.UTF_8, true);
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(tempFile, Charsets.UTF_8);
- assertThat(metadata.lines).isEqualTo(3);
- assertThat(metadata.nonBlankLines).isEqualTo(3);
- assertThat(metadata.hash).isEqualTo(md5Hex("foo\nbar\nbaz"));
- assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 8);
- assertThat(metadata.lastValidOffset).isEqualTo(11);
- }
-
- @Test
- public void unix_with_latest_eol() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.write(tempFile, "foo\nbar\nbaz\n", Charsets.UTF_8, true);
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(tempFile, Charsets.UTF_8);
- assertThat(metadata.lines).isEqualTo(4);
- assertThat(metadata.nonBlankLines).isEqualTo(3);
- assertThat(metadata.hash).isEqualTo(md5Hex("foo\nbar\nbaz\n"));
- assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 8, 12);
- assertThat(metadata.lastValidOffset).isEqualTo(12);
- }
-
- @Test
- public void mix_of_newlines_with_latest_eol() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.write(tempFile, "foo\nbar\r\nbaz\n", Charsets.UTF_8, true);
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(tempFile, Charsets.UTF_8);
- assertThat(metadata.lines).isEqualTo(4);
- assertThat(metadata.nonBlankLines).isEqualTo(3);
- assertThat(metadata.hash).isEqualTo(md5Hex("foo\nbar\nbaz\n"));
- assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 9, 13);
- }
-
- @Test
- public void several_new_lines() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.write(tempFile, "foo\n\n\nbar", Charsets.UTF_8, true);
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(tempFile, Charsets.UTF_8);
- assertThat(metadata.lines).isEqualTo(4);
- assertThat(metadata.nonBlankLines).isEqualTo(2);
- assertThat(metadata.hash).isEqualTo(md5Hex("foo\n\n\nbar"));
- assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 5, 6);
- }
-
- @Test
- public void mix_of_newlines_without_latest_eol() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.write(tempFile, "foo\nbar\r\nbaz", Charsets.UTF_8, true);
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(tempFile, Charsets.UTF_8);
- assertThat(metadata.lines).isEqualTo(3);
- assertThat(metadata.nonBlankLines).isEqualTo(3);
- assertThat(metadata.hash).isEqualTo(md5Hex("foo\nbar\nbaz"));
- assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 9);
- }
-
- @Test
- public void start_with_newline() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.write(tempFile, "\nfoo\nbar\r\nbaz", Charsets.UTF_8, true);
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(tempFile, Charsets.UTF_8);
- assertThat(metadata.lines).isEqualTo(4);
- assertThat(metadata.nonBlankLines).isEqualTo(3);
- assertThat(metadata.hash).isEqualTo(md5Hex("\nfoo\nbar\nbaz"));
- assertThat(metadata.originalLineOffsets).containsOnly(0, 1, 5, 10);
- }
-
- @Test
- public void start_with_bom() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.write(tempFile, "\uFEFFfoo\nbar\r\nbaz", Charsets.UTF_8, true);
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(tempFile, Charsets.UTF_8);
- assertThat(metadata.lines).isEqualTo(3);
- assertThat(metadata.nonBlankLines).isEqualTo(3);
- assertThat(metadata.hash).isEqualTo(md5Hex("foo\nbar\nbaz"));
- assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 9);
- }
-
- @Test
- public void ignore_whitespace_when_computing_line_hashes() throws Exception {
- File tempFile = temp.newFile();
- FileUtils.write(tempFile, " foo\nb ar\r\nbaz \t", Charsets.UTF_8, true);
-
- DefaultInputFile f = new DefaultInputFile("foo", tempFile.getName());
- f.setModuleBaseDir(tempFile.getParentFile().toPath());
- f.setCharset(Charsets.UTF_8);
- FileMetadata.computeLineHashesForIssueTracking(f, new LineHashConsumer() {
-
- @Override
- public void consume(int lineIdx, @Nullable byte[] hash) {
- switch (lineIdx) {
- case 1:
- assertThat(Hex.encodeHexString(hash)).isEqualTo(md5Hex("foo"));
- break;
- case 2:
- assertThat(Hex.encodeHexString(hash)).isEqualTo(md5Hex("bar"));
- break;
- case 3:
- assertThat(Hex.encodeHexString(hash)).isEqualTo(md5Hex("baz"));
- break;
- }
- }
- });
- }
-
- @Test
- public void should_throw_if_file_does_not_exist() throws Exception {
- File tempFolder = temp.newFolder();
- File file = new File(tempFolder, "doesNotExist.txt");
-
- thrown.expect(IllegalStateException.class);
- thrown.expectMessage("Fail to read file '" + file.getAbsolutePath() + "' with encoding 'UTF-8'");
-
- new FileMetadata(mode).read(file, Charsets.UTF_8);
- }
-
- @Test
- public void line_feed_is_included_into_hash() throws Exception {
- File file1 = temp.newFile();
- FileUtils.write(file1, "foo\nbar\n", Charsets.UTF_8, true);
-
- // same as file1, except an additional return carriage
- File file1a = temp.newFile();
- FileUtils.write(file1a, "foo\r\nbar\n", Charsets.UTF_8, true);
-
- File file2 = temp.newFile();
- FileUtils.write(file2, "foo\nbar", Charsets.UTF_8, true);
-
- String hash1 = new FileMetadata(mode).read(file1, Charsets.UTF_8).hash;
- String hash1a = new FileMetadata(mode).read(file1a, Charsets.UTF_8).hash;
- String hash2 = new FileMetadata(mode).read(file2, Charsets.UTF_8).hash;
- assertThat(hash1).isEqualTo(hash1a);
- assertThat(hash1).isNotEqualTo(hash2);
- }
-
- @Test
- public void binary_file_with_unmappable_character() throws Exception {
- File woff = new File(this.getClass().getResource("glyphicons-halflings-regular.woff").toURI());
-
- FileMetadata.Metadata metadata = new FileMetadata(mode).read(woff, Charsets.UTF_8);
- assertThat(metadata.lines).isEqualTo(135);
- assertThat(metadata.nonBlankLines).isEqualTo(134);
- assertThat(metadata.hash).isNotEmpty();
- assertThat(metadata.empty).isFalse();
-
- assertThat(logTester.logs(LoggerLevel.WARN).get(0)).contains("Invalid character encountered in file");
- assertThat(logTester.logs(LoggerLevel.WARN).get(0)).contains(
- "glyphicons-halflings-regular.woff at line 1 for encoding UTF-8. Please fix file content or configure the encoding to be used using property 'sonar.sourceEncoding'.");
- }
-
-}
import org.junit.Test;
import org.mockito.Mockito;
import org.sonar.api.batch.bootstrap.ProjectDefinition;
+import org.sonar.api.batch.fs.internal.FileMetadata;
import org.sonar.api.config.Settings;
import org.sonar.api.scan.filesystem.PathResolver;
import org.sonar.batch.bootstrap.DefaultAnalysisMode;
DefaultAnalysisMode analysisMode = mock(DefaultAnalysisMode.class);
InputFileBuilderFactory factory = new InputFileBuilderFactory(ProjectDefinition.create().setKey("struts"), pathResolver, langDetectionFactory,
- statusDetectionFactory, analysisMode, new Settings(), new FileMetadata(analysisMode));
+ statusDetectionFactory, analysisMode, new Settings(), new FileMetadata());
InputFileBuilder builder = factory.create(fs);
assertThat(builder.langDetection()).isNotNull();
import org.junit.rules.TemporaryFolder;
import org.sonar.api.batch.fs.InputFile;
import org.sonar.api.batch.fs.internal.DeprecatedDefaultInputFile;
+import org.sonar.api.batch.fs.internal.FileMetadata;
import org.sonar.api.config.Settings;
import org.sonar.api.scan.filesystem.PathResolver;
import org.sonar.api.utils.PathUtils;
.thenReturn(InputFile.Status.ADDED);
InputFileBuilder builder = new InputFileBuilder("struts", new PathResolver(),
- langDetection, statusDetection, fs, analysisMode, new Settings(), new FileMetadata(analysisMode));
+ langDetection, statusDetection, fs, analysisMode, new Settings(), new FileMetadata());
DeprecatedDefaultInputFile inputFile = builder.create(srcFile);
builder.completeAndComputeMetadata(inputFile, InputFile.Type.MAIN);
when(fs.baseDir()).thenReturn(basedir);
InputFileBuilder builder = new InputFileBuilder("struts", new PathResolver(),
- langDetection, statusDetection, fs, analysisMode, new Settings(), new FileMetadata(analysisMode));
+ langDetection, statusDetection, fs, analysisMode, new Settings(), new FileMetadata());
DeprecatedDefaultInputFile inputFile = builder.create(srcFile);
assertThat(inputFile).isNull();
when(langDetection.language(any(InputFile.class))).thenReturn(null);
InputFileBuilder builder = new InputFileBuilder("struts", new PathResolver(),
- langDetection, statusDetection, fs, analysisMode, new Settings(), new FileMetadata(analysisMode));
+ langDetection, statusDetection, fs, analysisMode, new Settings(), new FileMetadata());
DeprecatedDefaultInputFile inputFile = builder.create(srcFile);
- InputFileMetadata metadata = builder.completeAndComputeMetadata(inputFile, InputFile.Type.MAIN);
+ inputFile = builder.completeAndComputeMetadata(inputFile, InputFile.Type.MAIN);
- assertThat(metadata).isNull();
+ assertThat(inputFile).isNull();
}
}
assertThat(cache.allFiles()).hasSize(1);
}
- @Test
- public void should_add_input_file_metadata() throws Exception {
- InputPathCache cache = new InputPathCache();
- cache.put("struts-core", "src/main/java/Bar.java", new InputFileMetadata()
- .setHash("xyz")
- .setNonBlankLines(2)
- .setEmpty(true)
- .setOriginalLineOffsets(new int[] {0, 4}));
-
- InputFileMetadata loadedFileMetadata = cache.getFileMetadata("struts-core", "src/main/java/Bar.java");
- assertThat(loadedFileMetadata.originalLineOffsets()).containsOnly(0, 4);
- assertThat(loadedFileMetadata.hash()).isEqualTo("xyz");
- assertThat(loadedFileMetadata.nonBlankLines()).isEqualTo(2);
- assertThat(loadedFileMetadata.isEmpty()).isTrue();
-
- }
}
import org.sonar.api.batch.sensor.issue.Issue.Severity;
import org.sonar.api.batch.sensor.issue.internal.DefaultIssue;
import org.sonar.api.batch.sensor.measure.internal.DefaultMeasure;
-import org.sonar.api.component.ResourcePerspectives;
import org.sonar.api.config.Settings;
import org.sonar.api.design.Dependency;
-import org.sonar.api.issue.Issuable;
-import org.sonar.api.issue.Issue;
import org.sonar.api.measures.CoreMetrics;
import org.sonar.api.measures.Measure;
import org.sonar.api.measures.PersistenceMode;
import org.sonar.api.resources.Resource;
import org.sonar.api.rule.RuleKey;
import org.sonar.batch.duplication.DuplicationCache;
-import org.sonar.batch.index.ComponentDataCache;
import org.sonar.batch.index.DefaultIndex;
import org.sonar.batch.index.ResourceCache;
+import org.sonar.batch.issue.ModuleIssues;
+import org.sonar.batch.report.ReportPublisher;
import org.sonar.batch.sensor.coverage.CoverageExclusions;
import static org.assertj.core.api.Assertions.assertThat;
private DefaultFileSystem fs;
private DefaultSensorStorage sensorStorage;
private Settings settings;
- private ResourcePerspectives resourcePerspectives;
+ private ModuleIssues moduleIssues;
private Project project;
private DefaultIndex sonarIndex;
when(metricFinder.findByKey(CoreMetrics.NCLOC_KEY)).thenReturn(CoreMetrics.NCLOC);
when(metricFinder.findByKey(CoreMetrics.FUNCTION_COMPLEXITY_DISTRIBUTION_KEY)).thenReturn(CoreMetrics.FUNCTION_COMPLEXITY_DISTRIBUTION);
settings = new Settings();
- resourcePerspectives = mock(ResourcePerspectives.class);
- ComponentDataCache componentDataCache = mock(ComponentDataCache.class);
+ moduleIssues = mock(ModuleIssues.class);
project = new Project("myProject");
sonarIndex = mock(DefaultIndex.class);
CoverageExclusions coverageExclusions = mock(CoverageExclusions.class);
when(coverageExclusions.accept(any(Resource.class), any(Measure.class))).thenReturn(true);
resourceCache = new ResourceCache();
sensorStorage = new DefaultSensorStorage(metricFinder, project,
- resourcePerspectives, settings, fs, activeRules, componentDataCache, mock(DuplicationCache.class), sonarIndex, coverageExclusions, resourceCache);
+ moduleIssues, settings, fs, activeRules, mock(DuplicationCache.class), sonarIndex, coverageExclusions, resourceCache, mock(ReportPublisher.class));
}
@Test
public void shouldAddIssueOnFile() {
InputFile file = new DefaultInputFile("foo", "src/Foo.php").setLines(4);
- ArgumentCaptor<Issue> argumentCaptor = ArgumentCaptor.forClass(Issue.class);
-
- Issuable issuable = mock(Issuable.class);
- when(resourcePerspectives.as(Issuable.class, File.create("src/Foo.php"))).thenReturn(issuable);
-
- when(issuable.addIssue(argumentCaptor.capture())).thenReturn(true);
+ ArgumentCaptor<org.sonar.api.issue.internal.DefaultIssue> argumentCaptor = ArgumentCaptor.forClass(org.sonar.api.issue.internal.DefaultIssue.class);
sensorStorage.store(new DefaultIssue()
.onFile(file)
.atLine(3)
.effortToFix(10.0));
- Issue issue = argumentCaptor.getValue();
+ verify(moduleIssues).initAndAddIssue(argumentCaptor.capture());
+
+ org.sonar.api.issue.internal.DefaultIssue issue = argumentCaptor.getValue();
assertThat(issue.ruleKey()).isEqualTo(RuleKey.of("foo", "bar"));
assertThat(issue.message()).isEqualTo("Foo");
assertThat(issue.line()).isEqualTo(3);
public void shouldAddIssueOnDirectory() {
InputDir dir = new DefaultInputDir("foo", "src");
- ArgumentCaptor<Issue> argumentCaptor = ArgumentCaptor.forClass(Issue.class);
-
- Issuable issuable = mock(Issuable.class);
- when(resourcePerspectives.as(Issuable.class, Directory.create("src"))).thenReturn(issuable);
-
- when(issuable.addIssue(argumentCaptor.capture())).thenReturn(true);
+ ArgumentCaptor<org.sonar.api.issue.internal.DefaultIssue> argumentCaptor = ArgumentCaptor.forClass(org.sonar.api.issue.internal.DefaultIssue.class);
sensorStorage.store(new DefaultIssue()
.onDir(dir)
.message("Foo")
.effortToFix(10.0));
- Issue issue = argumentCaptor.getValue();
+ verify(moduleIssues).initAndAddIssue(argumentCaptor.capture());
+
+ org.sonar.api.issue.internal.DefaultIssue issue = argumentCaptor.getValue();
assertThat(issue.ruleKey()).isEqualTo(RuleKey.of("foo", "bar"));
assertThat(issue.message()).isEqualTo("Foo");
assertThat(issue.line()).isNull();
@Test
public void shouldAddIssueOnProject() {
- ArgumentCaptor<Issue> argumentCaptor = ArgumentCaptor.forClass(Issue.class);
-
- Issuable issuable = mock(Issuable.class);
- when(resourcePerspectives.as(Issuable.class, (Resource) project)).thenReturn(issuable);
-
- when(issuable.addIssue(argumentCaptor.capture())).thenReturn(true);
+ ArgumentCaptor<org.sonar.api.issue.internal.DefaultIssue> argumentCaptor = ArgumentCaptor.forClass(org.sonar.api.issue.internal.DefaultIssue.class);
sensorStorage.store(new DefaultIssue()
.onProject()
.overrideSeverity(Severity.BLOCKER)
.effortToFix(10.0));
- Issue issue = argumentCaptor.getValue();
+ verify(moduleIssues).initAndAddIssue(argumentCaptor.capture());
+
+ org.sonar.api.issue.internal.DefaultIssue issue = argumentCaptor.getValue();
assertThat(issue.ruleKey()).isEqualTo(RuleKey.of("foo", "bar"));
assertThat(issue.message()).isEqualTo("Foo");
assertThat(issue.line()).isNull();
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
+import org.sonar.api.batch.sensor.highlighting.NewHighlighting;
+import org.sonar.api.batch.sensor.highlighting.TypeOfText;
import org.sonar.api.web.CodeColorizerFormat;
-import org.sonar.batch.highlighting.SyntaxHighlightingData;
-import org.sonar.colorizer.CDocTokenizer;
-import org.sonar.colorizer.CppDocTokenizer;
-import org.sonar.colorizer.JavadocTokenizer;
-import org.sonar.colorizer.KeywordsTokenizer;
-import org.sonar.colorizer.StringTokenizer;
-import org.sonar.colorizer.Tokenizer;
+import org.sonar.colorizer.*;
import java.io.File;
import java.util.Arrays;
import java.util.List;
-import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
public class CodeColorizersTest {
- private static final String HIGHLIGHTING_JS = "0,4,cppd;5,11,cppd;12,15,cppd;16,19,k;29,37,k;65,69,k;85,93,cd;98,102,k;112,114,s;120,124,k";
- private static final String HIGHLIGHTING_JAVA = "0,4,j;5,11,j;12,15,j;16,22,k;23,28,k;43,50,k;51,54,k;67,78,a;81,87,k;88,92,k;97,100,k;142,146,k;162,170,cd";
@Rule
public TemporaryFolder temp = new TemporaryFolder();
@Test
public void testConvertToHighlighting() throws Exception {
CodeColorizers codeColorizers = new CodeColorizers(Arrays.<CodeColorizerFormat>asList(new JavaScriptColorizerFormat()));
-
File jsFile = new File(this.getClass().getResource("CodeColorizersTest/Person.js").toURI());
+ NewHighlighting highlighting = mock(NewHighlighting.class);
- SyntaxHighlightingData syntaxHighlighting = codeColorizers.toSyntaxHighlighting(jsFile, Charsets.UTF_8, "js");
+ codeColorizers.toSyntaxHighlighting(jsFile, Charsets.UTF_8, "js", highlighting);
- assertThat(syntaxHighlighting.writeString()).isEqualTo(HIGHLIGHTING_JS);
+ verifyForJs(highlighting);
+ }
+ private void verifyForJs(NewHighlighting highlighting) {
+ verify(highlighting).highlight(0, 4, TypeOfText.CPP_DOC);
+ verify(highlighting).highlight(5, 11, TypeOfText.CPP_DOC);
+ verify(highlighting).highlight(12, 15, TypeOfText.CPP_DOC);
+ verify(highlighting).highlight(16, 19, TypeOfText.KEYWORD);
+ verify(highlighting).highlight(29, 37, TypeOfText.KEYWORD);
+ verify(highlighting).highlight(65, 69, TypeOfText.KEYWORD);
+ verify(highlighting).highlight(85, 93, TypeOfText.COMMENT);
+ verify(highlighting).highlight(98, 102, TypeOfText.KEYWORD);
+ verify(highlighting).highlight(112, 114, TypeOfText.STRING);
+ verify(highlighting).highlight(120, 124, TypeOfText.KEYWORD);
}
@Test
File jsFile = new File(this.getClass().getResource("CodeColorizersTest/Person.js").toURI());
FileUtils.write(fileWithBom, FileUtils.readFileToString(jsFile), "UTF-8", true);
- SyntaxHighlightingData syntaxHighlighting = codeColorizers.toSyntaxHighlighting(fileWithBom, Charsets.UTF_8, "js");
+ NewHighlighting highlighting = mock(NewHighlighting.class);
+ codeColorizers.toSyntaxHighlighting(fileWithBom, Charsets.UTF_8, "js", highlighting);
- assertThat(syntaxHighlighting.writeString()).isEqualTo(HIGHLIGHTING_JS);
+ verifyForJs(highlighting);
}
@Test
File javaFile = new File(this.getClass().getResource("CodeColorizersTest/Person.java").toURI());
- SyntaxHighlightingData syntaxHighlighting = codeColorizers.toSyntaxHighlighting(javaFile, Charsets.UTF_8, "java");
-
- assertThat(syntaxHighlighting.writeString()).isEqualTo(HIGHLIGHTING_JAVA);
+ NewHighlighting highlighting = mock(NewHighlighting.class);
+ codeColorizers.toSyntaxHighlighting(javaFile, Charsets.UTF_8, "java", highlighting);
+
+ verify(highlighting).highlight(0, 4, TypeOfText.STRUCTURED_COMMENT);
+ verify(highlighting).highlight(5, 11, TypeOfText.STRUCTURED_COMMENT);
+ verify(highlighting).highlight(12, 15, TypeOfText.STRUCTURED_COMMENT);
+ verify(highlighting).highlight(16, 22, TypeOfText.KEYWORD);
+ verify(highlighting).highlight(23, 28, TypeOfText.KEYWORD);
+ verify(highlighting).highlight(43, 50, TypeOfText.KEYWORD);
+ verify(highlighting).highlight(51, 54, TypeOfText.KEYWORD);
+ verify(highlighting).highlight(67, 78, TypeOfText.ANNOTATION);
+ verify(highlighting).highlight(81, 87, TypeOfText.KEYWORD);
+ verify(highlighting).highlight(88, 92, TypeOfText.KEYWORD);
+ verify(highlighting).highlight(97, 100, TypeOfText.KEYWORD);
+ verify(highlighting).highlight(142, 146, TypeOfText.KEYWORD);
+ verify(highlighting).highlight(162, 170, TypeOfText.COMMENT);
}
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.ArgumentCaptor;
-import org.sonar.api.component.Component;
-import org.sonar.batch.highlighting.SyntaxHighlightingData;
-import org.sonar.batch.index.ComponentDataCache;
-import org.sonar.core.source.SnapshotDataTypes;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.fs.internal.FileMetadata;
+import org.sonar.api.batch.sensor.highlighting.internal.DefaultHighlighting;
+import org.sonar.api.batch.sensor.internal.SensorStorage;
+
+import java.io.StringReader;
import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
public class DefaultHighlightableTest {
@Test
public void should_store_highlighting_rules() throws Exception {
- DefaultHighlightable highlightablePerspective = new DefaultHighlightable(mock(Component.class), null);
- highlightablePerspective.newHighlighting().highlight(0, 10, "k").highlight(20, 30, "cppd");
-
- assertThat(highlightablePerspective.getHighlightingRules().getSyntaxHighlightingRuleSet()).hasSize(2);
+ SensorStorage sensorStorage = mock(SensorStorage.class);
+ DefaultInputFile inputFile = new DefaultInputFile("foo", "src/Foo.php")
+ .initMetadata(new FileMetadata().readMetadata(new StringReader("azerty\nbla bla")));
+ DefaultHighlightable highlightablePerspective = new DefaultHighlightable(inputFile, sensorStorage);
+ highlightablePerspective.newHighlighting().highlight(0, 6, "k").highlight(7, 10, "cppd").done();
+
+ ArgumentCaptor<DefaultHighlighting> argCaptor = ArgumentCaptor.forClass(DefaultHighlighting.class);
+ verify(sensorStorage).store(argCaptor.capture());
+ assertThat(argCaptor.getValue().getSyntaxHighlightingRuleSet()).hasSize(2);
}
- @Test
- public void should_apply_registered_highlighting() throws Exception {
- Component component = mock(Component.class);
- when(component.key()).thenReturn("myComponent");
-
- ComponentDataCache cache = mock(ComponentDataCache.class);
-
- DefaultHighlightable highlightable = new DefaultHighlightable(component, cache);
- highlightable.newHighlighting()
- .highlight(0, 10, "k")
- .highlight(20, 30, "cppd")
- .done();
-
- ArgumentCaptor<SyntaxHighlightingData> argCaptor = ArgumentCaptor.forClass(SyntaxHighlightingData.class);
- verify(cache).setData(eq("myComponent"), eq(SnapshotDataTypes.SYNTAX_HIGHLIGHTING), argCaptor.capture());
- assertThat(argCaptor.getValue().writeString()).isEqualTo("0,10,k;20,30,cppd");
- }
}
package org.sonar.batch.source;
+import com.google.common.base.Strings;
+import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.fs.internal.FileMetadata;
import org.sonar.api.source.Symbol;
import org.sonar.api.source.Symbolizable;
+import java.io.StringReader;
+
import static org.assertj.core.api.Assertions.assertThat;
public class DefaultSymbolTableTest {
@Rule
public ExpectedException throwable = ExpectedException.none();
+ private DefaultInputFile inputFile;
+
+ @Before
+ public void prepare() {
+ inputFile = new DefaultInputFile("foo", "src/Foo.php")
+ .initMetadata(new FileMetadata().readMetadata(new StringReader(Strings.repeat("azerty\n", 20))));
+ }
@Test
public void should_order_symbol_and_references() throws Exception {
- Symbolizable.SymbolTableBuilder symbolTableBuilder = new DefaultSymbolTable.Builder("foo");
+
+ Symbolizable.SymbolTableBuilder symbolTableBuilder = new DefaultSymbolTable.Builder(inputFile);
Symbol firstSymbol = symbolTableBuilder.newSymbol(10, 20);
symbolTableBuilder.newReference(firstSymbol, 32);
Symbol secondSymbol = symbolTableBuilder.newSymbol(84, 92);
Symbolizable.SymbolTable symbolTable = symbolTableBuilder.build();
assertThat(symbolTable.symbols()).containsExactly(firstSymbol, secondSymbol, thirdSymbol);
- assertThat(symbolTable.references(firstSymbol)).containsExactly(32);
- assertThat(symbolTable.references(secondSymbol)).containsExactly(124);
- assertThat(symbolTable.references(thirdSymbol)).containsExactly(70);
}
@Test
public void should_reject_reference_conflicting_with_declaration() throws Exception {
throwable.expect(UnsupportedOperationException.class);
- Symbolizable.SymbolTableBuilder symbolTableBuilder = new DefaultSymbolTable.Builder("foo");
+ Symbolizable.SymbolTableBuilder symbolTableBuilder = new DefaultSymbolTable.Builder(inputFile);
Symbol symbol = symbolTableBuilder.newSymbol(10, 20);
symbolTableBuilder.newReference(symbol, 15);
}
@Test
public void test_toString() throws Exception {
- Symbolizable.SymbolTableBuilder symbolTableBuilder = new DefaultSymbolTable.Builder("foo");
+ Symbolizable.SymbolTableBuilder symbolTableBuilder = new DefaultSymbolTable.Builder(inputFile);
Symbol symbol = symbolTableBuilder.newSymbol(10, 20);
- assertThat(symbol.toString()).isEqualTo("Symbol{offset=10-20}");
+ assertThat(symbol.toString()).isEqualTo("Symbol{range=Range[from [line=2, lineOffset=3] to [line=3, lineOffset=6]]}");
}
}
package org.sonar.batch.source;
+import com.google.common.base.Strings;
import org.junit.Test;
-import org.sonar.api.component.Component;
+import org.mockito.ArgumentCaptor;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.fs.internal.FileMetadata;
import org.sonar.api.source.Symbol;
import org.sonar.api.source.Symbolizable;
-import org.sonar.batch.index.ComponentDataCache;
-import org.sonar.batch.symbol.SymbolData;
-import org.sonar.core.source.SnapshotDataTypes;
+import org.sonar.batch.sensor.DefaultSensorStorage;
-import static org.mockito.Matchers.any;
+import java.io.StringReader;
+import java.util.Map;
+
+import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
public class DefaultSymbolizableTest {
@Test
public void should_update_cache_when_done() throws Exception {
- Component component = mock(Component.class);
- when(component.key()).thenReturn("myComponent");
-
- ComponentDataCache cache = mock(ComponentDataCache.class);
+ DefaultSensorStorage sensorStorage = mock(DefaultSensorStorage.class);
+ DefaultInputFile inputFile = new DefaultInputFile("foo", "src/Foo.php")
+ .initMetadata(new FileMetadata().readMetadata(new StringReader(Strings.repeat("azerty\n", 20))));
- DefaultSymbolizable symbolPerspective = new DefaultSymbolizable(cache, component);
+ DefaultSymbolizable symbolPerspective = new DefaultSymbolizable(inputFile, sensorStorage);
Symbolizable.SymbolTableBuilder symbolTableBuilder = symbolPerspective.newSymbolTableBuilder();
Symbol firstSymbol = symbolTableBuilder.newSymbol(4, 8);
symbolTableBuilder.newReference(firstSymbol, 12);
symbolPerspective.setSymbolTable(symbolTable);
- verify(cache).setData(eq("myComponent"), eq(SnapshotDataTypes.SYMBOL_HIGHLIGHTING), any(SymbolData.class));
+ ArgumentCaptor<Map> argCaptor = ArgumentCaptor.forClass(Map.class);
+ verify(sensorStorage).store(eq(inputFile), argCaptor.capture());
+ // Map<Symbol, Set<TextRange>>
+ assertThat(argCaptor.getValue().keySet()).hasSize(2);
}
}
package org.sonar.batch.source;
import org.junit.Test;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.sensor.internal.SensorStorage;
import org.sonar.api.component.Component;
import org.sonar.api.resources.File;
import org.sonar.api.resources.Project;
import org.sonar.api.resources.Resource;
import org.sonar.api.source.Highlightable;
-import org.sonar.batch.index.ComponentDataCache;
+import org.sonar.batch.index.BatchResource;
+import org.sonar.batch.index.ResourceCache;
import org.sonar.core.component.ResourceComponent;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
public class HighlightableBuilderTest {
- ComponentDataCache cache = mock(ComponentDataCache.class);
-
@Test
public void should_load_default_perspective() throws Exception {
Resource file = File.create("foo.c").setEffectiveKey("myproject:path/to/foo.c");
Component component = new ResourceComponent(file);
- HighlightableBuilder builder = new HighlightableBuilder(cache);
+ ResourceCache resourceCache = mock(ResourceCache.class);
+ when(resourceCache.get(file.getEffectiveKey())).thenReturn(new BatchResource(1, file, null).setInputPath(new DefaultInputFile("myproject", "path/to/foo.c")));
+ HighlightableBuilder builder = new HighlightableBuilder(resourceCache, mock(SensorStorage.class));
Highlightable perspective = builder.loadPerspective(Highlightable.class, component);
assertThat(perspective).isNotNull().isInstanceOf(DefaultHighlightable.class);
- assertThat(perspective.component()).isSameAs(component);
+ assertThat(perspective.component().key()).isEqualTo(component.key());
}
@Test
public void project_should_not_be_highlightable() {
Component component = new ResourceComponent(new Project("struts").setEffectiveKey("org.struts"));
- HighlightableBuilder builder = new HighlightableBuilder(cache);
+ HighlightableBuilder builder = new HighlightableBuilder(mock(ResourceCache.class), mock(SensorStorage.class));
Highlightable perspective = builder.loadPerspective(Highlightable.class, component);
assertThat(perspective).isNull();
package org.sonar.batch.source;
import org.junit.Test;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
import org.sonar.api.component.Component;
import org.sonar.api.component.Perspective;
+import org.sonar.api.resources.File;
+import org.sonar.api.resources.Project;
+import org.sonar.api.resources.Resource;
import org.sonar.api.source.Symbolizable;
-import org.sonar.batch.index.ComponentDataCache;
+import org.sonar.batch.index.BatchResource;
+import org.sonar.batch.index.ResourceCache;
+import org.sonar.batch.sensor.DefaultSensorStorage;
+import org.sonar.core.component.ResourceComponent;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
public class SymbolizableBuilderTest {
- ComponentDataCache dataCache = mock(ComponentDataCache.class);
-
@Test
public void should_load_perspective() throws Exception {
- Component component = mock(Component.class);
+ Resource file = File.create("foo.c").setEffectiveKey("myproject:path/to/foo.c");
+ Component component = new ResourceComponent(file);
+
+ ResourceCache resourceCache = mock(ResourceCache.class);
+ when(resourceCache.get(file.getEffectiveKey())).thenReturn(new BatchResource(1, file, null).setInputPath(new DefaultInputFile("myproject", "path/to/foo.c")));
- SymbolizableBuilder perspectiveBuilder = new SymbolizableBuilder(dataCache);
+ SymbolizableBuilder perspectiveBuilder = new SymbolizableBuilder(resourceCache, mock(DefaultSensorStorage.class));
Perspective perspective = perspectiveBuilder.loadPerspective(Symbolizable.class, component);
assertThat(perspective).isInstanceOf(Symbolizable.class);
- assertThat(perspective.component()).isEqualTo(component);
+ assertThat(perspective.component().key()).isEqualTo(component.key());
+ }
+
+ @Test
+ public void project_should_not_be_highlightable() {
+ Component component = new ResourceComponent(new Project("struts").setEffectiveKey("org.struts"));
+
+ SymbolizableBuilder builder = new SymbolizableBuilder(mock(ResourceCache.class), mock(DefaultSensorStorage.class));
+ Perspective perspective = builder.loadPerspective(Symbolizable.class, component);
+
+ assertThat(perspective).isNull();
}
}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-
-package org.sonar.batch.symbol;
-
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.mockito.ArgumentCaptor;
-import org.sonar.api.source.Symbol;
-import org.sonar.batch.index.ComponentDataCache;
-import org.sonar.core.source.SnapshotDataTypes;
-
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.Set;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.Matchers.eq;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-
-public class DefaultSymbolTableBuilderTest {
-
- @Rule
- public ExpectedException throwable = ExpectedException.none();
-
- @Test
- public void should_write_symbol_and_references() throws Exception {
- ComponentDataCache componentDataCache = mock(ComponentDataCache.class);
- DefaultSymbolTableBuilder symbolTableBuilder = new DefaultSymbolTableBuilder("foo", componentDataCache);
- Symbol firstSymbol = symbolTableBuilder.newSymbol(10, 20);
- symbolTableBuilder.newReference(firstSymbol, 32);
- Symbol secondSymbol = symbolTableBuilder.newSymbol(84, 92);
- symbolTableBuilder.newReference(secondSymbol, 124);
- Symbol thirdSymbol = symbolTableBuilder.newSymbol(55, 62);
- symbolTableBuilder.newReference(thirdSymbol, 70);
- symbolTableBuilder.done();
-
- ArgumentCaptor<SymbolData> argCaptor = ArgumentCaptor.forClass(SymbolData.class);
- verify(componentDataCache).setData(eq("foo"), eq(SnapshotDataTypes.SYMBOL_HIGHLIGHTING), argCaptor.capture());
-
- Map<org.sonar.api.source.Symbol, Set<Integer>> referencesBySymbol = argCaptor.getValue().referencesBySymbol();
-
- assertThat(new ArrayList<Symbol>(referencesBySymbol.keySet())).containsExactly(firstSymbol, secondSymbol, thirdSymbol);
- assertThat(new ArrayList<Integer>(referencesBySymbol.get(firstSymbol))).containsExactly(32);
- assertThat(new ArrayList<Integer>(referencesBySymbol.get(secondSymbol))).containsExactly(124);
- assertThat(new ArrayList<Integer>(referencesBySymbol.get(thirdSymbol))).containsExactly(70);
-
- assertThat(argCaptor.getValue().writeString()).isEqualTo("10,20,10,32;84,92,84,124;55,62,55,70");
- }
-
- @Test
- public void should_serialize_unused_symbol() throws Exception {
-
- ComponentDataCache componentDataCache = mock(ComponentDataCache.class);
- DefaultSymbolTableBuilder symbolTableBuilder = new DefaultSymbolTableBuilder("foo", componentDataCache);
- symbolTableBuilder.newSymbol(10, 20);
- symbolTableBuilder.done();
-
- ArgumentCaptor<SymbolData> argCaptor = ArgumentCaptor.forClass(SymbolData.class);
- verify(componentDataCache).setData(eq("foo"), eq(SnapshotDataTypes.SYMBOL_HIGHLIGHTING), argCaptor.capture());
-
- assertThat(argCaptor.getValue().writeString()).isEqualTo("10,20,10");
- }
-
- @Test
- public void should_reject_reference_conflicting_with_declaration() throws Exception {
- throwable.expect(UnsupportedOperationException.class);
-
- ComponentDataCache componentDataCache = mock(ComponentDataCache.class);
- DefaultSymbolTableBuilder symbolTableBuilder = new DefaultSymbolTableBuilder("foo", componentDataCache);
- Symbol symbol = symbolTableBuilder.newSymbol(10, 20);
- symbolTableBuilder.newReference(symbol, 15);
- }
-
- @Test
- public void should_reject_reference_from_another_file() throws Exception {
- throwable.expect(UnsupportedOperationException.class);
-
- ComponentDataCache componentDataCache = mock(ComponentDataCache.class);
- DefaultSymbolTableBuilder symbolTableBuilder = new DefaultSymbolTableBuilder("foo", componentDataCache);
- Symbol symbol = symbolTableBuilder.newSymbol(10, 20);
-
- DefaultSymbolTableBuilder symbolTableBuilder2 = new DefaultSymbolTableBuilder("foo2", componentDataCache);
- Symbol symbol2 = symbolTableBuilder2.newSymbol(30, 40);
-
- symbolTableBuilder.newReference(symbol2, 15);
- }
-
-}
protected final GraphPerspectiveLoader<T> perspectiveLoader;
protected GraphPerspectiveBuilder(ScanGraph graph, Class<T> perspectiveClass, EdgePath path,
- GraphPerspectiveLoader<T> perspectiveLoader) {
+ GraphPerspectiveLoader<T> perspectiveLoader) {
super(perspectiveClass);
this.graph = graph;
this.path = path;
public T create(ComponentVertex component) {
return (T) component.beanGraph().createAdjacentVertex(component, perspectiveLoader.getBeanClass(),
- perspectiveLoader.getPerspectiveKey());
+ perspectiveLoader.getPerspectiveKey());
}
public EdgePath path() {
}
@Override
- protected T loadPerspective(Class<T> perspectiveClass, Component component) {
+ public T loadPerspective(Class<T> perspectiveClass, Component component) {
ComponentVertex vertex;
if (component instanceof ComponentVertex) {
vertex = (ComponentVertex) component;
this.perspectiveClass = perspectiveClass;
}
- protected Class<T> getPerspectiveClass() {
+ public Class<T> getPerspectiveClass() {
return perspectiveClass;
}
@CheckForNull
- protected abstract T loadPerspective(Class<T> perspectiveClass, Component component);
+ public abstract T loadPerspective(Class<T> perspectiveClass, Component component);
}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.core.component;
-
-import com.google.common.collect.Maps;
-import org.sonar.api.batch.SonarIndex;
-import org.sonar.api.batch.fs.InputDir;
-import org.sonar.api.batch.fs.InputFile;
-import org.sonar.api.batch.fs.InputPath;
-import org.sonar.api.component.Component;
-import org.sonar.api.component.Perspective;
-import org.sonar.api.component.ResourcePerspectives;
-import org.sonar.api.resources.Directory;
-import org.sonar.api.resources.File;
-import org.sonar.api.resources.Resource;
-
-import javax.annotation.CheckForNull;
-import java.util.Map;
-
-public class ScanPerspectives implements ResourcePerspectives {
-
- private final Map<Class<?>, PerspectiveBuilder<?>> builders = Maps.newHashMap();
- private final SonarIndex resourceIndex;
-
- public ScanPerspectives(PerspectiveBuilder[] builders, SonarIndex resourceIndex) {
- this.resourceIndex = resourceIndex;
- for (PerspectiveBuilder builder : builders) {
- // TODO check duplications
- this.builders.put(builder.getPerspectiveClass(), builder);
- }
- }
-
- @Override
- @CheckForNull
- public <P extends Perspective> P as(Class<P> perspectiveClass, Component component) {
- if (component.key() == null) {
- return null;
- }
- PerspectiveBuilder<P> builder = builderFor(perspectiveClass);
- return builder.loadPerspective(perspectiveClass, component);
- }
-
- @Override
- @CheckForNull
- public <P extends Perspective> P as(Class<P> perspectiveClass, Resource resource) {
- Resource indexedResource = resource;
- if (resource.getEffectiveKey() == null) {
- indexedResource = resourceIndex.getResource(resource);
- }
- if (indexedResource != null) {
- return as(perspectiveClass, new ResourceComponent(indexedResource));
- }
- return null;
- }
-
- @Override
- public <P extends Perspective> P as(Class<P> perspectiveClass, InputPath inputPath) {
- Resource r;
- if (inputPath instanceof InputDir) {
- r = Directory.create(((InputDir) inputPath).relativePath());
- } else if (inputPath instanceof InputFile) {
- r = File.create(((InputFile) inputPath).relativePath());
- } else {
- throw new IllegalArgumentException("Unknow input path type: " + inputPath);
- }
- return as(perspectiveClass, r);
- }
-
- private <T extends Perspective> PerspectiveBuilder<T> builderFor(Class<T> clazz) {
- PerspectiveBuilder<T> builder = (PerspectiveBuilder<T>) builders.get(clazz);
- if (builder == null) {
- throw new PerspectiveNotFoundException("Perspective class is not registered: " + clazz);
- }
- return builder;
- }
-}
public void testGetPerspectiveClass() throws Exception {
PerspectiveBuilder<FakePerspective> builder = new PerspectiveBuilder<FakePerspective>(FakePerspective.class) {
@Override
- protected FakePerspective loadPerspective(Class<FakePerspective> perspectiveClass, Component component) {
+ public FakePerspective loadPerspective(Class<FakePerspective> perspectiveClass, Component component) {
return null;
}
};
*/
package org.sonar.api.batch.fs;
+import org.sonar.api.batch.fs.internal.DefaultInputFile;
+
import javax.annotation.CheckForNull;
import java.io.File;
/**
* This layer over {@link java.io.File} adds information for code analyzers.
+ * For unit testing purpose you can create some {@link DefaultInputFile} and initialize
+ * all fields using
+ *
+ * <pre>
+ * new DefaultInputFile("moduleKey", "relative/path/from/module/baseDir.java")
+ * .setModuleBaseDir(path)
+ * .initMetadata(new FileMetadata().readMetadata(someReader));
+ * </pre>
*
* @since 4.2
*/
*/
int lines();
+ /**
+ * Return a {@link TextPointer} in the given file.
+ * @param line Line of the pointer. Start at 1.
+ * @param lineOffset Offset in the line. Start at 0.
+ * @throw {@link IllegalArgumentException} if line or offset is not valid for the given file.
+ */
+ TextPointer newPointer(int line, int lineOffset);
+
+ /**
+ * Return a {@link TextRange} in the given file.
+ * @param start
+ * @param end
+ * @throw {@link IllegalArgumentException} if start or stop pointers are not valid for the given file.
+ */
+ TextRange newRange(TextPointer start, TextPointer end);
+
}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.api.batch.fs;
+
+/**
+ * Represents a position in a text file {@link InputFile}
+ *
+ * @since 5.2
+ */
+public interface TextPointer extends Comparable<TextPointer> {
+
+ /**
+ * The logical line where this pointer is located. First line is 1.
+ */
+ int line();
+
+ /**
+ * The offset of this pointer in the current line. First position in a line is 0.
+ */
+ int lineOffset();
+
+}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.api.batch.fs;
+
+/**
+ * Represents a text range in an {@link InputFile}
+ *
+ * @since 5.2
+ */
+public interface TextRange {
+
+ /**
+ * Start position of the range
+ */
+ TextPointer start();
+
+ /**
+ * End position of the range
+ */
+ TextPointer end();
+
+ /**
+ * Test if the current range has some common area with another range.
+ * Exemple: say the two ranges are on same line. Range with offsets [1,3] overlaps range with offsets [2,4] but not
+ * range with offset [3,5]
+ */
+ boolean overlap(TextRange another);
+
+}
*/
package org.sonar.api.batch.fs.internal;
+import com.google.common.base.Preconditions;
import org.sonar.api.batch.fs.InputFile;
+import org.sonar.api.batch.fs.TextPointer;
+import org.sonar.api.batch.fs.TextRange;
+import org.sonar.api.batch.fs.internal.FileMetadata.Metadata;
import org.sonar.api.utils.PathUtils;
import javax.annotation.CheckForNull;
import java.io.File;
import java.nio.charset.Charset;
import java.nio.file.Path;
+import java.util.Arrays;
/**
* @since 4.2
private String language;
private Type type = Type.MAIN;
private Status status;
- private int lines;
+ private int lines = -1;
private Charset charset;
- private int lastValidOffset;
+ private int lastValidOffset = -1;
+ private String hash;
+ private int nonBlankLines;
+ private int[] originalLineOffsets;
+ private boolean empty;
public DefaultInputFile(String moduleKey, String relativePath) {
this.moduleKey = moduleKey;
}
public int lastValidOffset() {
+ Preconditions.checkState(lastValidOffset >= 0, "InputFile is not properly initialized. Please set 'lastValidOffset' property.");
return lastValidOffset;
}
return this;
}
+ /**
+ * Digest hash of the file.
+ */
+ public String hash() {
+ return hash;
+ }
+
+ public int nonBlankLines() {
+ return nonBlankLines;
+ }
+
+ public int[] originalLineOffsets() {
+ Preconditions.checkState(originalLineOffsets != null, "InputFile is not properly initialized. Please set 'originalLineOffsets' property.");
+ Preconditions.checkState(originalLineOffsets.length == lines, "InputFile is not properly initialized. 'originalLineOffsets' property length should be equal to 'lines'");
+ return originalLineOffsets;
+ }
+
+ public DefaultInputFile setHash(String hash) {
+ this.hash = hash;
+ return this;
+ }
+
+ public DefaultInputFile setNonBlankLines(int nonBlankLines) {
+ this.nonBlankLines = nonBlankLines;
+ return this;
+ }
+
+ public DefaultInputFile setOriginalLineOffsets(int[] originalLineOffsets) {
+ this.originalLineOffsets = originalLineOffsets;
+ return this;
+ }
+
+ public boolean isEmpty() {
+ return this.empty;
+ }
+
+ public DefaultInputFile setEmpty(boolean empty) {
+ this.empty = empty;
+ return this;
+ }
+
+ @Override
+ public TextPointer newPointer(int line, int lineOffset) {
+ DefaultTextPointer textPointer = new DefaultTextPointer(line, lineOffset);
+ checkValid(textPointer, "pointer");
+ return textPointer;
+ }
+
+ private void checkValid(TextPointer pointer, String owner) {
+ Preconditions.checkArgument(pointer.line() >= 1, "%s is not a valid line for a file", pointer.line());
+ Preconditions.checkArgument(pointer.line() <= this.lines, "%s is not a valid line for %s. File %s has %s line(s)", pointer.line(), owner, this, lines);
+ Preconditions.checkArgument(pointer.lineOffset() >= 0, "%s is not a valid line offset for a file", pointer.lineOffset());
+ int lineLength = lineLength(pointer.line());
+ Preconditions.checkArgument(pointer.lineOffset() <= lineLength,
+ "%s is not a valid line offset for %s. File %s has %s character(s) at line %s", pointer.lineOffset(), owner, this, lineLength, pointer.line());
+ }
+
+ private int lineLength(int line) {
+ return lastValidGlobalOffsetForLine(line) - originalLineOffsets()[line - 1];
+ }
+
+ private int lastValidGlobalOffsetForLine(int line) {
+ return line < this.lines ? (originalLineOffsets()[line] - 1) : lastValidOffset();
+ }
+
+ @Override
+ public TextRange newRange(TextPointer start, TextPointer end) {
+ checkValid(start, "start pointer");
+ checkValid(end, "end pointer");
+ Preconditions.checkArgument(start.compareTo(end) < 0, "Start pointer %s should be before end pointer %s", start, end);
+ return new DefaultTextRange(start, end);
+ }
+
+ /**
+ * Create Range from global offsets. Used for backward compatibility with older API.
+ */
+ public TextRange newRange(int startOffset, int endOffset) {
+ return newRange(newPointer(startOffset), newPointer(endOffset));
+ }
+
+ public TextPointer newPointer(int globalOffset) {
+ Preconditions.checkArgument(globalOffset >= 0, "%s is not a valid offset for a file", globalOffset);
+ Preconditions.checkArgument(globalOffset <= lastValidOffset(), "%s is not a valid offset for file %s. Max offset is %s", globalOffset, this, lastValidOffset());
+ int line = findLine(globalOffset);
+ int startLineOffset = originalLineOffsets()[line - 1];
+ return new DefaultTextPointer(line, globalOffset - startLineOffset);
+ }
+
+ private int findLine(int globalOffset) {
+ return Math.abs(Arrays.binarySearch(originalLineOffsets(), globalOffset) + 1);
+ }
+
+ public DefaultInputFile initMetadata(Metadata metadata) {
+ this.setLines(metadata.lines);
+ this.setLastValidOffset(metadata.lastValidOffset);
+ this.setNonBlankLines(metadata.nonBlankLines);
+ this.setHash(metadata.hash);
+ this.setOriginalLineOffsets(metadata.originalLineOffsets);
+ this.setEmpty(metadata.empty);
+ return this;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) {
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.api.batch.fs.internal;
+
+import org.sonar.api.batch.fs.TextPointer;
+
+/**
+ * @since 5.2
+ */
+public class DefaultTextPointer implements TextPointer {
+
+ private final int line;
+ private final int lineOffset;
+
+ public DefaultTextPointer(int line, int lineOffset) {
+ this.line = line;
+ this.lineOffset = lineOffset;
+ }
+
+ @Override
+ public int line() {
+ return line;
+ }
+
+ @Override
+ public int lineOffset() {
+ return lineOffset;
+ }
+
+ @Override
+ public String toString() {
+ return "[line=" + line + ", lineOffset=" + lineOffset + "]";
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof DefaultTextPointer)) {
+ return false;
+ }
+ DefaultTextPointer other = (DefaultTextPointer) obj;
+ return other.line == this.line && other.lineOffset == this.lineOffset;
+ }
+
+ @Override
+ public int hashCode() {
+ return 37 * this.line + lineOffset;
+ }
+
+ @Override
+ public int compareTo(TextPointer o) {
+ if (this.line == o.line()) {
+ return Integer.compare(this.lineOffset, o.lineOffset());
+ }
+ return Integer.compare(this.line, o.line());
+ }
+
+}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.api.batch.fs.internal;
+
+import org.sonar.api.batch.fs.TextPointer;
+import org.sonar.api.batch.fs.TextRange;
+
+/**
+ * @since 5.2
+ */
+public class DefaultTextRange implements TextRange {
+
+ private final TextPointer start;
+ private final TextPointer end;
+
+ public DefaultTextRange(TextPointer start, TextPointer end) {
+ this.start = start;
+ this.end = end;
+ }
+
+ @Override
+ public TextPointer start() {
+ return start;
+ }
+
+ @Override
+ public TextPointer end() {
+ return end;
+ }
+
+ @Override
+ public boolean overlap(TextRange another) {
+ // [A,B] and [C,D]
+ // B > C && D > A
+ return this.end.compareTo(another.start()) > 0 && another.end().compareTo(this.start) > 0;
+ }
+
+ @Override
+ public String toString() {
+ return "Range[from " + start + " to " + end + "]";
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof DefaultTextRange)) {
+ return false;
+ }
+ DefaultTextRange other = (DefaultTextRange) obj;
+ return start.equals(other.start) && end.equals(other.end);
+ }
+
+ @Override
+ public int hashCode() {
+ return start.hashCode() * 17 + end.hashCode();
+ }
+
+}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.api.batch.fs.internal;
+
+import com.google.common.base.Charsets;
+import com.google.common.primitives.Ints;
+import org.apache.commons.codec.binary.Hex;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.commons.io.ByteOrderMark;
+import org.apache.commons.io.input.BOMInputStream;
+import org.sonar.api.BatchComponent;
+import org.sonar.api.CoreProperties;
+import org.sonar.api.utils.log.Logger;
+import org.sonar.api.utils.log.Loggers;
+
+import javax.annotation.CheckForNull;
+import javax.annotation.Nullable;
+
+import java.io.*;
+import java.nio.charset.Charset;
+import java.security.MessageDigest;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Computes hash of files. Ends of Lines are ignored, so files with
+ * same content but different EOL encoding have the same hash.
+ */
+public class FileMetadata implements BatchComponent {
+
+ private static final Logger LOG = Loggers.get(FileMetadata.class);
+
+ private static final char LINE_FEED = '\n';
+ private static final char CARRIAGE_RETURN = '\r';
+
+ private abstract static class CharHandler {
+
+ void handleAll(char c) {
+ }
+
+ void handleIgnoreEoL(char c) {
+ }
+
+ void newLine() {
+ }
+
+ void eof() {
+ }
+ }
+
+ private static class LineCounter extends CharHandler {
+ private boolean empty = true;
+ private int lines = 1;
+ private int nonBlankLines = 0;
+ private boolean blankLine = true;
+ boolean alreadyLoggedInvalidCharacter = false;
+ private final File file;
+ private final Charset encoding;
+
+ LineCounter(File file, Charset encoding) {
+ this.file = file;
+ this.encoding = encoding;
+ }
+
+ @Override
+ void handleAll(char c) {
+ this.empty = false;
+ if (!alreadyLoggedInvalidCharacter && c == '\ufffd') {
+ LOG.warn("Invalid character encountered in file {} at line {} for encoding {}. Please fix file content or configure the encoding to be used using property '{}'.", file,
+ lines, encoding, CoreProperties.ENCODING_PROPERTY);
+ alreadyLoggedInvalidCharacter = true;
+ }
+ }
+
+ @Override
+ void newLine() {
+ lines++;
+ if (!blankLine) {
+ nonBlankLines++;
+ }
+ blankLine = true;
+ }
+
+ @Override
+ void handleIgnoreEoL(char c) {
+ if (!Character.isWhitespace(c)) {
+ blankLine = false;
+ }
+ }
+
+ @Override
+ void eof() {
+ if (!blankLine) {
+ nonBlankLines++;
+ }
+ }
+
+ public int lines() {
+ return lines;
+ }
+
+ public int nonBlankLines() {
+ return nonBlankLines;
+ }
+
+ public boolean isEmpty() {
+ return empty;
+ }
+ }
+
+ private static class FileHashComputer extends CharHandler {
+ private MessageDigest globalMd5Digest = DigestUtils.getMd5Digest();
+ private StringBuilder sb = new StringBuilder();
+
+ @Override
+ void handleIgnoreEoL(char c) {
+ sb.append(c);
+ }
+
+ @Override
+ void newLine() {
+ sb.append(LINE_FEED);
+ globalMd5Digest.update(sb.toString().getBytes(Charsets.UTF_8));
+ sb.setLength(0);
+ }
+
+ @Override
+ void eof() {
+ if (sb.length() > 0) {
+ globalMd5Digest.update(sb.toString().getBytes(Charsets.UTF_8));
+ }
+ }
+
+ @CheckForNull
+ public String getHash() {
+ return Hex.encodeHexString(globalMd5Digest.digest());
+ }
+ }
+
+ private static class LineHashComputer extends CharHandler {
+ private final MessageDigest lineMd5Digest = DigestUtils.getMd5Digest();
+ private final StringBuilder sb = new StringBuilder();
+ private final LineHashConsumer consumer;
+ private int line = 1;
+
+ public LineHashComputer(LineHashConsumer consumer) {
+ this.consumer = consumer;
+ }
+
+ @Override
+ void handleIgnoreEoL(char c) {
+ if (!Character.isWhitespace(c)) {
+ sb.append(c);
+ }
+ }
+
+ @Override
+ void newLine() {
+ consumer.consume(line, sb.length() > 0 ? lineMd5Digest.digest(sb.toString().getBytes(Charsets.UTF_8)) : null);
+ sb.setLength(0);
+ line++;
+ }
+
+ @Override
+ void eof() {
+ consumer.consume(line, sb.length() > 0 ? lineMd5Digest.digest(sb.toString().getBytes(Charsets.UTF_8)) : null);
+ }
+
+ }
+
+ private static class LineOffsetCounter extends CharHandler {
+ private int currentOriginalOffset = 0;
+ private List<Integer> originalLineOffsets = new ArrayList<Integer>();
+ private int lastValidOffset = 0;
+
+ public LineOffsetCounter() {
+ originalLineOffsets.add(0);
+ }
+
+ @Override
+ void handleAll(char c) {
+ currentOriginalOffset++;
+ }
+
+ @Override
+ void newLine() {
+ originalLineOffsets.add(currentOriginalOffset);
+ }
+
+ @Override
+ void eof() {
+ lastValidOffset = currentOriginalOffset;
+ }
+
+ public List<Integer> getOriginalLineOffsets() {
+ return originalLineOffsets;
+ }
+
+ public int getLastValidOffset() {
+ return lastValidOffset;
+ }
+
+ }
+
+ /**
+ * Compute hash of a file ignoring line ends differences.
+ * Maximum performance is needed.
+ */
+ public Metadata readMetadata(File file, Charset encoding) {
+ LineCounter lineCounter = new LineCounter(file, encoding);
+ FileHashComputer fileHashComputer = new FileHashComputer();
+ LineOffsetCounter lineOffsetCounter = new LineOffsetCounter();
+ readFile(file, encoding, lineCounter, fileHashComputer, lineOffsetCounter);
+ return new Metadata(lineCounter.lines(), lineCounter.nonBlankLines(), fileHashComputer.getHash(), lineOffsetCounter.getOriginalLineOffsets(),
+ lineOffsetCounter.getLastValidOffset(),
+ lineCounter.isEmpty());
+ }
+
+ /**
+ * For testing purpose
+ */
+ public Metadata readMetadata(Reader reader) {
+ LineCounter lineCounter = new LineCounter(new File("fromString"), Charsets.UTF_16);
+ FileHashComputer fileHashComputer = new FileHashComputer();
+ LineOffsetCounter lineOffsetCounter = new LineOffsetCounter();
+ try {
+ read(reader, lineCounter, fileHashComputer, lineOffsetCounter);
+ } catch (IOException e) {
+ throw new IllegalStateException("Should never occurs", e);
+ }
+ return new Metadata(lineCounter.lines(), lineCounter.nonBlankLines(), fileHashComputer.getHash(), lineOffsetCounter.getOriginalLineOffsets(),
+ lineOffsetCounter.getLastValidOffset(),
+ lineCounter.isEmpty());
+ }
+
+ private static void readFile(File file, Charset encoding, CharHandler... handlers) {
+ try (BOMInputStream bomIn = new BOMInputStream(new FileInputStream(file),
+ ByteOrderMark.UTF_8, ByteOrderMark.UTF_16LE, ByteOrderMark.UTF_16BE, ByteOrderMark.UTF_32LE, ByteOrderMark.UTF_32BE);
+ Reader reader = new BufferedReader(new InputStreamReader(bomIn, encoding))) {
+ read(reader, handlers);
+ } catch (IOException e) {
+ throw new IllegalStateException(String.format("Fail to read file '%s' with encoding '%s'", file.getAbsolutePath(), encoding), e);
+ }
+ }
+
+ private static void read(Reader reader, CharHandler... handlers) throws IOException {
+ char c = (char) 0;
+ int i = reader.read();
+ boolean afterCR = false;
+ while (i != -1) {
+ c = (char) i;
+ if (afterCR) {
+ for (CharHandler handler : handlers) {
+ if (c != CARRIAGE_RETURN && c != LINE_FEED) {
+ handler.handleIgnoreEoL(c);
+ }
+ handler.handleAll(c);
+ handler.newLine();
+ }
+ afterCR = c == CARRIAGE_RETURN;
+ } else if (c == LINE_FEED) {
+ for (CharHandler handler : handlers) {
+ handler.handleAll(c);
+ handler.newLine();
+ }
+ } else if (c == CARRIAGE_RETURN) {
+ afterCR = true;
+ for (CharHandler handler : handlers) {
+ handler.handleAll(c);
+ }
+ } else {
+ for (CharHandler handler : handlers) {
+ handler.handleIgnoreEoL(c);
+ handler.handleAll(c);
+ }
+ }
+ i = reader.read();
+ }
+ for (CharHandler handler : handlers) {
+ handler.eof();
+ }
+ }
+
+ public static class Metadata {
+ final int lines;
+ final int nonBlankLines;
+ final String hash;
+ final int[] originalLineOffsets;
+ final int lastValidOffset;
+ final boolean empty;
+
+ private Metadata(int lines, int nonBlankLines, String hash, List<Integer> originalLineOffsets, int lastValidOffset, boolean empty) {
+ this.lines = lines;
+ this.nonBlankLines = nonBlankLines;
+ this.hash = hash;
+ this.empty = empty;
+ this.originalLineOffsets = Ints.toArray(originalLineOffsets);
+ this.lastValidOffset = lastValidOffset;
+ }
+ }
+
+ public static interface LineHashConsumer {
+
+ void consume(int lineIdx, @Nullable byte[] hash);
+
+ }
+
+ /**
+ * Compute a MD5 hash of each line of the file after removing of all blank chars
+ */
+ public static void computeLineHashesForIssueTracking(DefaultInputFile f, LineHashConsumer consumer) {
+ readFile(f.file(), f.charset(), new LineHashComputer(consumer));
+ }
+}
public class DefaultHighlighting extends DefaultStorable implements NewHighlighting {
- private InputFile inputFile;
+ private DefaultInputFile inputFile;
private Set<SyntaxHighlightingRule> syntaxHighlightingRuleSet;
public DefaultHighlighting() {
syntaxHighlightingRuleSet = Sets.newTreeSet(new Comparator<SyntaxHighlightingRule>() {
@Override
public int compare(SyntaxHighlightingRule left, SyntaxHighlightingRule right) {
- int result = left.getStartPosition() - right.getStartPosition();
+ int result = left.range().start().compareTo(right.range().start());
if (result == 0) {
- result = right.getEndPosition() - left.getEndPosition();
+ result = right.range().end().compareTo(left.range().end());
}
return result;
}
SyntaxHighlightingRule previous = it.next();
while (it.hasNext()) {
SyntaxHighlightingRule current = it.next();
- if (previous.getEndPosition() > current.getStartPosition() && !(previous.getEndPosition() >= current.getEndPosition())) {
- String errorMsg = String.format("Cannot register highlighting rule for characters from %s to %s as it " +
- "overlaps at least one existing rule", current.getStartPosition(), current.getEndPosition());
+ if (previous.range().end().compareTo(current.range().start()) > 0 && !(previous.range().end().compareTo(current.range().end()) >= 0)) {
+ String errorMsg = String.format("Cannot register highlighting rule for characters at %s as it " +
+ "overlaps at least one existing rule", current.range());
throw new IllegalStateException(errorMsg);
}
previous = current;
@Override
public DefaultHighlighting onFile(InputFile inputFile) {
Preconditions.checkNotNull(inputFile, "file can't be null");
- this.inputFile = inputFile;
+ this.inputFile = (DefaultInputFile) inputFile;
return this;
}
@Override
public DefaultHighlighting highlight(int startOffset, int endOffset, TypeOfText typeOfText) {
Preconditions.checkState(inputFile != null, "Call onFile() first");
- int maxValidOffset = ((DefaultInputFile) inputFile).lastValidOffset();
- checkOffset(startOffset, maxValidOffset, "startOffset");
- checkOffset(endOffset, maxValidOffset, "endOffset");
- Preconditions.checkArgument(startOffset < endOffset, "startOffset (" + startOffset + ") should be < endOffset (" + endOffset + ") for file " + inputFile + ".");
- SyntaxHighlightingRule syntaxHighlightingRule = SyntaxHighlightingRule.create(startOffset, endOffset,
- typeOfText);
+ SyntaxHighlightingRule syntaxHighlightingRule = SyntaxHighlightingRule.create(inputFile.newRange(startOffset, endOffset), typeOfText);
this.syntaxHighlightingRuleSet.add(syntaxHighlightingRule);
return this;
}
- private void checkOffset(int offset, int maxValidOffset, String label) {
- Preconditions.checkArgument(offset >= 0 && offset <= maxValidOffset, "Invalid " + label + " " + offset + ". Should be >= 0 and <= " + maxValidOffset
- + " for file " + inputFile);
- }
-
@Override
protected void doSave() {
Preconditions.checkState(inputFile != null, "Call onFile() first");
*/
package org.sonar.api.batch.sensor.highlighting.internal;
+import org.apache.commons.lang.builder.ReflectionToStringBuilder;
+import org.apache.commons.lang.builder.ToStringStyle;
+import org.sonar.api.batch.fs.TextRange;
import org.sonar.api.batch.sensor.highlighting.TypeOfText;
-import java.io.Serializable;
+public class SyntaxHighlightingRule {
-public class SyntaxHighlightingRule implements Serializable {
-
- private final int startPosition;
- private final int endPosition;
+ private final TextRange range;
private final TypeOfText textType;
- private SyntaxHighlightingRule(int startPosition, int endPosition, TypeOfText textType) {
- this.startPosition = startPosition;
- this.endPosition = endPosition;
+ private SyntaxHighlightingRule(TextRange range, TypeOfText textType) {
+ this.range = range;
this.textType = textType;
}
- public static SyntaxHighlightingRule create(int startPosition, int endPosition, TypeOfText textType) {
- return new SyntaxHighlightingRule(startPosition, endPosition, textType);
- }
-
- public int getStartPosition() {
- return startPosition;
+ public static SyntaxHighlightingRule create(TextRange range, TypeOfText textType) {
+ return new SyntaxHighlightingRule(range, textType);
}
- public int getEndPosition() {
- return endPosition;
+ public TextRange range() {
+ return range;
}
public TypeOfText getTextType() {
@Override
public String toString() {
- return "" + startPosition + "," + endPosition + "," + textType.cssClass();
+ return ReflectionToStringBuilder.toString(this, ToStringStyle.SIMPLE_STYLE);
}
}
import org.sonar.api.batch.fs.internal.DefaultFileSystem;
import org.sonar.api.batch.fs.internal.DefaultInputDir;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.fs.internal.DefaultTextPointer;
import org.sonar.api.batch.rule.ActiveRules;
import org.sonar.api.batch.rule.internal.ActiveRulesBuilder;
import org.sonar.api.batch.sensor.Sensor;
import java.io.File;
import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
/**
* Utility class to help testing {@link Sensor}.
return new DefaultHighlighting(sensorStorage);
}
- public List<TypeOfText> highlightingTypeFor(String componentKey, int charIndex) {
+ public List<TypeOfText> highlightingTypeAt(String componentKey, int line, int lineOffset) {
DefaultHighlighting syntaxHighlightingData = sensorStorage.highlightingByComponent.get(componentKey);
if (syntaxHighlightingData == null) {
return Collections.emptyList();
}
List<TypeOfText> result = new ArrayList<TypeOfText>();
+ DefaultTextPointer location = new DefaultTextPointer(line, lineOffset);
for (SyntaxHighlightingRule sortedRule : syntaxHighlightingData.getSyntaxHighlightingRuleSet()) {
- if (sortedRule.getStartPosition() <= charIndex && sortedRule.getEndPosition() > charIndex) {
+ if (sortedRule.range().start().compareTo(location) <= 0 && sortedRule.range().end().compareTo(location) > 0) {
result.add(sortedRule.getTextType());
}
}
*/
package org.sonar.api.batch.sensor.internal;
+import org.sonar.api.BatchComponent;
import org.sonar.api.batch.sensor.dependency.Dependency;
import org.sonar.api.batch.sensor.duplication.Duplication;
import org.sonar.api.batch.sensor.highlighting.internal.DefaultHighlighting;
* Interface for storing data computed by sensors.
* @since 5.1
*/
-public interface SensorStorage {
+public interface SensorStorage extends BatchComponent {
void store(Measure measure);
import org.sonar.api.BatchComponent;
import org.sonar.api.ServerComponent;
+/**
+ * @deprecated since 5.2 unused
+ */
+@Deprecated
public interface Perspectives extends BatchComponent, ServerComponent {
<P extends Perspective> P as(Class<P> perspectiveClass, Component component);
import org.sonar.api.config.PropertyDefinitions;
import javax.annotation.Nullable;
+
import java.util.Collection;
import java.util.List;
if (component instanceof ComponentAdapter) {
pico.addAdapter((ComponentAdapter) component);
} else {
- pico.as(singleton ? Characteristics.CACHE : Characteristics.NO_CACHE).addComponent(key, component);
+ try {
+ pico.as(singleton ? Characteristics.CACHE : Characteristics.NO_CACHE).addComponent(key, component);
+ } catch (Throwable t) {
+ throw new IllegalStateException("Unable to register component " + getName(component), t);
+ }
declareExtension(null, component);
}
return this;
public interface Symbol {
+ /**
+ * @deprecated in 5.2 not used.
+ */
+ @Deprecated
int getDeclarationStartOffset();
+ /**
+ * @deprecated in 5.2 not used.
+ */
+ @Deprecated
int getDeclarationEndOffset();
/**
- * @since unused
* @deprecated in 4.3 not used.
*/
@Deprecated
List<Symbol> symbols();
+ /**
+ * @deprecated since 5.2 not used
+ */
+ @Deprecated
List<Integer> references(Symbol symbol);
}
import java.io.File;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.fail;
public class DefaultInputFileTest {
DefaultInputFile file = new DefaultInputFile("ABCDE", "src/Foo.php");
assertThat(file.toString()).isEqualTo("[moduleKey=ABCDE, relative=src/Foo.php, basedir=null]");
}
+
+ @Test
+ public void checkValidPointer() {
+ DefaultInputFile file = new DefaultInputFile("ABCDE", "src/Foo.php");
+ file.setLines(2);
+ file.setOriginalLineOffsets(new int[] {0, 10});
+ file.setLastValidOffset(15);
+ assertThat(file.newPointer(1, 0).line()).isEqualTo(1);
+ assertThat(file.newPointer(1, 0).lineOffset()).isEqualTo(0);
+ // Don't fail
+ file.newPointer(1, 9);
+ file.newPointer(2, 0);
+ file.newPointer(2, 5);
+
+ try {
+ file.newPointer(0, 1);
+ fail();
+ } catch (Exception e) {
+ assertThat(e).hasMessage("0 is not a valid line for a file");
+ }
+ try {
+ file.newPointer(3, 1);
+ fail();
+ } catch (Exception e) {
+ assertThat(e).hasMessage("3 is not a valid line for pointer. File [moduleKey=ABCDE, relative=src/Foo.php, basedir=null] has 2 line(s)");
+ }
+ try {
+ file.newPointer(1, -1);
+ fail();
+ } catch (Exception e) {
+ assertThat(e).hasMessage("-1 is not a valid line offset for a file");
+ }
+ try {
+ file.newPointer(1, 10);
+ fail();
+ } catch (Exception e) {
+ assertThat(e).hasMessage("10 is not a valid line offset for pointer. File [moduleKey=ABCDE, relative=src/Foo.php, basedir=null] has 9 character(s) at line 1");
+ }
+ }
+
+ @Test
+ public void checkValidPointerUsingGlobalOffset() {
+ DefaultInputFile file = new DefaultInputFile("ABCDE", "src/Foo.php");
+ file.setLines(2);
+ file.setOriginalLineOffsets(new int[] {0, 10});
+ file.setLastValidOffset(15);
+ assertThat(file.newPointer(0).line()).isEqualTo(1);
+ assertThat(file.newPointer(0).lineOffset()).isEqualTo(0);
+
+ assertThat(file.newPointer(9).line()).isEqualTo(1);
+ assertThat(file.newPointer(9).lineOffset()).isEqualTo(9);
+
+ assertThat(file.newPointer(10).line()).isEqualTo(2);
+ assertThat(file.newPointer(10).lineOffset()).isEqualTo(0);
+
+ assertThat(file.newPointer(15).line()).isEqualTo(2);
+ assertThat(file.newPointer(15).lineOffset()).isEqualTo(5);
+
+ try {
+ file.newPointer(-1);
+ fail();
+ } catch (Exception e) {
+ assertThat(e).hasMessage("-1 is not a valid offset for a file");
+ }
+
+ try {
+ file.newPointer(16);
+ fail();
+ } catch (Exception e) {
+ assertThat(e).hasMessage("16 is not a valid offset for file [moduleKey=ABCDE, relative=src/Foo.php, basedir=null]. Max offset is 15");
+ }
+ }
+
+ @Test
+ public void checkValidRange() {
+ DefaultInputFile file = new DefaultInputFile("ABCDE", "src/Foo.php");
+ file.setLines(2);
+ file.setOriginalLineOffsets(new int[] {0, 10});
+ file.setLastValidOffset(15);
+ assertThat(file.newRange(file.newPointer(1, 0), file.newPointer(2, 1)).start().line()).isEqualTo(1);
+ // Don't fail
+ file.newRange(file.newPointer(1, 0), file.newPointer(1, 1));
+ file.newRange(file.newPointer(1, 0), file.newPointer(1, 9));
+ file.newRange(file.newPointer(1, 0), file.newPointer(2, 0));
+ assertThat(file.newRange(file.newPointer(1, 0), file.newPointer(2, 5))).isEqualTo(file.newRange(0, 15));
+
+ try {
+ file.newRange(file.newPointer(1, 0), file.newPointer(1, 0));
+ fail();
+ } catch (Exception e) {
+ assertThat(e).hasMessage("Start pointer [line=1, lineOffset=0] should be before end pointer [line=1, lineOffset=0]");
+ }
+ }
+
+ @Test
+ public void testRangeOverlap() {
+ DefaultInputFile file = new DefaultInputFile("ABCDE", "src/Foo.php");
+ file.setLines(2);
+ file.setOriginalLineOffsets(new int[] {0, 10});
+ file.setLastValidOffset(15);
+ // Don't fail
+ assertThat(file.newRange(file.newPointer(1, 0), file.newPointer(1, 1)).overlap(file.newRange(file.newPointer(1, 0), file.newPointer(1, 1)))).isTrue();
+ assertThat(file.newRange(file.newPointer(1, 0), file.newPointer(1, 1)).overlap(file.newRange(file.newPointer(1, 0), file.newPointer(1, 2)))).isTrue();
+ assertThat(file.newRange(file.newPointer(1, 0), file.newPointer(1, 1)).overlap(file.newRange(file.newPointer(1, 1), file.newPointer(1, 2)))).isFalse();
+ assertThat(file.newRange(file.newPointer(1, 2), file.newPointer(1, 3)).overlap(file.newRange(file.newPointer(1, 0), file.newPointer(1, 2)))).isFalse();
+ }
}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.api.batch.fs.internal;
+
+import com.google.common.base.Charsets;
+import org.apache.commons.codec.binary.Hex;
+import org.apache.commons.io.FileUtils;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.rules.TemporaryFolder;
+import org.sonar.api.batch.fs.internal.FileMetadata.LineHashConsumer;
+import org.sonar.api.utils.log.LogTester;
+import org.sonar.api.utils.log.LoggerLevel;
+
+import javax.annotation.Nullable;
+
+import java.io.File;
+import java.nio.charset.Charset;
+
+import static org.apache.commons.codec.digest.DigestUtils.md5Hex;
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class FileMetadataTest {
+
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
+
+ @Rule
+ public TemporaryFolder temp = new TemporaryFolder();
+
+ @Rule
+ public LogTester logTester = new LogTester();
+
+ @Test
+ public void empty_file() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.touch(tempFile);
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(1);
+ assertThat(metadata.nonBlankLines).isEqualTo(0);
+ assertThat(metadata.hash).isNotEmpty();
+ assertThat(metadata.originalLineOffsets).containsOnly(0);
+ assertThat(metadata.lastValidOffset).isEqualTo(0);
+ assertThat(metadata.empty).isTrue();
+ }
+
+ @Test
+ public void windows_without_latest_eol() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, "foo\r\nbar\r\nbaz", Charsets.UTF_8, true);
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(3);
+ assertThat(metadata.nonBlankLines).isEqualTo(3);
+ assertThat(metadata.hash).isEqualTo(md5Hex("foo\nbar\nbaz"));
+ assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10);
+ assertThat(metadata.lastValidOffset).isEqualTo(13);
+ assertThat(metadata.empty).isFalse();
+ }
+
+ @Test
+ public void read_with_wrong_encoding() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, "marker´s\n", Charset.forName("cp1252"));
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(2);
+ assertThat(metadata.hash).isEqualTo(md5Hex("marker\ufffds\n"));
+ assertThat(metadata.originalLineOffsets).containsOnly(0, 9);
+ }
+
+ @Test
+ public void non_ascii_utf_8() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, "föo\r\nbàr\r\n\u1D11Ebaßz\r\n", Charsets.UTF_8, true);
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(4);
+ assertThat(metadata.nonBlankLines).isEqualTo(3);
+ assertThat(metadata.hash).isEqualTo(md5Hex("föo\nbàr\n\u1D11Ebaßz\n"));
+ assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10, 18);
+ }
+
+ @Test
+ public void non_ascii_utf_16() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, "föo\r\nbàr\r\n\u1D11Ebaßz\r\n", Charsets.UTF_16, true);
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, Charsets.UTF_16);
+ assertThat(metadata.lines).isEqualTo(4);
+ assertThat(metadata.nonBlankLines).isEqualTo(3);
+ assertThat(metadata.hash).isEqualTo(md5Hex("föo\nbàr\n\u1D11Ebaßz\n"));
+ assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10, 18);
+ }
+
+ @Test
+ public void unix_without_latest_eol() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, "foo\nbar\nbaz", Charsets.UTF_8, true);
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(3);
+ assertThat(metadata.nonBlankLines).isEqualTo(3);
+ assertThat(metadata.hash).isEqualTo(md5Hex("foo\nbar\nbaz"));
+ assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 8);
+ assertThat(metadata.lastValidOffset).isEqualTo(11);
+ }
+
+ @Test
+ public void unix_with_latest_eol() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, "foo\nbar\nbaz\n", Charsets.UTF_8, true);
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(4);
+ assertThat(metadata.nonBlankLines).isEqualTo(3);
+ assertThat(metadata.hash).isEqualTo(md5Hex("foo\nbar\nbaz\n"));
+ assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 8, 12);
+ assertThat(metadata.lastValidOffset).isEqualTo(12);
+ }
+
+ @Test
+ public void mix_of_newlines_with_latest_eol() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, "foo\nbar\r\nbaz\n", Charsets.UTF_8, true);
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(4);
+ assertThat(metadata.nonBlankLines).isEqualTo(3);
+ assertThat(metadata.hash).isEqualTo(md5Hex("foo\nbar\nbaz\n"));
+ assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 9, 13);
+ }
+
+ @Test
+ public void several_new_lines() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, "foo\n\n\nbar", Charsets.UTF_8, true);
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(4);
+ assertThat(metadata.nonBlankLines).isEqualTo(2);
+ assertThat(metadata.hash).isEqualTo(md5Hex("foo\n\n\nbar"));
+ assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 5, 6);
+ }
+
+ @Test
+ public void mix_of_newlines_without_latest_eol() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, "foo\nbar\r\nbaz", Charsets.UTF_8, true);
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(3);
+ assertThat(metadata.nonBlankLines).isEqualTo(3);
+ assertThat(metadata.hash).isEqualTo(md5Hex("foo\nbar\nbaz"));
+ assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 9);
+ }
+
+ @Test
+ public void start_with_newline() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, "\nfoo\nbar\r\nbaz", Charsets.UTF_8, true);
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(4);
+ assertThat(metadata.nonBlankLines).isEqualTo(3);
+ assertThat(metadata.hash).isEqualTo(md5Hex("\nfoo\nbar\nbaz"));
+ assertThat(metadata.originalLineOffsets).containsOnly(0, 1, 5, 10);
+ }
+
+ @Test
+ public void start_with_bom() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, "\uFEFFfoo\nbar\r\nbaz", Charsets.UTF_8, true);
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(3);
+ assertThat(metadata.nonBlankLines).isEqualTo(3);
+ assertThat(metadata.hash).isEqualTo(md5Hex("foo\nbar\nbaz"));
+ assertThat(metadata.originalLineOffsets).containsOnly(0, 4, 9);
+ }
+
+ @Test
+ public void ignore_whitespace_when_computing_line_hashes() throws Exception {
+ File tempFile = temp.newFile();
+ FileUtils.write(tempFile, " foo\nb ar\r\nbaz \t", Charsets.UTF_8, true);
+
+ DefaultInputFile f = new DefaultInputFile("foo", tempFile.getName());
+ f.setModuleBaseDir(tempFile.getParentFile().toPath());
+ f.setCharset(Charsets.UTF_8);
+ FileMetadata.computeLineHashesForIssueTracking(f, new LineHashConsumer() {
+
+ @Override
+ public void consume(int lineIdx, @Nullable byte[] hash) {
+ switch (lineIdx) {
+ case 1:
+ assertThat(Hex.encodeHexString(hash)).isEqualTo(md5Hex("foo"));
+ break;
+ case 2:
+ assertThat(Hex.encodeHexString(hash)).isEqualTo(md5Hex("bar"));
+ break;
+ case 3:
+ assertThat(Hex.encodeHexString(hash)).isEqualTo(md5Hex("baz"));
+ break;
+ }
+ }
+ });
+ }
+
+ @Test
+ public void should_throw_if_file_does_not_exist() throws Exception {
+ File tempFolder = temp.newFolder();
+ File file = new File(tempFolder, "doesNotExist.txt");
+
+ thrown.expect(IllegalStateException.class);
+ thrown.expectMessage("Fail to read file '" + file.getAbsolutePath() + "' with encoding 'UTF-8'");
+
+ new FileMetadata().readMetadata(file, Charsets.UTF_8);
+ }
+
+ @Test
+ public void line_feed_is_included_into_hash() throws Exception {
+ File file1 = temp.newFile();
+ FileUtils.write(file1, "foo\nbar\n", Charsets.UTF_8, true);
+
+ // same as file1, except an additional return carriage
+ File file1a = temp.newFile();
+ FileUtils.write(file1a, "foo\r\nbar\n", Charsets.UTF_8, true);
+
+ File file2 = temp.newFile();
+ FileUtils.write(file2, "foo\nbar", Charsets.UTF_8, true);
+
+ String hash1 = new FileMetadata().readMetadata(file1, Charsets.UTF_8).hash;
+ String hash1a = new FileMetadata().readMetadata(file1a, Charsets.UTF_8).hash;
+ String hash2 = new FileMetadata().readMetadata(file2, Charsets.UTF_8).hash;
+ assertThat(hash1).isEqualTo(hash1a);
+ assertThat(hash1).isNotEqualTo(hash2);
+ }
+
+ @Test
+ public void binary_file_with_unmappable_character() throws Exception {
+ File woff = new File(this.getClass().getResource("glyphicons-halflings-regular.woff").toURI());
+
+ FileMetadata.Metadata metadata = new FileMetadata().readMetadata(woff, Charsets.UTF_8);
+ assertThat(metadata.lines).isEqualTo(135);
+ assertThat(metadata.nonBlankLines).isEqualTo(134);
+ assertThat(metadata.hash).isNotEmpty();
+ assertThat(metadata.empty).isFalse();
+
+ assertThat(logTester.logs(LoggerLevel.WARN).get(0)).contains("Invalid character encountered in file");
+ assertThat(logTester.logs(LoggerLevel.WARN).get(0)).contains(
+ "glyphicons-halflings-regular.woff at line 1 for encoding UTF-8. Please fix file content or configure the encoding to be used using property 'sonar.sourceEncoding'.");
+ }
+
+}
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
+import org.sonar.api.batch.fs.TextRange;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.fs.internal.DefaultTextPointer;
+import org.sonar.api.batch.fs.internal.DefaultTextRange;
import org.sonar.api.batch.sensor.internal.SensorStorage;
import java.util.Collection;
public class DefaultHighlightingTest {
+ private static final DefaultInputFile INPUT_FILE = new DefaultInputFile("foo", "src/Foo.java")
+ .setLines(2)
+ .setOriginalLineOffsets(new int[] {0, 50})
+ .setLastValidOffset(100);
+
private Collection<SyntaxHighlightingRule> highlightingRules;
@Rule
public void setUpSampleRules() {
DefaultHighlighting highlightingDataBuilder = new DefaultHighlighting()
- .onFile(new DefaultInputFile("foo", "src/Foo.java").setLastValidOffset(100))
+ .onFile(INPUT_FILE)
.highlight(0, 10, COMMENT)
.highlight(10, 12, KEYWORD)
.highlight(24, 38, KEYWORD)
assertThat(highlightingRules).hasSize(6);
}
+ private static TextRange rangeOf(int startLine, int startOffset, int endLine, int endOffset) {
+ return new DefaultTextRange(new DefaultTextPointer(startLine, startOffset), new DefaultTextPointer(endLine, endOffset));
+ }
+
@Test
public void should_order_by_start_then_end_offset() throws Exception {
- assertThat(highlightingRules).extracting("startPosition").containsOnly(0, 10, 12, 24, 24, 42);
- assertThat(highlightingRules).extracting("endPosition").containsOnly(10, 12, 20, 38, 65, 50);
+ assertThat(highlightingRules).extracting("range", TextRange.class).containsExactly(rangeOf(1, 0, 1, 10),
+ rangeOf(1, 10, 1, 12),
+ rangeOf(1, 12, 1, 20),
+ rangeOf(1, 24, 2, 15),
+ rangeOf(1, 24, 1, 38),
+ rangeOf(1, 42, 2, 0));
assertThat(highlightingRules).extracting("textType").containsOnly(COMMENT, KEYWORD, COMMENT, KEYWORD, CPP_DOC, KEYWORD);
}
@Test
public void should_suport_overlapping() throws Exception {
new DefaultHighlighting(mock(SensorStorage.class))
- .onFile(new DefaultInputFile("foo", "src/Foo.java").setLastValidOffset(100))
+ .onFile(INPUT_FILE)
.highlight(0, 15, KEYWORD)
.highlight(8, 12, CPP_DOC)
.save();
@Test
public void should_prevent_boudaries_overlapping() throws Exception {
throwable.expect(IllegalStateException.class);
- throwable.expectMessage("Cannot register highlighting rule for characters from 8 to 15 as it overlaps at least one existing rule");
+ throwable
+ .expectMessage("Cannot register highlighting rule for characters at Range[from [line=1, lineOffset=8] to [line=1, lineOffset=15]] as it overlaps at least one existing rule");
new DefaultHighlighting(mock(SensorStorage.class))
- .onFile(new DefaultInputFile("foo", "src/Foo.java").setLastValidOffset(100))
+ .onFile(INPUT_FILE)
.highlight(0, 10, KEYWORD)
.highlight(8, 15, KEYWORD)
.save();
}
- @Test
- public void should_prevent_invalid_offset() throws Exception {
- throwable.expect(IllegalArgumentException.class);
- throwable.expectMessage("Invalid endOffset 15. Should be >= 0 and <= 10 for file [moduleKey=foo, relative=src/Foo.java, basedir=null]");
-
- new DefaultHighlighting(mock(SensorStorage.class))
- .onFile(new DefaultInputFile("foo", "src/Foo.java").setLastValidOffset(10))
- .highlight(0, 10, KEYWORD)
- .highlight(8, 15, KEYWORD)
- .save();
- }
-
- @Test
- public void positive_offset() throws Exception {
- throwable.expect(IllegalArgumentException.class);
- throwable.expectMessage("Invalid startOffset -8. Should be >= 0 and <= 10 for file [moduleKey=foo, relative=src/Foo.java, basedir=null]");
-
- new DefaultHighlighting(mock(SensorStorage.class))
- .onFile(new DefaultInputFile("foo", "src/Foo.java").setLastValidOffset(10))
- .highlight(0, 10, KEYWORD)
- .highlight(-8, 15, KEYWORD)
- .save();
- }
-
- @Test
- public void should_prevent_invalid_offset_order() throws Exception {
- throwable.expect(IllegalArgumentException.class);
- throwable.expectMessage("startOffset (18) should be < endOffset (15) for file [moduleKey=foo, relative=src/Foo.java, basedir=null].");
-
- new DefaultHighlighting(mock(SensorStorage.class))
- .onFile(new DefaultInputFile("foo", "src/Foo.java").setLastValidOffset(20))
- .highlight(0, 10, KEYWORD)
- .highlight(18, 15, KEYWORD)
- .save();
- }
-
}
import org.sonar.api.batch.fs.internal.DefaultFileSystem;
import org.sonar.api.batch.fs.internal.DefaultInputDir;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
+import org.sonar.api.batch.fs.internal.FileMetadata;
import org.sonar.api.batch.rule.ActiveRules;
import org.sonar.api.batch.rule.internal.ActiveRulesBuilder;
import org.sonar.api.batch.sensor.highlighting.TypeOfText;
import org.sonar.api.rule.RuleKey;
import java.io.File;
+import java.io.StringReader;
import static org.assertj.core.api.Assertions.assertThat;
@Test
public void testHighlighting() {
- assertThat(tester.highlightingTypeFor("foo:src/Foo.java", 3)).isEmpty();
+ assertThat(tester.highlightingTypeAt("foo:src/Foo.java", 1, 3)).isEmpty();
tester.newHighlighting()
- .onFile(new DefaultInputFile("foo", "src/Foo.java").setLastValidOffset(100))
+ .onFile(new DefaultInputFile("foo", "src/Foo.java").initMetadata(new FileMetadata().readMetadata(new StringReader("annot dsf fds foo bar"))))
.highlight(0, 4, TypeOfText.ANNOTATION)
.highlight(8, 10, TypeOfText.CONSTANT)
.highlight(9, 10, TypeOfText.COMMENT)
.save();
- assertThat(tester.highlightingTypeFor("foo:src/Foo.java", 3)).containsExactly(TypeOfText.ANNOTATION);
- assertThat(tester.highlightingTypeFor("foo:src/Foo.java", 9)).containsExactly(TypeOfText.CONSTANT, TypeOfText.COMMENT);
+ assertThat(tester.highlightingTypeAt("foo:src/Foo.java", 1, 3)).containsExactly(TypeOfText.ANNOTATION);
+ assertThat(tester.highlightingTypeAt("foo:src/Foo.java", 1, 9)).containsExactly(TypeOfText.CONSTANT, TypeOfText.COMMENT);
}
@Test