.setScmAuthor("a_guy")
.setSource("this is not java code " + i)
.setUtLineHits(i)
- .setUtConditions(i+1)
+ .setUtConditions(i + 1)
.setUtCoveredConditions(i)
.setItLineHits(i)
- .setItConditions(i+1)
+ .setItConditions(i + 1)
.setItCoveredConditions(i)
.setOverallLineHits(i)
- .setOverallConditions(i+1)
+ .setOverallConditions(i + 1)
.setOverallCoveredConditions(i)
- .setScmDate(150000000L)
+ .setScmDate(1_500_000_000_000L)
.setHighlighting("2,9,k;9,18,k")
- .addAllDuplications(Arrays.asList(19,33,141))
+ .addAllDuplication(Arrays.asList(19, 33, 141))
.build();
}
return FileSourceDto.encodeData(dataBuilder.build());
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sonar.server.es.EsClient;
- import org.sonar.server.source.index.SourceLineDoc;
- import org.sonar.server.source.index.SourceLineIndex;
- import org.sonar.server.source.index.SourceLineIndexDefinition;
- import org.sonar.server.source.index.SourceLineIndexer;
- import org.sonar.server.source.index.SourceLineResultSetIterator;
+ import org.sonar.server.source.db.FileSourceDb;
-import org.sonar.server.source.index.SourceFileResultSetIterator;
-import org.sonar.server.source.index.SourceLineDoc;
-import org.sonar.server.source.index.SourceLineIndex;
-import org.sonar.server.source.index.SourceLineIndexDefinition;
-import org.sonar.server.source.index.SourceLineIndexer;
++import org.sonar.server.source.index.*;
import org.sonar.server.tester.ServerTester;
-import java.util.Arrays;
-import java.util.Date;
--import java.util.Iterator;
--import java.util.List;
--import java.util.Timer;
++import java.util.*;
import java.util.concurrent.atomic.AtomicLong;
import static org.assertj.core.api.Assertions.assertThat;
}
@Override
- public SourceLineResultSetIterator.SourceFile next() {
+ public SourceFileResultSetIterator.Row next() {
+ String projectUuid = "P" + currentProject;
String fileUuid = "FILE" + count.get();
- SourceLineResultSetIterator.SourceFile file = new SourceLineResultSetIterator.SourceFile(fileUuid, System.currentTimeMillis());
+ dataBuilder.clear();
for (int indexLine = 1; indexLine <= nbLinesPerFile; indexLine++) {
- SourceLineDoc line = new SourceLineDoc(Maps.<String, Object>newHashMap());
- line.setFileUuid(fileUuid);
- line.setLine(indexLine);
- line.setHighlighting(StringUtils.repeat("HIGHLIGHTING", 5));
- line.setItConditions(4);
- line.setItCoveredConditions(2);
- line.setItLineHits(2);
- line.setOverallConditions(8);
- line.setOverallCoveredConditions(2);
- line.setOverallLineHits(2);
- line.setUtConditions(8);
- line.setUtCoveredConditions(2);
- line.setUtLineHits(2);
- line.setProjectUuid("PROJECT" + currentProject);
- line.setScmAuthor("a_guy");
- line.setScmRevision("ABCDEFGHIJKL");
- line.setSource(StringUtils.repeat("SOURCE", 10));
- file.addLine(line);
+ dataBuilder.addLinesBuilder()
+ .setLine(indexLine)
+ .setScmRevision("REVISION_" + indexLine)
+ .setScmAuthor("a_guy")
+ .setSource("this is not java code " + indexLine)
+ .setUtLineHits(2)
+ .setUtConditions(8)
+ .setUtCoveredConditions(2)
+ .setItLineHits(2)
+ .setItConditions(8)
+ .setItCoveredConditions(2)
+ .setOverallLineHits(2)
+ .setOverallConditions(8)
+ .setOverallCoveredConditions(2)
+ .setScmDate(1_500_000_000_000L)
+ .setHighlighting("2,9,k;9,18,k")
- .addAllDuplications(Arrays.asList(19, 33, 141))
++ .addAllDuplication(Arrays.asList(19, 33, 141))
+ .build();
}
count.incrementAndGet();
if (count.get() % 500 == 0) {
package org.sonar.server.activity.index;
import org.apache.commons.lang.StringUtils;
+ import org.elasticsearch.action.update.UpdateRequest;
import org.sonar.api.utils.KeyValueFormat;
+ import org.sonar.api.utils.text.JsonWriter;
import org.sonar.server.db.DbClient;
import org.sonar.server.db.ResultSetIterator;
+ import org.sonar.server.es.EsUtils;
+ import org.sonar.server.util.DateCollector;
- import java.sql.Connection;
- import java.sql.PreparedStatement;
- import java.sql.ResultSet;
- import java.sql.SQLException;
- import java.sql.Timestamp;
- import java.util.HashMap;
+ import java.io.ByteArrayOutputStream;
+ import java.io.OutputStreamWriter;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Timestamp;
++import java.sql.*;
+ import java.util.Date;
/**
* Scrolls over table ACTIVITIES and reads documents to populate
FeedEventsLongDates.class,
AddNewCharacteristics.class,
RemovePermissionsOnModulesMigration.class,
- DropIssuesColumns.class
+ AddIssuesColumns.class,
++ DropIssuesColumns.class,
+
+ // 5.2
+ FeedProjectLinksComponentUuid.class,
+ FeedEventsComponentUuid.class
);
}
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.sonar.server.search.BaseDoc;
- import java.util.ArrayList;
- import java.util.LinkedHashMap;
- import java.util.List;
- import java.util.Map;
+ import javax.annotation.CheckForNull;
+ import javax.annotation.Nullable;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
++
++import java.util.*;
public class EsUtils {
package org.sonar.server.es.request;
-import com.google.common.collect.HashMultiset;
+import com.google.common.collect.LinkedHashMultiset;
import com.google.common.collect.Multiset;
- import com.google.common.collect.Multiset.Entry;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ListenableActionFuture;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.sonar.api.config.Settings;
import org.sonar.process.LoopbackAddress;
import org.sonar.process.ProcessConstants;
--import org.sonar.server.es.request.ProxyBulkRequestBuilder;
--import org.sonar.server.es.request.ProxyCountRequestBuilder;
--import org.sonar.server.es.request.ProxyCreateIndexRequestBuilder;
- import org.sonar.server.es.request.ProxyDeleteByQueryRequestBuilder;
--import org.sonar.server.es.request.ProxyDeleteRequestBuilder;
--import org.sonar.server.es.request.ProxyGetRequestBuilder;
--import org.sonar.server.es.request.ProxyIndicesExistsRequestBuilder;
--import org.sonar.server.es.request.ProxyMultiGetRequestBuilder;
--import org.sonar.server.es.request.ProxyPutMappingRequestBuilder;
--import org.sonar.server.es.request.ProxyRefreshRequestBuilder;
--import org.sonar.server.es.request.ProxySearchRequestBuilder;
--import org.sonar.server.es.request.ProxySearchScrollRequestBuilder;
++import org.sonar.server.es.request.*;
/**
* ElasticSearch Node used to connect to index.
--- /dev/null
- writer.name(SourceLineIndexDefinition.FIELD_DUPLICATIONS).valueObject(line.getDuplicationsList());
+ /*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+ package org.sonar.server.source.index;
+
+ import org.apache.commons.lang.StringUtils;
+ import org.elasticsearch.action.update.UpdateRequest;
+ import org.sonar.api.utils.text.JsonWriter;
+ import org.sonar.core.source.db.FileSourceDto;
+ import org.sonar.server.db.DbClient;
+ import org.sonar.server.db.ResultSetIterator;
+ import org.sonar.server.es.EsUtils;
+ import org.sonar.server.source.db.FileSourceDb;
+
+ import java.io.ByteArrayOutputStream;
+ import java.io.OutputStreamWriter;
+ import java.sql.Connection;
+ import java.sql.PreparedStatement;
+ import java.sql.ResultSet;
+ import java.sql.SQLException;
+ import java.util.ArrayList;
+ import java.util.Date;
+ import java.util.List;
+
+ /**
+ * Scroll over table FILE_SOURCES and directly parse data required to
+ * populate the index sourcelines
+ */
+ public class SourceFileResultSetIterator extends ResultSetIterator<SourceFileResultSetIterator.Row> {
+
+ public static class Row {
+ private final String fileUuid, projectUuid;
+ private final long updatedAt;
+ private final List<UpdateRequest> lineUpdateRequests = new ArrayList<>();
+
+ public Row(String projectUuid, String fileUuid, long updatedAt) {
+ this.projectUuid = projectUuid;
+ this.fileUuid = fileUuid;
+ this.updatedAt = updatedAt;
+ }
+
+ public String getProjectUuid() {
+ return projectUuid;
+ }
+
+ public String getFileUuid() {
+ return fileUuid;
+ }
+
+ public long getUpdatedAt() {
+ return updatedAt;
+ }
+
+ public List<UpdateRequest> getLineUpdateRequests() {
+ return lineUpdateRequests;
+ }
+ }
+
+ private static final String[] FIELDS = {
+ "project_uuid",
+ "file_uuid",
+ "updated_at",
+ "binary_data"
+ };
+ private static final String SQL_ALL = "select " + StringUtils.join(FIELDS, ",") + " from file_sources";
+ private static final String SQL_AFTER_DATE = SQL_ALL + " where updated_at>?";
+
+ public static SourceFileResultSetIterator create(DbClient dbClient, Connection connection, long afterDate) {
+ try {
+ String sql = afterDate > 0L ? SQL_AFTER_DATE : SQL_ALL;
+ // rows are big, so they are scrolled once at a time (one row in memory at a time)
+ PreparedStatement stmt = dbClient.newScrollingSingleRowSelectStatement(connection, sql);
+ if (afterDate > 0L) {
+ stmt.setLong(1, afterDate);
+ }
+ return new SourceFileResultSetIterator(stmt);
+ } catch (SQLException e) {
+ throw new IllegalStateException("Fail to prepare SQL request to select all file sources", e);
+ }
+ }
+
+ private SourceFileResultSetIterator(PreparedStatement stmt) throws SQLException {
+ super(stmt);
+ }
+
+ @Override
+ protected Row read(ResultSet rs) throws SQLException {
+ String projectUuid = rs.getString(1);
+ String fileUuid = rs.getString(2);
+ Date updatedAt = new Date(rs.getLong(3));
+ FileSourceDb.Data data = FileSourceDto.decodeData(rs.getBinaryStream(4));
+ return toRow(projectUuid, fileUuid, updatedAt, data);
+ }
+
+ /**
+ * Convert protobuf message to data required for Elasticsearch indexing
+ */
+ public static Row toRow(String projectUuid, String fileUuid, Date updatedAt, FileSourceDb.Data data) {
+ Row result = new Row(projectUuid, fileUuid, updatedAt.getTime());
+ for (FileSourceDb.Line line : data.getLinesList()) {
+ ByteArrayOutputStream bytes = new ByteArrayOutputStream();
+
+ // all the fields must be present, even if value is null
+ JsonWriter writer = JsonWriter.of(new OutputStreamWriter(bytes)).setSerializeNulls(true);
+ writer.beginObject();
+ writer.prop(SourceLineIndexDefinition.FIELD_PROJECT_UUID, projectUuid);
+ writer.prop(SourceLineIndexDefinition.FIELD_FILE_UUID, fileUuid);
+ writer.prop(SourceLineIndexDefinition.FIELD_LINE, line.getLine());
+ writer.prop(SourceLineIndexDefinition.FIELD_UPDATED_AT, EsUtils.formatDateTime(updatedAt));
+ writer.prop(SourceLineIndexDefinition.FIELD_SCM_REVISION, line.getScmRevision());
+ writer.prop(SourceLineIndexDefinition.FIELD_SCM_AUTHOR, line.getScmAuthor());
+ writer.prop(SourceLineIndexDefinition.FIELD_SCM_DATE, EsUtils.formatDateTime(line.hasScmDate() ? new Date(line.getScmDate()) : null));
+
+ // unit tests
+ if (line.hasUtLineHits()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_UT_LINE_HITS, line.getUtLineHits());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_UT_LINE_HITS).valueObject(null);
+ }
+ if (line.hasUtConditions()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_UT_CONDITIONS, line.getUtConditions());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_UT_CONDITIONS).valueObject(null);
+ }
+ if (line.hasUtCoveredConditions()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_UT_COVERED_CONDITIONS, line.getUtCoveredConditions());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_UT_COVERED_CONDITIONS).valueObject(null);
+ }
+
+ // IT
+ if (line.hasItLineHits()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_IT_LINE_HITS, line.getItLineHits());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_IT_LINE_HITS).valueObject(null);
+ }
+ if (line.hasItConditions()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_IT_CONDITIONS, line.getItConditions());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_IT_CONDITIONS).valueObject(null);
+ }
+ if (line.hasItCoveredConditions()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_IT_COVERED_CONDITIONS, line.getItCoveredConditions());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_IT_COVERED_CONDITIONS).valueObject(null);
+ }
+
+ // Overall coverage
+ if (line.hasOverallLineHits()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_OVERALL_LINE_HITS, line.getOverallLineHits());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_OVERALL_LINE_HITS).valueObject(null);
+ }
+ if (line.hasOverallConditions()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_OVERALL_CONDITIONS, line.getOverallConditions());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_OVERALL_CONDITIONS).valueObject(null);
+ }
+ if (line.hasOverallCoveredConditions()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_OVERALL_COVERED_CONDITIONS, line.getOverallCoveredConditions());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_OVERALL_COVERED_CONDITIONS).valueObject(null);
+ }
+
+ if (line.hasHighlighting()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_HIGHLIGHTING, line.getHighlighting());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_HIGHLIGHTING).valueObject(null);
+ }
+ if (line.hasSymbols()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_SYMBOLS, line.getSymbols());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_SYMBOLS).valueObject(null);
+ }
++ writer.name(SourceLineIndexDefinition.FIELD_DUPLICATIONS).valueObject(line.getDuplicationList());
+ writer.prop(SourceLineIndexDefinition.FIELD_SOURCE, line.hasSource() ? line.getSource() : null);
+ writer.endObject().close();
+
+ // This is an optimization to reduce memory consumption and multiple conversions from Map to JSON.
+ // UpdateRequest#doc() and #upsert() take the same parameter values, so:
+ // - passing the same Map would execute two JSON serializations
+ // - Map is a useless temporarily structure: read JDBC result set -> convert to map -> convert to JSON. Generating
+ // directly JSON from result set is more efficient.
+ byte[] jsonDoc = bytes.toByteArray();
+ UpdateRequest updateRequest = new UpdateRequest(SourceLineIndexDefinition.INDEX, SourceLineIndexDefinition.TYPE, SourceLineIndexDefinition.docKey(fileUuid, line.getLine()))
+ .routing(projectUuid)
+ .doc(jsonDoc)
+ .upsert(jsonDoc);
+ result.lineUpdateRequests.add(updateRequest);
+ }
+ return result;
+ }
+ }
import javax.annotation.CheckForNull;
import javax.annotation.Nullable;
++
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
assertThat(countDocuments()).isEqualTo(3);
}
+ /**
+ * File F1 in project P1 has one line -> to be updated
+ * File F2 in project P1 has one line -> untouched
+ */
@Test
public void update_already_indexed_lines() throws Exception {
- prepareIndex()
- .setSource(IOUtils.toString(new FileInputStream(TestUtils.getResource(this.getClass(), "line2.json"))))
- .get();
- prepareIndex()
- .setSource(IOUtils.toString(new FileInputStream(TestUtils.getResource(this.getClass(), "line2_other_file.json"))))
- .setRefresh(true)
- .get();
+ indexLine("P1", "F1", 1);
+ indexLine("P1", "F2", 1);
List<Integer> duplications = ImmutableList.of(1, 2, 3);
- SourceLineDoc line1 = new SourceLineDoc(ImmutableMap.<String, Object>builder()
- .put(FIELD_PROJECT_UUID, "abcd")
- .put(FIELD_FILE_UUID, "efgh")
- .put(FIELD_LINE, 1)
- .put(FIELD_SCM_REVISION, "cafebabe")
- .put(FIELD_SCM_DATE, DateUtils.parseDateTime("2014-01-01T12:34:56+0100"))
- .put(FIELD_SCM_AUTHOR, "polop")
- .put(FIELD_SOURCE, "package org.sonar.server.source;")
- .put(FIELD_DUPLICATIONS, duplications)
- .put(FIELD_UPDATED_AT, new Date())
- .build());
- SourceLineResultSetIterator.SourceFile file = new SourceLineResultSetIterator.SourceFile("efgh", System.currentTimeMillis());
- file.addLine(line1);
- indexer.index(Iterators.singletonIterator(file));
+ FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+ dataBuilder.addLinesBuilder()
+ .setLine(1)
+ .setScmRevision("new_revision")
+ .setScmAuthor("new_author")
+ .setSource("new source")
- .addAllDuplications(duplications)
++ .addAllDuplication(duplications)
+ .build();
+ SourceFileResultSetIterator.Row dbRow = SourceFileResultSetIterator.toRow("P1", "F1", new Date(), dataBuilder.build());
+ indexer.index(Iterators.singletonIterator(dbRow));
assertThat(countDocuments()).isEqualTo(2L);
import org.sonar.api.measures.Metric;
import org.sonar.api.rules.RulePriority;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Id;
-import javax.persistence.Table;
+import javax.persistence.*;
+
import java.io.UnsupportedEncodingException;
- import java.util.Date;
-
- import static org.sonar.api.utils.DateUtils.dateToLong;
- import static org.sonar.api.utils.DateUtils.longToDate;
/**
* This class is the Hibernate model to store a measure in the DB
import org.sonar.api.utils.DateUtils;
import javax.annotation.Nullable;
++
import java.io.Writer;
import java.util.Date;
import java.util.Map;