public void start() {
LoggerFactory.getLogger(SearchServer.class).info("Starting Elasticsearch[{}] on port {}", settings.clusterName(), settings.tcpPort());
- node = new InternalNode(settings.build(), true);
+ node = new InternalNode(settings.build(), false);
node.start();
}
private void configureStorage(ImmutableSettings.Builder builder) {
builder
.put("index.number_of_shards", "1")
- .put("index.refresh_interval", "30s")
- .put("indices.store.throttle.type", "none");
+ .put("index.refresh_interval", "30s");
}
private void configureCluster(ImmutableSettings.Builder builder) {
import org.sonar.core.source.db.FileSourceDto;
import org.sonar.server.db.DbClient;
import org.sonar.server.source.db.FileSourceDb;
-import org.sonar.server.source.index.SourceLineResultSetIterator;
+import org.sonar.server.source.index.SourceFileResultSetIterator;
import java.io.IOException;
import java.sql.Connection;
try {
long start = System.currentTimeMillis();
- SourceLineResultSetIterator it = SourceLineResultSetIterator.create(dbClient, connection, 0L);
+ SourceFileResultSetIterator it = SourceFileResultSetIterator.create(dbClient, connection, 0L);
while (it.hasNext()) {
- SourceLineResultSetIterator.SourceFile row = it.next();
- assertThat(row.getLines().size()).isEqualTo(NUMBER_OF_LINES);
+ SourceFileResultSetIterator.Row row = it.next();
+ assertThat(row.getLineUpdateRequests().size()).isEqualTo(NUMBER_OF_LINES);
assertThat(row.getFileUuid()).isNotEmpty();
counter.incrementAndGet();
}
.setOverallLineHits(i)
.setOverallConditions(i+1)
.setOverallCoveredConditions(i)
- .setScmDate(150000000L)
+ .setScmDate(1_500_000_000_000L)
.setHighlighting("2,9,k;9,18,k")
.addAllDuplications(Arrays.asList(19,33,141))
.build();
package org.sonar.server.benchmark;
-import com.google.common.collect.Maps;
import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang.StringUtils;
import org.junit.Rule;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sonar.server.es.EsClient;
+import org.sonar.server.source.db.FileSourceDb;
+import org.sonar.server.source.index.SourceFileResultSetIterator;
import org.sonar.server.source.index.SourceLineDoc;
import org.sonar.server.source.index.SourceLineIndex;
import org.sonar.server.source.index.SourceLineIndexDefinition;
import org.sonar.server.source.index.SourceLineIndexer;
-import org.sonar.server.source.index.SourceLineResultSetIterator;
import org.sonar.server.tester.ServerTester;
+import java.util.Arrays;
+import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Timer;
// TODO assertions
}
- private static class SourceIterator implements Iterator<SourceLineResultSetIterator.SourceFile> {
+ private static class SourceIterator implements Iterator<SourceFileResultSetIterator.Row> {
private final long nbFiles;
private final int nbLinesPerFile;
private int currentProject = 0;
private AtomicLong count = new AtomicLong(0L);
+ private final FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
SourceIterator(long nbFiles, int nbLinesPerFile) {
this.nbFiles = nbFiles;
}
@Override
- public SourceLineResultSetIterator.SourceFile next() {
+ public SourceFileResultSetIterator.Row next() {
+ String projectUuid = "P" + currentProject;
String fileUuid = "FILE" + count.get();
- SourceLineResultSetIterator.SourceFile file = new SourceLineResultSetIterator.SourceFile(fileUuid, System.currentTimeMillis());
+ dataBuilder.clear();
for (int indexLine = 1; indexLine <= nbLinesPerFile; indexLine++) {
- SourceLineDoc line = new SourceLineDoc(Maps.<String, Object>newHashMap());
- line.setFileUuid(fileUuid);
- line.setLine(indexLine);
- line.setHighlighting(StringUtils.repeat("HIGHLIGHTING", 5));
- line.setItConditions(4);
- line.setItCoveredConditions(2);
- line.setItLineHits(2);
- line.setOverallConditions(8);
- line.setOverallCoveredConditions(2);
- line.setOverallLineHits(2);
- line.setUtConditions(8);
- line.setUtCoveredConditions(2);
- line.setUtLineHits(2);
- line.setProjectUuid("PROJECT" + currentProject);
- line.setScmAuthor("a_guy");
- line.setScmRevision("ABCDEFGHIJKL");
- line.setSource(StringUtils.repeat("SOURCE", 10));
- file.addLine(line);
+ dataBuilder.addLinesBuilder()
+ .setLine(indexLine)
+ .setScmRevision("REVISION_" + indexLine)
+ .setScmAuthor("a_guy")
+ .setSource("this is not java code " + indexLine)
+ .setUtLineHits(2)
+ .setUtConditions(8)
+ .setUtCoveredConditions(2)
+ .setItLineHits(2)
+ .setItConditions(8)
+ .setItCoveredConditions(2)
+ .setOverallLineHits(2)
+ .setOverallConditions(8)
+ .setOverallCoveredConditions(2)
+ .setScmDate(1_500_000_000_000L)
+ .setHighlighting("2,9,k;9,18,k")
+ .addAllDuplications(Arrays.asList(19, 33, 141))
+ .build();
}
count.incrementAndGet();
if (count.get() % 500 == 0) {
currentProject++;
}
- return file;
+ return SourceFileResultSetIterator.toRow(projectUuid, fileUuid, new Date(), dataBuilder.build());
}
@Override
*/
package org.sonar.server.activity.index;
-import org.elasticsearch.action.update.UpdateRequest;
import org.sonar.core.persistence.DbSession;
import org.sonar.server.db.DbClient;
import org.sonar.server.es.BaseIndexer;
import org.sonar.server.es.EsClient;
import java.sql.Connection;
-import java.util.Iterator;
/**
* Add to Elasticsearch index {@link org.sonar.server.activity.index.ActivityIndexDefinition} the rows of
DbSession dbSession = dbClient.openSession(false);
Connection dbConnection = dbSession.getConnection();
try {
- ActivityResultSetIterator rowIt = ActivityResultSetIterator.create(dbClient, dbConnection, lastUpdatedAt);
- long maxUpdatedAt = doIndex(bulk, rowIt);
- rowIt.close();
- return maxUpdatedAt;
+ ActivityResultSetIterator it = ActivityResultSetIterator.create(dbClient, dbConnection, lastUpdatedAt);
+ bulk.start();
+ while (it.hasNext()) {
+ bulk.add(it.next());
+ }
+ bulk.stop();
+ it.close();
+ return it.getMaxRowDate();
} finally {
dbSession.close();
}
}
- public long index(Iterator<ActivityDoc> activities) {
- BulkIndexer bulk = new BulkIndexer(esClient, ActivityIndexDefinition.INDEX);
- return doIndex(bulk, activities);
- }
-
- private long doIndex(BulkIndexer bulk, Iterator<ActivityDoc> activities) {
- long maxUpdatedAt = 0L;
- bulk.start();
- while (activities.hasNext()) {
- ActivityDoc activity = activities.next();
- bulk.add(newUpsertRequest(activity));
-
- // it's more efficient to sort programmatically than in SQL on some databases (MySQL for instance)
- maxUpdatedAt = Math.max(maxUpdatedAt, activity.getCreatedAt().getTime());
- }
- bulk.stop();
- return maxUpdatedAt;
- }
-
- private UpdateRequest newUpsertRequest(ActivityDoc doc) {
- return new UpdateRequest(ActivityIndexDefinition.INDEX, ActivityIndexDefinition.TYPE, doc.getKey())
- .doc(doc.getFields())
- .upsert(doc.getFields());
- }
}
package org.sonar.server.activity.index;
import org.apache.commons.lang.StringUtils;
+import org.elasticsearch.action.update.UpdateRequest;
import org.sonar.api.utils.KeyValueFormat;
+import org.sonar.api.utils.text.JsonWriter;
import org.sonar.server.db.DbClient;
import org.sonar.server.db.ResultSetIterator;
+import org.sonar.server.es.EsUtils;
+import org.sonar.server.util.DateCollector;
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStreamWriter;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
-import java.util.HashMap;
+import java.util.Date;
/**
* Scrolls over table ACTIVITIES and reads documents to populate
* the index "activities/activity"
*/
-class ActivityResultSetIterator extends ResultSetIterator<ActivityDoc> {
+class ActivityResultSetIterator extends ResultSetIterator<UpdateRequest> {
private static final String[] FIELDS = {
- // column 1
"log_key",
"log_action",
"log_message",
private static final String SQL_AFTER_DATE = SQL_ALL + " where created_at>=?";
+ private final DateCollector dates = new DateCollector();
+
private ActivityResultSetIterator(PreparedStatement stmt) throws SQLException {
super(stmt);
}
}
@Override
- protected ActivityDoc read(ResultSet rs) throws SQLException {
- ActivityDoc doc = new ActivityDoc(new HashMap<String, Object>(10));
-
+ protected UpdateRequest read(ResultSet rs) throws SQLException {
+ ByteArrayOutputStream bytes = new ByteArrayOutputStream();
// all the fields must be present, even if value is null
- doc.setKey(rs.getString(1));
- doc.setAction(rs.getString(2));
- doc.setMessage(rs.getString(3));
- doc.setDetails(KeyValueFormat.parse(rs.getString(4)));
- doc.setLogin(rs.getString(5));
- doc.setType(rs.getString(6));
- doc.setCreatedAt(rs.getTimestamp(7));
- return doc;
+ JsonWriter writer = JsonWriter.of(new OutputStreamWriter(bytes)).setSerializeNulls(true);
+ writer.beginObject();
+ String key = rs.getString(1);
+ writer.prop(ActivityIndexDefinition.FIELD_KEY, key);
+ writer.prop(ActivityIndexDefinition.FIELD_ACTION, rs.getString(2));
+ writer.prop(ActivityIndexDefinition.FIELD_MESSAGE, rs.getString(3));
+ writer.name(ActivityIndexDefinition.FIELD_DETAILS).valueObject(KeyValueFormat.parse(rs.getString(4)));
+ writer.prop(ActivityIndexDefinition.FIELD_LOGIN, rs.getString(5));
+ writer.prop(ActivityIndexDefinition.FIELD_TYPE, rs.getString(6));
+ Date createdAt = rs.getTimestamp(7);
+ writer.prop(ActivityIndexDefinition.FIELD_CREATED_AT, EsUtils.formatDateTime(createdAt));
+ writer.endObject().close();
+ byte[] jsonDoc = bytes.toByteArray();
+
+ // it's more efficient to sort programmatically than in SQL on some databases (MySQL for instance)
+ dates.add(createdAt);
+
+ return new UpdateRequest(ActivityIndexDefinition.INDEX, ActivityIndexDefinition.TYPE, key).doc(jsonDoc).upsert(jsonDoc);
+ }
+
+ long getMaxRowDate() {
+ return dates.getMax();
}
}
FeedIssuesLongDates.class,
FeedFileSourcesBinaryData.class,
FeedSemaphoresLongDates.class,
- FeedProjectMeasuresLongDates.class,
FeedManualMeasuresLongDates.class,
FeedEventsLongDates.class,
AddNewCharacteristics.class,
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-
-package org.sonar.server.db.migrations.v51;
-
-import org.sonar.api.utils.System2;
-import org.sonar.core.persistence.Database;
-import org.sonar.server.db.migrations.BaseDataChange;
-import org.sonar.server.db.migrations.MassUpdate;
-import org.sonar.server.db.migrations.Select;
-import org.sonar.server.db.migrations.SqlStatement;
-
-import java.sql.SQLException;
-import java.util.Date;
-
-public class FeedProjectMeasuresLongDates extends BaseDataChange {
-
- private final System2 system2;
-
- public FeedProjectMeasuresLongDates(Database db, System2 system2) {
- super(db);
- this.system2 = system2;
- }
-
- @Override
- public void execute(Context context) throws SQLException {
- final long now = system2.now();
- MassUpdate massUpdate = context.prepareMassUpdate();
- massUpdate
- .select("SELECT m.measure_date, m.id FROM project_measures m WHERE measure_date_ms IS NULL");
- massUpdate
- .update("UPDATE project_measures SET measure_date_ms=? WHERE id=?");
- massUpdate.rowPluralName("project measures");
- massUpdate.execute(new MassUpdate.Handler() {
- @Override
- public boolean handle(Select.Row row, SqlStatement update) throws SQLException {
- Date date = row.getNullableDate(1);
- update.setLong(1, date == null ? null : Math.min(now, date.getTime()));
-
- Long id = row.getNullableLong(2);
- update.setLong(2, id);
-
- return true;
- }
- });
- }
-
-}
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
+import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.search.SearchHit;
import org.picocontainer.Startable;
import org.sonar.api.utils.log.Logger;
import org.sonar.api.utils.log.Loggers;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
/**
* </ul>
*/
public class BulkIndexer implements Startable {
+
private static final Logger LOGGER = Loggers.get(BulkIndexer.class);
- private static final long FLUSH_BYTE_SIZE = new ByteSizeValue(2, ByteSizeUnit.MB).bytes();
+ private static final long FLUSH_BYTE_SIZE = new ByteSizeValue(1, ByteSizeUnit.MB).bytes();
private static final String REFRESH_INTERVAL_SETTING = "index.refresh_interval";
private static final String ALREADY_STARTED_MESSAGE = "Bulk indexing is already started";
private final EsClient client;
private final String indexName;
private boolean large = false;
- private long flushByteSize = FLUSH_BYTE_SIZE;
+ private boolean disableRefresh = false;
private BulkRequestBuilder bulkRequest = null;
private Map<String, Object> largeInitialSettings = null;
-
private final AtomicLong counter = new AtomicLong(0L);
+ private final int concurrentRequests;
+ private final Semaphore semaphore;
private final ProgressLogger progress;
public BulkIndexer(EsClient client, String indexName) {
this.indexName = indexName;
this.progress = new ProgressLogger(String.format("Progress[BulkIndexer[%s]]", indexName), counter, LOGGER)
.setPluralLabel("requests");
+
+ this.concurrentRequests = Math.max(1, Runtime.getRuntime().availableProcessors() - 1);
+ this.semaphore = new Semaphore(concurrentRequests);
}
/**
* Large indexing is an heavy operation that populates an index generally from scratch. Replicas and
* automatic refresh are disabled during bulk indexing and lucene segments are optimized at the end.
*/
-
public BulkIndexer setLarge(boolean b) {
Preconditions.checkState(bulkRequest == null, ALREADY_STARTED_MESSAGE);
this.large = b;
}
/**
- * Default value is {@link org.sonar.server.es.BulkIndexer#FLUSH_BYTE_SIZE}
- * @see org.elasticsearch.common.unit.ByteSizeValue
+ * By default refresh of index is executed in method {@link #stop()}. Set to true
+ * to disable refresh.
*/
- public BulkIndexer setFlushByteSize(long l) {
- this.flushByteSize = l;
+ public BulkIndexer setDisableRefresh(boolean b) {
+ this.disableRefresh = b;
return this;
}
+
+
@Override
public void start() {
Preconditions.checkState(bulkRequest == null, ALREADY_STARTED_MESSAGE);
updateSettings(bulkSettings);
}
- bulkRequest = client.prepareBulk();
+ bulkRequest = client.prepareBulk().setRefresh(false);
counter.set(0L);
progress.start();
}
public void add(ActionRequest request) {
bulkRequest.request().add(request);
- counter.getAndIncrement();
- if (bulkRequest.request().estimatedSizeInBytes() >= flushByteSize) {
- executeBulk(bulkRequest);
- bulkRequest = client.prepareBulk();
+ if (bulkRequest.request().estimatedSizeInBytes() >= FLUSH_BYTE_SIZE) {
+ executeBulk();
+ }
+ }
+
+ public void addDeletion(SearchRequestBuilder searchRequest) {
+ searchRequest
+ .setScroll(TimeValue.timeValueMinutes(5))
+ .setSearchType(SearchType.SCAN)
+ // load only doc ids, not _source fields
+ .setFetchSource(false);
+
+ // this search is synchronous. An optimization would be to be non-blocking,
+ // but it requires to tracking pending requests in close().
+ // Same semaphore can't be reused because of potential deadlock (requires to acquire
+ // two locks)
+ SearchResponse searchResponse = searchRequest.get();
+ searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()).get();
+ for (SearchHit hit : searchResponse.getHits()) {
+ add(client.prepareDelete(hit.index(), hit.type(), hit.getId()).request());
}
}
+ /**
+ * Delete all the documents matching the given search request. This method is blocking.
+ * Index is refreshed, so docs are not searchable as soon as method is executed.
+ */
+ public static void delete(EsClient client, String indexName, SearchRequestBuilder searchRequest) {
+ BulkIndexer bulk = new BulkIndexer(client, indexName);
+ bulk.start();
+ bulk.addDeletion(searchRequest);
+ bulk.stop();
+ }
+
@Override
public void stop() {
+ if (bulkRequest.numberOfActions() > 0) {
+ executeBulk();
+ }
try {
- if (bulkRequest.numberOfActions() > 0) {
- executeBulk(bulkRequest);
+ if (semaphore.tryAcquire(concurrentRequests, 10, TimeUnit.MINUTES)) {
+ semaphore.release(concurrentRequests);
}
- } finally {
- progress.stop();
+ } catch (InterruptedException e) {
+ throw new IllegalStateException("Elasticsearch bulk requests still being executed after 10 minutes", e);
}
+ progress.stop();
- client.prepareRefresh(indexName).get();
+ if (!disableRefresh) {
+ client.prepareRefresh(indexName).get();
+ }
if (large) {
// optimize lucene segments and revert index settings
// Optimization must be done before re-applying replicas:
req.get();
}
- private void executeBulk(BulkRequestBuilder bulkRequest) {
- List<ActionRequest> retries = Lists.newArrayList();
- BulkResponse response = bulkRequest.get();
-
- for (BulkItemResponse item : response.getItems()) {
- if (item.isFailed()) {
- ActionRequest retry = bulkRequest.request().requests().get(item.getItemId());
- retries.add(retry);
- }
- }
-
- if (!retries.isEmpty()) {
- LOGGER.warn(String.format("%d index requests failed. Trying again.", retries.size()));
- BulkRequestBuilder retryBulk = client.prepareBulk();
- for (ActionRequest retry : retries) {
- retryBulk.request().add(retry);
- }
- BulkResponse retryBulkResponse = retryBulk.get();
- if (retryBulkResponse.hasFailures()) {
- LOGGER.error("New attempt to index documents failed");
- for (int index = 0; index < retryBulkResponse.getItems().length; index++) {
- BulkItemResponse item = retryBulkResponse.getItems()[index];
+ private void executeBulk() {
+ final BulkRequestBuilder req = this.bulkRequest;
+ this.bulkRequest = client.prepareBulk().setRefresh(false);
+ semaphore.acquireUninterruptibly();
+ req.execute(new ActionListener<BulkResponse>() {
+ @Override
+ public void onResponse(BulkResponse response) {
+ semaphore.release();
+ counter.addAndGet(response.getItems().length);
+
+ List<ActionRequest> retries = Lists.newArrayList();
+ for (BulkItemResponse item : response.getItems()) {
if (item.isFailed()) {
- StringBuilder sb = new StringBuilder();
- String msg = sb.append("\n[").append(index)
- .append("]: index [").append(item.getIndex()).append("], type [").append(item.getType()).append("], id [").append(item.getId())
- .append("], message [").append(item.getFailureMessage()).append("]").toString();
- LOGGER.error(msg);
+ ActionRequest retry = req.request().requests().get(item.getItemId());
+ retries.add(retry);
+ }
+ }
+
+ if (!retries.isEmpty()) {
+ LOGGER.warn(String.format("%d index requests failed. Trying again.", retries.size()));
+ BulkRequestBuilder retryBulk = client.prepareBulk();
+ for (ActionRequest retry : retries) {
+ retryBulk.request().add(retry);
+ }
+ BulkResponse retryBulkResponse = retryBulk.get();
+ if (retryBulkResponse.hasFailures()) {
+ LOGGER.error("New attempt to index documents failed");
+ for (int index = 0; index < retryBulkResponse.getItems().length; index++) {
+ BulkItemResponse item = retryBulkResponse.getItems()[index];
+ if (item.isFailed()) {
+ StringBuilder sb = new StringBuilder();
+ String msg = sb.append("\n[").append(index)
+ .append("]: index [").append(item.getIndex()).append("], type [").append(item.getType()).append("], id [").append(item.getId())
+ .append("], message [").append(item.getFailureMessage()).append("]").toString();
+ LOGGER.error(msg);
+ }
+ }
+ } else {
+ LOGGER.info("New index attempt succeeded");
}
}
- } else {
- LOGGER.info("New index attempt succeeded");
}
- }
+
+ @Override
+ public void onFailure(Throwable e) {
+ semaphore.release();
+ LOGGER.error("Fail to execute bulk index request: " + req, e);
+ }
+ });
}
}
import com.google.common.base.Function;
import com.google.common.collect.Lists;
+import org.elasticsearch.common.joda.time.format.ISODateTimeFormat;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.sonar.server.search.BaseDoc;
+import javax.annotation.CheckForNull;
+import javax.annotation.Nullable;
import java.util.ArrayList;
+import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
}
});
}
+
+ @CheckForNull
+ public static Date parseDateTime(@Nullable String s) {
+ if (s != null) {
+ return ISODateTimeFormat.dateTime().parseDateTime(s).toDate();
+ }
+ return null;
+ }
+
+ @CheckForNull
+ public static String formatDateTime(@Nullable Date date) {
+ if (date != null) {
+ return ISODateTimeFormat.dateTime().print(date.getTime());
+ }
+ return null;
+ }
}
package org.sonar.server.es.request;
import com.google.common.collect.HashMultiset;
-import com.google.common.collect.Multiset.Entry;
+import com.google.common.collect.Multiset;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ListenableActionFuture;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
@Override
public ListenableActionFuture<BulkResponse> execute() {
- throw new UnsupportedOperationException("execute() should not be called as it's used for asynchronous");
+ throw unsupported();
}
private UnsupportedOperationException unsupported() {
groupedRequests.add(new BulkRequestKey(requestType, index, docType));
}
- Set<Entry<BulkRequestKey>> entrySet = groupedRequests.entrySet();
+ Set<Multiset.Entry<BulkRequestKey>> entrySet = groupedRequests.entrySet();
int size = entrySet.size();
int current = 0;
- for (Entry<BulkRequestKey> requestEntry : entrySet) {
+ for (Multiset.Entry<BulkRequestKey> requestEntry : entrySet) {
message.append(requestEntry.getCount()).append(" ").append(requestEntry.getElement().toString());
current++;
if (current < size) {
package org.sonar.server.issue.index;
import org.apache.commons.dbutils.DbUtils;
+import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.index.query.FilterBuilders;
-import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.sonar.core.persistence.DbSession;
import org.sonar.server.db.DbClient;
}
public void deleteProject(String uuid, boolean refresh) {
- QueryBuilder query = QueryBuilders.filteredQuery(
- QueryBuilders.matchAllQuery(),
- FilterBuilders.boolFilter().must(FilterBuilders.termsFilter(IssueIndexDefinition.FIELD_ISSUE_PROJECT_UUID, uuid))
- );
- esClient.prepareDeleteByQuery(IssueIndexDefinition.INDEX).setQuery(query).get();
- if (refresh) {
- esClient.prepareRefresh(IssueIndexDefinition.INDEX).get();
- }
+ BulkIndexer bulk = new BulkIndexer(esClient, IssueIndexDefinition.INDEX);
+ bulk.setDisableRefresh(!refresh);
+ bulk.start();
+ SearchRequestBuilder search = esClient.prepareSearch(IssueIndexDefinition.INDEX)
+ .setRouting(uuid)
+ .setQuery(QueryBuilders.filteredQuery(
+ QueryBuilders.matchAllQuery(),
+ FilterBuilders.boolFilter().must(FilterBuilders.termsFilter(IssueIndexDefinition.FIELD_ISSUE_PROJECT_UUID, uuid))
+ ));
+ bulk.addDeletion(search);
+ bulk.stop();
}
BulkIndexer createBulkIndexer(boolean large) {
import org.sonar.server.es.request.ProxyBulkRequestBuilder;
import org.sonar.server.es.request.ProxyCountRequestBuilder;
import org.sonar.server.es.request.ProxyCreateIndexRequestBuilder;
-import org.sonar.server.es.request.ProxyDeleteByQueryRequestBuilder;
import org.sonar.server.es.request.ProxyDeleteRequestBuilder;
import org.sonar.server.es.request.ProxyGetRequestBuilder;
import org.sonar.server.es.request.ProxyIndicesExistsRequestBuilder;
@Override
public DeleteByQueryRequestBuilder prepareDeleteByQuery(String... indices) {
- return new ProxyDeleteByQueryRequestBuilder(this).setIndices(indices);
+ throw new UnsupportedOperationException("Delete by query must not be used. See https://github.com/elastic/elasticsearch/issues/10067. See alternatives in BulkIndexer.");
}
// ****************************************************************************************************************
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.server.source.index;
+
+import org.apache.commons.lang.StringUtils;
+import org.elasticsearch.action.update.UpdateRequest;
+import org.sonar.api.utils.text.JsonWriter;
+import org.sonar.core.source.db.FileSourceDto;
+import org.sonar.server.db.DbClient;
+import org.sonar.server.db.ResultSetIterator;
+import org.sonar.server.es.EsUtils;
+import org.sonar.server.source.db.FileSourceDb;
+
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStreamWriter;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+/**
+ * Scroll over table FILE_SOURCES and directly parse data required to
+ * populate the index sourcelines
+ */
+public class SourceFileResultSetIterator extends ResultSetIterator<SourceFileResultSetIterator.Row> {
+
+ public static class Row {
+ private final String fileUuid, projectUuid;
+ private final long updatedAt;
+ private final List<UpdateRequest> lineUpdateRequests = new ArrayList<>();
+
+ public Row(String projectUuid, String fileUuid, long updatedAt) {
+ this.projectUuid = projectUuid;
+ this.fileUuid = fileUuid;
+ this.updatedAt = updatedAt;
+ }
+
+ public String getProjectUuid() {
+ return projectUuid;
+ }
+
+ public String getFileUuid() {
+ return fileUuid;
+ }
+
+ public long getUpdatedAt() {
+ return updatedAt;
+ }
+
+ public List<UpdateRequest> getLineUpdateRequests() {
+ return lineUpdateRequests;
+ }
+ }
+
+ private static final String[] FIELDS = {
+ "project_uuid",
+ "file_uuid",
+ "updated_at",
+ "binary_data"
+ };
+ private static final String SQL_ALL = "select " + StringUtils.join(FIELDS, ",") + " from file_sources";
+ private static final String SQL_AFTER_DATE = SQL_ALL + " where updated_at>?";
+
+ public static SourceFileResultSetIterator create(DbClient dbClient, Connection connection, long afterDate) {
+ try {
+ String sql = afterDate > 0L ? SQL_AFTER_DATE : SQL_ALL;
+ // rows are big, so they are scrolled once at a time (one row in memory at a time)
+ PreparedStatement stmt = dbClient.newScrollingSingleRowSelectStatement(connection, sql);
+ if (afterDate > 0L) {
+ stmt.setLong(1, afterDate);
+ }
+ return new SourceFileResultSetIterator(stmt);
+ } catch (SQLException e) {
+ throw new IllegalStateException("Fail to prepare SQL request to select all file sources", e);
+ }
+ }
+
+ private SourceFileResultSetIterator(PreparedStatement stmt) throws SQLException {
+ super(stmt);
+ }
+
+ @Override
+ protected Row read(ResultSet rs) throws SQLException {
+ String projectUuid = rs.getString(1);
+ String fileUuid = rs.getString(2);
+ Date updatedAt = new Date(rs.getLong(3));
+ FileSourceDb.Data data = FileSourceDto.decodeData(rs.getBinaryStream(4));
+ return toRow(projectUuid, fileUuid, updatedAt, data);
+ }
+
+ /**
+ * Convert protobuf message to data required for Elasticsearch indexing
+ */
+ public static Row toRow(String projectUuid, String fileUuid, Date updatedAt, FileSourceDb.Data data) {
+ Row result = new Row(projectUuid, fileUuid, updatedAt.getTime());
+ for (FileSourceDb.Line line : data.getLinesList()) {
+ ByteArrayOutputStream bytes = new ByteArrayOutputStream();
+
+ // all the fields must be present, even if value is null
+ JsonWriter writer = JsonWriter.of(new OutputStreamWriter(bytes)).setSerializeNulls(true);
+ writer.beginObject();
+ writer.prop(SourceLineIndexDefinition.FIELD_PROJECT_UUID, projectUuid);
+ writer.prop(SourceLineIndexDefinition.FIELD_FILE_UUID, fileUuid);
+ writer.prop(SourceLineIndexDefinition.FIELD_LINE, line.getLine());
+ writer.prop(SourceLineIndexDefinition.FIELD_UPDATED_AT, EsUtils.formatDateTime(updatedAt));
+ writer.prop(SourceLineIndexDefinition.FIELD_SCM_REVISION, line.getScmRevision());
+ writer.prop(SourceLineIndexDefinition.FIELD_SCM_AUTHOR, line.getScmAuthor());
+ writer.prop(SourceLineIndexDefinition.FIELD_SCM_DATE, EsUtils.formatDateTime(line.hasScmDate() ? new Date(line.getScmDate()) : null));
+
+ // unit tests
+ if (line.hasUtLineHits()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_UT_LINE_HITS, line.getUtLineHits());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_UT_LINE_HITS).valueObject(null);
+ }
+ if (line.hasUtConditions()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_UT_CONDITIONS, line.getUtConditions());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_UT_CONDITIONS).valueObject(null);
+ }
+ if (line.hasUtCoveredConditions()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_UT_COVERED_CONDITIONS, line.getUtCoveredConditions());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_UT_COVERED_CONDITIONS).valueObject(null);
+ }
+
+ // IT
+ if (line.hasItLineHits()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_IT_LINE_HITS, line.getItLineHits());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_IT_LINE_HITS).valueObject(null);
+ }
+ if (line.hasItConditions()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_IT_CONDITIONS, line.getItConditions());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_IT_CONDITIONS).valueObject(null);
+ }
+ if (line.hasItCoveredConditions()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_IT_COVERED_CONDITIONS, line.getItCoveredConditions());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_IT_COVERED_CONDITIONS).valueObject(null);
+ }
+
+ // Overall coverage
+ if (line.hasOverallLineHits()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_OVERALL_LINE_HITS, line.getOverallLineHits());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_OVERALL_LINE_HITS).valueObject(null);
+ }
+ if (line.hasOverallConditions()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_OVERALL_CONDITIONS, line.getOverallConditions());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_OVERALL_CONDITIONS).valueObject(null);
+ }
+ if (line.hasOverallCoveredConditions()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_OVERALL_COVERED_CONDITIONS, line.getOverallCoveredConditions());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_OVERALL_COVERED_CONDITIONS).valueObject(null);
+ }
+
+ if (line.hasHighlighting()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_HIGHLIGHTING, line.getHighlighting());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_HIGHLIGHTING).valueObject(null);
+ }
+ if (line.hasSymbols()) {
+ writer.prop(SourceLineIndexDefinition.FIELD_SYMBOLS, line.getSymbols());
+ } else {
+ writer.name(SourceLineIndexDefinition.FIELD_SYMBOLS).valueObject(null);
+ }
+ writer.name(SourceLineIndexDefinition.FIELD_DUPLICATIONS).valueObject(line.getDuplicationsList());
+ writer.prop(SourceLineIndexDefinition.FIELD_SOURCE, line.hasSource() ? line.getSource() : null);
+ writer.endObject().close();
+
+ // This is an optimization to reduce memory consumption and multiple conversions from Map to JSON.
+ // UpdateRequest#doc() and #upsert() take the same parameter values, so:
+ // - passing the same Map would execute two JSON serializations
+ // - Map is a useless temporarily structure: read JDBC result set -> convert to map -> convert to JSON. Generating
+ // directly JSON from result set is more efficient.
+ byte[] jsonDoc = bytes.toByteArray();
+ UpdateRequest updateRequest = new UpdateRequest(SourceLineIndexDefinition.INDEX, SourceLineIndexDefinition.TYPE, SourceLineIndexDefinition.docKey(fileUuid, line.getLine()))
+ .routing(projectUuid)
+ .doc(jsonDoc)
+ .upsert(jsonDoc);
+ result.lineUpdateRequests.add(updateRequest);
+ }
+ return result;
+ }
+}
return this;
}
- public String key() {
- return String.format("%s_%d", fileUuid(), line());
- }
-
@CheckForNull
public Integer utLineHits() {
Number lineHits = getNullableField(SourceLineIndexDefinition.FIELD_UT_LINE_HITS);
*/
package org.sonar.server.source.index;
+import com.google.common.collect.ImmutableMap;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.sonar.api.config.Settings;
import org.sonar.process.ProcessConstants;
public class SourceLineIndexDefinition implements IndexDefinition {
+ public static final String INDEX = "sourcelines";
+ public static final String TYPE = "sourceline";
public static final String FIELD_PROJECT_UUID = "projectUuid";
public static final String FIELD_FILE_UUID = "fileUuid";
public static final String FIELD_LINE = "line";
public static final String FIELD_DUPLICATIONS = "duplications";
public static final String FIELD_UPDATED_AT = "updatedAt";
- public static final String INDEX = "sourcelines";
-
- public static final String TYPE = "sourceline";
-
-
private final Settings settings;
public SourceLineIndexDefinition(Settings settings) {
}
// type "sourceline"
- NewIndex.NewIndexType sourceLineMapping = index.createType(TYPE);
- sourceLineMapping.stringFieldBuilder(FIELD_PROJECT_UUID).build();
- sourceLineMapping.stringFieldBuilder(FIELD_FILE_UUID).build();
- sourceLineMapping.createIntegerField(FIELD_LINE);
- sourceLineMapping.stringFieldBuilder(FIELD_SCM_REVISION).build();
- sourceLineMapping.stringFieldBuilder(FIELD_SCM_AUTHOR).build();
- sourceLineMapping.createDateTimeField(FIELD_SCM_DATE);
- sourceLineMapping.stringFieldBuilder(FIELD_HIGHLIGHTING).disableSearch().build();
- sourceLineMapping.stringFieldBuilder(FIELD_SOURCE).disableSearch().build();
- sourceLineMapping.createIntegerField(FIELD_UT_LINE_HITS);
- sourceLineMapping.createIntegerField(FIELD_UT_CONDITIONS);
- sourceLineMapping.createIntegerField(FIELD_UT_COVERED_CONDITIONS);
- sourceLineMapping.createIntegerField(FIELD_IT_LINE_HITS);
- sourceLineMapping.createIntegerField(FIELD_IT_CONDITIONS);
- sourceLineMapping.createIntegerField(FIELD_IT_COVERED_CONDITIONS);
- sourceLineMapping.createIntegerField(FIELD_OVERALL_LINE_HITS);
- sourceLineMapping.createIntegerField(FIELD_OVERALL_CONDITIONS);
- sourceLineMapping.createIntegerField(FIELD_OVERALL_COVERED_CONDITIONS);
- sourceLineMapping.stringFieldBuilder(FIELD_SYMBOLS).disableSearch().build();
- sourceLineMapping.createShortField(FIELD_DUPLICATIONS);
- sourceLineMapping.createDateTimeField(FIELD_UPDATED_AT);
+ NewIndex.NewIndexType mapping = index.createType(TYPE);
+ mapping.setAttribute("_routing", ImmutableMap.of("required", true, "path", FIELD_PROJECT_UUID));
+ mapping.stringFieldBuilder(FIELD_PROJECT_UUID).build();
+ mapping.stringFieldBuilder(FIELD_FILE_UUID).build();
+ mapping.createIntegerField(FIELD_LINE);
+ mapping.stringFieldBuilder(FIELD_SCM_REVISION).disableSearch().build();
+ mapping.stringFieldBuilder(FIELD_SCM_AUTHOR).disableSearch().build();
+ mapping.createDateTimeField(FIELD_SCM_DATE);
+ mapping.stringFieldBuilder(FIELD_HIGHLIGHTING).disableSearch().build();
+ mapping.stringFieldBuilder(FIELD_SOURCE).disableSearch().build();
+ mapping.createIntegerField(FIELD_UT_LINE_HITS);
+ mapping.createIntegerField(FIELD_UT_CONDITIONS);
+ mapping.createIntegerField(FIELD_UT_COVERED_CONDITIONS);
+ mapping.createIntegerField(FIELD_IT_LINE_HITS);
+ mapping.createIntegerField(FIELD_IT_CONDITIONS);
+ mapping.createIntegerField(FIELD_IT_COVERED_CONDITIONS);
+ mapping.createIntegerField(FIELD_OVERALL_LINE_HITS);
+ mapping.createIntegerField(FIELD_OVERALL_CONDITIONS);
+ mapping.createIntegerField(FIELD_OVERALL_COVERED_CONDITIONS);
+ mapping.stringFieldBuilder(FIELD_SYMBOLS).disableSearch().build();
+ mapping.createShortField(FIELD_DUPLICATIONS);
+ mapping.createDateTimeField(FIELD_UPDATED_AT);
+ }
+
+ public static String docKey(String fileUuid, int line) {
+ return String.format("%s_%d", fileUuid, line);
}
}
*/
package org.sonar.server.source.index;
+import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.QueryBuilders;
DbSession dbSession = dbClient.openSession(false);
Connection dbConnection = dbSession.getConnection();
try {
- SourceLineResultSetIterator rowIt = SourceLineResultSetIterator.create(dbClient, dbConnection, lastUpdatedAt);
+ SourceFileResultSetIterator rowIt = SourceFileResultSetIterator.create(dbClient, dbConnection, lastUpdatedAt);
long maxUpdatedAt = doIndex(bulk, rowIt);
rowIt.close();
return maxUpdatedAt;
}
}
- public long index(Iterator<SourceLineResultSetIterator.SourceFile> sourceFiles) {
- final BulkIndexer bulk = new BulkIndexer(esClient, SourceLineIndexDefinition.INDEX);
- return doIndex(bulk, sourceFiles);
+ public long index(Iterator<SourceFileResultSetIterator.Row> dbRows) {
+ BulkIndexer bulk = new BulkIndexer(esClient, SourceLineIndexDefinition.INDEX);
+ return doIndex(bulk, dbRows);
}
- private long doIndex(BulkIndexer bulk, Iterator<SourceLineResultSetIterator.SourceFile> files) {
+ private long doIndex(BulkIndexer bulk, Iterator<SourceFileResultSetIterator.Row> dbRows) {
long maxUpdatedAt = 0L;
bulk.start();
- while (files.hasNext()) {
- SourceLineResultSetIterator.SourceFile file = files.next();
- for (SourceLineDoc line : file.getLines()) {
- bulk.add(newUpsertRequest(line));
+ while (dbRows.hasNext()) {
+ SourceFileResultSetIterator.Row row = dbRows.next();
+ addDeleteRequestsForLinesGreaterThan(bulk, row);
+ for (UpdateRequest updateRequest : row.getLineUpdateRequests()) {
+ bulk.add(updateRequest);
}
- deleteLinesFromFileAbove(file.getFileUuid(), file.getLines().size());
- maxUpdatedAt = Math.max(maxUpdatedAt, file.getUpdatedAt());
+ maxUpdatedAt = Math.max(maxUpdatedAt, row.getUpdatedAt());
}
bulk.stop();
return maxUpdatedAt;
}
- private UpdateRequest newUpsertRequest(SourceLineDoc lineDoc) {
- String projectUuid = lineDoc.projectUuid();
- return new UpdateRequest(SourceLineIndexDefinition.INDEX, SourceLineIndexDefinition.TYPE, lineDoc.key())
- .routing(projectUuid)
- .doc(lineDoc.getFields())
- .upsert(lineDoc.getFields());
- }
-
/**
- * Unindex all lines in file with UUID <code>fileUuid</code> above line <code>lastLine</code>
+ * Use-case:
+ * - file had 10 lines in previous analysis
+ * - same file has now 5 lines
+ * Lines 6 to 10 must be removed from index.
*/
- private void deleteLinesFromFileAbove(String fileUuid, int lastLine) {
- esClient.prepareDeleteByQuery(SourceLineIndexDefinition.INDEX)
+ private void addDeleteRequestsForLinesGreaterThan(BulkIndexer bulk, SourceFileResultSetIterator.Row fileRow) {
+ int numberOfLines = fileRow.getLineUpdateRequests().size();
+ SearchRequestBuilder searchRequest = esClient.prepareSearch(SourceLineIndexDefinition.INDEX)
.setTypes(SourceLineIndexDefinition.TYPE)
- .setQuery(QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), FilterBuilders.boolFilter()
- .must(FilterBuilders.termFilter(FIELD_FILE_UUID, fileUuid).cache(false))
- .must(FilterBuilders.rangeFilter(SourceLineIndexDefinition.FIELD_LINE).gt(lastLine).cache(false))
- )).get();
+ .setRouting(fileRow.getProjectUuid())
+ .setQuery(QueryBuilders.filteredQuery(
+ QueryBuilders.matchAllQuery(),
+ FilterBuilders.boolFilter()
+ .must(FilterBuilders.termFilter(FIELD_FILE_UUID, fileRow.getFileUuid()).cache(false))
+ .must(FilterBuilders.rangeFilter(SourceLineIndexDefinition.FIELD_LINE).gt(numberOfLines).cache(false))
+ .cache(false)
+ ));
+ bulk.addDeletion(searchRequest);
}
public void deleteByFile(String fileUuid) {
- esClient.prepareDeleteByQuery(SourceLineIndexDefinition.INDEX)
+ // TODO would be great to have the projectUuid for routing
+ SearchRequestBuilder searchRequest = esClient.prepareSearch(SourceLineIndexDefinition.INDEX)
.setTypes(SourceLineIndexDefinition.TYPE)
- .setQuery(QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(),
- FilterBuilders.termFilter(FIELD_FILE_UUID, fileUuid).cache(false)))
- .get();
+ .setQuery(QueryBuilders.filteredQuery(
+ QueryBuilders.matchAllQuery(),
+ FilterBuilders.termFilter(FIELD_FILE_UUID, fileUuid).cache(false)));
+ BulkIndexer.delete(esClient, SourceLineIndexDefinition.INDEX, searchRequest);
}
public void deleteByProject(String projectUuid) {
- esClient.prepareDeleteByQuery(SourceLineIndexDefinition.INDEX)
+ SearchRequestBuilder searchRequest = esClient.prepareSearch(SourceLineIndexDefinition.INDEX)
+ .setRouting(projectUuid)
.setTypes(SourceLineIndexDefinition.TYPE)
- .setQuery(QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(),
- FilterBuilders.termFilter(FIELD_PROJECT_UUID, projectUuid).cache(false)))
- .get();
+ .setQuery(QueryBuilders.filteredQuery(
+ QueryBuilders.matchAllQuery(),
+ FilterBuilders.termFilter(FIELD_PROJECT_UUID, projectUuid).cache(false)));
+ BulkIndexer.delete(esClient, SourceLineIndexDefinition.INDEX, searchRequest);
}
}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.server.source.index;
-
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.StringUtils;
-import org.sonar.core.source.db.FileSourceDto;
-import org.sonar.server.db.DbClient;
-import org.sonar.server.db.ResultSetIterator;
-import org.sonar.server.source.db.FileSourceDb;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.Date;
-import java.util.List;
-
-/**
- * Scroll over table FILE_SOURCES and directly parse CSV field required to
- * populate the index sourcelines
- */
-public class SourceLineResultSetIterator extends ResultSetIterator<SourceLineResultSetIterator.SourceFile> {
-
- public static class SourceFile {
- private final String fileUuid;
- private final long updatedAt;
- private final List<SourceLineDoc> lines = Lists.newArrayList();
-
- public SourceFile(String fileUuid, long updatedAt) {
- this.fileUuid = fileUuid;
- this.updatedAt = updatedAt;
- }
-
- public String getFileUuid() {
- return fileUuid;
- }
-
- public long getUpdatedAt() {
- return updatedAt;
- }
-
- public List<SourceLineDoc> getLines() {
- return lines;
- }
-
- public void addLine(SourceLineDoc line) {
- this.lines.add(line);
- }
- }
-
- private static final String[] FIELDS = {
- // column 1
- "project_uuid",
- "file_uuid",
- "updated_at",
- "binary_data"
- };
-
- private static final String SQL_ALL = "select " + StringUtils.join(FIELDS, ",") + " from file_sources";
-
- private static final String SQL_AFTER_DATE = SQL_ALL + " where updated_at>?";
-
- public static SourceLineResultSetIterator create(DbClient dbClient, Connection connection, long afterDate) {
- try {
- String sql = afterDate > 0L ? SQL_AFTER_DATE : SQL_ALL;
- // rows are big, so they are scrolled once at a time (one row in memory at a time)
- PreparedStatement stmt = dbClient.newScrollingSingleRowSelectStatement(connection, sql);
- if (afterDate > 0L) {
- stmt.setLong(1, afterDate);
- }
- return new SourceLineResultSetIterator(stmt);
- } catch (SQLException e) {
- throw new IllegalStateException("Fail to prepare SQL request to select all file sources", e);
- }
- }
-
- private SourceLineResultSetIterator(PreparedStatement stmt) throws SQLException {
- super(stmt);
- }
-
- @Override
- protected SourceFile read(ResultSet rs) throws SQLException {
- String projectUuid = rs.getString(1);
- String fileUuid = rs.getString(2);
- long updatedAt = rs.getLong(3);
- Date updatedDate = new Date(updatedAt);
-
- SourceFile result = new SourceFile(fileUuid, updatedAt);
- FileSourceDb.Data data = FileSourceDto.decodeData(rs.getBinaryStream(4));
- for (FileSourceDb.Line line : data.getLinesList()) {
- SourceLineDoc doc = new SourceLineDoc();
- doc.setProjectUuid(projectUuid);
- doc.setFileUuid(fileUuid);
- doc.setLine(line.getLine());
- doc.setUpdateDate(updatedDate);
- doc.setScmRevision(line.getScmRevision());
- doc.setScmAuthor(line.getScmAuthor());
- doc.setScmDate(line.hasScmDate() ? new Date(line.getScmDate()) : null);
- // UT
- doc.setUtLineHits(line.hasUtLineHits() ? line.getUtLineHits() : null);
- doc.setUtConditions(line.hasUtConditions() ? line.getUtConditions() : null);
- doc.setUtCoveredConditions(line.hasUtCoveredConditions() ? line.getUtCoveredConditions() : null);
- // IT
- doc.setItLineHits(line.hasItLineHits() ? line.getItLineHits() : null);
- doc.setItConditions(line.hasItConditions() ? line.getItConditions() : null);
- doc.setItCoveredConditions(line.hasItCoveredConditions() ? line.getItCoveredConditions() : null);
- // OVERALL
- doc.setOverallLineHits(line.hasOverallLineHits() ? line.getOverallLineHits() : null);
- doc.setOverallConditions(line.hasOverallConditions() ? line.getOverallConditions() : null);
- doc.setOverallCoveredConditions(line.hasOverallCoveredConditions() ? line.getOverallCoveredConditions() : null);
-
- doc.setHighlighting(line.hasHighlighting() ? line.getHighlighting() : null);
- doc.setSymbols(line.hasSymbols() ? line.getSymbols() : null);
- doc.setDuplications(line.getDuplicationsList());
-
- // source is always the latest field. All future fields will be added between duplications (14) and source.
- doc.setSource(line.hasSource() ? line.getSource() : null);
-
- result.addLine(doc);
- }
- return result;
- }
-}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.server.util;
+
+import javax.annotation.Nullable;
+
+import java.util.Date;
+
+public class DateCollector {
+
+ private long maxDate = 0L;
+
+ public void add(@Nullable Date d) {
+ if (d != null) {
+ add(d.getTime());
+ }
+ }
+
+ public void add(long date) {
+ maxDate = Math.max(maxDate, date);
+ }
+
+ /**
+ * The most recent collected date. Value is zero if no dates were collected.
+ */
+ public long getMax() {
+ return maxDate;
+ }
+}
task.log();
}
- private static class LoggerTimerTask extends TimerTask {
+ private class LoggerTimerTask extends TimerTask {
private final AtomicLong counter;
private final Logger logger;
private String pluralLabel = "rows";
+ private long previousCounter = 0L;
private LoggerTimerTask(AtomicLong counter, Logger logger) {
this.counter = counter;
}
private void log() {
- logger.info(String.format("%d %s processed", counter.get(), pluralLabel));
+ long current = counter.get();
+ logger.info(String.format("%d %s processed (%d items/sec)", current, pluralLabel, 1000 * (current-previousCounter) / periodMs));
+ previousCounter = current;
}
}
}
import org.apache.commons.dbutils.DbUtils;
import org.assertj.core.data.MapEntry;
+import org.elasticsearch.action.update.UpdateRequest;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.sonar.test.DbTests;
import java.sql.Connection;
+import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
DbUtils.closeQuietly(connection);
}
+ /**
+ * Iterate over two rows in table.
+ */
@Test
public void traverse() throws Exception {
dbTester.prepareDbUnit(getClass(), "traverse.xml");
ActivityResultSetIterator it = ActivityResultSetIterator.create(client, connection, 0L);
+
assertThat(it.hasNext()).isTrue();
- ActivityDoc doc = it.next();
- assertThat(doc).isNotNull();
- assertThat(doc.getKey()).isEqualTo("UUID1");
- assertThat(doc.getAction()).isEqualTo("THE_ACTION");
- assertThat(doc.getMessage()).isEqualTo("THE_MSG");
- assertThat(doc.getDetails()).containsOnly(MapEntry.entry("foo", "bar"));
- assertThat(doc.getLogin()).isEqualTo("THE_AUTHOR");
+ UpdateRequest request = it.next();
+ Map<String, Object> doc = request.doc().sourceAsMap();
+ assertThat(doc.get(ActivityIndexDefinition.FIELD_KEY)).isEqualTo("UUID1");
+ assertThat(doc.get(ActivityIndexDefinition.FIELD_ACTION)).isEqualTo("THE_ACTION");
+ assertThat(doc.get(ActivityIndexDefinition.FIELD_MESSAGE)).isEqualTo("THE_MSG");
+ assertThat((Map) doc.get(ActivityIndexDefinition.FIELD_DETAILS)).containsOnly(MapEntry.entry("foo", "bar"));
+ assertThat(doc.get(ActivityIndexDefinition.FIELD_LOGIN)).isEqualTo("THE_AUTHOR");
assertThat(it.hasNext()).isTrue();
assertThat(it.next()).isNotNull();
assertThat(it.hasNext()).isFalse();
it.close();
+
+ assertThat(it.getMaxRowDate()).isEqualTo(1420066800000L);
}
@Test
ActivityResultSetIterator it = ActivityResultSetIterator.create(client, connection, DateUtils.parseDate("2014-12-01").getTime());
assertThat(it.hasNext()).isTrue();
- ActivityDoc doc = it.next();
- assertThat(doc).isNotNull();
- assertThat(doc.getKey()).isEqualTo("UUID2");
+ UpdateRequest request = it.next();
+ assertThat(request).isNotNull();
+ Map<String, Object> doc = request.doc().sourceAsMap();
+ assertThat(doc.get(ActivityIndexDefinition.FIELD_KEY)).isEqualTo("UUID2");
assertThat(it.hasNext()).isFalse();
it.close();
+
+ assertThat(it.getMaxRowDate()).isEqualTo(1420066800000L);
+ }
+
+ @Test
+ public void nothing_to_traverse() throws Exception {
+ dbTester.prepareDbUnit(getClass(), "traverse.xml");
+ ActivityResultSetIterator it = ActivityResultSetIterator.create(client, connection, DateUtils.parseDate("2030-01-01").getTime());
+
+ assertThat(it.hasNext()).isFalse();
+ it.close();
+
+ assertThat(it.getMaxRowDate()).isEqualTo(0L);
}
}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-
-package org.sonar.server.db.migrations.v51;
-
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.sonar.api.utils.System2;
-import org.sonar.core.persistence.DbTester;
-import org.sonar.server.db.migrations.DatabaseMigration;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-import static org.sonar.api.utils.DateUtils.parseDate;
-
-public class FeedProjectMeasuresLongDatesTest {
- @ClassRule
- public static DbTester db = new DbTester().schema(FeedProjectMeasuresLongDatesTest.class, "schema.sql");
-
- @Before
- public void before() throws Exception {
- db.prepareDbUnit(getClass(), "before.xml");
- }
-
- @Test
- public void execute() throws Exception {
- DatabaseMigration migration = newMigration(System2.INSTANCE);
-
- migration.execute();
-
- int count = db
- .countSql("select count(*) from project_measures where " +
- "measure_date_ms is not null");
- assertThat(count).isEqualTo(2);
- }
-
- @Test
- public void take_now_if_date_in_the_future() throws Exception {
- System2 system = mock(System2.class);
- when(system.now()).thenReturn(1234L);
-
- DatabaseMigration migration = newMigration(system);
-
- migration.execute();
-
- int count = db
- .countSql("select count(*) from project_measures where " +
- "measure_date_ms = 1234");
- assertThat(count).isEqualTo(1);
- }
-
- @Test
- public void take_snapshot_date_if_in_the_past() throws Exception {
- DatabaseMigration migration = newMigration(System2.INSTANCE);
-
- migration.execute();
-
- long snapshotTime = parseDate("2014-09-25").getTime();
- int count = db
- .countSql("select count(*) from project_measures where " +
- "measure_date_ms=" + snapshotTime);
- assertThat(count).isEqualTo(1);
- }
-
- private FeedProjectMeasuresLongDates newMigration(System2 system) {
- return new FeedProjectMeasuresLongDates(db.database(), system);
- }
-}
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.unit.ByteSizeUnit;
-import org.elasticsearch.common.unit.ByteSizeValue;
-import org.junit.ClassRule;
+import org.junit.Rule;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class BulkIndexerTest {
- @ClassRule
- public static EsTester esTester = new EsTester().addDefinitions(new FakeIndexDefinition().setReplicas(1));
+ @Rule
+ public EsTester esTester = new EsTester().addDefinitions(new FakeIndexDefinition().setReplicas(1));
@Test
public void index_nothing() throws Exception {
assertThat(replicas()).isEqualTo(1);
BulkIndexer indexer = new BulkIndexer(esTester.client(), FakeIndexDefinition.INDEX)
- .setLarge(true)
- .setFlushByteSize(new ByteSizeValue(1, ByteSizeUnit.BYTES).bytes());
+ .setLarge(true);
indexer.start();
// replicas are temporarily disabled
assertThat(replicas()).isEqualTo(1);
}
+ @Test
+ public void disable_refresh() throws Exception {
+ BulkIndexer indexer = new BulkIndexer(esTester.client(), FakeIndexDefinition.INDEX)
+ .setDisableRefresh(true);
+ indexer.start();
+ indexer.add(newIndexRequest(42));
+ indexer.add(newIndexRequest(78));
+ indexer.stop();
+
+ assertThat(count()).isEqualTo(0);
+
+ esTester.client().prepareRefresh(FakeIndexDefinition.INDEX).get();
+ assertThat(count()).isEqualTo(2);
+ }
+
+
private long count() {
return esTester.countDocuments("fakes", "fake");
}
import org.sonar.server.search.BaseDoc;
import org.sonar.test.TestUtils;
+import java.util.Date;
import java.util.List;
import java.util.Map;
public void util_class() throws Exception {
assertThat(TestUtils.hasOnlyPrivateConstructors(EsUtils.class));
}
+
+ @Test
+ public void es_date_format() throws Exception {
+ assertThat(EsUtils.formatDateTime(new Date(1_500_000_000_000L))).startsWith("2017-07-").hasSize(29);
+ assertThat(EsUtils.formatDateTime(null)).isNull();
+
+ assertThat(EsUtils.parseDateTime("2017-07-14T04:40:00.000+02:00").getTime()).isEqualTo(1_500_000_000_000L);
+ assertThat(EsUtils.parseDateTime(null)).isNull();
+ }
}
public void define(IndexDefinitionContext context) {
NewIndex index = context.create(INDEX);
index.getSettings().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, replicas);
+ index.getSettings().put("index.refresh_interval", "-1");
NewIndex.NewIndexType type = index.createType(TYPE);
type.createIntegerField(INT_FIELD);
}
*/
package org.sonar.server.issue;
-import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import com.google.common.collect.Multiset;
import org.sonar.server.permission.PermissionChange;
import org.sonar.server.rule.RuleTesting;
import org.sonar.server.rule.db.RuleDao;
-import org.sonar.server.source.index.SourceLineDoc;
+import org.sonar.server.source.db.FileSourceDb;
+import org.sonar.server.source.index.SourceFileResultSetIterator;
import org.sonar.server.source.index.SourceLineIndexer;
-import org.sonar.server.source.index.SourceLineResultSetIterator;
import org.sonar.server.tester.ServerTester;
import org.sonar.server.user.MockUserSession;
import org.sonar.server.user.NewUser;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.entry;
import static org.junit.Assert.fail;
-import static org.sonar.server.source.index.SourceLineIndexDefinition.*;
public class IssueServiceMediumTest {
}
private void newSourceLine(ComponentDto file, int line, String scmAuthor) {
- SourceLineDoc line1 = new SourceLineDoc(ImmutableMap.<String, Object>builder()
- .put(FIELD_PROJECT_UUID, file.projectUuid())
- .put(FIELD_FILE_UUID, file.uuid())
- .put(FIELD_LINE, line)
- .put(FIELD_UPDATED_AT, new Date())
- .put(FIELD_SCM_AUTHOR, scmAuthor)
- .build());
- SourceLineResultSetIterator.SourceFile sourceFile = new SourceLineResultSetIterator.SourceFile(file.uuid(), System.currentTimeMillis());
- sourceFile.addLine(line1);
- tester.get(SourceLineIndexer.class).index(Iterators.singletonIterator(sourceFile));
+ FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+ dataBuilder.addLinesBuilder()
+ .setLine(line)
+ .setScmAuthor(scmAuthor)
+ .build();
+ SourceFileResultSetIterator.Row row = SourceFileResultSetIterator.toRow(file.projectUuid(), file.uuid(), new Date(), dataBuilder.build());
+ tester.get(SourceLineIndexer.class).index(Iterators.singletonIterator(row));
}
private void newUser(String login) {
}
}
+ @Test
+ public void delete_by_query_is_not_supported() throws Exception {
+ try {
+ searchClient.prepareDeleteByQuery();
+ fail();
+ } catch (UnsupportedOperationException e) {
+ assertThat(e).hasMessage("Delete by query must not be used. See https://github.com/elastic/elasticsearch/issues/10067. See alternatives in BulkIndexer.");
+ }
+ }
+
@Test
public void prepare_percolate_is_not_yet_implemented() throws Exception {
try {
searchClient.preparePercolate();
fail();
- } catch (Exception e) {
- assertThat(e).isInstanceOf(IllegalStateException.class).hasMessage("Not yet implemented");
+ } catch (IllegalStateException e) {
+ assertThat(e).hasMessage("Not yet implemented");
}
}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.server.source.index;
+
+import org.assertj.core.data.MapEntry;
+import org.elasticsearch.action.update.UpdateRequest;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.sonar.core.persistence.DbTester;
+import org.sonar.server.db.DbClient;
+import org.sonar.server.source.db.FileSourceDb;
+import org.sonar.server.source.db.FileSourceTesting;
+import org.sonar.test.DbTests;
+
+import java.sql.Connection;
+import java.util.Map;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.fail;
+
+@Category(DbTests.class)
+public class SourceFileResultSetIteratorTest {
+
+ @ClassRule
+ public static DbTester db = new DbTester().schema(SourceFileResultSetIteratorTest.class, "schema.sql");
+
+ DbClient dbClient;
+
+ Connection connection;
+
+ @Before
+ public void setUp() throws Exception {
+ dbClient = new DbClient(db.database(), db.myBatis());
+ connection = db.openConnection();
+ }
+
+ @After
+ public void after() throws Exception {
+ connection.close();
+ }
+
+ @Test
+ public void traverse_db() throws Exception {
+ db.prepareDbUnit(getClass(), "shared.xml");
+ FileSourceTesting.updateDataColumn(connection, "F1", FileSourceTesting.newFakeData(3).build());
+
+ SourceFileResultSetIterator it = SourceFileResultSetIterator.create(dbClient, connection, 0L);
+ assertThat(it.hasNext()).isTrue();
+ SourceFileResultSetIterator.Row row = it.next();
+ assertThat(row.getProjectUuid()).isEqualTo("P1");
+ assertThat(row.getFileUuid()).isEqualTo("F1");
+ assertThat(row.getUpdatedAt()).isEqualTo(1416239042000L);
+ assertThat(row.getLineUpdateRequests()).hasSize(3);
+
+ UpdateRequest firstRequest = row.getLineUpdateRequests().get(0);
+ Map<String, Object> doc = firstRequest.doc().sourceAsMap();
+ assertThat(doc).contains(
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_PROJECT_UUID, "P1"),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_FILE_UUID, "F1"),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_LINE, 1),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_SCM_REVISION, "REVISION_1"),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_SCM_AUTHOR, "AUTHOR_1"),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_HIGHLIGHTING, "HIGHLIGHTING_1"),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_SYMBOLS, "SYMBOLS_1"),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_UT_LINE_HITS, 1),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_UT_CONDITIONS, 2),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_UT_COVERED_CONDITIONS, 3),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_IT_LINE_HITS, 4),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_IT_CONDITIONS, 5),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_IT_COVERED_CONDITIONS, 6),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_OVERALL_LINE_HITS, 7),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_OVERALL_CONDITIONS, 8),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_OVERALL_COVERED_CONDITIONS, 9)
+ );
+ it.close();
+ }
+
+ /**
+ * File with one line. No metadata available on the line.
+ */
+ @Test
+ public void minimal_data() throws Exception {
+ db.prepareDbUnit(getClass(), "shared.xml");
+ FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+ dataBuilder.addLinesBuilder().setLine(1).build();
+ FileSourceTesting.updateDataColumn(connection, "F1", dataBuilder.build());
+
+ SourceFileResultSetIterator it = SourceFileResultSetIterator.create(dbClient, connection, 0L);
+ SourceFileResultSetIterator.Row row = it.next();
+ assertThat(row.getProjectUuid()).isEqualTo("P1");
+ assertThat(row.getFileUuid()).isEqualTo("F1");
+ assertThat(row.getUpdatedAt()).isEqualTo(1416239042000L);
+ assertThat(row.getLineUpdateRequests()).hasSize(1);
+ UpdateRequest firstRequest = row.getLineUpdateRequests().get(0);
+ Map<String, Object> doc = firstRequest.doc().sourceAsMap();
+ assertThat(doc).contains(
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_PROJECT_UUID, "P1"),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_FILE_UUID, "F1"),
+ MapEntry.entry(SourceLineIndexDefinition.FIELD_LINE, 1)
+ );
+ // null values
+ assertThat(doc).containsKeys(
+ SourceLineIndexDefinition.FIELD_SCM_REVISION,
+ SourceLineIndexDefinition.FIELD_SCM_AUTHOR,
+ SourceLineIndexDefinition.FIELD_HIGHLIGHTING,
+ SourceLineIndexDefinition.FIELD_SYMBOLS,
+ SourceLineIndexDefinition.FIELD_UT_LINE_HITS,
+ SourceLineIndexDefinition.FIELD_UT_CONDITIONS,
+ SourceLineIndexDefinition.FIELD_UT_COVERED_CONDITIONS,
+ SourceLineIndexDefinition.FIELD_IT_LINE_HITS,
+ SourceLineIndexDefinition.FIELD_IT_CONDITIONS,
+ SourceLineIndexDefinition.FIELD_IT_COVERED_CONDITIONS,
+ SourceLineIndexDefinition.FIELD_OVERALL_LINE_HITS,
+ SourceLineIndexDefinition.FIELD_OVERALL_CONDITIONS,
+ SourceLineIndexDefinition.FIELD_OVERALL_COVERED_CONDITIONS
+ );
+ it.close();
+ }
+
+ @Test
+ public void filter_by_date() throws Exception {
+ db.prepareDbUnit(getClass(), "shared.xml");
+
+ SourceFileResultSetIterator iterator = SourceFileResultSetIterator.create(dbClient, connection, 2000000000000L);
+ assertThat(iterator.hasNext()).isFalse();
+ iterator.close();
+ }
+
+ @Test
+ public void fail_on_bad_data_format() throws Exception {
+ db.prepareDbUnit(getClass(), "shared.xml");
+
+ FileSourceTesting.updateDataColumn(connection, "F1", "THIS_IS_NOT_PROTOBUF".getBytes());
+
+ SourceFileResultSetIterator iterator = SourceFileResultSetIterator.create(dbClient, connection, 0L);
+ try {
+ assertThat(iterator.hasNext()).isTrue();
+ iterator.next();
+ fail();
+ } catch (IllegalStateException e) {
+ // ok
+ }
+ iterator.close();
+ }
+}
package org.sonar.server.source.index;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterators;
-import org.apache.commons.io.IOUtils;
-import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.apache.commons.io.FileUtils;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.index.query.QueryBuilders;
import org.sonar.core.persistence.DbTester;
import org.sonar.server.db.DbClient;
import org.sonar.server.es.EsTester;
+import org.sonar.server.source.db.FileSourceDb;
import org.sonar.server.source.db.FileSourceTesting;
import org.sonar.test.DbTests;
import org.sonar.test.TestUtils;
-import java.io.FileInputStream;
+import java.io.IOException;
import java.sql.Connection;
import java.util.Date;
import java.util.List;
assertThat(countDocuments()).isEqualTo(3);
}
+ /**
+ * File F1 in project P1 has one line -> to be updated
+ * File F2 in project P1 has one line -> untouched
+ */
@Test
public void update_already_indexed_lines() throws Exception {
- prepareIndex()
- .setSource(IOUtils.toString(new FileInputStream(TestUtils.getResource(this.getClass(), "line2.json"))))
- .get();
- prepareIndex()
- .setSource(IOUtils.toString(new FileInputStream(TestUtils.getResource(this.getClass(), "line2_other_file.json"))))
- .setRefresh(true)
- .get();
+ indexLine("P1", "F1", 1);
+ indexLine("P1", "F2", 1);
List<Integer> duplications = ImmutableList.of(1, 2, 3);
- SourceLineDoc line1 = new SourceLineDoc(ImmutableMap.<String, Object>builder()
- .put(FIELD_PROJECT_UUID, "abcd")
- .put(FIELD_FILE_UUID, "efgh")
- .put(FIELD_LINE, 1)
- .put(FIELD_SCM_REVISION, "cafebabe")
- .put(FIELD_SCM_DATE, DateUtils.parseDateTime("2014-01-01T12:34:56+0100"))
- .put(FIELD_SCM_AUTHOR, "polop")
- .put(FIELD_SOURCE, "package org.sonar.server.source;")
- .put(FIELD_DUPLICATIONS, duplications)
- .put(FIELD_UPDATED_AT, new Date())
- .build());
- SourceLineResultSetIterator.SourceFile file = new SourceLineResultSetIterator.SourceFile("efgh", System.currentTimeMillis());
- file.addLine(line1);
- indexer.index(Iterators.singletonIterator(file));
+ FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+ dataBuilder.addLinesBuilder()
+ .setLine(1)
+ .setScmRevision("new_revision")
+ .setScmAuthor("new_author")
+ .setSource("new source")
+ .addAllDuplications(duplications)
+ .build();
+ SourceFileResultSetIterator.Row dbRow = SourceFileResultSetIterator.toRow("P1", "F1", new Date(), dataBuilder.build());
+ indexer.index(Iterators.singletonIterator(dbRow));
assertThat(countDocuments()).isEqualTo(2L);
SearchResponse fileSearch = prepareSearch()
- .setQuery(QueryBuilders.termQuery(FIELD_FILE_UUID, "efgh"))
+ .setQuery(QueryBuilders.termQuery(FIELD_FILE_UUID, "F1"))
.get();
assertThat(fileSearch.getHits().getTotalHits()).isEqualTo(1L);
Map<String, Object> fields = fileSearch.getHits().getHits()[0].sourceAsMap();
- assertThat(fields).hasSize(9);
assertThat(fields).contains(
- entry(FIELD_PROJECT_UUID, "abcd"),
- entry(FIELD_FILE_UUID, "efgh"),
+ entry(FIELD_PROJECT_UUID, "P1"),
+ entry(FIELD_FILE_UUID, "F1"),
entry(FIELD_LINE, 1),
- entry(FIELD_SCM_REVISION, "cafebabe"),
- entry(FIELD_SCM_DATE, "2014-01-01T11:34:56.000Z"),
- entry(FIELD_SCM_AUTHOR, "polop"),
- entry(FIELD_SOURCE, "package org.sonar.server.source;"),
+ entry(FIELD_SCM_REVISION, "new_revision"),
+ entry(FIELD_SCM_AUTHOR, "new_author"),
+ entry(FIELD_SOURCE, "new source"),
entry(FIELD_DUPLICATIONS, duplications)
);
}
@Test
public void delete_file_uuid() throws Exception {
- addSource("line2.json");
- addSource("line3.json");
- addSource("line2_other_file.json");
+ indexLine("P1", "F1", 1);
+ indexLine("P1", "F1", 2);
+ indexLine("P1", "F2", 1);
- indexer.deleteByFile("efgh");
+ indexer.deleteByFile("F1");
List<SearchHit> hits = getDocuments();
Map<String, Object> document = hits.get(0).getSource();
assertThat(hits).hasSize(1);
- assertThat(document.get(FIELD_LINE)).isEqualTo(2);
- assertThat(document.get(FIELD_FILE_UUID)).isEqualTo("fdsq");
+ assertThat(document.get(FIELD_LINE)).isEqualTo(1);
+ assertThat(document.get(FIELD_FILE_UUID)).isEqualTo("F2");
}
@Test
public void delete_by_project_uuid() throws Exception {
- addSource("line2.json");
- addSource("line3.json");
- addSource("line2_other_file.json");
- addSource("line3_other_project.json");
+ indexLine("P1", "F1", 1);
+ indexLine("P1", "F1", 2);
+ indexLine("P1", "F2", 1);
+ indexLine("P2", "F3", 1);
- indexer.deleteByProject("abcd");
+ indexer.deleteByProject("P1");
List<SearchHit> hits = getDocuments();
+ assertThat(hits).hasSize(1);
Map<String, Object> document = hits.get(0).getSource();
assertThat(hits).hasSize(1);
- assertThat(document.get(FIELD_PROJECT_UUID)).isEqualTo("plmn");
+ assertThat(document.get(FIELD_PROJECT_UUID)).isEqualTo("P2");
}
@Test
public void index_source_lines_with_big_test_data() throws Exception {
Integer bigValue = Short.MAX_VALUE * 2;
- SourceLineDoc line1 = new SourceLineDoc(ImmutableMap.<String, Object>builder()
- .put(FIELD_PROJECT_UUID, "abcd")
- .put(FIELD_FILE_UUID, "efgh")
- .put(FIELD_LINE, 1)
- .put(FIELD_SOURCE, "package org.sonar.server.source;")
- .put(FIELD_UT_LINE_HITS, bigValue)
- .put(FIELD_UT_CONDITIONS, bigValue)
- .put(FIELD_UT_COVERED_CONDITIONS, bigValue)
- .put(FIELD_IT_LINE_HITS, bigValue)
- .put(FIELD_IT_CONDITIONS, bigValue)
- .put(FIELD_IT_COVERED_CONDITIONS, bigValue)
- .put(FIELD_OVERALL_LINE_HITS, bigValue)
- .put(FIELD_OVERALL_CONDITIONS, bigValue)
- .put(FIELD_OVERALL_COVERED_CONDITIONS, bigValue)
- .put(FIELD_UPDATED_AT, new Date())
- .build());
- SourceLineResultSetIterator.SourceFile file = new SourceLineResultSetIterator.SourceFile("efgh", System.currentTimeMillis());
- file.addLine(line1);
- indexer.index(Iterators.singletonIterator(file));
+
+ FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+ dataBuilder.addLinesBuilder()
+ .setLine(1)
+ .setScmRevision("cafebabe")
+ .setScmAuthor("polop")
+ .setScmDate(DateUtils.parseDateTime("2014-01-01T12:34:56+0100").getTime())
+ .setSource("package org.sonar.server.source;")
+ .setUtLineHits(bigValue)
+ .setUtConditions(bigValue)
+ .setUtCoveredConditions(bigValue)
+ .setItLineHits(bigValue)
+ .setItConditions(bigValue)
+ .setItCoveredConditions(bigValue)
+ .setOverallLineHits(bigValue)
+ .setOverallConditions(bigValue)
+ .setOverallCoveredConditions(bigValue)
+ .build();
+
+ SourceFileResultSetIterator.Row row = SourceFileResultSetIterator.toRow("P1", "F1", new Date(), dataBuilder.build());
+ indexer.index(Iterators.singletonIterator(row));
List<SearchHit> hits = getDocuments();
assertThat(hits).hasSize(1);
assertThat(document.get(FIELD_OVERALL_COVERED_CONDITIONS)).isEqualTo(bigValue);
}
- private void addSource(String fileName) throws Exception {
- prepareIndex()
- .setSource(IOUtils.toString(new FileInputStream(TestUtils.getResource(this.getClass(), fileName))))
+ private void indexLine(String projectUuid, String fileUuid, int line) throws IOException {
+ es.client().prepareIndex(INDEX, TYPE)
+ .setId(SourceLineIndexDefinition.docKey(fileUuid, line))
+ .setSource(FileUtils.readFileToString(TestUtils.getResource(this.getClass(), projectUuid + "_" + fileUuid + "_line" + line + ".json")))
+ .setRefresh(true)
.get();
}
.setTypes(TYPE);
}
- private IndexRequestBuilder prepareIndex() {
- return es.client().prepareIndex(INDEX, TYPE);
- }
-
private List<SearchHit> getDocuments() {
return es.getDocuments(INDEX, TYPE);
}
+++ /dev/null
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- */
-package org.sonar.server.source.index;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.sonar.core.persistence.DbTester;
-import org.sonar.server.db.DbClient;
-import org.sonar.server.source.db.FileSourceTesting;
-import org.sonar.test.DbTests;
-
-import java.sql.Connection;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.fail;
-
-@Category(DbTests.class)
-public class SourceLineResultSetIteratorTest {
-
- @ClassRule
- public static DbTester db = new DbTester().schema(SourceLineResultSetIteratorTest.class, "schema.sql");
-
- DbClient dbClient;
-
- Connection connection;
-
- @Before
- public void setUp() throws Exception {
- dbClient = new DbClient(db.database(), db.myBatis());
- connection = db.openConnection();
- }
-
- @After
- public void after() throws Exception {
- connection.close();
- }
-
- @Test
- public void parse_db_and_generate_source_line_documents() throws Exception {
- db.prepareDbUnit(getClass(), "shared.xml");
- FileSourceTesting.updateDataColumn(connection, "FILE_UUID", FileSourceTesting.newFakeData(3).build());
-
- SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 0L);
- assertThat(iterator.hasNext()).isTrue();
- SourceLineResultSetIterator.SourceFile file = iterator.next();
- assertThat(file.getLines()).hasSize(3);
- SourceLineDoc firstLine = file.getLines().get(0);
- assertThat(firstLine.projectUuid()).isEqualTo("PROJECT_UUID");
- assertThat(firstLine.fileUuid()).isEqualTo("FILE_UUID");
- assertThat(firstLine.line()).isEqualTo(1);
- assertThat(firstLine.scmRevision()).isEqualTo("REVISION_1");
- assertThat(firstLine.scmAuthor()).isEqualTo("AUTHOR_1");
- assertThat(firstLine.highlighting()).isEqualTo("HIGHLIGHTING_1");
- assertThat(firstLine.symbols()).isEqualTo("SYMBOLS_1");
- assertThat(firstLine.source()).isEqualTo("SOURCE_1");
- assertThat(firstLine.utLineHits()).isEqualTo(1);
- assertThat(firstLine.utConditions()).isEqualTo(2);
- assertThat(firstLine.utCoveredConditions()).isEqualTo(3);
- assertThat(firstLine.itLineHits()).isEqualTo(4);
- assertThat(firstLine.itConditions()).isEqualTo(5);
- assertThat(firstLine.itCoveredConditions()).isEqualTo(6);
- assertThat(firstLine.overallLineHits()).isEqualTo(7);
- assertThat(firstLine.overallConditions()).isEqualTo(8);
- assertThat(firstLine.overallCoveredConditions()).isEqualTo(9);
- iterator.close();
- }
-
- @Test
- public void should_ignore_lines_already_handled() throws Exception {
- db.prepareDbUnit(getClass(), "shared.xml");
-
- SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 2000000000000L);
- assertThat(iterator.hasNext()).isFalse();
- iterator.close();
- }
-
- @Test
- public void should_fail_on_bad_data_format() throws Exception {
- db.prepareDbUnit(getClass(), "shared.xml");
-
- FileSourceTesting.updateDataColumn(connection, "FILE_UUID", "THIS_IS_NOT_PROTOBUF".getBytes());
-
- SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 0L);
- try {
- assertThat(iterator.hasNext()).isTrue();
- iterator.next();
- fail();
- } catch (IllegalStateException e) {
- // ok
- }
- iterator.close();
- }
-}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.server.util;
+
+import org.junit.Test;
+import org.sonar.api.utils.DateUtils;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class DateCollectorTest {
+
+ DateCollector collector = new DateCollector();
+
+ @Test
+ public void max_is_zero_if_no_dates() throws Exception {
+ assertThat(collector.getMax()).isEqualTo(0L);
+ }
+
+ @Test
+ public void max() throws Exception {
+ collector.add(DateUtils.parseDate("2013-06-01"));
+ collector.add(DateUtils.parseDate("2014-01-01"));
+ collector.add(DateUtils.parseDate("2013-08-01"));
+
+ assertThat(collector.getMax()).isEqualTo(DateUtils.parseDateQuietly("2014-01-01").getTime());
+ }
+}
progress.start();
Thread.sleep(80L);
progress.stop();
- verify(logger, atLeast(1)).info("42 rows processed");
+ verify(logger, atLeast(1)).info(startsWith("42 rows processed"));
// ability to manual log, generally final status
counter.incrementAndGet();
progress.log();
- verify(logger).info("43 rows processed");
+ verify(logger).info(startsWith("43 rows processed"));
}
@Test
+++ /dev/null
-<dataset>
- <!-- new migration -->
- <project_measures
- id="1"
- measure_date="2014-09-25"
- measure_date_ms="[null]"
- />
-
- <!-- re-entrant migration - ignore the ones that are already fed with new dates -->
- <project_measures
- id="2"
- measure_date="2014-09-25"
- measure_date_ms="1500000000"
- />
-
- <!-- NULL dates -->
- <project_measures
- id="3"
- measure_date="[null]"
- measure_date_ms="[null]"
- />
-</dataset>
+++ /dev/null
-CREATE TABLE "PROJECT_MEASURES" (
- "ID" BIGINT NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1),
- "MEASURE_DATE" TIMESTAMP,
- "MEASURE_DATE_MS" BIGINT
-);
--- /dev/null
+
+CREATE TABLE "FILE_SOURCES" (
+ "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1),
+ "PROJECT_UUID" VARCHAR(50) NOT NULL,
+ "FILE_UUID" VARCHAR(50) NOT NULL,
+ "BINARY_DATA" BINARY(167772150),
+ "DATA_HASH" VARCHAR(50) NOT NULL,
+ "CREATED_AT" BIGINT NOT NULL,
+ "UPDATED_AT" BIGINT NOT NULL
+);
--- /dev/null
+<dataset>
+
+ <file_sources id="1" project_uuid="P1" file_uuid="F1" created_at="1416238020000" updated_at="1416239042000"
+ binary_data="" data_hash="" />
+
+</dataset>
--- /dev/null
+{
+ "projectUuid": "P1",
+ "fileUuid": "F1",
+ "line": 1,
+ "scmAuthor": "polop",
+ "scmDate": "2014-01-01T12:34:56.7+01:00",
+ "scmRevision": "cafebabe",
+ "source": "// Empty",
+ "updatedAt": "2014-01-01T23:45:01.8+01:00",
+ "utLineHits": 0,
+ "utConditions": 0,
+ "utCoveredConditions": 0,
+ "itLineHits": 0,
+ "itConditions": 0,
+ "itCoveredConditions": 0,
+ "overallLineHits": 0,
+ "overallConditions": 0,
+ "overallCoveredConditions": 0
+}
--- /dev/null
+{
+ "projectUuid": "P1",
+ "fileUuid": "F1",
+ "line": 2,
+ "scmAuthor": "polop",
+ "scmDate": "2014-01-01T12:34:56.7+01:00",
+ "scmRevision": "cafebabe",
+ "source": "// Empty",
+ "updatedAt": "2014-01-01T23:45:01.8+01:00",
+ "utLineHits": 0,
+ "utConditions": 0,
+ "utCoveredConditions": 0,
+ "itLineHits": 0,
+ "itConditions": 0,
+ "itCoveredConditions": 0,
+ "overallLineHits": 0,
+ "overallConditions": 0,
+ "overallCoveredConditions": 0
+}
--- /dev/null
+{
+ "projectUuid": "P1",
+ "fileUuid": "F2",
+ "line": 1,
+ "scmAuthor": "polop",
+ "scmDate": "2014-01-01T12:34:56.7+01:00",
+ "scmRevision": "cafebabe",
+ "source": "// Empty",
+ "updatedAt": "2014-01-01T23:45:01.8+01:00",
+ "utLineHits": 0,
+ "utConditions": 0,
+ "utCoveredConditions": 0,
+ "itLineHits": 0,
+ "itConditions": 0,
+ "itCoveredConditions": 0,
+ "overallLineHits": 0,
+ "overallConditions": 0,
+ "overallCoveredConditions": 0
+}
--- /dev/null
+{
+ "projectUuid": "P2",
+ "fileUuid": "F3",
+ "line": 1,
+ "scmAuthor": "polop",
+ "scmDate": "2014-01-01T12:34:56.7+01:00",
+ "scmRevision": "cafebabe",
+ "source": "// Empty",
+ "updatedAt": "2014-01-01T23:45:01.8+01:00",
+ "utLineHits": 0,
+ "utConditions": 0,
+ "utCoveredConditions": 0,
+ "itLineHits": 0,
+ "itConditions": 0,
+ "itCoveredConditions": 0,
+ "overallLineHits": 0,
+ "overallConditions": 0,
+ "overallCoveredConditions": 0
+}
+++ /dev/null
-{
- "projectUuid": "abcd",
- "fileUuid": "efgh",
- "line": 2,
- "scmAuthor": "polop",
- "scmDate": "2014-01-01T12:34:56.7+01:00",
- "scmRevision": "cafebabe",
- "source": "// Empty",
- "updatedAt": "2014-01-01T23:45:01.8+01:00",
- "utLineHits": 0,
- "utConditions": 0,
- "utCoveredConditions": 0,
- "itLineHits": 0,
- "itConditions": 0,
- "itCoveredConditions": 0,
- "overallLineHits": 0,
- "overallConditions": 0,
- "overallCoveredConditions": 0
-}
\ No newline at end of file
+++ /dev/null
-{
- "projectUuid": "abcd",
- "fileUuid": "fdsq",
- "line": 2,
- "scmAuthor": "polop",
- "scmDate": "2014-01-01T12:34:56.7+01:00",
- "scmRevision": "cafebabe",
- "source": "// Empty",
- "updatedAt": "2014-01-01T23:45:01.8+01:00",
- "utLineHits": 0,
- "utConditions": 0,
- "utCoveredConditions": 0,
- "itLineHits": 0,
- "itConditions": 0,
- "itCoveredConditions": 0,
- "overallLineHits": 0,
- "overallConditions": 0,
- "overallCoveredConditions": 0
-}
\ No newline at end of file
+++ /dev/null
-{
- "projectUuid": "abcd",
- "fileUuid": "efgh",
- "line": 3,
- "scmAuthor": "polop",
- "scmDate": "2014-01-01T12:34:56.7+01:00",
- "scmRevision": "cafebabe",
- "source": "// Empty",
- "updatedAt": "2014-01-01T23:45:01.8+01:00",
- "utLineHits": 0,
- "utConditions": 0,
- "utCoveredConditions": 0,
- "itLineHits": 0,
- "itConditions": 0,
- "itCoveredConditions": 0,
- "overallLineHits": 0,
- "overallConditions": 0,
- "overallCoveredConditions": 0
-}
\ No newline at end of file
+++ /dev/null
-{
- "projectUuid": "plmn",
- "fileUuid": "efgh",
- "line": 3,
- "scmAuthor": "polop",
- "scmDate": "2014-01-01T12:34:56.7+01:00",
- "scmRevision": "cafebabe",
- "source": "// Empty",
- "updatedAt": "2014-01-01T23:45:01.8+01:00",
- "utLineHits": 0,
- "utConditions": 0,
- "utCoveredConditions": 0,
- "itLineHits": 0,
- "itConditions": 0,
- "itCoveredConditions": 0,
- "overallLineHits": 0,
- "overallConditions": 0,
- "overallCoveredConditions": 0
-}
\ No newline at end of file
+++ /dev/null
-
-CREATE TABLE "FILE_SOURCES" (
- "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1),
- "PROJECT_UUID" VARCHAR(50) NOT NULL,
- "FILE_UUID" VARCHAR(50) NOT NULL,
- "BINARY_DATA" BINARY(167772150),
- "DATA_HASH" VARCHAR(50) NOT NULL,
- "CREATED_AT" BIGINT NOT NULL,
- "UPDATED_AT" BIGINT NOT NULL
-);
+++ /dev/null
-<dataset>
-
- <file_sources id="1" project_uuid="PROJECT_UUID" file_uuid="FILE_UUID" created_at="1416238020000" updated_at="1416239042000"
- binary_data="" data_hash="" />
-
-</dataset>
def created_at
long_to_date(:created_at)
end
+
+ def created_at=(date)
+ write_attribute(:created_at, date.to_i*1000)
+ end
def updated_at
long_to_date(:updated_at)
end
+
+ def updated_at=(date)
+ write_attribute(:updated_at, date.to_i*1000)
+ end
def long_to_date(attribute)
date_in_long = read_attribute(attribute)
end
end
+ def before_save
+ self.updated_at=DateTime.now
+ end
+
+ def before_create
+ self.created_at=DateTime.now
+ end
end
--- /dev/null
+#
+# SonarQube, open source software quality management tool.
+# Copyright (C) 2008-2014 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# SonarQube is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# SonarQube is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+
+#
+# SonarQube 5.1
+#
+class AddManualMeasuresLongDates < ActiveRecord::Migration
+ def self.up
+ add_column 'manual_measures', :created_at_ms, :big_integer, :null => true
+ add_column 'manual_measures', :updated_at_ms, :big_integer, :null => true
+ end
+end
+++ /dev/null
-#
-# SonarQube, open source software quality management tool.
-# Copyright (C) 2008-2014 SonarSource
-# mailto:contact AT sonarsource DOT com
-#
-# SonarQube is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 3 of the License, or (at your option) any later version.
-#
-# SonarQube is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-
-#
-# SonarQube 5.1
-#
-class AddMeasuresLongDates < ActiveRecord::Migration
- def self.up
- add_column 'project_measures', :measure_date_ms, :big_integer, :null => true
- add_column 'manual_measures', :created_at_ms, :big_integer, :null => true
- add_column 'manual_measures', :updated_at_ms, :big_integer, :null => true
- end
-end
--- /dev/null
+#
+# SonarQube, open source software quality management tool.
+# Copyright (C) 2008-2014 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# SonarQube is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# SonarQube is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+
+#
+# SonarQube 5.1
+#
+class FeedManualMeasuresLongDates < ActiveRecord::Migration
+ def self.up
+ execute_java_migration('org.sonar.server.db.migrations.v51.FeedManualMeasuresLongDates')
+ end
+end
+
+++ /dev/null
-#
-# SonarQube, open source software quality management tool.
-# Copyright (C) 2008-2014 SonarSource
-# mailto:contact AT sonarsource DOT com
-#
-# SonarQube is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 3 of the License, or (at your option) any later version.
-#
-# SonarQube is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-
-#
-# SonarQube 5.1
-#
-class FeedMeasuresLongDates < ActiveRecord::Migration
- def self.up
- execute_java_migration('org.sonar.server.db.migrations.v51.FeedProjectMeasuresLongDates')
- execute_java_migration('org.sonar.server.db.migrations.v51.FeedManualMeasuresLongDates')
- end
-end
-
--- /dev/null
+#
+# SonarQube, open source software quality management tool.
+# Copyright (C) 2008-2014 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# SonarQube is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# SonarQube is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+
+#
+# SonarQube 5.1
+#
+class RenameManualMeasuresLongDates < ActiveRecord::Migration
+ def self.up
+ remove_column 'manual_measures', 'created_at'
+ remove_column 'manual_measures', 'updated_at'
+ rename_column 'manual_measures', 'created_at_ms', 'created_at'
+ rename_column 'manual_measures', 'updated_at_ms', 'updated_at'
+ end
+end
+
+++ /dev/null
-#
-# SonarQube, open source software quality management tool.
-# Copyright (C) 2008-2014 SonarSource
-# mailto:contact AT sonarsource DOT com
-#
-# SonarQube is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 3 of the License, or (at your option) any later version.
-#
-# SonarQube is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-
-#
-# SonarQube 5.1
-#
-class RenameMeasuresLongDates < ActiveRecord::Migration
- def self.up
- remove_column 'project_measures', 'measure_date'
- remove_column 'manual_measures', 'created_at'
- remove_column 'manual_measures', 'updated_at'
- rename_column 'manual_measures', 'created_at_ms', 'created_at'
- rename_column 'manual_measures', 'updated_at_ms', 'updated_at'
- rename_column 'project_measures', 'measure_date_ms', 'measure_date'
- end
-end
-
"RULES_CATEGORY_ID" INTEGER,
"TEXT_VALUE" VARCHAR(4000),
"TENDENCY" INTEGER,
- "MEASURE_DATE" BIGINT,
+ "MEASURE_DATE" TIMESTAMP,
"PROJECT_ID" INTEGER,
"ALERT_STATUS" VARCHAR(5),
"ALERT_TEXT" VARCHAR(4000),
import org.sonar.api.measures.Metric;
import org.sonar.api.rules.RulePriority;
-import javax.persistence.*;
-
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.EnumType;
+import javax.persistence.Enumerated;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.Table;
import java.io.UnsupportedEncodingException;
-import java.util.Date;
-
-import static org.sonar.api.utils.DateUtils.dateToLong;
-import static org.sonar.api.utils.DateUtils.longToDate;
/**
* This class is the Hibernate model to store a measure in the DB
@Column(name = "description", updatable = true, nullable = true, length = 4000)
private String description;
- @Column(name = "measure_date", updatable = true, nullable = true)
- private Long measureDate;
-
@Column(name = "rule_id", updatable = true, nullable = true)
private Integer ruleId;
this.projectId = projectId;
}
- /**
- * @return the date of the measure
- */
- public Date getMeasureDate() {
- return longToDate(measureDate);
- }
-
- /**
- * Sets the date for the measure
- *
- * @return the current object
- */
- public MeasureModel setMeasureDate(Date measureDate) {
- this.measureDate = dateToLong(measureDate);
- return this;
- }
-
- /**
- * @return the date of the measure
- */
- public Long getMeasureDateMs() {
- return measureDate;
- }
-
- /**
- * Sets the date for the measure
- *
- * @return the current object
- */
- public MeasureModel setMeasureDateMs(Long measureDate) {
- this.measureDate = measureDate;
- return this;
- }
-
/**
* @return the alert status if there is one, null otherwise
*/
clone.setRulePriority(getRulePriority());
clone.setRuleId(getRuleId());
clone.setSnapshotId(getSnapshotId());
- clone.setMeasureDate(getMeasureDate());
clone.setUrl(getUrl());
clone.setCharacteristicId(getCharacteristicId());
clone.setPersonId(getPersonId());
return new JsonWriter(writer);
}
+ public JsonWriter setSerializeNulls(boolean b) {
+ this.stream.setSerializeNulls(b);
+ return this;
+ }
+
/**
* Begins encoding a new array. Each call to this method must be paired with
* a call to {@link #endArray}. Output is <code>[</code>.
<insert id="insert" parameterType="MeasureModel" useGeneratedKeys="false">
INSERT INTO project_measures (
- value, metric_id, snapshot_id, rule_id, text_value, tendency, measure_date,
+ value, metric_id, snapshot_id, rule_id, text_value, tendency,
project_id, alert_status, alert_text, url, description, rule_priority, characteristic_id, variation_value_1,
variation_value_2, variation_value_3, variation_value_4, variation_value_5, person_id, measure_data)
VALUES (
#{value, jdbcType=DOUBLE}, #{metricId, jdbcType=INTEGER}, #{snapshotId, jdbcType=INTEGER}, #{ruleId, jdbcType=INTEGER}, #{textValue, jdbcType=VARCHAR}, #{tendency, jdbcType=INTEGER},
- #{measureDateMs, jdbcType=BIGINT}, #{projectId, jdbcType=INTEGER}, #{alertStatus, jdbcType=VARCHAR}, #{alertText, jdbcType=VARCHAR},
+ #{projectId, jdbcType=INTEGER}, #{alertStatus, jdbcType=VARCHAR}, #{alertText, jdbcType=VARCHAR},
#{url, jdbcType=VARCHAR}, #{description, jdbcType=VARCHAR}, #{rulePriority.ordinal, jdbcType=INTEGER}, #{characteristicId, jdbcType=INTEGER}, #{variationValue1, jdbcType=DOUBLE},
#{variationValue2, jdbcType=DOUBLE}, #{variationValue3, jdbcType=DOUBLE}, #{variationValue4, jdbcType=DOUBLE}, #{variationValue5, jdbcType=DOUBLE}, #{personId, jdbcType=INTEGER}, #{data}
)
}
@Test
- public void ignore_null_values() throws Exception {
+ public void ignore_null_values_by_default() throws Exception {
writer.beginObject()
.prop("nullNumber", (Number) null)
.prop("nullString", (String) null)
expect("{}");
}
+ @Test
+ public void serialize_null_values() throws Exception {
+ writer.setSerializeNulls(true);
+ writer.beginObject()
+ .prop("nullNumber", (Number) null)
+ .prop("nullString", (String) null)
+ .name("nullNumber").value((Number) null)
+ .name("nullString").value((String) null)
+ .name("nullDate").valueDate(null)
+ .name("nullDateTime").valueDate(null)
+ .endObject().close();
+ expect("{\"nullNumber\":null,\"nullString\":null,\"nullNumber\":null,\"nullString\":null,\"nullDate\":null,\"nullDateTime\":null}");
+ }
+
@Test
public void escape_values() throws Exception {
writer.beginObject()