]> source.dussan.org Git - sonarqube.git/commitdiff
Improve db migration from 5.0 and indexing step at server startup
authorSimon Brandhof <simon.brandhof@sonarsource.com>
Fri, 13 Mar 2015 16:43:27 +0000 (17:43 +0100)
committerSimon Brandhof <simon.brandhof@sonarsource.com>
Tue, 17 Mar 2015 14:21:43 +0000 (15:21 +0100)
* Parallel bulk indexing of documents into Elasticsearch
* reduce memory consumption when building JSON requests
* no need to change type of column PROJECT_MEASURES.MEASURE_DATE as it is not used
* fix created_at/updated_at fields of manual_measure when saved from Rails
* remove usage of deprecated delete-by-query

58 files changed:
server/sonar-search/src/main/java/org/sonar/search/SearchServer.java
server/sonar-search/src/main/java/org/sonar/search/SearchSettings.java
server/sonar-server-benchmarks/src/test/java/org/sonar/server/benchmark/SourceDbBenchmarkTest.java
server/sonar-server-benchmarks/src/test/java/org/sonar/server/benchmark/SourceIndexBenchmarkTest.java
server/sonar-server/src/main/java/org/sonar/server/activity/index/ActivityIndexer.java
server/sonar-server/src/main/java/org/sonar/server/activity/index/ActivityResultSetIterator.java
server/sonar-server/src/main/java/org/sonar/server/db/migrations/DatabaseMigrations.java
server/sonar-server/src/main/java/org/sonar/server/db/migrations/v51/FeedProjectMeasuresLongDates.java [deleted file]
server/sonar-server/src/main/java/org/sonar/server/es/BulkIndexer.java
server/sonar-server/src/main/java/org/sonar/server/es/EsUtils.java
server/sonar-server/src/main/java/org/sonar/server/es/request/ProxyBulkRequestBuilder.java
server/sonar-server/src/main/java/org/sonar/server/issue/index/IssueIndexer.java
server/sonar-server/src/main/java/org/sonar/server/search/SearchClient.java
server/sonar-server/src/main/java/org/sonar/server/source/index/SourceFileResultSetIterator.java [new file with mode: 0644]
server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineDoc.java
server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineIndexDefinition.java
server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineIndexer.java
server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineResultSetIterator.java [deleted file]
server/sonar-server/src/main/java/org/sonar/server/util/DateCollector.java [new file with mode: 0644]
server/sonar-server/src/main/java/org/sonar/server/util/ProgressLogger.java
server/sonar-server/src/test/java/org/sonar/server/activity/index/ActivityResultSetIteratorTest.java
server/sonar-server/src/test/java/org/sonar/server/db/migrations/v51/FeedProjectMeasuresLongDatesTest.java [deleted file]
server/sonar-server/src/test/java/org/sonar/server/es/BulkIndexerTest.java
server/sonar-server/src/test/java/org/sonar/server/es/EsUtilsTest.java
server/sonar-server/src/test/java/org/sonar/server/es/FakeIndexDefinition.java
server/sonar-server/src/test/java/org/sonar/server/issue/IssueServiceMediumTest.java
server/sonar-server/src/test/java/org/sonar/server/search/SearchClientMediumTest.java
server/sonar-server/src/test/java/org/sonar/server/source/index/SourceFileResultSetIteratorTest.java [new file with mode: 0644]
server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineIndexerTest.java
server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineResultSetIteratorTest.java [deleted file]
server/sonar-server/src/test/java/org/sonar/server/util/DateCollectorTest.java [new file with mode: 0644]
server/sonar-server/src/test/java/org/sonar/server/util/ProgressLoggerTest.java
server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedProjectMeasuresLongDatesTest/before.xml [deleted file]
server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedProjectMeasuresLongDatesTest/schema.sql [deleted file]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceFileResultSetIteratorTest/schema.sql [new file with mode: 0644]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceFileResultSetIteratorTest/shared.xml [new file with mode: 0644]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/P1_F1_line1.json [new file with mode: 0644]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/P1_F1_line2.json [new file with mode: 0644]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/P1_F2_line1.json [new file with mode: 0644]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/P2_F3_line1.json [new file with mode: 0644]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/line2.json [deleted file]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/line2_other_file.json [deleted file]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/line3.json [deleted file]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/line3_other_project.json [deleted file]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/schema.sql [deleted file]
server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/shared.xml [deleted file]
server/sonar-web/src/main/webapp/WEB-INF/app/models/manual_measure.rb
server/sonar-web/src/main/webapp/WEB-INF/db/migrate/787_add_manual_measures_long_dates.rb [new file with mode: 0644]
server/sonar-web/src/main/webapp/WEB-INF/db/migrate/787_add_measures_long_dates.rb [deleted file]
server/sonar-web/src/main/webapp/WEB-INF/db/migrate/788_feed_manual_measures_long_dates.rb [new file with mode: 0644]
server/sonar-web/src/main/webapp/WEB-INF/db/migrate/788_feed_measures_long_dates.rb [deleted file]
server/sonar-web/src/main/webapp/WEB-INF/db/migrate/789_rename_manual_measures_long_dates.rb [new file with mode: 0644]
server/sonar-web/src/main/webapp/WEB-INF/db/migrate/789_rename_measures_long_dates.rb [deleted file]
sonar-core/src/main/resources/org/sonar/core/persistence/schema-h2.ddl
sonar-plugin-api/src/main/java/org/sonar/api/database/model/MeasureModel.java
sonar-plugin-api/src/main/java/org/sonar/api/utils/text/JsonWriter.java
sonar-plugin-api/src/main/resources/org/sonar/api/database/model/MeasureMapper.xml
sonar-plugin-api/src/test/java/org/sonar/api/utils/text/JsonWriterTest.java

index f5b601ce21c7495e431adff674bdd13995fa667c..fdc575277d2e7a5fb59d9b416534ebf98c414e7a 100644 (file)
@@ -42,7 +42,7 @@ public class SearchServer implements Monitored {
   public void start() {
     LoggerFactory.getLogger(SearchServer.class).info("Starting Elasticsearch[{}] on port {}", settings.clusterName(), settings.tcpPort());
 
-    node = new InternalNode(settings.build(), true);
+    node = new InternalNode(settings.build(), false);
     node.start();
   }
 
index fe7813381792d781fd98323eb81456af344fe783..258c44fce0900730cdecbc6d43a254eef0bc405e 100644 (file)
@@ -151,8 +151,7 @@ class SearchSettings {
   private void configureStorage(ImmutableSettings.Builder builder) {
     builder
       .put("index.number_of_shards", "1")
-      .put("index.refresh_interval", "30s")
-      .put("indices.store.throttle.type", "none");
+      .put("index.refresh_interval", "30s");
   }
 
   private void configureCluster(ImmutableSettings.Builder builder) {
index 73179f4fefa842caab4e6187097d2ac1e30f1b80..f327dc0fb1e848c9209ddfb73e846bf1b7073174 100644 (file)
@@ -31,7 +31,7 @@ import org.sonar.core.source.db.FileSourceDao;
 import org.sonar.core.source.db.FileSourceDto;
 import org.sonar.server.db.DbClient;
 import org.sonar.server.source.db.FileSourceDb;
-import org.sonar.server.source.index.SourceLineResultSetIterator;
+import org.sonar.server.source.index.SourceFileResultSetIterator;
 
 import java.io.IOException;
 import java.sql.Connection;
@@ -73,10 +73,10 @@ public class SourceDbBenchmarkTest {
 
     try {
       long start = System.currentTimeMillis();
-      SourceLineResultSetIterator it = SourceLineResultSetIterator.create(dbClient, connection, 0L);
+      SourceFileResultSetIterator it = SourceFileResultSetIterator.create(dbClient, connection, 0L);
       while (it.hasNext()) {
-        SourceLineResultSetIterator.SourceFile row = it.next();
-        assertThat(row.getLines().size()).isEqualTo(NUMBER_OF_LINES);
+        SourceFileResultSetIterator.Row row = it.next();
+        assertThat(row.getLineUpdateRequests().size()).isEqualTo(NUMBER_OF_LINES);
         assertThat(row.getFileUuid()).isNotEmpty();
         counter.incrementAndGet();
       }
@@ -131,7 +131,7 @@ public class SourceDbBenchmarkTest {
         .setOverallLineHits(i)
         .setOverallConditions(i+1)
         .setOverallCoveredConditions(i)
-        .setScmDate(150000000L)
+        .setScmDate(1_500_000_000_000L)
         .setHighlighting("2,9,k;9,18,k")
         .addAllDuplications(Arrays.asList(19,33,141))
         .build();
index a5751ae8cb3df837589b00e5a2d45804ba7884f6..abb0d5dcd78a9ba3314cac32269a2915895a6a4e 100644 (file)
 
 package org.sonar.server.benchmark;
 
-import com.google.common.collect.Maps;
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang.StringUtils;
 import org.junit.Rule;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.sonar.server.es.EsClient;
+import org.sonar.server.source.db.FileSourceDb;
+import org.sonar.server.source.index.SourceFileResultSetIterator;
 import org.sonar.server.source.index.SourceLineDoc;
 import org.sonar.server.source.index.SourceLineIndex;
 import org.sonar.server.source.index.SourceLineIndexDefinition;
 import org.sonar.server.source.index.SourceLineIndexer;
-import org.sonar.server.source.index.SourceLineResultSetIterator;
 import org.sonar.server.tester.ServerTester;
 
+import java.util.Arrays;
+import java.util.Date;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Timer;
@@ -109,11 +110,12 @@ public class SourceIndexBenchmarkTest {
     // TODO assertions
   }
 
-  private static class SourceIterator implements Iterator<SourceLineResultSetIterator.SourceFile> {
+  private static class SourceIterator implements Iterator<SourceFileResultSetIterator.Row> {
     private final long nbFiles;
     private final int nbLinesPerFile;
     private int currentProject = 0;
     private AtomicLong count = new AtomicLong(0L);
+    private final FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
 
     SourceIterator(long nbFiles, int nbLinesPerFile) {
       this.nbFiles = nbFiles;
@@ -130,34 +132,35 @@ public class SourceIndexBenchmarkTest {
     }
 
     @Override
-    public SourceLineResultSetIterator.SourceFile next() {
+    public SourceFileResultSetIterator.Row next() {
+      String projectUuid = "P" + currentProject;
       String fileUuid = "FILE" + count.get();
-      SourceLineResultSetIterator.SourceFile file = new SourceLineResultSetIterator.SourceFile(fileUuid, System.currentTimeMillis());
+      dataBuilder.clear();
       for (int indexLine = 1; indexLine <= nbLinesPerFile; indexLine++) {
-        SourceLineDoc line = new SourceLineDoc(Maps.<String, Object>newHashMap());
-        line.setFileUuid(fileUuid);
-        line.setLine(indexLine);
-        line.setHighlighting(StringUtils.repeat("HIGHLIGHTING", 5));
-        line.setItConditions(4);
-        line.setItCoveredConditions(2);
-        line.setItLineHits(2);
-        line.setOverallConditions(8);
-        line.setOverallCoveredConditions(2);
-        line.setOverallLineHits(2);
-        line.setUtConditions(8);
-        line.setUtCoveredConditions(2);
-        line.setUtLineHits(2);
-        line.setProjectUuid("PROJECT" + currentProject);
-        line.setScmAuthor("a_guy");
-        line.setScmRevision("ABCDEFGHIJKL");
-        line.setSource(StringUtils.repeat("SOURCE", 10));
-        file.addLine(line);
+        dataBuilder.addLinesBuilder()
+          .setLine(indexLine)
+          .setScmRevision("REVISION_" + indexLine)
+          .setScmAuthor("a_guy")
+          .setSource("this is not java code " + indexLine)
+          .setUtLineHits(2)
+          .setUtConditions(8)
+          .setUtCoveredConditions(2)
+          .setItLineHits(2)
+          .setItConditions(8)
+          .setItCoveredConditions(2)
+          .setOverallLineHits(2)
+          .setOverallConditions(8)
+          .setOverallCoveredConditions(2)
+          .setScmDate(1_500_000_000_000L)
+          .setHighlighting("2,9,k;9,18,k")
+          .addAllDuplications(Arrays.asList(19, 33, 141))
+          .build();
       }
       count.incrementAndGet();
       if (count.get() % 500 == 0) {
         currentProject++;
       }
-      return file;
+      return SourceFileResultSetIterator.toRow(projectUuid, fileUuid, new Date(), dataBuilder.build());
     }
 
     @Override
index babf3f671bd5f0b9d4796305d2f1c0d239e753cb..81f9d3b85bc99b88cb7610439f0555b653101cdc 100644 (file)
@@ -19,7 +19,6 @@
  */
 package org.sonar.server.activity.index;
 
-import org.elasticsearch.action.update.UpdateRequest;
 import org.sonar.core.persistence.DbSession;
 import org.sonar.server.db.DbClient;
 import org.sonar.server.es.BaseIndexer;
@@ -27,7 +26,6 @@ import org.sonar.server.es.BulkIndexer;
 import org.sonar.server.es.EsClient;
 
 import java.sql.Connection;
-import java.util.Iterator;
 
 /**
  * Add to Elasticsearch index {@link org.sonar.server.activity.index.ActivityIndexDefinition} the rows of
@@ -51,38 +49,18 @@ public class ActivityIndexer extends BaseIndexer {
     DbSession dbSession = dbClient.openSession(false);
     Connection dbConnection = dbSession.getConnection();
     try {
-      ActivityResultSetIterator rowIt = ActivityResultSetIterator.create(dbClient, dbConnection, lastUpdatedAt);
-      long maxUpdatedAt = doIndex(bulk, rowIt);
-      rowIt.close();
-      return maxUpdatedAt;
+      ActivityResultSetIterator it = ActivityResultSetIterator.create(dbClient, dbConnection, lastUpdatedAt);
+      bulk.start();
+      while (it.hasNext()) {
+        bulk.add(it.next());
+      }
+      bulk.stop();
+      it.close();
+      return it.getMaxRowDate();
 
     } finally {
       dbSession.close();
     }
   }
 
-  public long index(Iterator<ActivityDoc> activities) {
-    BulkIndexer bulk = new BulkIndexer(esClient, ActivityIndexDefinition.INDEX);
-    return doIndex(bulk, activities);
-  }
-
-  private long doIndex(BulkIndexer bulk, Iterator<ActivityDoc> activities) {
-    long maxUpdatedAt = 0L;
-    bulk.start();
-    while (activities.hasNext()) {
-      ActivityDoc activity = activities.next();
-      bulk.add(newUpsertRequest(activity));
-
-      // it's more efficient to sort programmatically than in SQL on some databases (MySQL for instance)
-      maxUpdatedAt = Math.max(maxUpdatedAt, activity.getCreatedAt().getTime());
-    }
-    bulk.stop();
-    return maxUpdatedAt;
-  }
-
-  private UpdateRequest newUpsertRequest(ActivityDoc doc) {
-    return new UpdateRequest(ActivityIndexDefinition.INDEX, ActivityIndexDefinition.TYPE, doc.getKey())
-      .doc(doc.getFields())
-      .upsert(doc.getFields());
-  }
 }
index 701d649ff8f4f30f9482d3499394b5dece30609f..9bb8cdd7b359c2574354f98c367ecf39ab2c8afe 100644 (file)
 package org.sonar.server.activity.index;
 
 import org.apache.commons.lang.StringUtils;
+import org.elasticsearch.action.update.UpdateRequest;
 import org.sonar.api.utils.KeyValueFormat;
+import org.sonar.api.utils.text.JsonWriter;
 import org.sonar.server.db.DbClient;
 import org.sonar.server.db.ResultSetIterator;
+import org.sonar.server.es.EsUtils;
+import org.sonar.server.util.DateCollector;
 
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStreamWriter;
 import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Timestamp;
-import java.util.HashMap;
+import java.util.Date;
 
 /**
  * Scrolls over table ACTIVITIES and reads documents to populate
  * the index "activities/activity"
  */
-class ActivityResultSetIterator extends ResultSetIterator<ActivityDoc> {
+class ActivityResultSetIterator extends ResultSetIterator<UpdateRequest> {
 
   private static final String[] FIELDS = {
-    // column 1
     "log_key",
     "log_action",
     "log_message",
@@ -52,6 +57,8 @@ class ActivityResultSetIterator extends ResultSetIterator<ActivityDoc> {
 
   private static final String SQL_AFTER_DATE = SQL_ALL + " where created_at>=?";
 
+  private final DateCollector dates = new DateCollector();
+
   private ActivityResultSetIterator(PreparedStatement stmt) throws SQLException {
     super(stmt);
   }
@@ -70,17 +77,30 @@ class ActivityResultSetIterator extends ResultSetIterator<ActivityDoc> {
   }
 
   @Override
-  protected ActivityDoc read(ResultSet rs) throws SQLException {
-    ActivityDoc doc = new ActivityDoc(new HashMap<String, Object>(10));
-
+  protected UpdateRequest read(ResultSet rs) throws SQLException {
+    ByteArrayOutputStream bytes = new ByteArrayOutputStream();
     // all the fields must be present, even if value is null
-    doc.setKey(rs.getString(1));
-    doc.setAction(rs.getString(2));
-    doc.setMessage(rs.getString(3));
-    doc.setDetails(KeyValueFormat.parse(rs.getString(4)));
-    doc.setLogin(rs.getString(5));
-    doc.setType(rs.getString(6));
-    doc.setCreatedAt(rs.getTimestamp(7));
-    return doc;
+    JsonWriter writer = JsonWriter.of(new OutputStreamWriter(bytes)).setSerializeNulls(true);
+    writer.beginObject();
+    String key = rs.getString(1);
+    writer.prop(ActivityIndexDefinition.FIELD_KEY, key);
+    writer.prop(ActivityIndexDefinition.FIELD_ACTION, rs.getString(2));
+    writer.prop(ActivityIndexDefinition.FIELD_MESSAGE, rs.getString(3));
+    writer.name(ActivityIndexDefinition.FIELD_DETAILS).valueObject(KeyValueFormat.parse(rs.getString(4)));
+    writer.prop(ActivityIndexDefinition.FIELD_LOGIN, rs.getString(5));
+    writer.prop(ActivityIndexDefinition.FIELD_TYPE, rs.getString(6));
+    Date createdAt = rs.getTimestamp(7);
+    writer.prop(ActivityIndexDefinition.FIELD_CREATED_AT, EsUtils.formatDateTime(createdAt));
+    writer.endObject().close();
+    byte[] jsonDoc = bytes.toByteArray();
+
+    // it's more efficient to sort programmatically than in SQL on some databases (MySQL for instance)
+    dates.add(createdAt);
+
+    return new UpdateRequest(ActivityIndexDefinition.INDEX, ActivityIndexDefinition.TYPE, key).doc(jsonDoc).upsert(jsonDoc);
+  }
+
+  long getMaxRowDate() {
+    return dates.getMax();
   }
 }
index 8b26a3f5196ccd2deda2d8ae66bf1e93064c4785..d7ebc315c6a691f09d712efe4a5a1b0869075b14 100644 (file)
@@ -89,7 +89,6 @@ public interface DatabaseMigrations {
     FeedIssuesLongDates.class,
     FeedFileSourcesBinaryData.class,
     FeedSemaphoresLongDates.class,
-    FeedProjectMeasuresLongDates.class,
     FeedManualMeasuresLongDates.class,
     FeedEventsLongDates.class,
     AddNewCharacteristics.class,
diff --git a/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v51/FeedProjectMeasuresLongDates.java b/server/sonar-server/src/main/java/org/sonar/server/db/migrations/v51/FeedProjectMeasuresLongDates.java
deleted file mode 100644 (file)
index dce97a7..0000000
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
- */
-
-package org.sonar.server.db.migrations.v51;
-
-import org.sonar.api.utils.System2;
-import org.sonar.core.persistence.Database;
-import org.sonar.server.db.migrations.BaseDataChange;
-import org.sonar.server.db.migrations.MassUpdate;
-import org.sonar.server.db.migrations.Select;
-import org.sonar.server.db.migrations.SqlStatement;
-
-import java.sql.SQLException;
-import java.util.Date;
-
-public class FeedProjectMeasuresLongDates extends BaseDataChange {
-
-  private final System2 system2;
-
-  public FeedProjectMeasuresLongDates(Database db, System2 system2) {
-    super(db);
-    this.system2 = system2;
-  }
-
-  @Override
-  public void execute(Context context) throws SQLException {
-    final long now = system2.now();
-    MassUpdate massUpdate = context.prepareMassUpdate();
-    massUpdate
-      .select("SELECT m.measure_date, m.id FROM project_measures m WHERE measure_date_ms IS NULL");
-    massUpdate
-      .update("UPDATE project_measures SET measure_date_ms=? WHERE id=?");
-    massUpdate.rowPluralName("project measures");
-    massUpdate.execute(new MassUpdate.Handler() {
-      @Override
-      public boolean handle(Select.Row row, SqlStatement update) throws SQLException {
-        Date date = row.getNullableDate(1);
-        update.setLong(1, date == null ? null : Math.min(now, date.getTime()));
-
-        Long id = row.getNullableLong(2);
-        update.setLong(2, id);
-
-        return true;
-      }
-    });
-  }
-
-}
index fd6f3fec50c12bd7d427299be597c1fdd217f7f9..218da47bd8430cea462eec2f08c3819a6e7ede45 100644 (file)
@@ -22,15 +22,21 @@ package org.sonar.server.es;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.ActionRequest;
 import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
 import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder;
 import org.elasticsearch.action.bulk.BulkItemResponse;
 import org.elasticsearch.action.bulk.BulkRequestBuilder;
 import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.search.SearchType;
 import org.elasticsearch.cluster.metadata.IndexMetaData;
 import org.elasticsearch.common.unit.ByteSizeUnit;
 import org.elasticsearch.common.unit.ByteSizeValue;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.search.SearchHit;
 import org.picocontainer.Startable;
 import org.sonar.api.utils.log.Logger;
 import org.sonar.api.utils.log.Loggers;
@@ -38,6 +44,8 @@ import org.sonar.server.util.ProgressLogger;
 
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
 /**
@@ -49,19 +57,21 @@ import java.util.concurrent.atomic.AtomicLong;
  * </ul>
  */
 public class BulkIndexer implements Startable {
+
   private static final Logger LOGGER = Loggers.get(BulkIndexer.class);
-  private static final long FLUSH_BYTE_SIZE = new ByteSizeValue(2, ByteSizeUnit.MB).bytes();
+  private static final long FLUSH_BYTE_SIZE = new ByteSizeValue(1, ByteSizeUnit.MB).bytes();
   private static final String REFRESH_INTERVAL_SETTING = "index.refresh_interval";
   private static final String ALREADY_STARTED_MESSAGE = "Bulk indexing is already started";
 
   private final EsClient client;
   private final String indexName;
   private boolean large = false;
-  private long flushByteSize = FLUSH_BYTE_SIZE;
+  private boolean disableRefresh = false;
   private BulkRequestBuilder bulkRequest = null;
   private Map<String, Object> largeInitialSettings = null;
-
   private final AtomicLong counter = new AtomicLong(0L);
+  private final int concurrentRequests;
+  private final Semaphore semaphore;
   private final ProgressLogger progress;
 
   public BulkIndexer(EsClient client, String indexName) {
@@ -69,13 +79,15 @@ public class BulkIndexer implements Startable {
     this.indexName = indexName;
     this.progress = new ProgressLogger(String.format("Progress[BulkIndexer[%s]]", indexName), counter, LOGGER)
       .setPluralLabel("requests");
+
+    this.concurrentRequests = Math.max(1, Runtime.getRuntime().availableProcessors() - 1);
+    this.semaphore = new Semaphore(concurrentRequests);
   }
 
   /**
    * Large indexing is an heavy operation that populates an index generally from scratch. Replicas and
    * automatic refresh are disabled during bulk indexing and lucene segments are optimized at the end.
    */
-
   public BulkIndexer setLarge(boolean b) {
     Preconditions.checkState(bulkRequest == null, ALREADY_STARTED_MESSAGE);
     this.large = b;
@@ -83,14 +95,16 @@ public class BulkIndexer implements Startable {
   }
 
   /**
-   * Default value is {@link org.sonar.server.es.BulkIndexer#FLUSH_BYTE_SIZE}
-   * @see org.elasticsearch.common.unit.ByteSizeValue
+   * By default refresh of index is executed in method {@link #stop()}. Set to true
+   * to disable refresh.
    */
-  public BulkIndexer setFlushByteSize(long l) {
-    this.flushByteSize = l;
+  public BulkIndexer setDisableRefresh(boolean b) {
+    this.disableRefresh = b;
     return this;
   }
 
+
+
   @Override
   public void start() {
     Preconditions.checkState(bulkRequest == null, ALREADY_STARTED_MESSAGE);
@@ -113,31 +127,64 @@ public class BulkIndexer implements Startable {
 
       updateSettings(bulkSettings);
     }
-    bulkRequest = client.prepareBulk();
+    bulkRequest = client.prepareBulk().setRefresh(false);
     counter.set(0L);
     progress.start();
   }
 
   public void add(ActionRequest request) {
     bulkRequest.request().add(request);
-    counter.getAndIncrement();
-    if (bulkRequest.request().estimatedSizeInBytes() >= flushByteSize) {
-      executeBulk(bulkRequest);
-      bulkRequest = client.prepareBulk();
+    if (bulkRequest.request().estimatedSizeInBytes() >= FLUSH_BYTE_SIZE) {
+      executeBulk();
+    }
+  }
+
+  public void addDeletion(SearchRequestBuilder searchRequest) {
+    searchRequest
+      .setScroll(TimeValue.timeValueMinutes(5))
+      .setSearchType(SearchType.SCAN)
+      // load only doc ids, not _source fields
+      .setFetchSource(false);
+
+    // this search is synchronous. An optimization would be to be non-blocking,
+    // but it requires to tracking pending requests in close().
+    // Same semaphore can't be reused because of potential deadlock (requires to acquire
+    // two locks)
+    SearchResponse searchResponse = searchRequest.get();
+    searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()).get();
+    for (SearchHit hit : searchResponse.getHits()) {
+      add(client.prepareDelete(hit.index(), hit.type(), hit.getId()).request());
     }
   }
 
+  /**
+   * Delete all the documents matching the given search request. This method is blocking.
+   * Index is refreshed, so docs are not searchable as soon as method is executed.
+   */
+  public static void delete(EsClient client, String indexName, SearchRequestBuilder searchRequest) {
+    BulkIndexer bulk = new BulkIndexer(client, indexName);
+    bulk.start();
+    bulk.addDeletion(searchRequest);
+    bulk.stop();
+  }
+
   @Override
   public void stop() {
+    if (bulkRequest.numberOfActions() > 0) {
+      executeBulk();
+    }
     try {
-      if (bulkRequest.numberOfActions() > 0) {
-        executeBulk(bulkRequest);
+      if (semaphore.tryAcquire(concurrentRequests, 10, TimeUnit.MINUTES)) {
+        semaphore.release(concurrentRequests);
       }
-    } finally {
-      progress.stop();
+    } catch (InterruptedException e) {
+      throw new IllegalStateException("Elasticsearch bulk requests still being executed after 10 minutes", e);
     }
+    progress.stop();
 
-    client.prepareRefresh(indexName).get();
+    if (!disableRefresh) {
+      client.prepareRefresh(indexName).get();
+    }
     if (large) {
       // optimize lucene segments and revert index settings
       // Optimization must be done before re-applying replicas:
@@ -155,39 +202,54 @@ public class BulkIndexer implements Startable {
     req.get();
   }
 
-  private void executeBulk(BulkRequestBuilder bulkRequest) {
-    List<ActionRequest> retries = Lists.newArrayList();
-    BulkResponse response = bulkRequest.get();
-
-    for (BulkItemResponse item : response.getItems()) {
-      if (item.isFailed()) {
-        ActionRequest retry = bulkRequest.request().requests().get(item.getItemId());
-        retries.add(retry);
-      }
-    }
-
-    if (!retries.isEmpty()) {
-      LOGGER.warn(String.format("%d index requests failed. Trying again.", retries.size()));
-      BulkRequestBuilder retryBulk = client.prepareBulk();
-      for (ActionRequest retry : retries) {
-        retryBulk.request().add(retry);
-      }
-      BulkResponse retryBulkResponse = retryBulk.get();
-      if (retryBulkResponse.hasFailures()) {
-        LOGGER.error("New attempt to index documents failed");
-        for (int index = 0; index < retryBulkResponse.getItems().length; index++) {
-          BulkItemResponse item = retryBulkResponse.getItems()[index];
+  private void executeBulk() {
+    final BulkRequestBuilder req = this.bulkRequest;
+    this.bulkRequest = client.prepareBulk().setRefresh(false);
+    semaphore.acquireUninterruptibly();
+    req.execute(new ActionListener<BulkResponse>() {
+      @Override
+      public void onResponse(BulkResponse response) {
+        semaphore.release();
+        counter.addAndGet(response.getItems().length);
+
+        List<ActionRequest> retries = Lists.newArrayList();
+        for (BulkItemResponse item : response.getItems()) {
           if (item.isFailed()) {
-            StringBuilder sb = new StringBuilder();
-            String msg = sb.append("\n[").append(index)
-              .append("]: index [").append(item.getIndex()).append("], type [").append(item.getType()).append("], id [").append(item.getId())
-              .append("], message [").append(item.getFailureMessage()).append("]").toString();
-            LOGGER.error(msg);
+            ActionRequest retry = req.request().requests().get(item.getItemId());
+            retries.add(retry);
+          }
+        }
+
+        if (!retries.isEmpty()) {
+          LOGGER.warn(String.format("%d index requests failed. Trying again.", retries.size()));
+          BulkRequestBuilder retryBulk = client.prepareBulk();
+          for (ActionRequest retry : retries) {
+            retryBulk.request().add(retry);
+          }
+          BulkResponse retryBulkResponse = retryBulk.get();
+          if (retryBulkResponse.hasFailures()) {
+            LOGGER.error("New attempt to index documents failed");
+            for (int index = 0; index < retryBulkResponse.getItems().length; index++) {
+              BulkItemResponse item = retryBulkResponse.getItems()[index];
+              if (item.isFailed()) {
+                StringBuilder sb = new StringBuilder();
+                String msg = sb.append("\n[").append(index)
+                  .append("]: index [").append(item.getIndex()).append("], type [").append(item.getType()).append("], id [").append(item.getId())
+                  .append("], message [").append(item.getFailureMessage()).append("]").toString();
+                LOGGER.error(msg);
+              }
+            }
+          } else {
+            LOGGER.info("New index attempt succeeded");
           }
         }
-      } else {
-        LOGGER.info("New index attempt succeeded");
       }
-    }
+
+      @Override
+      public void onFailure(Throwable e) {
+        semaphore.release();
+        LOGGER.error("Fail to execute bulk index request: " + req, e);
+      }
+    });
   }
 }
index 837754cf20a9d47bb0fc2fa2e10630e9ef94b3da..451ac8ea9233386ade178aec5c4a680ea9d2f1a0 100644 (file)
@@ -21,12 +21,16 @@ package org.sonar.server.es;
 
 import com.google.common.base.Function;
 import com.google.common.collect.Lists;
+import org.elasticsearch.common.joda.time.format.ISODateTimeFormat;
 import org.elasticsearch.search.SearchHit;
 import org.elasticsearch.search.SearchHits;
 import org.elasticsearch.search.aggregations.bucket.terms.Terms;
 import org.sonar.server.search.BaseDoc;
 
+import javax.annotation.CheckForNull;
+import javax.annotation.Nullable;
 import java.util.ArrayList;
+import java.util.Date;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
@@ -62,4 +66,20 @@ public class EsUtils {
       }
     });
   }
+
+  @CheckForNull
+  public static Date parseDateTime(@Nullable String s) {
+    if (s != null) {
+      return ISODateTimeFormat.dateTime().parseDateTime(s).toDate();
+    }
+    return null;
+  }
+
+  @CheckForNull
+  public static String formatDateTime(@Nullable Date date) {
+    if (date != null) {
+      return ISODateTimeFormat.dateTime().print(date.getTime());
+    }
+    return null;
+  }
 }
index 6ace4a6579b0657821edb74640d9e2c5d7162e7a..c9654435c6ef5359c0c070bf1407f452c9151a32 100644 (file)
@@ -21,7 +21,7 @@
 package org.sonar.server.es.request;
 
 import com.google.common.collect.HashMultiset;
-import com.google.common.collect.Multiset.Entry;
+import com.google.common.collect.Multiset;
 import org.elasticsearch.action.ActionRequest;
 import org.elasticsearch.action.ListenableActionFuture;
 import org.elasticsearch.action.bulk.BulkRequestBuilder;
@@ -69,7 +69,7 @@ public class ProxyBulkRequestBuilder extends BulkRequestBuilder {
 
   @Override
   public ListenableActionFuture<BulkResponse> execute() {
-    throw new UnsupportedOperationException("execute() should not be called as it's used for asynchronous");
+    throw unsupported();
   }
 
   private UnsupportedOperationException unsupported() {
@@ -106,10 +106,10 @@ public class ProxyBulkRequestBuilder extends BulkRequestBuilder {
       groupedRequests.add(new BulkRequestKey(requestType, index, docType));
     }
 
-    Set<Entry<BulkRequestKey>> entrySet = groupedRequests.entrySet();
+    Set<Multiset.Entry<BulkRequestKey>> entrySet = groupedRequests.entrySet();
     int size = entrySet.size();
     int current = 0;
-    for (Entry<BulkRequestKey> requestEntry : entrySet) {
+    for (Multiset.Entry<BulkRequestKey> requestEntry : entrySet) {
       message.append(requestEntry.getCount()).append(" ").append(requestEntry.getElement().toString());
       current++;
       if (current < size) {
index 84420be5d4824dcbc4cdaa7bc72fd4f00a09e6ed..d7be429f8e70276c8384e80d0ab755c333b0add5 100644 (file)
@@ -20,9 +20,9 @@
 package org.sonar.server.issue.index;
 
 import org.apache.commons.dbutils.DbUtils;
+import org.elasticsearch.action.search.SearchRequestBuilder;
 import org.elasticsearch.action.update.UpdateRequest;
 import org.elasticsearch.index.query.FilterBuilders;
-import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.index.query.QueryBuilders;
 import org.sonar.core.persistence.DbSession;
 import org.sonar.server.db.DbClient;
@@ -89,14 +89,17 @@ public class IssueIndexer extends BaseIndexer {
   }
 
   public void deleteProject(String uuid, boolean refresh) {
-    QueryBuilder query = QueryBuilders.filteredQuery(
-      QueryBuilders.matchAllQuery(),
-      FilterBuilders.boolFilter().must(FilterBuilders.termsFilter(IssueIndexDefinition.FIELD_ISSUE_PROJECT_UUID, uuid))
-      );
-    esClient.prepareDeleteByQuery(IssueIndexDefinition.INDEX).setQuery(query).get();
-    if (refresh) {
-      esClient.prepareRefresh(IssueIndexDefinition.INDEX).get();
-    }
+    BulkIndexer bulk = new BulkIndexer(esClient, IssueIndexDefinition.INDEX);
+    bulk.setDisableRefresh(!refresh);
+    bulk.start();
+    SearchRequestBuilder search = esClient.prepareSearch(IssueIndexDefinition.INDEX)
+      .setRouting(uuid)
+      .setQuery(QueryBuilders.filteredQuery(
+        QueryBuilders.matchAllQuery(),
+        FilterBuilders.boolFilter().must(FilterBuilders.termsFilter(IssueIndexDefinition.FIELD_ISSUE_PROJECT_UUID, uuid))
+      ));
+    bulk.addDeletion(search);
+    bulk.stop();
   }
 
   BulkIndexer createBulkIndexer(boolean large) {
index 83bc7c3d368e855372a446a5786234562307e3cd..b8003977b5edb56243cd02fbd35c96fdf28dc7e7 100644 (file)
@@ -57,7 +57,6 @@ import org.sonar.process.ProcessConstants;
 import org.sonar.server.es.request.ProxyBulkRequestBuilder;
 import org.sonar.server.es.request.ProxyCountRequestBuilder;
 import org.sonar.server.es.request.ProxyCreateIndexRequestBuilder;
-import org.sonar.server.es.request.ProxyDeleteByQueryRequestBuilder;
 import org.sonar.server.es.request.ProxyDeleteRequestBuilder;
 import org.sonar.server.es.request.ProxyGetRequestBuilder;
 import org.sonar.server.es.request.ProxyIndicesExistsRequestBuilder;
@@ -136,7 +135,7 @@ public class SearchClient extends TransportClient implements Startable {
 
   @Override
   public DeleteByQueryRequestBuilder prepareDeleteByQuery(String... indices) {
-    return new ProxyDeleteByQueryRequestBuilder(this).setIndices(indices);
+    throw new UnsupportedOperationException("Delete by query must not be used. See https://github.com/elastic/elasticsearch/issues/10067. See alternatives in BulkIndexer.");
   }
 
   // ****************************************************************************************************************
diff --git a/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceFileResultSetIterator.java b/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceFileResultSetIterator.java
new file mode 100644 (file)
index 0000000..cf346ea
--- /dev/null
@@ -0,0 +1,209 @@
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.source.index;
+
+import org.apache.commons.lang.StringUtils;
+import org.elasticsearch.action.update.UpdateRequest;
+import org.sonar.api.utils.text.JsonWriter;
+import org.sonar.core.source.db.FileSourceDto;
+import org.sonar.server.db.DbClient;
+import org.sonar.server.db.ResultSetIterator;
+import org.sonar.server.es.EsUtils;
+import org.sonar.server.source.db.FileSourceDb;
+
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStreamWriter;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+/**
+ * Scroll over table FILE_SOURCES and directly parse data required to
+ * populate the index sourcelines
+ */
+public class SourceFileResultSetIterator extends ResultSetIterator<SourceFileResultSetIterator.Row> {
+
+  public static class Row {
+    private final String fileUuid, projectUuid;
+    private final long updatedAt;
+    private final List<UpdateRequest> lineUpdateRequests = new ArrayList<>();
+
+    public Row(String projectUuid, String fileUuid, long updatedAt) {
+      this.projectUuid = projectUuid;
+      this.fileUuid = fileUuid;
+      this.updatedAt = updatedAt;
+    }
+
+    public String getProjectUuid() {
+      return projectUuid;
+    }
+
+    public String getFileUuid() {
+      return fileUuid;
+    }
+
+    public long getUpdatedAt() {
+      return updatedAt;
+    }
+
+    public List<UpdateRequest> getLineUpdateRequests() {
+      return lineUpdateRequests;
+    }
+  }
+
+  private static final String[] FIELDS = {
+    "project_uuid",
+    "file_uuid",
+    "updated_at",
+    "binary_data"
+  };
+  private static final String SQL_ALL = "select " + StringUtils.join(FIELDS, ",") + " from file_sources";
+  private static final String SQL_AFTER_DATE = SQL_ALL + " where updated_at>?";
+
+  public static SourceFileResultSetIterator create(DbClient dbClient, Connection connection, long afterDate) {
+    try {
+      String sql = afterDate > 0L ? SQL_AFTER_DATE : SQL_ALL;
+      // rows are big, so they are scrolled once at a time (one row in memory at a time)
+      PreparedStatement stmt = dbClient.newScrollingSingleRowSelectStatement(connection, sql);
+      if (afterDate > 0L) {
+        stmt.setLong(1, afterDate);
+      }
+      return new SourceFileResultSetIterator(stmt);
+    } catch (SQLException e) {
+      throw new IllegalStateException("Fail to prepare SQL request to select all file sources", e);
+    }
+  }
+
+  private SourceFileResultSetIterator(PreparedStatement stmt) throws SQLException {
+    super(stmt);
+  }
+
+  @Override
+  protected Row read(ResultSet rs) throws SQLException {
+    String projectUuid = rs.getString(1);
+    String fileUuid = rs.getString(2);
+    Date updatedAt = new Date(rs.getLong(3));
+    FileSourceDb.Data data = FileSourceDto.decodeData(rs.getBinaryStream(4));
+    return toRow(projectUuid, fileUuid, updatedAt, data);
+  }
+
+  /**
+   * Convert protobuf message to data required for Elasticsearch indexing
+   */
+  public static Row toRow(String projectUuid, String fileUuid, Date updatedAt, FileSourceDb.Data data) {
+    Row result = new Row(projectUuid, fileUuid, updatedAt.getTime());
+    for (FileSourceDb.Line line : data.getLinesList()) {
+      ByteArrayOutputStream bytes = new ByteArrayOutputStream();
+
+      // all the fields must be present, even if value is null
+      JsonWriter writer = JsonWriter.of(new OutputStreamWriter(bytes)).setSerializeNulls(true);
+      writer.beginObject();
+      writer.prop(SourceLineIndexDefinition.FIELD_PROJECT_UUID, projectUuid);
+      writer.prop(SourceLineIndexDefinition.FIELD_FILE_UUID, fileUuid);
+      writer.prop(SourceLineIndexDefinition.FIELD_LINE, line.getLine());
+      writer.prop(SourceLineIndexDefinition.FIELD_UPDATED_AT, EsUtils.formatDateTime(updatedAt));
+      writer.prop(SourceLineIndexDefinition.FIELD_SCM_REVISION, line.getScmRevision());
+      writer.prop(SourceLineIndexDefinition.FIELD_SCM_AUTHOR, line.getScmAuthor());
+      writer.prop(SourceLineIndexDefinition.FIELD_SCM_DATE, EsUtils.formatDateTime(line.hasScmDate() ? new Date(line.getScmDate()) : null));
+
+      // unit tests
+      if (line.hasUtLineHits()) {
+        writer.prop(SourceLineIndexDefinition.FIELD_UT_LINE_HITS,  line.getUtLineHits());
+      } else {
+        writer.name(SourceLineIndexDefinition.FIELD_UT_LINE_HITS).valueObject(null);
+      }
+      if (line.hasUtConditions()) {
+        writer.prop(SourceLineIndexDefinition.FIELD_UT_CONDITIONS,  line.getUtConditions());
+      } else {
+        writer.name(SourceLineIndexDefinition.FIELD_UT_CONDITIONS).valueObject(null);
+      }
+      if (line.hasUtCoveredConditions()) {
+        writer.prop(SourceLineIndexDefinition.FIELD_UT_COVERED_CONDITIONS,  line.getUtCoveredConditions());
+      } else {
+        writer.name(SourceLineIndexDefinition.FIELD_UT_COVERED_CONDITIONS).valueObject(null);
+      }
+
+      // IT
+      if (line.hasItLineHits()) {
+        writer.prop(SourceLineIndexDefinition.FIELD_IT_LINE_HITS,  line.getItLineHits());
+      } else {
+        writer.name(SourceLineIndexDefinition.FIELD_IT_LINE_HITS).valueObject(null);
+      }
+      if (line.hasItConditions()) {
+        writer.prop(SourceLineIndexDefinition.FIELD_IT_CONDITIONS,  line.getItConditions());
+      } else {
+        writer.name(SourceLineIndexDefinition.FIELD_IT_CONDITIONS).valueObject(null);
+      }
+      if (line.hasItCoveredConditions()) {
+        writer.prop(SourceLineIndexDefinition.FIELD_IT_COVERED_CONDITIONS,  line.getItCoveredConditions());
+      } else {
+        writer.name(SourceLineIndexDefinition.FIELD_IT_COVERED_CONDITIONS).valueObject(null);
+      }
+
+      // Overall coverage
+      if (line.hasOverallLineHits()) {
+        writer.prop(SourceLineIndexDefinition.FIELD_OVERALL_LINE_HITS,  line.getOverallLineHits());
+      } else {
+        writer.name(SourceLineIndexDefinition.FIELD_OVERALL_LINE_HITS).valueObject(null);
+      }
+      if (line.hasOverallConditions()) {
+        writer.prop(SourceLineIndexDefinition.FIELD_OVERALL_CONDITIONS,  line.getOverallConditions());
+      } else {
+        writer.name(SourceLineIndexDefinition.FIELD_OVERALL_CONDITIONS).valueObject(null);
+      }
+      if (line.hasOverallCoveredConditions()) {
+        writer.prop(SourceLineIndexDefinition.FIELD_OVERALL_COVERED_CONDITIONS,  line.getOverallCoveredConditions());
+      } else {
+        writer.name(SourceLineIndexDefinition.FIELD_OVERALL_COVERED_CONDITIONS).valueObject(null);
+      }
+
+      if (line.hasHighlighting()) {
+        writer.prop(SourceLineIndexDefinition.FIELD_HIGHLIGHTING,  line.getHighlighting());
+      } else {
+        writer.name(SourceLineIndexDefinition.FIELD_HIGHLIGHTING).valueObject(null);
+      }
+      if (line.hasSymbols()) {
+        writer.prop(SourceLineIndexDefinition.FIELD_SYMBOLS,  line.getSymbols());
+      } else {
+        writer.name(SourceLineIndexDefinition.FIELD_SYMBOLS).valueObject(null);
+      }
+      writer.name(SourceLineIndexDefinition.FIELD_DUPLICATIONS).valueObject(line.getDuplicationsList());
+      writer.prop(SourceLineIndexDefinition.FIELD_SOURCE, line.hasSource() ? line.getSource() : null);
+      writer.endObject().close();
+
+      // This is an optimization to reduce memory consumption and multiple conversions from Map to JSON.
+      // UpdateRequest#doc() and #upsert() take the same parameter values, so:
+      // - passing the same Map would execute two JSON serializations
+      // - Map is a useless temporarily structure: read JDBC result set -> convert to map -> convert to JSON. Generating
+      // directly JSON from result set is more efficient.
+      byte[] jsonDoc = bytes.toByteArray();
+      UpdateRequest updateRequest = new UpdateRequest(SourceLineIndexDefinition.INDEX, SourceLineIndexDefinition.TYPE, SourceLineIndexDefinition.docKey(fileUuid, line.getLine()))
+        .routing(projectUuid)
+        .doc(jsonDoc)
+        .upsert(jsonDoc);
+      result.lineUpdateRequests.add(updateRequest);
+    }
+    return result;
+  }
+}
index f00a5b8c4573f714b4d1e7c7858e3325716f16c5..5303a37d684061e487b5e9c4df5afc7d24a45877 100644 (file)
@@ -125,10 +125,6 @@ public class SourceLineDoc extends BaseDoc {
     return this;
   }
 
-  public String key() {
-    return String.format("%s_%d", fileUuid(), line());
-  }
-
   @CheckForNull
   public Integer utLineHits() {
     Number lineHits = getNullableField(SourceLineIndexDefinition.FIELD_UT_LINE_HITS);
index ad5615fdd9a5f819220232efb486b272236913c2..5385b80f1361514ae141f4b56e659dd999976e26 100644 (file)
@@ -19,6 +19,7 @@
  */
 package org.sonar.server.source.index;
 
+import com.google.common.collect.ImmutableMap;
 import org.elasticsearch.cluster.metadata.IndexMetaData;
 import org.sonar.api.config.Settings;
 import org.sonar.process.ProcessConstants;
@@ -27,6 +28,8 @@ import org.sonar.server.es.NewIndex;
 
 public class SourceLineIndexDefinition implements IndexDefinition {
 
+  public static final String INDEX = "sourcelines";
+  public static final String TYPE = "sourceline";
   public static final String FIELD_PROJECT_UUID = "projectUuid";
   public static final String FIELD_FILE_UUID = "fileUuid";
   public static final String FIELD_LINE = "line";
@@ -48,11 +51,6 @@ public class SourceLineIndexDefinition implements IndexDefinition {
   public static final String FIELD_DUPLICATIONS = "duplications";
   public static final String FIELD_UPDATED_AT = "updatedAt";
 
-  public static final String INDEX = "sourcelines";
-
-  public static final String TYPE = "sourceline";
-
-
   private final Settings settings;
 
   public SourceLineIndexDefinition(Settings settings) {
@@ -75,26 +73,31 @@ public class SourceLineIndexDefinition implements IndexDefinition {
     }
 
     // type "sourceline"
-    NewIndex.NewIndexType sourceLineMapping = index.createType(TYPE);
-    sourceLineMapping.stringFieldBuilder(FIELD_PROJECT_UUID).build();
-    sourceLineMapping.stringFieldBuilder(FIELD_FILE_UUID).build();
-    sourceLineMapping.createIntegerField(FIELD_LINE);
-    sourceLineMapping.stringFieldBuilder(FIELD_SCM_REVISION).build();
-    sourceLineMapping.stringFieldBuilder(FIELD_SCM_AUTHOR).build();
-    sourceLineMapping.createDateTimeField(FIELD_SCM_DATE);
-    sourceLineMapping.stringFieldBuilder(FIELD_HIGHLIGHTING).disableSearch().build();
-    sourceLineMapping.stringFieldBuilder(FIELD_SOURCE).disableSearch().build();
-    sourceLineMapping.createIntegerField(FIELD_UT_LINE_HITS);
-    sourceLineMapping.createIntegerField(FIELD_UT_CONDITIONS);
-    sourceLineMapping.createIntegerField(FIELD_UT_COVERED_CONDITIONS);
-    sourceLineMapping.createIntegerField(FIELD_IT_LINE_HITS);
-    sourceLineMapping.createIntegerField(FIELD_IT_CONDITIONS);
-    sourceLineMapping.createIntegerField(FIELD_IT_COVERED_CONDITIONS);
-    sourceLineMapping.createIntegerField(FIELD_OVERALL_LINE_HITS);
-    sourceLineMapping.createIntegerField(FIELD_OVERALL_CONDITIONS);
-    sourceLineMapping.createIntegerField(FIELD_OVERALL_COVERED_CONDITIONS);
-    sourceLineMapping.stringFieldBuilder(FIELD_SYMBOLS).disableSearch().build();
-    sourceLineMapping.createShortField(FIELD_DUPLICATIONS);
-    sourceLineMapping.createDateTimeField(FIELD_UPDATED_AT);
+    NewIndex.NewIndexType mapping = index.createType(TYPE);
+    mapping.setAttribute("_routing", ImmutableMap.of("required", true, "path", FIELD_PROJECT_UUID));
+    mapping.stringFieldBuilder(FIELD_PROJECT_UUID).build();
+    mapping.stringFieldBuilder(FIELD_FILE_UUID).build();
+    mapping.createIntegerField(FIELD_LINE);
+    mapping.stringFieldBuilder(FIELD_SCM_REVISION).disableSearch().build();
+    mapping.stringFieldBuilder(FIELD_SCM_AUTHOR).disableSearch().build();
+    mapping.createDateTimeField(FIELD_SCM_DATE);
+    mapping.stringFieldBuilder(FIELD_HIGHLIGHTING).disableSearch().build();
+    mapping.stringFieldBuilder(FIELD_SOURCE).disableSearch().build();
+    mapping.createIntegerField(FIELD_UT_LINE_HITS);
+    mapping.createIntegerField(FIELD_UT_CONDITIONS);
+    mapping.createIntegerField(FIELD_UT_COVERED_CONDITIONS);
+    mapping.createIntegerField(FIELD_IT_LINE_HITS);
+    mapping.createIntegerField(FIELD_IT_CONDITIONS);
+    mapping.createIntegerField(FIELD_IT_COVERED_CONDITIONS);
+    mapping.createIntegerField(FIELD_OVERALL_LINE_HITS);
+    mapping.createIntegerField(FIELD_OVERALL_CONDITIONS);
+    mapping.createIntegerField(FIELD_OVERALL_COVERED_CONDITIONS);
+    mapping.stringFieldBuilder(FIELD_SYMBOLS).disableSearch().build();
+    mapping.createShortField(FIELD_DUPLICATIONS);
+    mapping.createDateTimeField(FIELD_UPDATED_AT);
+  }
+
+  public static String docKey(String fileUuid, int line) {
+    return String.format("%s_%d", fileUuid, line);
   }
 }
index 9e862e6b38963d8c21a16d90702f4c704d088f3b..011cb348ffa6b9b28fc32c8b59c4033f387865b1 100644 (file)
@@ -19,6 +19,7 @@
  */
 package org.sonar.server.source.index;
 
+import org.elasticsearch.action.search.SearchRequestBuilder;
 import org.elasticsearch.action.update.UpdateRequest;
 import org.elasticsearch.index.query.FilterBuilders;
 import org.elasticsearch.index.query.QueryBuilders;
@@ -55,7 +56,7 @@ public class SourceLineIndexer extends BaseIndexer {
     DbSession dbSession = dbClient.openSession(false);
     Connection dbConnection = dbSession.getConnection();
     try {
-      SourceLineResultSetIterator rowIt = SourceLineResultSetIterator.create(dbClient, dbConnection, lastUpdatedAt);
+      SourceFileResultSetIterator rowIt = SourceFileResultSetIterator.create(dbClient, dbConnection, lastUpdatedAt);
       long maxUpdatedAt = doIndex(bulk, rowIt);
       rowIt.close();
       return maxUpdatedAt;
@@ -65,59 +66,64 @@ public class SourceLineIndexer extends BaseIndexer {
     }
   }
 
-  public long index(Iterator<SourceLineResultSetIterator.SourceFile> sourceFiles) {
-    final BulkIndexer bulk = new BulkIndexer(esClient, SourceLineIndexDefinition.INDEX);
-    return doIndex(bulk, sourceFiles);
+  public long index(Iterator<SourceFileResultSetIterator.Row> dbRows) {
+    BulkIndexer bulk = new BulkIndexer(esClient, SourceLineIndexDefinition.INDEX);
+    return doIndex(bulk, dbRows);
   }
 
-  private long doIndex(BulkIndexer bulk, Iterator<SourceLineResultSetIterator.SourceFile> files) {
+  private long doIndex(BulkIndexer bulk, Iterator<SourceFileResultSetIterator.Row> dbRows) {
     long maxUpdatedAt = 0L;
     bulk.start();
-    while (files.hasNext()) {
-      SourceLineResultSetIterator.SourceFile file = files.next();
-      for (SourceLineDoc line : file.getLines()) {
-        bulk.add(newUpsertRequest(line));
+    while (dbRows.hasNext()) {
+      SourceFileResultSetIterator.Row row = dbRows.next();
+      addDeleteRequestsForLinesGreaterThan(bulk, row);
+      for (UpdateRequest updateRequest : row.getLineUpdateRequests()) {
+        bulk.add(updateRequest);
       }
-      deleteLinesFromFileAbove(file.getFileUuid(), file.getLines().size());
-      maxUpdatedAt = Math.max(maxUpdatedAt, file.getUpdatedAt());
+      maxUpdatedAt = Math.max(maxUpdatedAt, row.getUpdatedAt());
     }
     bulk.stop();
     return maxUpdatedAt;
   }
 
-  private UpdateRequest newUpsertRequest(SourceLineDoc lineDoc) {
-    String projectUuid = lineDoc.projectUuid();
-    return new UpdateRequest(SourceLineIndexDefinition.INDEX, SourceLineIndexDefinition.TYPE, lineDoc.key())
-      .routing(projectUuid)
-      .doc(lineDoc.getFields())
-      .upsert(lineDoc.getFields());
-  }
-
   /**
-   * Unindex all lines in file with UUID <code>fileUuid</code> above line <code>lastLine</code>
+   * Use-case:
+   * - file had 10 lines in previous analysis
+   * - same file has now 5 lines
+   * Lines 6 to 10 must be removed from index.
    */
-  private void deleteLinesFromFileAbove(String fileUuid, int lastLine) {
-    esClient.prepareDeleteByQuery(SourceLineIndexDefinition.INDEX)
+  private void addDeleteRequestsForLinesGreaterThan(BulkIndexer bulk, SourceFileResultSetIterator.Row fileRow) {
+    int numberOfLines = fileRow.getLineUpdateRequests().size();
+    SearchRequestBuilder searchRequest = esClient.prepareSearch(SourceLineIndexDefinition.INDEX)
       .setTypes(SourceLineIndexDefinition.TYPE)
-      .setQuery(QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), FilterBuilders.boolFilter()
-        .must(FilterBuilders.termFilter(FIELD_FILE_UUID, fileUuid).cache(false))
-        .must(FilterBuilders.rangeFilter(SourceLineIndexDefinition.FIELD_LINE).gt(lastLine).cache(false))
-        )).get();
+      .setRouting(fileRow.getProjectUuid())
+      .setQuery(QueryBuilders.filteredQuery(
+        QueryBuilders.matchAllQuery(),
+        FilterBuilders.boolFilter()
+          .must(FilterBuilders.termFilter(FIELD_FILE_UUID, fileRow.getFileUuid()).cache(false))
+          .must(FilterBuilders.rangeFilter(SourceLineIndexDefinition.FIELD_LINE).gt(numberOfLines).cache(false))
+          .cache(false)
+      ));
+    bulk.addDeletion(searchRequest);
   }
 
   public void deleteByFile(String fileUuid) {
-    esClient.prepareDeleteByQuery(SourceLineIndexDefinition.INDEX)
+    // TODO would be great to have the projectUuid for routing
+    SearchRequestBuilder searchRequest = esClient.prepareSearch(SourceLineIndexDefinition.INDEX)
       .setTypes(SourceLineIndexDefinition.TYPE)
-      .setQuery(QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(),
-        FilterBuilders.termFilter(FIELD_FILE_UUID, fileUuid).cache(false)))
-      .get();
+      .setQuery(QueryBuilders.filteredQuery(
+        QueryBuilders.matchAllQuery(),
+        FilterBuilders.termFilter(FIELD_FILE_UUID, fileUuid).cache(false)));
+    BulkIndexer.delete(esClient, SourceLineIndexDefinition.INDEX, searchRequest);
   }
 
   public void deleteByProject(String projectUuid) {
-    esClient.prepareDeleteByQuery(SourceLineIndexDefinition.INDEX)
+    SearchRequestBuilder searchRequest = esClient.prepareSearch(SourceLineIndexDefinition.INDEX)
+      .setRouting(projectUuid)
       .setTypes(SourceLineIndexDefinition.TYPE)
-      .setQuery(QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(),
-        FilterBuilders.termFilter(FIELD_PROJECT_UUID, projectUuid).cache(false)))
-      .get();
+      .setQuery(QueryBuilders.filteredQuery(
+        QueryBuilders.matchAllQuery(),
+        FilterBuilders.termFilter(FIELD_PROJECT_UUID, projectUuid).cache(false)));
+    BulkIndexer.delete(esClient, SourceLineIndexDefinition.INDEX, searchRequest);
   }
 }
diff --git a/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineResultSetIterator.java b/server/sonar-server/src/main/java/org/sonar/server/source/index/SourceLineResultSetIterator.java
deleted file mode 100644 (file)
index e51d312..0000000
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
- */
-package org.sonar.server.source.index;
-
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.StringUtils;
-import org.sonar.core.source.db.FileSourceDto;
-import org.sonar.server.db.DbClient;
-import org.sonar.server.db.ResultSetIterator;
-import org.sonar.server.source.db.FileSourceDb;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.Date;
-import java.util.List;
-
-/**
- * Scroll over table FILE_SOURCES and directly parse CSV field required to
- * populate the index sourcelines
- */
-public class SourceLineResultSetIterator extends ResultSetIterator<SourceLineResultSetIterator.SourceFile> {
-
-  public static class SourceFile {
-    private final String fileUuid;
-    private final long updatedAt;
-    private final List<SourceLineDoc> lines = Lists.newArrayList();
-
-    public SourceFile(String fileUuid, long updatedAt) {
-      this.fileUuid = fileUuid;
-      this.updatedAt = updatedAt;
-    }
-
-    public String getFileUuid() {
-      return fileUuid;
-    }
-
-    public long getUpdatedAt() {
-      return updatedAt;
-    }
-
-    public List<SourceLineDoc> getLines() {
-      return lines;
-    }
-
-    public void addLine(SourceLineDoc line) {
-      this.lines.add(line);
-    }
-  }
-
-  private static final String[] FIELDS = {
-    // column 1
-    "project_uuid",
-    "file_uuid",
-    "updated_at",
-    "binary_data"
-  };
-
-  private static final String SQL_ALL = "select " + StringUtils.join(FIELDS, ",") + " from file_sources";
-
-  private static final String SQL_AFTER_DATE = SQL_ALL + " where updated_at>?";
-
-  public static SourceLineResultSetIterator create(DbClient dbClient, Connection connection, long afterDate) {
-    try {
-      String sql = afterDate > 0L ? SQL_AFTER_DATE : SQL_ALL;
-      // rows are big, so they are scrolled once at a time (one row in memory at a time)
-      PreparedStatement stmt = dbClient.newScrollingSingleRowSelectStatement(connection, sql);
-      if (afterDate > 0L) {
-        stmt.setLong(1, afterDate);
-      }
-      return new SourceLineResultSetIterator(stmt);
-    } catch (SQLException e) {
-      throw new IllegalStateException("Fail to prepare SQL request to select all file sources", e);
-    }
-  }
-
-  private SourceLineResultSetIterator(PreparedStatement stmt) throws SQLException {
-    super(stmt);
-  }
-
-  @Override
-  protected SourceFile read(ResultSet rs) throws SQLException {
-    String projectUuid = rs.getString(1);
-    String fileUuid = rs.getString(2);
-    long updatedAt = rs.getLong(3);
-    Date updatedDate = new Date(updatedAt);
-
-    SourceFile result = new SourceFile(fileUuid, updatedAt);
-    FileSourceDb.Data data = FileSourceDto.decodeData(rs.getBinaryStream(4));
-    for (FileSourceDb.Line line : data.getLinesList()) {
-      SourceLineDoc doc = new SourceLineDoc();
-      doc.setProjectUuid(projectUuid);
-      doc.setFileUuid(fileUuid);
-      doc.setLine(line.getLine());
-      doc.setUpdateDate(updatedDate);
-      doc.setScmRevision(line.getScmRevision());
-      doc.setScmAuthor(line.getScmAuthor());
-      doc.setScmDate(line.hasScmDate() ? new Date(line.getScmDate()) : null);
-      // UT
-      doc.setUtLineHits(line.hasUtLineHits() ? line.getUtLineHits() : null);
-      doc.setUtConditions(line.hasUtConditions() ? line.getUtConditions() : null);
-      doc.setUtCoveredConditions(line.hasUtCoveredConditions() ? line.getUtCoveredConditions() : null);
-      // IT
-      doc.setItLineHits(line.hasItLineHits() ? line.getItLineHits() : null);
-      doc.setItConditions(line.hasItConditions() ? line.getItConditions() : null);
-      doc.setItCoveredConditions(line.hasItCoveredConditions() ? line.getItCoveredConditions() : null);
-      // OVERALL
-      doc.setOverallLineHits(line.hasOverallLineHits() ? line.getOverallLineHits() : null);
-      doc.setOverallConditions(line.hasOverallConditions() ? line.getOverallConditions() : null);
-      doc.setOverallCoveredConditions(line.hasOverallCoveredConditions() ? line.getOverallCoveredConditions() : null);
-
-      doc.setHighlighting(line.hasHighlighting() ? line.getHighlighting() : null);
-      doc.setSymbols(line.hasSymbols() ? line.getSymbols() : null);
-      doc.setDuplications(line.getDuplicationsList());
-
-      // source is always the latest field. All future fields will be added between duplications (14) and source.
-      doc.setSource(line.hasSource() ? line.getSource() : null);
-
-      result.addLine(doc);
-    }
-    return result;
-  }
-}
diff --git a/server/sonar-server/src/main/java/org/sonar/server/util/DateCollector.java b/server/sonar-server/src/main/java/org/sonar/server/util/DateCollector.java
new file mode 100644 (file)
index 0000000..53ab6e9
--- /dev/null
@@ -0,0 +1,46 @@
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.util;
+
+import javax.annotation.Nullable;
+
+import java.util.Date;
+
+public class DateCollector {
+
+  private long maxDate = 0L;
+
+  public void add(@Nullable Date d) {
+    if (d != null) {
+      add(d.getTime());
+    }
+  }
+
+  public void add(long date) {
+    maxDate = Math.max(maxDate, date);
+  }
+
+  /**
+   * The most recent collected date. Value is zero if no dates were collected.
+   */
+  public long getMax() {
+    return maxDate;
+  }
+}
index 660dfcd4ded935d50eb170db6b0c7ba0e677fe13..a159389a22bc529f28718dfd65c4ad7ef34554f2 100644 (file)
@@ -90,10 +90,11 @@ public class ProgressLogger {
     task.log();
   }
 
-  private static class LoggerTimerTask extends TimerTask {
+  private class LoggerTimerTask extends TimerTask {
     private final AtomicLong counter;
     private final Logger logger;
     private String pluralLabel = "rows";
+    private long previousCounter = 0L;
 
     private LoggerTimerTask(AtomicLong counter, Logger logger) {
       this.counter = counter;
@@ -106,7 +107,9 @@ public class ProgressLogger {
     }
 
     private void log() {
-      logger.info(String.format("%d %s processed", counter.get(), pluralLabel));
+      long current = counter.get();
+      logger.info(String.format("%d %s processed (%d items/sec)", current, pluralLabel, 1000 * (current-previousCounter) / periodMs));
+      previousCounter = current;
     }
   }
 }
index 6a986068ae56356305219bdb3623ddd7f453779c..b2d8b6b064be19ec73b00ef7fed687f6e6aeffbb 100644 (file)
@@ -21,6 +21,7 @@ package org.sonar.server.activity.index;
 
 import org.apache.commons.dbutils.DbUtils;
 import org.assertj.core.data.MapEntry;
+import org.elasticsearch.action.update.UpdateRequest;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.ClassRule;
@@ -32,6 +33,7 @@ import org.sonar.server.db.DbClient;
 import org.sonar.test.DbTests;
 
 import java.sql.Connection;
+import java.util.Map;
 
 import static org.assertj.core.api.Assertions.assertThat;
 
@@ -56,23 +58,29 @@ public class ActivityResultSetIteratorTest {
     DbUtils.closeQuietly(connection);
   }
 
+  /**
+   * Iterate over two rows in table.
+   */
   @Test
   public void traverse() throws Exception {
     dbTester.prepareDbUnit(getClass(), "traverse.xml");
     ActivityResultSetIterator it = ActivityResultSetIterator.create(client, connection, 0L);
+
     assertThat(it.hasNext()).isTrue();
-    ActivityDoc doc = it.next();
-    assertThat(doc).isNotNull();
-    assertThat(doc.getKey()).isEqualTo("UUID1");
-    assertThat(doc.getAction()).isEqualTo("THE_ACTION");
-    assertThat(doc.getMessage()).isEqualTo("THE_MSG");
-    assertThat(doc.getDetails()).containsOnly(MapEntry.entry("foo", "bar"));
-    assertThat(doc.getLogin()).isEqualTo("THE_AUTHOR");
+    UpdateRequest request = it.next();
+    Map<String, Object> doc = request.doc().sourceAsMap();
+    assertThat(doc.get(ActivityIndexDefinition.FIELD_KEY)).isEqualTo("UUID1");
+    assertThat(doc.get(ActivityIndexDefinition.FIELD_ACTION)).isEqualTo("THE_ACTION");
+    assertThat(doc.get(ActivityIndexDefinition.FIELD_MESSAGE)).isEqualTo("THE_MSG");
+    assertThat((Map) doc.get(ActivityIndexDefinition.FIELD_DETAILS)).containsOnly(MapEntry.entry("foo", "bar"));
+    assertThat(doc.get(ActivityIndexDefinition.FIELD_LOGIN)).isEqualTo("THE_AUTHOR");
 
     assertThat(it.hasNext()).isTrue();
     assertThat(it.next()).isNotNull();
     assertThat(it.hasNext()).isFalse();
     it.close();
+
+    assertThat(it.getMaxRowDate()).isEqualTo(1420066800000L);
   }
 
   @Test
@@ -81,11 +89,25 @@ public class ActivityResultSetIteratorTest {
     ActivityResultSetIterator it = ActivityResultSetIterator.create(client, connection, DateUtils.parseDate("2014-12-01").getTime());
 
     assertThat(it.hasNext()).isTrue();
-    ActivityDoc doc = it.next();
-    assertThat(doc).isNotNull();
-    assertThat(doc.getKey()).isEqualTo("UUID2");
+    UpdateRequest request = it.next();
+    assertThat(request).isNotNull();
+    Map<String, Object> doc = request.doc().sourceAsMap();
+    assertThat(doc.get(ActivityIndexDefinition.FIELD_KEY)).isEqualTo("UUID2");
 
     assertThat(it.hasNext()).isFalse();
     it.close();
+
+    assertThat(it.getMaxRowDate()).isEqualTo(1420066800000L);
+  }
+
+  @Test
+  public void nothing_to_traverse() throws Exception {
+    dbTester.prepareDbUnit(getClass(), "traverse.xml");
+    ActivityResultSetIterator it = ActivityResultSetIterator.create(client, connection, DateUtils.parseDate("2030-01-01").getTime());
+
+    assertThat(it.hasNext()).isFalse();
+    it.close();
+
+    assertThat(it.getMaxRowDate()).isEqualTo(0L);
   }
 }
diff --git a/server/sonar-server/src/test/java/org/sonar/server/db/migrations/v51/FeedProjectMeasuresLongDatesTest.java b/server/sonar-server/src/test/java/org/sonar/server/db/migrations/v51/FeedProjectMeasuresLongDatesTest.java
deleted file mode 100644 (file)
index 01b5a15..0000000
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
- */
-
-package org.sonar.server.db.migrations.v51;
-
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.sonar.api.utils.System2;
-import org.sonar.core.persistence.DbTester;
-import org.sonar.server.db.migrations.DatabaseMigration;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-import static org.sonar.api.utils.DateUtils.parseDate;
-
-public class FeedProjectMeasuresLongDatesTest {
-  @ClassRule
-  public static DbTester db = new DbTester().schema(FeedProjectMeasuresLongDatesTest.class, "schema.sql");
-
-  @Before
-  public void before() throws Exception {
-    db.prepareDbUnit(getClass(), "before.xml");
-  }
-
-  @Test
-  public void execute() throws Exception {
-    DatabaseMigration migration = newMigration(System2.INSTANCE);
-
-    migration.execute();
-
-    int count = db
-      .countSql("select count(*) from project_measures where " +
-        "measure_date_ms is not null");
-    assertThat(count).isEqualTo(2);
-  }
-
-  @Test
-  public void take_now_if_date_in_the_future() throws Exception {
-    System2 system = mock(System2.class);
-    when(system.now()).thenReturn(1234L);
-
-    DatabaseMigration migration = newMigration(system);
-
-    migration.execute();
-
-    int count = db
-      .countSql("select count(*) from project_measures where " +
-        "measure_date_ms = 1234");
-    assertThat(count).isEqualTo(1);
-  }
-
-  @Test
-  public void take_snapshot_date_if_in_the_past() throws Exception {
-    DatabaseMigration migration = newMigration(System2.INSTANCE);
-
-    migration.execute();
-
-    long snapshotTime = parseDate("2014-09-25").getTime();
-    int count = db
-      .countSql("select count(*) from project_measures where " +
-        "measure_date_ms=" + snapshotTime);
-    assertThat(count).isEqualTo(1);
-  }
-
-  private FeedProjectMeasuresLongDates newMigration(System2 system) {
-    return new FeedProjectMeasuresLongDates(db.database(), system);
-  }
-}
index 7f4e4e6a16f1f54615ad3d4fdc26fd992eb7e46a..6cb28e3db7ed38073b84f67526915bb269eb577c 100644 (file)
@@ -23,17 +23,15 @@ import com.google.common.collect.ImmutableMap;
 import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
 import org.elasticsearch.action.index.IndexRequest;
 import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.unit.ByteSizeUnit;
-import org.elasticsearch.common.unit.ByteSizeValue;
-import org.junit.ClassRule;
+import org.junit.Rule;
 import org.junit.Test;
 
 import static org.assertj.core.api.Assertions.assertThat;
 
 public class BulkIndexerTest {
 
-  @ClassRule
-  public static EsTester esTester = new EsTester().addDefinitions(new FakeIndexDefinition().setReplicas(1));
+  @Rule
+  public EsTester esTester = new EsTester().addDefinitions(new FakeIndexDefinition().setReplicas(1));
 
   @Test
   public void index_nothing() throws Exception {
@@ -67,8 +65,7 @@ public class BulkIndexerTest {
     assertThat(replicas()).isEqualTo(1);
 
     BulkIndexer indexer = new BulkIndexer(esTester.client(), FakeIndexDefinition.INDEX)
-      .setLarge(true)
-      .setFlushByteSize(new ByteSizeValue(1, ByteSizeUnit.BYTES).bytes());
+      .setLarge(true);
     indexer.start();
 
     // replicas are temporarily disabled
@@ -85,6 +82,22 @@ public class BulkIndexerTest {
     assertThat(replicas()).isEqualTo(1);
   }
 
+  @Test
+  public void disable_refresh() throws Exception {
+    BulkIndexer indexer = new BulkIndexer(esTester.client(), FakeIndexDefinition.INDEX)
+      .setDisableRefresh(true);
+    indexer.start();
+    indexer.add(newIndexRequest(42));
+    indexer.add(newIndexRequest(78));
+    indexer.stop();
+
+    assertThat(count()).isEqualTo(0);
+
+    esTester.client().prepareRefresh(FakeIndexDefinition.INDEX).get();
+    assertThat(count()).isEqualTo(2);
+  }
+
+
   private long count() {
     return esTester.countDocuments("fakes", "fake");
   }
index e5139c098b457518e05474c3f62603fa323dfb37..c9f2890d349baf9c2aa5b00b9301396511074e47 100644 (file)
@@ -28,6 +28,7 @@ import org.sonar.server.issue.index.IssueDoc;
 import org.sonar.server.search.BaseDoc;
 import org.sonar.test.TestUtils;
 
+import java.util.Date;
 import java.util.List;
 import java.util.Map;
 
@@ -66,4 +67,13 @@ public class EsUtilsTest {
   public void util_class() throws Exception {
     assertThat(TestUtils.hasOnlyPrivateConstructors(EsUtils.class));
   }
+
+  @Test
+  public void es_date_format() throws Exception {
+    assertThat(EsUtils.formatDateTime(new Date(1_500_000_000_000L))).startsWith("2017-07-").hasSize(29);
+    assertThat(EsUtils.formatDateTime(null)).isNull();
+
+    assertThat(EsUtils.parseDateTime("2017-07-14T04:40:00.000+02:00").getTime()).isEqualTo(1_500_000_000_000L);
+    assertThat(EsUtils.parseDateTime(null)).isNull();
+  }
 }
index 3b033f6646b0d018190a47ff92853b658b7c865e..5ed5fd6380bd9d4801e3d9541061703516b0a3c7 100644 (file)
@@ -41,6 +41,7 @@ public class FakeIndexDefinition implements IndexDefinition {
   public void define(IndexDefinitionContext context) {
     NewIndex index = context.create(INDEX);
     index.getSettings().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, replicas);
+    index.getSettings().put("index.refresh_interval", "-1");
     NewIndex.NewIndexType type = index.createType(TYPE);
     type.createIntegerField(INT_FIELD);
   }
index 2caaa0bd6b51218790ddb2e9526afd1e61f2705c..def3b0afc7400514fa09c64e8e5bc3cc3848028a 100644 (file)
@@ -19,7 +19,6 @@
  */
 package org.sonar.server.issue;
 
-import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Multiset;
@@ -60,9 +59,9 @@ import org.sonar.server.permission.InternalPermissionService;
 import org.sonar.server.permission.PermissionChange;
 import org.sonar.server.rule.RuleTesting;
 import org.sonar.server.rule.db.RuleDao;
-import org.sonar.server.source.index.SourceLineDoc;
+import org.sonar.server.source.db.FileSourceDb;
+import org.sonar.server.source.index.SourceFileResultSetIterator;
 import org.sonar.server.source.index.SourceLineIndexer;
-import org.sonar.server.source.index.SourceLineResultSetIterator;
 import org.sonar.server.tester.ServerTester;
 import org.sonar.server.user.MockUserSession;
 import org.sonar.server.user.NewUser;
@@ -77,7 +76,6 @@ import java.util.Map;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.entry;
 import static org.junit.Assert.fail;
-import static org.sonar.server.source.index.SourceLineIndexDefinition.*;
 
 public class IssueServiceMediumTest {
 
@@ -648,16 +646,13 @@ public class IssueServiceMediumTest {
   }
 
   private void newSourceLine(ComponentDto file, int line, String scmAuthor) {
-    SourceLineDoc line1 = new SourceLineDoc(ImmutableMap.<String, Object>builder()
-      .put(FIELD_PROJECT_UUID, file.projectUuid())
-      .put(FIELD_FILE_UUID, file.uuid())
-      .put(FIELD_LINE, line)
-      .put(FIELD_UPDATED_AT, new Date())
-      .put(FIELD_SCM_AUTHOR, scmAuthor)
-      .build());
-    SourceLineResultSetIterator.SourceFile sourceFile = new SourceLineResultSetIterator.SourceFile(file.uuid(), System.currentTimeMillis());
-    sourceFile.addLine(line1);
-    tester.get(SourceLineIndexer.class).index(Iterators.singletonIterator(sourceFile));
+    FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+    dataBuilder.addLinesBuilder()
+      .setLine(line)
+      .setScmAuthor(scmAuthor)
+      .build();
+    SourceFileResultSetIterator.Row row = SourceFileResultSetIterator.toRow(file.projectUuid(), file.uuid(), new Date(), dataBuilder.build());
+    tester.get(SourceLineIndexer.class).index(Iterators.singletonIterator(row));
   }
 
   private void newUser(String login) {
index 038d841cf8f11af64c108d15e820db9497694afa..5f0aa3305a4cfad325ebc5eda6a911a4c2f7e2c1 100644 (file)
@@ -78,13 +78,23 @@ public class SearchClientMediumTest {
     }
   }
 
+  @Test
+  public void delete_by_query_is_not_supported() throws Exception {
+    try {
+      searchClient.prepareDeleteByQuery();
+      fail();
+    } catch (UnsupportedOperationException e) {
+      assertThat(e).hasMessage("Delete by query must not be used. See https://github.com/elastic/elasticsearch/issues/10067. See alternatives in BulkIndexer.");
+    }
+  }
+
   @Test
   public void prepare_percolate_is_not_yet_implemented() throws Exception {
     try {
       searchClient.preparePercolate();
       fail();
-    } catch (Exception e) {
-      assertThat(e).isInstanceOf(IllegalStateException.class).hasMessage("Not yet implemented");
+    } catch (IllegalStateException e) {
+      assertThat(e).hasMessage("Not yet implemented");
     }
   }
 
diff --git a/server/sonar-server/src/test/java/org/sonar/server/source/index/SourceFileResultSetIteratorTest.java b/server/sonar-server/src/test/java/org/sonar/server/source/index/SourceFileResultSetIteratorTest.java
new file mode 100644 (file)
index 0000000..6775d28
--- /dev/null
@@ -0,0 +1,165 @@
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.source.index;
+
+import org.assertj.core.data.MapEntry;
+import org.elasticsearch.action.update.UpdateRequest;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.sonar.core.persistence.DbTester;
+import org.sonar.server.db.DbClient;
+import org.sonar.server.source.db.FileSourceDb;
+import org.sonar.server.source.db.FileSourceTesting;
+import org.sonar.test.DbTests;
+
+import java.sql.Connection;
+import java.util.Map;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.fail;
+
+@Category(DbTests.class)
+public class SourceFileResultSetIteratorTest {
+
+  @ClassRule
+  public static DbTester db = new DbTester().schema(SourceFileResultSetIteratorTest.class, "schema.sql");
+
+  DbClient dbClient;
+
+  Connection connection;
+
+  @Before
+  public void setUp() throws Exception {
+    dbClient = new DbClient(db.database(), db.myBatis());
+    connection = db.openConnection();
+  }
+
+  @After
+  public void after() throws Exception {
+    connection.close();
+  }
+
+  @Test
+  public void traverse_db() throws Exception {
+    db.prepareDbUnit(getClass(), "shared.xml");
+    FileSourceTesting.updateDataColumn(connection, "F1", FileSourceTesting.newFakeData(3).build());
+
+    SourceFileResultSetIterator it = SourceFileResultSetIterator.create(dbClient, connection, 0L);
+    assertThat(it.hasNext()).isTrue();
+    SourceFileResultSetIterator.Row row = it.next();
+    assertThat(row.getProjectUuid()).isEqualTo("P1");
+    assertThat(row.getFileUuid()).isEqualTo("F1");
+    assertThat(row.getUpdatedAt()).isEqualTo(1416239042000L);
+    assertThat(row.getLineUpdateRequests()).hasSize(3);
+
+    UpdateRequest firstRequest = row.getLineUpdateRequests().get(0);
+    Map<String, Object> doc = firstRequest.doc().sourceAsMap();
+    assertThat(doc).contains(
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_PROJECT_UUID, "P1"),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_FILE_UUID, "F1"),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_LINE, 1),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_SCM_REVISION, "REVISION_1"),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_SCM_AUTHOR, "AUTHOR_1"),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_HIGHLIGHTING, "HIGHLIGHTING_1"),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_SYMBOLS, "SYMBOLS_1"),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_UT_LINE_HITS, 1),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_UT_CONDITIONS, 2),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_UT_COVERED_CONDITIONS, 3),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_IT_LINE_HITS, 4),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_IT_CONDITIONS, 5),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_IT_COVERED_CONDITIONS, 6),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_OVERALL_LINE_HITS, 7),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_OVERALL_CONDITIONS, 8),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_OVERALL_COVERED_CONDITIONS, 9)
+      );
+    it.close();
+  }
+
+  /**
+   * File with one line. No metadata available on the line.
+   */
+  @Test
+  public void minimal_data() throws Exception {
+    db.prepareDbUnit(getClass(), "shared.xml");
+    FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+    dataBuilder.addLinesBuilder().setLine(1).build();
+    FileSourceTesting.updateDataColumn(connection, "F1", dataBuilder.build());
+
+    SourceFileResultSetIterator it = SourceFileResultSetIterator.create(dbClient, connection, 0L);
+    SourceFileResultSetIterator.Row row = it.next();
+    assertThat(row.getProjectUuid()).isEqualTo("P1");
+    assertThat(row.getFileUuid()).isEqualTo("F1");
+    assertThat(row.getUpdatedAt()).isEqualTo(1416239042000L);
+    assertThat(row.getLineUpdateRequests()).hasSize(1);
+    UpdateRequest firstRequest = row.getLineUpdateRequests().get(0);
+    Map<String, Object> doc = firstRequest.doc().sourceAsMap();
+    assertThat(doc).contains(
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_PROJECT_UUID, "P1"),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_FILE_UUID, "F1"),
+      MapEntry.entry(SourceLineIndexDefinition.FIELD_LINE, 1)
+      );
+    // null values
+    assertThat(doc).containsKeys(
+      SourceLineIndexDefinition.FIELD_SCM_REVISION,
+      SourceLineIndexDefinition.FIELD_SCM_AUTHOR,
+      SourceLineIndexDefinition.FIELD_HIGHLIGHTING,
+      SourceLineIndexDefinition.FIELD_SYMBOLS,
+      SourceLineIndexDefinition.FIELD_UT_LINE_HITS,
+      SourceLineIndexDefinition.FIELD_UT_CONDITIONS,
+      SourceLineIndexDefinition.FIELD_UT_COVERED_CONDITIONS,
+      SourceLineIndexDefinition.FIELD_IT_LINE_HITS,
+      SourceLineIndexDefinition.FIELD_IT_CONDITIONS,
+      SourceLineIndexDefinition.FIELD_IT_COVERED_CONDITIONS,
+      SourceLineIndexDefinition.FIELD_OVERALL_LINE_HITS,
+      SourceLineIndexDefinition.FIELD_OVERALL_CONDITIONS,
+      SourceLineIndexDefinition.FIELD_OVERALL_COVERED_CONDITIONS
+    );
+    it.close();
+  }
+
+  @Test
+  public void filter_by_date() throws Exception {
+    db.prepareDbUnit(getClass(), "shared.xml");
+
+    SourceFileResultSetIterator iterator = SourceFileResultSetIterator.create(dbClient, connection, 2000000000000L);
+    assertThat(iterator.hasNext()).isFalse();
+    iterator.close();
+  }
+
+  @Test
+  public void fail_on_bad_data_format() throws Exception {
+    db.prepareDbUnit(getClass(), "shared.xml");
+
+    FileSourceTesting.updateDataColumn(connection, "F1", "THIS_IS_NOT_PROTOBUF".getBytes());
+
+    SourceFileResultSetIterator iterator = SourceFileResultSetIterator.create(dbClient, connection, 0L);
+    try {
+      assertThat(iterator.hasNext()).isTrue();
+      iterator.next();
+      fail();
+    } catch (IllegalStateException e) {
+      // ok
+    }
+    iterator.close();
+  }
+}
index 8560c032717267262648b640cc0d8b8424b794c8..58ba4796f547fbcd46acce4f59edf16fdda21bce 100644 (file)
 package org.sonar.server.source.index;
 
 import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Iterators;
-import org.apache.commons.io.IOUtils;
-import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.apache.commons.io.FileUtils;
 import org.elasticsearch.action.search.SearchRequestBuilder;
 import org.elasticsearch.action.search.SearchResponse;
 import org.elasticsearch.index.query.QueryBuilders;
@@ -37,11 +35,12 @@ import org.sonar.api.utils.DateUtils;
 import org.sonar.core.persistence.DbTester;
 import org.sonar.server.db.DbClient;
 import org.sonar.server.es.EsTester;
+import org.sonar.server.source.db.FileSourceDb;
 import org.sonar.server.source.db.FileSourceTesting;
 import org.sonar.test.DbTests;
 import org.sonar.test.TestUtils;
 
-import java.io.FileInputStream;
+import java.io.IOException;
 import java.sql.Connection;
 import java.util.Date;
 import java.util.List;
@@ -82,104 +81,100 @@ public class SourceLineIndexerTest {
     assertThat(countDocuments()).isEqualTo(3);
   }
 
+  /**
+   * File F1 in project P1 has one line -> to be updated
+   * File F2 in project P1 has one line -> untouched
+   */
   @Test
   public void update_already_indexed_lines() throws Exception {
-    prepareIndex()
-      .setSource(IOUtils.toString(new FileInputStream(TestUtils.getResource(this.getClass(), "line2.json"))))
-      .get();
-    prepareIndex()
-      .setSource(IOUtils.toString(new FileInputStream(TestUtils.getResource(this.getClass(), "line2_other_file.json"))))
-      .setRefresh(true)
-      .get();
+    indexLine("P1", "F1", 1);
+    indexLine("P1", "F2", 1);
 
     List<Integer> duplications = ImmutableList.of(1, 2, 3);
-    SourceLineDoc line1 = new SourceLineDoc(ImmutableMap.<String, Object>builder()
-      .put(FIELD_PROJECT_UUID, "abcd")
-      .put(FIELD_FILE_UUID, "efgh")
-      .put(FIELD_LINE, 1)
-      .put(FIELD_SCM_REVISION, "cafebabe")
-      .put(FIELD_SCM_DATE, DateUtils.parseDateTime("2014-01-01T12:34:56+0100"))
-      .put(FIELD_SCM_AUTHOR, "polop")
-      .put(FIELD_SOURCE, "package org.sonar.server.source;")
-      .put(FIELD_DUPLICATIONS, duplications)
-      .put(FIELD_UPDATED_AT, new Date())
-      .build());
-    SourceLineResultSetIterator.SourceFile file = new SourceLineResultSetIterator.SourceFile("efgh", System.currentTimeMillis());
-    file.addLine(line1);
-    indexer.index(Iterators.singletonIterator(file));
+    FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+    dataBuilder.addLinesBuilder()
+      .setLine(1)
+      .setScmRevision("new_revision")
+      .setScmAuthor("new_author")
+      .setSource("new source")
+      .addAllDuplications(duplications)
+      .build();
+    SourceFileResultSetIterator.Row dbRow = SourceFileResultSetIterator.toRow("P1", "F1", new Date(), dataBuilder.build());
+    indexer.index(Iterators.singletonIterator(dbRow));
 
     assertThat(countDocuments()).isEqualTo(2L);
 
     SearchResponse fileSearch = prepareSearch()
-      .setQuery(QueryBuilders.termQuery(FIELD_FILE_UUID, "efgh"))
+      .setQuery(QueryBuilders.termQuery(FIELD_FILE_UUID, "F1"))
       .get();
     assertThat(fileSearch.getHits().getTotalHits()).isEqualTo(1L);
     Map<String, Object> fields = fileSearch.getHits().getHits()[0].sourceAsMap();
-    assertThat(fields).hasSize(9);
     assertThat(fields).contains(
-      entry(FIELD_PROJECT_UUID, "abcd"),
-      entry(FIELD_FILE_UUID, "efgh"),
+      entry(FIELD_PROJECT_UUID, "P1"),
+      entry(FIELD_FILE_UUID, "F1"),
       entry(FIELD_LINE, 1),
-      entry(FIELD_SCM_REVISION, "cafebabe"),
-      entry(FIELD_SCM_DATE, "2014-01-01T11:34:56.000Z"),
-      entry(FIELD_SCM_AUTHOR, "polop"),
-      entry(FIELD_SOURCE, "package org.sonar.server.source;"),
+      entry(FIELD_SCM_REVISION, "new_revision"),
+      entry(FIELD_SCM_AUTHOR, "new_author"),
+      entry(FIELD_SOURCE, "new source"),
       entry(FIELD_DUPLICATIONS, duplications)
       );
   }
 
   @Test
   public void delete_file_uuid() throws Exception {
-    addSource("line2.json");
-    addSource("line3.json");
-    addSource("line2_other_file.json");
+    indexLine("P1", "F1", 1);
+    indexLine("P1", "F1", 2);
+    indexLine("P1", "F2", 1);
 
-    indexer.deleteByFile("efgh");
+    indexer.deleteByFile("F1");
 
     List<SearchHit> hits = getDocuments();
     Map<String, Object> document = hits.get(0).getSource();
     assertThat(hits).hasSize(1);
-    assertThat(document.get(FIELD_LINE)).isEqualTo(2);
-    assertThat(document.get(FIELD_FILE_UUID)).isEqualTo("fdsq");
+    assertThat(document.get(FIELD_LINE)).isEqualTo(1);
+    assertThat(document.get(FIELD_FILE_UUID)).isEqualTo("F2");
   }
 
   @Test
   public void delete_by_project_uuid() throws Exception {
-    addSource("line2.json");
-    addSource("line3.json");
-    addSource("line2_other_file.json");
-    addSource("line3_other_project.json");
+    indexLine("P1", "F1", 1);
+    indexLine("P1", "F1", 2);
+    indexLine("P1", "F2", 1);
+    indexLine("P2", "F3", 1);
 
-    indexer.deleteByProject("abcd");
+    indexer.deleteByProject("P1");
 
     List<SearchHit> hits = getDocuments();
+    assertThat(hits).hasSize(1);
     Map<String, Object> document = hits.get(0).getSource();
     assertThat(hits).hasSize(1);
-    assertThat(document.get(FIELD_PROJECT_UUID)).isEqualTo("plmn");
+    assertThat(document.get(FIELD_PROJECT_UUID)).isEqualTo("P2");
   }
 
   @Test
   public void index_source_lines_with_big_test_data() throws Exception {
     Integer bigValue = Short.MAX_VALUE * 2;
-    SourceLineDoc line1 = new SourceLineDoc(ImmutableMap.<String, Object>builder()
-      .put(FIELD_PROJECT_UUID, "abcd")
-      .put(FIELD_FILE_UUID, "efgh")
-      .put(FIELD_LINE, 1)
-      .put(FIELD_SOURCE, "package org.sonar.server.source;")
-      .put(FIELD_UT_LINE_HITS, bigValue)
-      .put(FIELD_UT_CONDITIONS, bigValue)
-      .put(FIELD_UT_COVERED_CONDITIONS, bigValue)
-      .put(FIELD_IT_LINE_HITS, bigValue)
-      .put(FIELD_IT_CONDITIONS, bigValue)
-      .put(FIELD_IT_COVERED_CONDITIONS, bigValue)
-      .put(FIELD_OVERALL_LINE_HITS, bigValue)
-      .put(FIELD_OVERALL_CONDITIONS, bigValue)
-      .put(FIELD_OVERALL_COVERED_CONDITIONS, bigValue)
-      .put(FIELD_UPDATED_AT, new Date())
-      .build());
-    SourceLineResultSetIterator.SourceFile file = new SourceLineResultSetIterator.SourceFile("efgh", System.currentTimeMillis());
-    file.addLine(line1);
-    indexer.index(Iterators.singletonIterator(file));
+
+    FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder();
+    dataBuilder.addLinesBuilder()
+      .setLine(1)
+      .setScmRevision("cafebabe")
+      .setScmAuthor("polop")
+      .setScmDate(DateUtils.parseDateTime("2014-01-01T12:34:56+0100").getTime())
+      .setSource("package org.sonar.server.source;")
+      .setUtLineHits(bigValue)
+      .setUtConditions(bigValue)
+      .setUtCoveredConditions(bigValue)
+      .setItLineHits(bigValue)
+      .setItConditions(bigValue)
+      .setItCoveredConditions(bigValue)
+      .setOverallLineHits(bigValue)
+      .setOverallConditions(bigValue)
+      .setOverallCoveredConditions(bigValue)
+      .build();
+
+    SourceFileResultSetIterator.Row row = SourceFileResultSetIterator.toRow("P1", "F1", new Date(), dataBuilder.build());
+    indexer.index(Iterators.singletonIterator(row));
 
     List<SearchHit> hits = getDocuments();
     assertThat(hits).hasSize(1);
@@ -195,9 +190,11 @@ public class SourceLineIndexerTest {
     assertThat(document.get(FIELD_OVERALL_COVERED_CONDITIONS)).isEqualTo(bigValue);
   }
 
-  private void addSource(String fileName) throws Exception {
-    prepareIndex()
-      .setSource(IOUtils.toString(new FileInputStream(TestUtils.getResource(this.getClass(), fileName))))
+  private void indexLine(String projectUuid, String fileUuid, int line) throws IOException {
+    es.client().prepareIndex(INDEX, TYPE)
+      .setId(SourceLineIndexDefinition.docKey(fileUuid, line))
+      .setSource(FileUtils.readFileToString(TestUtils.getResource(this.getClass(), projectUuid + "_" + fileUuid + "_line" + line + ".json")))
+      .setRefresh(true)
       .get();
   }
 
@@ -206,10 +203,6 @@ public class SourceLineIndexerTest {
       .setTypes(TYPE);
   }
 
-  private IndexRequestBuilder prepareIndex() {
-    return es.client().prepareIndex(INDEX, TYPE);
-  }
-
   private List<SearchHit> getDocuments() {
     return es.getDocuments(INDEX, TYPE);
   }
diff --git a/server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineResultSetIteratorTest.java b/server/sonar-server/src/test/java/org/sonar/server/source/index/SourceLineResultSetIteratorTest.java
deleted file mode 100644 (file)
index aaa10a7..0000000
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * SonarQube, open source software quality management tool.
- * Copyright (C) 2008-2014 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * SonarQube is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * SonarQube is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this program; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
- */
-package org.sonar.server.source.index;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.sonar.core.persistence.DbTester;
-import org.sonar.server.db.DbClient;
-import org.sonar.server.source.db.FileSourceTesting;
-import org.sonar.test.DbTests;
-
-import java.sql.Connection;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.fail;
-
-@Category(DbTests.class)
-public class SourceLineResultSetIteratorTest {
-
-  @ClassRule
-  public static DbTester db = new DbTester().schema(SourceLineResultSetIteratorTest.class, "schema.sql");
-
-  DbClient dbClient;
-
-  Connection connection;
-
-  @Before
-  public void setUp() throws Exception {
-    dbClient = new DbClient(db.database(), db.myBatis());
-    connection = db.openConnection();
-  }
-
-  @After
-  public void after() throws Exception {
-    connection.close();
-  }
-
-  @Test
-  public void parse_db_and_generate_source_line_documents() throws Exception {
-    db.prepareDbUnit(getClass(), "shared.xml");
-    FileSourceTesting.updateDataColumn(connection, "FILE_UUID", FileSourceTesting.newFakeData(3).build());
-
-    SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 0L);
-    assertThat(iterator.hasNext()).isTrue();
-    SourceLineResultSetIterator.SourceFile file = iterator.next();
-    assertThat(file.getLines()).hasSize(3);
-    SourceLineDoc firstLine = file.getLines().get(0);
-    assertThat(firstLine.projectUuid()).isEqualTo("PROJECT_UUID");
-    assertThat(firstLine.fileUuid()).isEqualTo("FILE_UUID");
-    assertThat(firstLine.line()).isEqualTo(1);
-    assertThat(firstLine.scmRevision()).isEqualTo("REVISION_1");
-    assertThat(firstLine.scmAuthor()).isEqualTo("AUTHOR_1");
-    assertThat(firstLine.highlighting()).isEqualTo("HIGHLIGHTING_1");
-    assertThat(firstLine.symbols()).isEqualTo("SYMBOLS_1");
-    assertThat(firstLine.source()).isEqualTo("SOURCE_1");
-    assertThat(firstLine.utLineHits()).isEqualTo(1);
-    assertThat(firstLine.utConditions()).isEqualTo(2);
-    assertThat(firstLine.utCoveredConditions()).isEqualTo(3);
-    assertThat(firstLine.itLineHits()).isEqualTo(4);
-    assertThat(firstLine.itConditions()).isEqualTo(5);
-    assertThat(firstLine.itCoveredConditions()).isEqualTo(6);
-    assertThat(firstLine.overallLineHits()).isEqualTo(7);
-    assertThat(firstLine.overallConditions()).isEqualTo(8);
-    assertThat(firstLine.overallCoveredConditions()).isEqualTo(9);
-    iterator.close();
-  }
-
-  @Test
-  public void should_ignore_lines_already_handled() throws Exception {
-    db.prepareDbUnit(getClass(), "shared.xml");
-
-    SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 2000000000000L);
-    assertThat(iterator.hasNext()).isFalse();
-    iterator.close();
-  }
-
-  @Test
-  public void should_fail_on_bad_data_format() throws Exception {
-    db.prepareDbUnit(getClass(), "shared.xml");
-
-    FileSourceTesting.updateDataColumn(connection, "FILE_UUID", "THIS_IS_NOT_PROTOBUF".getBytes());
-
-    SourceLineResultSetIterator iterator = SourceLineResultSetIterator.create(dbClient, connection, 0L);
-    try {
-      assertThat(iterator.hasNext()).isTrue();
-      iterator.next();
-      fail();
-    } catch (IllegalStateException e) {
-      // ok
-    }
-    iterator.close();
-  }
-}
diff --git a/server/sonar-server/src/test/java/org/sonar/server/util/DateCollectorTest.java b/server/sonar-server/src/test/java/org/sonar/server/util/DateCollectorTest.java
new file mode 100644 (file)
index 0000000..da7369e
--- /dev/null
@@ -0,0 +1,44 @@
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.sonar.server.util;
+
+import org.junit.Test;
+import org.sonar.api.utils.DateUtils;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class DateCollectorTest {
+
+  DateCollector collector = new DateCollector();
+
+  @Test
+  public void max_is_zero_if_no_dates() throws Exception {
+    assertThat(collector.getMax()).isEqualTo(0L);
+  }
+
+  @Test
+  public void max() throws Exception {
+    collector.add(DateUtils.parseDate("2013-06-01"));
+    collector.add(DateUtils.parseDate("2014-01-01"));
+    collector.add(DateUtils.parseDate("2013-08-01"));
+
+    assertThat(collector.getMax()).isEqualTo(DateUtils.parseDateQuietly("2014-01-01").getTime());
+  }
+}
index 0003d69b5e991444611f2c8245a22d54bb52d24f..ec0afe6966b849143e9330dec6f87f12a14bc47e 100644 (file)
@@ -38,12 +38,12 @@ public class ProgressLoggerTest {
     progress.start();
     Thread.sleep(80L);
     progress.stop();
-    verify(logger, atLeast(1)).info("42 rows processed");
+    verify(logger, atLeast(1)).info(startsWith("42 rows processed"));
 
     // ability to manual log, generally final status
     counter.incrementAndGet();
     progress.log();
-    verify(logger).info("43 rows processed");
+    verify(logger).info(startsWith("43 rows processed"));
   }
 
   @Test
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedProjectMeasuresLongDatesTest/before.xml b/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedProjectMeasuresLongDatesTest/before.xml
deleted file mode 100644 (file)
index ee5eb2d..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-<dataset>
-  <!-- new migration -->
-  <project_measures
-      id="1"
-      measure_date="2014-09-25"
-      measure_date_ms="[null]"
-      />
-
-  <!-- re-entrant migration - ignore the ones that are already fed with new dates -->
-  <project_measures
-      id="2"
-      measure_date="2014-09-25"
-      measure_date_ms="1500000000"
-      />
-
-  <!-- NULL dates -->
-  <project_measures
-      id="3"
-      measure_date="[null]"
-      measure_date_ms="[null]"
-      />
-</dataset>
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedProjectMeasuresLongDatesTest/schema.sql b/server/sonar-server/src/test/resources/org/sonar/server/db/migrations/v51/FeedProjectMeasuresLongDatesTest/schema.sql
deleted file mode 100644 (file)
index 473af7b..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-CREATE TABLE "PROJECT_MEASURES" (
-  "ID" BIGINT NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1),
-  "MEASURE_DATE" TIMESTAMP,
-  "MEASURE_DATE_MS" BIGINT
-);
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceFileResultSetIteratorTest/schema.sql b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceFileResultSetIteratorTest/schema.sql
new file mode 100644 (file)
index 0000000..859eefe
--- /dev/null
@@ -0,0 +1,10 @@
+
+CREATE TABLE "FILE_SOURCES" (
+  "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1),
+  "PROJECT_UUID" VARCHAR(50) NOT NULL,
+  "FILE_UUID" VARCHAR(50) NOT NULL,
+  "BINARY_DATA" BINARY(167772150),
+  "DATA_HASH" VARCHAR(50) NOT NULL,
+  "CREATED_AT" BIGINT NOT NULL,
+  "UPDATED_AT" BIGINT NOT NULL
+);
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceFileResultSetIteratorTest/shared.xml b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceFileResultSetIteratorTest/shared.xml
new file mode 100644 (file)
index 0000000..f56c7f5
--- /dev/null
@@ -0,0 +1,6 @@
+<dataset>
+
+  <file_sources id="1" project_uuid="P1" file_uuid="F1" created_at="1416238020000" updated_at="1416239042000"
+    binary_data="" data_hash="" />
+
+</dataset>
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/P1_F1_line1.json b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/P1_F1_line1.json
new file mode 100644 (file)
index 0000000..82498c6
--- /dev/null
@@ -0,0 +1,19 @@
+{
+  "projectUuid": "P1",
+  "fileUuid": "F1",
+  "line": 1,
+  "scmAuthor": "polop",
+  "scmDate": "2014-01-01T12:34:56.7+01:00",
+  "scmRevision": "cafebabe",
+  "source": "// Empty",
+  "updatedAt": "2014-01-01T23:45:01.8+01:00",
+  "utLineHits": 0,
+  "utConditions": 0,
+  "utCoveredConditions": 0,
+  "itLineHits": 0,
+  "itConditions": 0,
+  "itCoveredConditions": 0,
+  "overallLineHits": 0,
+  "overallConditions": 0,
+  "overallCoveredConditions": 0
+}
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/P1_F1_line2.json b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/P1_F1_line2.json
new file mode 100644 (file)
index 0000000..e33192a
--- /dev/null
@@ -0,0 +1,19 @@
+{
+  "projectUuid": "P1",
+  "fileUuid": "F1",
+  "line": 2,
+  "scmAuthor": "polop",
+  "scmDate": "2014-01-01T12:34:56.7+01:00",
+  "scmRevision": "cafebabe",
+  "source": "// Empty",
+  "updatedAt": "2014-01-01T23:45:01.8+01:00",
+  "utLineHits": 0,
+  "utConditions": 0,
+  "utCoveredConditions": 0,
+  "itLineHits": 0,
+  "itConditions": 0,
+  "itCoveredConditions": 0,
+  "overallLineHits": 0,
+  "overallConditions": 0,
+  "overallCoveredConditions": 0
+}
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/P1_F2_line1.json b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/P1_F2_line1.json
new file mode 100644 (file)
index 0000000..7db71d2
--- /dev/null
@@ -0,0 +1,19 @@
+{
+  "projectUuid": "P1",
+  "fileUuid": "F2",
+  "line": 1,
+  "scmAuthor": "polop",
+  "scmDate": "2014-01-01T12:34:56.7+01:00",
+  "scmRevision": "cafebabe",
+  "source": "// Empty",
+  "updatedAt": "2014-01-01T23:45:01.8+01:00",
+  "utLineHits": 0,
+  "utConditions": 0,
+  "utCoveredConditions": 0,
+  "itLineHits": 0,
+  "itConditions": 0,
+  "itCoveredConditions": 0,
+  "overallLineHits": 0,
+  "overallConditions": 0,
+  "overallCoveredConditions": 0
+}
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/P2_F3_line1.json b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/P2_F3_line1.json
new file mode 100644 (file)
index 0000000..e87ae84
--- /dev/null
@@ -0,0 +1,19 @@
+{
+  "projectUuid": "P2",
+  "fileUuid": "F3",
+  "line": 1,
+  "scmAuthor": "polop",
+  "scmDate": "2014-01-01T12:34:56.7+01:00",
+  "scmRevision": "cafebabe",
+  "source": "// Empty",
+  "updatedAt": "2014-01-01T23:45:01.8+01:00",
+  "utLineHits": 0,
+  "utConditions": 0,
+  "utCoveredConditions": 0,
+  "itLineHits": 0,
+  "itConditions": 0,
+  "itCoveredConditions": 0,
+  "overallLineHits": 0,
+  "overallConditions": 0,
+  "overallCoveredConditions": 0
+}
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/line2.json b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/line2.json
deleted file mode 100644 (file)
index a96a8cb..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-{
-  "projectUuid": "abcd",
-  "fileUuid": "efgh",
-  "line": 2,
-  "scmAuthor": "polop",
-  "scmDate": "2014-01-01T12:34:56.7+01:00",
-  "scmRevision": "cafebabe",
-  "source": "// Empty",
-  "updatedAt": "2014-01-01T23:45:01.8+01:00",
-  "utLineHits": 0,
-  "utConditions": 0,
-  "utCoveredConditions": 0,
-  "itLineHits": 0,
-  "itConditions": 0,
-  "itCoveredConditions": 0,
-  "overallLineHits": 0,
-  "overallConditions": 0,
-  "overallCoveredConditions": 0
-}
\ No newline at end of file
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/line2_other_file.json b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/line2_other_file.json
deleted file mode 100644 (file)
index d558a7d..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-{
-  "projectUuid": "abcd",
-  "fileUuid": "fdsq",
-  "line": 2,
-  "scmAuthor": "polop",
-  "scmDate": "2014-01-01T12:34:56.7+01:00",
-  "scmRevision": "cafebabe",
-  "source": "// Empty",
-  "updatedAt": "2014-01-01T23:45:01.8+01:00",
-  "utLineHits": 0,
-  "utConditions": 0,
-  "utCoveredConditions": 0,
-  "itLineHits": 0,
-  "itConditions": 0,
-  "itCoveredConditions": 0,
-  "overallLineHits": 0,
-  "overallConditions": 0,
-  "overallCoveredConditions": 0
-}
\ No newline at end of file
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/line3.json b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/line3.json
deleted file mode 100644 (file)
index a6899ae..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-{
-  "projectUuid": "abcd",
-  "fileUuid": "efgh",
-  "line": 3,
-  "scmAuthor": "polop",
-  "scmDate": "2014-01-01T12:34:56.7+01:00",
-  "scmRevision": "cafebabe",
-  "source": "// Empty",
-  "updatedAt": "2014-01-01T23:45:01.8+01:00",
-  "utLineHits": 0,
-  "utConditions": 0,
-  "utCoveredConditions": 0,
-  "itLineHits": 0,
-  "itConditions": 0,
-  "itCoveredConditions": 0,
-  "overallLineHits": 0,
-  "overallConditions": 0,
-  "overallCoveredConditions": 0
-}
\ No newline at end of file
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/line3_other_project.json b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineIndexerTest/line3_other_project.json
deleted file mode 100644 (file)
index 107ccb1..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-{
-  "projectUuid": "plmn",
-  "fileUuid": "efgh",
-  "line": 3,
-  "scmAuthor": "polop",
-  "scmDate": "2014-01-01T12:34:56.7+01:00",
-  "scmRevision": "cafebabe",
-  "source": "// Empty",
-  "updatedAt": "2014-01-01T23:45:01.8+01:00",
-  "utLineHits": 0,
-  "utConditions": 0,
-  "utCoveredConditions": 0,
-  "itLineHits": 0,
-  "itConditions": 0,
-  "itCoveredConditions": 0,
-  "overallLineHits": 0,
-  "overallConditions": 0,
-  "overallCoveredConditions": 0
-}
\ No newline at end of file
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/schema.sql b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/schema.sql
deleted file mode 100644 (file)
index 859eefe..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-
-CREATE TABLE "FILE_SOURCES" (
-  "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1),
-  "PROJECT_UUID" VARCHAR(50) NOT NULL,
-  "FILE_UUID" VARCHAR(50) NOT NULL,
-  "BINARY_DATA" BINARY(167772150),
-  "DATA_HASH" VARCHAR(50) NOT NULL,
-  "CREATED_AT" BIGINT NOT NULL,
-  "UPDATED_AT" BIGINT NOT NULL
-);
diff --git a/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/shared.xml b/server/sonar-server/src/test/resources/org/sonar/server/source/index/SourceLineResultSetIteratorTest/shared.xml
deleted file mode 100644 (file)
index 521e0db..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-<dataset>
-
-  <file_sources id="1" project_uuid="PROJECT_UUID" file_uuid="FILE_UUID" created_at="1416238020000" updated_at="1416239042000"
-    binary_data="" data_hash="" />
-
-</dataset>
index d115fb1ebfabc1b64306cf163993d92f1ed44988..f69afee49ace9a3054971b6b83ca28e0ec8f34af 100644 (file)
@@ -29,10 +29,18 @@ class ManualMeasure < ActiveRecord::Base
   def created_at
     long_to_date(:created_at)
   end
+
+  def created_at=(date)
+    write_attribute(:created_at, date.to_i*1000)
+  end
   
   def updated_at
     long_to_date(:updated_at)
   end
+
+  def updated_at=(date)
+    write_attribute(:updated_at, date.to_i*1000)
+  end
   
   def long_to_date(attribute)
     date_in_long = read_attribute(attribute)
@@ -155,4 +163,11 @@ class ManualMeasure < ActiveRecord::Base
     end
   end
 
+  def before_save
+    self.updated_at=DateTime.now
+  end
+
+  def before_create
+    self.created_at=DateTime.now
+  end
 end
diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/787_add_manual_measures_long_dates.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/787_add_manual_measures_long_dates.rb
new file mode 100644 (file)
index 0000000..0a29ec8
--- /dev/null
@@ -0,0 +1,29 @@
+#
+# SonarQube, open source software quality management tool.
+# Copyright (C) 2008-2014 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# SonarQube is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# SonarQube is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+#
+
+#
+# SonarQube 5.1
+#
+class AddManualMeasuresLongDates < ActiveRecord::Migration
+  def self.up
+    add_column 'manual_measures', :created_at_ms, :big_integer, :null => true
+    add_column 'manual_measures', :updated_at_ms, :big_integer, :null => true
+  end
+end
diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/787_add_measures_long_dates.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/787_add_measures_long_dates.rb
deleted file mode 100644 (file)
index 92cc241..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-#
-# SonarQube, open source software quality management tool.
-# Copyright (C) 2008-2014 SonarSource
-# mailto:contact AT sonarsource DOT com
-#
-# SonarQube is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 3 of the License, or (at your option) any later version.
-#
-# SonarQube is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
-#
-
-#
-# SonarQube 5.1
-#
-class AddMeasuresLongDates < ActiveRecord::Migration
-  def self.up
-    add_column 'project_measures', :measure_date_ms, :big_integer, :null => true
-    add_column 'manual_measures', :created_at_ms, :big_integer, :null => true
-    add_column 'manual_measures', :updated_at_ms, :big_integer, :null => true
-  end
-end
diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/788_feed_manual_measures_long_dates.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/788_feed_manual_measures_long_dates.rb
new file mode 100644 (file)
index 0000000..23e42b9
--- /dev/null
@@ -0,0 +1,29 @@
+#
+# SonarQube, open source software quality management tool.
+# Copyright (C) 2008-2014 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# SonarQube is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# SonarQube is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+#
+
+#
+# SonarQube 5.1
+#
+class FeedManualMeasuresLongDates < ActiveRecord::Migration
+  def self.up
+    execute_java_migration('org.sonar.server.db.migrations.v51.FeedManualMeasuresLongDates')
+  end
+end
+
diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/788_feed_measures_long_dates.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/788_feed_measures_long_dates.rb
deleted file mode 100644 (file)
index 04ab9ac..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-#
-# SonarQube, open source software quality management tool.
-# Copyright (C) 2008-2014 SonarSource
-# mailto:contact AT sonarsource DOT com
-#
-# SonarQube is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 3 of the License, or (at your option) any later version.
-#
-# SonarQube is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
-#
-
-#
-# SonarQube 5.1
-#
-class FeedMeasuresLongDates < ActiveRecord::Migration
-  def self.up
-    execute_java_migration('org.sonar.server.db.migrations.v51.FeedProjectMeasuresLongDates')
-    execute_java_migration('org.sonar.server.db.migrations.v51.FeedManualMeasuresLongDates')
-  end
-end
-
diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/789_rename_manual_measures_long_dates.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/789_rename_manual_measures_long_dates.rb
new file mode 100644 (file)
index 0000000..b13a340
--- /dev/null
@@ -0,0 +1,32 @@
+#
+# SonarQube, open source software quality management tool.
+# Copyright (C) 2008-2014 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# SonarQube is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# SonarQube is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+#
+
+#
+# SonarQube 5.1
+#
+class RenameManualMeasuresLongDates < ActiveRecord::Migration
+  def self.up
+    remove_column 'manual_measures', 'created_at'
+    remove_column 'manual_measures', 'updated_at'
+    rename_column 'manual_measures', 'created_at_ms', 'created_at'
+    rename_column 'manual_measures', 'updated_at_ms', 'updated_at'
+  end
+end
+
diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/789_rename_measures_long_dates.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/789_rename_measures_long_dates.rb
deleted file mode 100644 (file)
index 34ad6b0..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-# SonarQube, open source software quality management tool.
-# Copyright (C) 2008-2014 SonarSource
-# mailto:contact AT sonarsource DOT com
-#
-# SonarQube is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 3 of the License, or (at your option) any later version.
-#
-# SonarQube is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
-#
-
-#
-# SonarQube 5.1
-#
-class RenameMeasuresLongDates < ActiveRecord::Migration
-  def self.up
-    remove_column 'project_measures', 'measure_date'
-    remove_column 'manual_measures', 'created_at'
-    remove_column 'manual_measures', 'updated_at'
-    rename_column 'manual_measures', 'created_at_ms', 'created_at'
-    rename_column 'manual_measures', 'updated_at_ms', 'updated_at'
-    rename_column 'project_measures', 'measure_date_ms', 'measure_date'
-  end
-end
-
index a722b4673ae650ec00c25b439f625a11b6a28ac6..cc273b5b07e682374f2d86c833a6cd9c0d3bbe7e 100644 (file)
@@ -227,7 +227,7 @@ CREATE TABLE "PROJECT_MEASURES" (
   "RULES_CATEGORY_ID" INTEGER,
   "TEXT_VALUE" VARCHAR(4000),
   "TENDENCY" INTEGER,
-  "MEASURE_DATE" BIGINT,
+  "MEASURE_DATE" TIMESTAMP,
   "PROJECT_ID" INTEGER,
   "ALERT_STATUS" VARCHAR(5),
   "ALERT_TEXT" VARCHAR(4000),
index dc5847f7564e60b4742f5fe66dc1bd20b156b508..8ab131f21688b43fb1d02e1c759bd85dcebea7fd 100644 (file)
@@ -27,13 +27,14 @@ import org.sonar.api.database.DatabaseSession;
 import org.sonar.api.measures.Metric;
 import org.sonar.api.rules.RulePriority;
 
-import javax.persistence.*;
-
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.EnumType;
+import javax.persistence.Enumerated;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.Table;
 import java.io.UnsupportedEncodingException;
-import java.util.Date;
-
-import static org.sonar.api.utils.DateUtils.dateToLong;
-import static org.sonar.api.utils.DateUtils.longToDate;
 
 /**
  * This class is the Hibernate model to store a measure in the DB
@@ -70,9 +71,6 @@ public class MeasureModel implements Cloneable {
   @Column(name = "description", updatable = true, nullable = true, length = 4000)
   private String description;
 
-  @Column(name = "measure_date", updatable = true, nullable = true)
-  private Long measureDate;
-
   @Column(name = "rule_id", updatable = true, nullable = true)
   private Integer ruleId;
 
@@ -305,40 +303,6 @@ public class MeasureModel implements Cloneable {
     this.projectId = projectId;
   }
 
-  /**
-   * @return the date of the measure
-   */
-  public Date getMeasureDate() {
-    return longToDate(measureDate);
-  }
-
-  /**
-   * Sets the date for the measure
-   *
-   * @return the current object
-   */
-  public MeasureModel setMeasureDate(Date measureDate) {
-    this.measureDate = dateToLong(measureDate);
-    return this;
-  }
-
-  /**
-   * @return the date of the measure
-   */
-  public Long getMeasureDateMs() {
-    return measureDate;
-  }
-
-  /**
-   * Sets the date for the measure
-   *
-   * @return the current object
-   */
-  public MeasureModel setMeasureDateMs(Long measureDate) {
-    this.measureDate = measureDate;
-    return this;
-  }
-
   /**
    * @return the alert status if there is one, null otherwise
    */
@@ -519,7 +483,6 @@ public class MeasureModel implements Cloneable {
     clone.setRulePriority(getRulePriority());
     clone.setRuleId(getRuleId());
     clone.setSnapshotId(getSnapshotId());
-    clone.setMeasureDate(getMeasureDate());
     clone.setUrl(getUrl());
     clone.setCharacteristicId(getCharacteristicId());
     clone.setPersonId(getPersonId());
index f8645826374765689aaa836792dc27f6054bc5dc..dae87d0522f30f758af5b606d7ae9a855ea0148e 100644 (file)
@@ -69,6 +69,11 @@ public class JsonWriter {
     return new JsonWriter(writer);
   }
 
+  public JsonWriter setSerializeNulls(boolean b) {
+    this.stream.setSerializeNulls(b);
+    return this;
+  }
+
   /**
    * Begins encoding a new array. Each call to this method must be paired with
    * a call to {@link #endArray}. Output is <code>[</code>.
index 66b00baacae9f4ff21580a1d2f85eabde708b452..1bf6e453f64e2ec0e45fa4da16df5272edfe4135 100644 (file)
@@ -5,12 +5,12 @@
 
   <insert id="insert" parameterType="MeasureModel" useGeneratedKeys="false">
     INSERT INTO project_measures (
-      value, metric_id, snapshot_id, rule_id, text_value, tendency, measure_date,
+      value, metric_id, snapshot_id, rule_id, text_value, tendency,
       project_id, alert_status, alert_text, url, description, rule_priority, characteristic_id, variation_value_1,
       variation_value_2, variation_value_3, variation_value_4, variation_value_5, person_id, measure_data)
     VALUES (
       #{value, jdbcType=DOUBLE}, #{metricId, jdbcType=INTEGER}, #{snapshotId, jdbcType=INTEGER}, #{ruleId, jdbcType=INTEGER}, #{textValue, jdbcType=VARCHAR}, #{tendency, jdbcType=INTEGER},
-      #{measureDateMs, jdbcType=BIGINT}, #{projectId, jdbcType=INTEGER}, #{alertStatus, jdbcType=VARCHAR}, #{alertText, jdbcType=VARCHAR},
+      #{projectId, jdbcType=INTEGER}, #{alertStatus, jdbcType=VARCHAR}, #{alertText, jdbcType=VARCHAR},
       #{url, jdbcType=VARCHAR}, #{description, jdbcType=VARCHAR}, #{rulePriority.ordinal, jdbcType=INTEGER}, #{characteristicId, jdbcType=INTEGER}, #{variationValue1, jdbcType=DOUBLE},
       #{variationValue2, jdbcType=DOUBLE}, #{variationValue3, jdbcType=DOUBLE}, #{variationValue4, jdbcType=DOUBLE}, #{variationValue5, jdbcType=DOUBLE}, #{personId, jdbcType=INTEGER}, #{data}
     )
index 45280508abb0dde1faee84d9623b1287ed860f9f..f0de3de04342518713f93acb3f7787205d3fedfc 100644 (file)
@@ -101,7 +101,7 @@ public class JsonWriterTest {
   }
 
   @Test
-  public void ignore_null_values() throws Exception {
+  public void ignore_null_values_by_default() throws Exception {
     writer.beginObject()
       .prop("nullNumber", (Number) null)
       .prop("nullString", (String) null)
@@ -113,6 +113,20 @@ public class JsonWriterTest {
     expect("{}");
   }
 
+  @Test
+  public void serialize_null_values() throws Exception {
+    writer.setSerializeNulls(true);
+    writer.beginObject()
+      .prop("nullNumber", (Number) null)
+      .prop("nullString", (String) null)
+      .name("nullNumber").value((Number) null)
+      .name("nullString").value((String) null)
+      .name("nullDate").valueDate(null)
+      .name("nullDateTime").valueDate(null)
+      .endObject().close();
+    expect("{\"nullNumber\":null,\"nullString\":null,\"nullNumber\":null,\"nullString\":null,\"nullDate\":null,\"nullDateTime\":null}");
+  }
+
   @Test
   public void escape_values() throws Exception {
     writer.beginObject()