]> source.dussan.org Git - sonarqube.git/commitdiff
SONAR-11792 update ES compile dependency to 6.6.2
authorSébastien Lesaint <sebastien.lesaint@sonarsource.com>
Thu, 14 Feb 2019 08:39:25 +0000 (09:39 +0100)
committerSonarTech <sonartech@sonarsource.com>
Tue, 19 Mar 2019 19:21:24 +0000 (20:21 +0100)
29 files changed:
build.gradle
server/sonar-ce/src/main/java/org/sonar/ce/CeDistributedInformationImpl.java
server/sonar-main/src/main/java/org/sonar/application/es/EsConnectorImpl.java
server/sonar-server-common/build.gradle
server/sonar-server-common/src/main/java/org/sonar/server/es/BulkIndexer.java
server/sonar-server-common/src/main/java/org/sonar/server/es/EsClientProvider.java
server/sonar-server-common/src/main/java/org/sonar/server/es/EsUtils.java
server/sonar-server-common/src/main/java/org/sonar/server/es/IndexDefinitionHash.java
server/sonar-server-common/src/main/java/org/sonar/server/es/StickyFacetBuilder.java
server/sonar-server-common/src/main/java/org/sonar/server/es/metadata/MetadataIndex.java
server/sonar-server-common/src/main/java/org/sonar/server/es/newindex/DefaultIndexSettingsElement.java
server/sonar-server-common/src/main/java/org/sonar/server/es/newindex/NewIndex.java
server/sonar-server-common/src/main/java/org/sonar/server/es/request/ProxyCreateIndexRequestBuilder.java
server/sonar-server-common/src/main/java/org/sonar/server/es/request/ProxyPutMappingRequestBuilder.java
server/sonar-server-common/src/main/java/org/sonar/server/rule/index/RuleIndex.java
server/sonar-server-common/src/test/java/org/elasticsearch/transport/MockTcpTransport.java [new file with mode: 0644]
server/sonar-server-common/src/test/java/org/sonar/server/es/DefaultIndexSettingsTest.java
server/sonar-server-common/src/test/java/org/sonar/server/es/EsTester.java
server/sonar-server-common/src/test/java/org/sonar/server/es/EsUtilsTest.java
server/sonar-server-common/src/test/java/org/sonar/server/es/newindex/NewIndexTest.java
server/sonar-server/src/main/java/org/sonar/server/component/index/ComponentIndex.java
server/sonar-server/src/main/java/org/sonar/server/es/IndexCreator.java
server/sonar-server/src/main/java/org/sonar/server/issue/index/IssueIndex.java
server/sonar-server/src/main/java/org/sonar/server/measure/index/ProjectMeasuresIndex.java
server/sonar-server/src/main/java/org/sonar/server/platform/monitoring/EsStateSection.java
server/sonar-server/src/test/java/org/sonar/server/issue/index/IssueIndexTest.java
server/sonar-server/src/test/java/org/sonar/server/permission/index/FooIndex.java
server/sonar-server/src/test/java/org/sonar/server/user/UserUpdaterCreateTest.java
server/sonar-server/src/test/java/org/sonar/server/user/UserUpdaterUpdateTest.java

index a066df966dd0b92899b612016b0d77ea29ace680..8a6e42d37ae324cfc5fa68aee7b7f8ad3089bcc4 100644 (file)
@@ -201,12 +201,15 @@ subprojects {
         entry 'jetty-server'
         entry 'jetty-servlet'
       }
-      dependency('org.elasticsearch.client:transport:5.6.3') {
+      dependency('org.elasticsearch.client:transport:6.6.2') {
         exclude 'org.elasticsearch.plugin:lang-mustache-client'
-        exclude 'org.elasticsearch.plugin:transport-netty3-client'
         exclude 'commons-logging:commons-logging'
         exclude 'org.elasticsearch.plugin:reindex-client'
+        exclude 'org.elasticsearch.plugin:rank-eval-client'
       }
+      dependency('org.elasticsearch:mocksocket:1.0')
+      // at this date, version 6.6.2 of this module has not been published to Maven central
+      dependency('org.codelibs.elasticsearch.module:analysis-common:6.6.1')
       dependency 'org.freemarker:freemarker:2.3.20'
       dependency 'org.hamcrest:hamcrest-all:1.3'
       dependency 'org.mindrot:jbcrypt:0.4'
index f5963be29ad5a8cec8d65aee88678e831dc84d6b..0251f93ce5434850c75d1b622c765293899ecb12 100644 (file)
@@ -23,9 +23,9 @@ import com.hazelcast.spi.exception.RetryableHazelcastException;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.locks.Lock;
-import org.apache.logging.log4j.Logger;
-import org.elasticsearch.common.logging.Loggers;
 import org.picocontainer.Startable;
+import org.sonar.api.utils.log.Logger;
+import org.sonar.api.utils.log.Loggers;
 import org.sonar.ce.taskprocessor.CeWorker;
 import org.sonar.ce.taskprocessor.CeWorkerFactory;
 import org.sonar.process.cluster.hz.HazelcastMember;
@@ -38,7 +38,7 @@ import static org.sonar.process.cluster.hz.HazelcastObjects.WORKER_UUIDS;
  * Provide the set of worker's UUID in a clustered SonarQube instance
  */
 public class CeDistributedInformationImpl implements CeDistributedInformation, Startable {
-  private static final Logger LOGGER = Loggers.getLogger(CeDistributedInformationImpl.class);
+  private static final Logger LOGGER = Loggers.get(CeDistributedInformationImpl.class);
 
   private final HazelcastMember hazelcastMember;
   private final CeWorkerFactory ceCeWorkerFactory;
index 8b96e1bde6a7a85f9b0a325d2df2226c51468781..3ab89817001dd4c8ba6f0915b459eeee5255b987 100644 (file)
@@ -33,7 +33,6 @@ import org.elasticsearch.client.transport.TransportClient;
 import org.elasticsearch.cluster.health.ClusterHealthStatus;
 import org.elasticsearch.common.network.NetworkModule;
 import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.transport.InetSocketTransportAddress;
 import org.elasticsearch.common.transport.TransportAddress;
 import org.elasticsearch.transport.Netty4Plugin;
 import org.slf4j.Logger;
@@ -106,7 +105,7 @@ public class EsConnectorImpl implements EsConnector {
       boolean connectedToOneHost = false;
       for (HostAndPort hostAndPort : hostAndPorts) {
         try {
-          addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(hostAndPort.getHostText()), hostAndPort.getPortOrDefault(9001)));
+          addTransportAddress(new TransportAddress(InetAddress.getByName(hostAndPort.getHostText()), hostAndPort.getPortOrDefault(9001)));
           connectedToOneHost = true;
         } catch (UnknownHostException e) {
           LOG.debug("Can not resolve host [" + hostAndPort.getHostText() + "]", e);
index baa6476d540364a2b0ca13282035d711deb8d0c7..bbf75de41b5834380613753c7a804a5920d098e9 100644 (file)
@@ -8,6 +8,8 @@ sonarqube {
 
 configurations {
   tests
+
+  testCompile.extendsFrom tests
 }
 
 dependencies {
@@ -28,6 +30,10 @@ dependencies {
 
   compileOnly 'com.google.code.findbugs:jsr305'
 
+  // "tests" dependencies are pulled by other modules which depends on "tests" configuration, "testCompile" are not pulled
+  tests 'org.codelibs.elasticsearch.module:analysis-common'
+  tests 'org.elasticsearch:mocksocket'
+
   testCompile 'ch.qos.logback:logback-core'
   testCompile 'com.google.code.findbugs:jsr305'
   testCompile 'com.h2database:h2'
index 3db7df268b9dd75e7586cacfe81b13ced995604a..750c332431649514fcebeeaeaa49acda399b5ea4 100644 (file)
@@ -45,11 +45,11 @@ import org.elasticsearch.action.search.SearchRequestBuilder;
 import org.elasticsearch.action.search.SearchResponse;
 import org.elasticsearch.action.update.UpdateRequest;
 import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.document.DocumentField;
 import org.elasticsearch.common.unit.ByteSizeUnit;
 import org.elasticsearch.common.unit.ByteSizeValue;
 import org.elasticsearch.common.unit.TimeValue;
 import org.elasticsearch.search.SearchHit;
-import org.elasticsearch.search.SearchHitField;
 import org.elasticsearch.search.sort.SortOrder;
 import org.sonar.api.utils.log.Logger;
 import org.sonar.api.utils.log.Loggers;
@@ -156,7 +156,7 @@ public class BulkIndexer {
     while (true) {
       SearchHit[] hits = searchResponse.getHits().getHits();
       for (SearchHit hit : hits) {
-        SearchHitField routing = hit.getField("_routing");
+        DocumentField routing = hit.field("_routing");
         DeleteRequestBuilder deleteRequestBuilder = client.prepareDelete(hit.getIndex(), hit.getType(), hit.getId());
         if (routing != null) {
           deleteRequestBuilder.setRouting(routing.getValue());
index 4298bd52a7aac843f42b96be6f66677f439e1b7e..b84ab902146ae28139e865c81bf5203e2dd86801 100644 (file)
@@ -30,7 +30,6 @@ import java.util.stream.Collectors;
 import org.elasticsearch.client.transport.TransportClient;
 import org.elasticsearch.common.network.NetworkModule;
 import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.transport.InetSocketTransportAddress;
 import org.elasticsearch.common.transport.TransportAddress;
 import org.elasticsearch.join.ParentJoinPlugin;
 import org.elasticsearch.percolator.PercolatorPlugin;
@@ -89,7 +88,7 @@ public class EsClientProvider extends ProviderAdapter {
 
   private static void addHostToClient(HostAndPort host, TransportClient client) {
     try {
-      client.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(host.getHostText()), host.getPortOrDefault(9001)));
+      client.addTransportAddress(new TransportAddress(InetAddress.getByName(host.getHostText()), host.getPortOrDefault(9001)));
     } catch (UnknownHostException e) {
       throw new IllegalStateException("Can not resolve host [" + host + "]", e);
     }
index e21f9655d2fef5fabf06a6e4c781930c4d472c6f..189178a86709b821992f80420fdfea238ddeec9e 100644 (file)
@@ -60,7 +60,7 @@ public class EsUtils {
   public static <D extends BaseDoc> List<D> convertToDocs(SearchHits hits, Function<Map<String, Object>, D> converter) {
     List<D> docs = new ArrayList<>();
     for (SearchHit hit : hits.getHits()) {
-      docs.add(converter.apply(hit.getSource()));
+      docs.add(converter.apply(hit.getSourceAsMap()));
     }
     return docs;
   }
index 8be1677ede3df1fd57393607035f8e7749ea22b7..2465e70cbd27f2743286d74fe580493701034036 100644 (file)
@@ -43,21 +43,21 @@ class IndexDefinitionHash {
   static String of(BuiltIndex<?> index) {
     IndexType.IndexMainType mainType = index.getMainType();
     return of(
+      index.getSettings().toString(),
       ImmutableMap.of(mainType.getIndex(), mainType),
       index.getRelationTypes().stream().collect(uniqueIndex(IndexType.IndexRelationType::getName, t -> t)),
-      index.getSettings().getAsMap(),
       index.getAttributes());
   }
 
-  private static String of(Map... maps) {
-    StringBuilder sb = new StringBuilder();
-    for (Map map : maps) {
+  private static String of(String str, Map<?,?>... maps) {
+    StringBuilder sb = new StringBuilder(str);
+    for (Map<?,?> map : maps) {
       appendMap(sb, map);
     }
     return DigestUtils.sha256Hex(sb.toString());
   }
 
-  private static void appendMap(StringBuilder sb, Map attributes) {
+  private static void appendMap(StringBuilder sb, Map<?,?> attributes) {
     for (Object entry : sort(attributes).entrySet()) {
       sb.append(((Map.Entry) entry).getKey());
       sb.append(DELIMITER);
@@ -78,9 +78,9 @@ class IndexDefinitionHash {
     }
   }
 
-  private static SortedMap sort(Map map) {
+  private static SortedMap<?,?> sort(Map<?,?> map) {
     if (map instanceof ImmutableSortedMap) {
-      return (ImmutableSortedMap) map;
+      return (ImmutableSortedMap<?,?>) map;
     }
     return ImmutableSortedMap.copyOf(map);
   }
index fb340b734c3e12efdb0336c91296dd9a18c34e30..fee87d9647b30d794778c32bc06c84de9386a553 100644 (file)
@@ -32,11 +32,10 @@ import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
 import org.elasticsearch.search.aggregations.AggregationBuilder;
 import org.elasticsearch.search.aggregations.AggregationBuilders;
+import org.elasticsearch.search.aggregations.BucketOrder;
 import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
+import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
 import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
-import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
 
 import static java.lang.Math.max;
 import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
@@ -45,7 +44,7 @@ public class StickyFacetBuilder {
 
   private static final int FACET_DEFAULT_MIN_DOC_COUNT = 1;
   private static final int FACET_DEFAULT_SIZE = 10;
-  private static final Order FACET_DEFAULT_ORDER = Terms.Order.count(false);
+  private static final BucketOrder FACET_DEFAULT_ORDER = BucketOrder.count(false);
   /** In some cases the user selects >15 items for one facet. In that case, we want to calculate the doc count for all of them (not just the first 15 items, which would be the
    * default for the TermsAggregation). */
   private static final int MAXIMUM_NUMBER_OF_SELECTED_ITEMS_WHOSE_DOC_COUNT_WILL_BE_CALCULATED = 50;
@@ -54,13 +53,13 @@ public class StickyFacetBuilder {
   private final QueryBuilder query;
   private final Map<String, QueryBuilder> filters;
   private final AbstractAggregationBuilder subAggregation;
-  private final Order order;
+  private final BucketOrder order;
 
   public StickyFacetBuilder(QueryBuilder query, Map<String, QueryBuilder> filters) {
     this(query, filters, null, FACET_DEFAULT_ORDER);
   }
 
-  public StickyFacetBuilder(QueryBuilder query, Map<String, QueryBuilder> filters, @Nullable AbstractAggregationBuilder subAggregation, @Nullable Order order) {
+  public StickyFacetBuilder(QueryBuilder query, Map<String, QueryBuilder> filters, @Nullable AbstractAggregationBuilder subAggregation, @Nullable BucketOrder order) {
     this.query = query;
     this.filters = filters;
     this.subAggregation = subAggregation;
index 1aaccbc132e8434bbabf272d6ff9cca4198d05f7..2254b368911a0c0f6f18fd66a4f490c73e5584cf 100644 (file)
@@ -22,7 +22,7 @@ package org.sonar.server.es.metadata;
 import java.util.Optional;
 import org.elasticsearch.action.get.GetRequestBuilder;
 import org.elasticsearch.action.get.GetResponse;
-import org.elasticsearch.index.get.GetField;
+import org.elasticsearch.common.document.DocumentField;
 import org.sonar.server.es.EsClient;
 import org.sonar.server.es.Index;
 import org.sonar.server.es.IndexType;
@@ -88,9 +88,8 @@ public class MetadataIndex {
       .setStoredFields(MetadataIndexDefinition.FIELD_VALUE);
     GetResponse response = request.get();
     if (response.isExists()) {
-      GetField field = response.getField(MetadataIndexDefinition.FIELD_VALUE);
-      String value = String.valueOf(field.getValue());
-      return Optional.of(value);
+      DocumentField field = response.getField(MetadataIndexDefinition.FIELD_VALUE);
+      return Optional.of(field.getValue());
     }
     return Optional.empty();
   }
index c05173149b358f180962e58582f94539ff60777e..4500d1176f742f4746f4ea626376d6ea21c37e7c 100644 (file)
@@ -81,7 +81,7 @@ public enum DefaultIndexSettingsElement {
       set(TYPE, "nGram");
       set(MIN_GRAM, MINIMUM_NGRAM_LENGTH);
       set(MAX_GRAM, MAXIMUM_NGRAM_LENGTH);
-      setArray("token_chars", "letter", "digit", "punctuation", "symbol");
+      setList("token_chars", "letter", "digit", "punctuation", "symbol");
     }
   },
 
@@ -94,7 +94,7 @@ public enum DefaultIndexSettingsElement {
       set(TYPE, "nGram");
       set(MIN_GRAM, MINIMUM_NGRAM_LENGTH);
       set(MAX_GRAM, MAXIMUM_NGRAM_LENGTH);
-      setArray("token_chars", "letter", "digit", "punctuation", "symbol");
+      setList("token_chars", "letter", "digit", "punctuation", "symbol");
     }
   },
   PREFIX_TOKENIZER(TOKENIZER) {
@@ -122,7 +122,7 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, KEYWORD);
-      setArray(FILTER, TRIM, LOWERCASE);
+      setList(FILTER, TRIM, LOWERCASE);
     }
 
     @Override
@@ -139,7 +139,7 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, GRAM_TOKENIZER);
-      setArray(FILTER, TRIM, LOWERCASE);
+      setList(FILTER, TRIM, LOWERCASE);
     }
   },
   SEARCH_GRAMS_ANALYZER(ANALYZER) {
@@ -147,7 +147,7 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, WHITESPACE);
-      setArray(FILTER, TRIM, LOWERCASE);
+      setList(FILTER, TRIM, LOWERCASE);
     }
 
     @Override
@@ -164,7 +164,7 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, PREFIX_TOKENIZER);
-      setArray(FILTER, TRIM);
+      setList(FILTER, TRIM);
     }
   },
   SEARCH_PREFIX_ANALYZER(ANALYZER) {
@@ -172,7 +172,7 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, WHITESPACE);
-      setArray(FILTER, TRIM);
+      setList(FILTER, TRIM);
     }
 
     @Override
@@ -189,7 +189,7 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, PREFIX_TOKENIZER);
-      setArray(FILTER, TRIM, LOWERCASE);
+      setList(FILTER, TRIM, LOWERCASE);
     }
   },
   SEARCH_PREFIX_CASE_INSENSITIVE_ANALYZER(ANALYZER) {
@@ -197,7 +197,7 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, WHITESPACE);
-      setArray(FILTER, TRIM, LOWERCASE);
+      setList(FILTER, TRIM, LOWERCASE);
     }
 
     @Override
@@ -214,7 +214,7 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, WHITESPACE);
-      setArray(FILTER, TRIM, LOWERCASE, NGRAM_FILTER.getName());
+      setList(FILTER, TRIM, LOWERCASE, NGRAM_FILTER.getName());
     }
   },
   USER_SEARCH_GRAMS_ANALYZER(ANALYZER) {
@@ -222,7 +222,7 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, WHITESPACE);
-      setArray(FILTER, TRIM, LOWERCASE);
+      setList(FILTER, TRIM, LOWERCASE);
     }
 
     @Override
@@ -239,7 +239,7 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, STANDARD);
-      setArray(FILTER, STANDARD, "word_filter", LOWERCASE, STOP, ASCIIFOLDING, PORTER_STEM);
+      setList(FILTER, STANDARD, "word_filter", LOWERCASE, STOP, ASCIIFOLDING, PORTER_STEM);
     }
   },
   SEARCH_WORDS_ANALYZER(ANALYZER) {
@@ -247,7 +247,7 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, STANDARD);
-      setArray(FILTER, STANDARD, LOWERCASE, STOP, ASCIIFOLDING, PORTER_STEM);
+      setList(FILTER, STANDARD, LOWERCASE, STOP, ASCIIFOLDING, PORTER_STEM);
     }
 
     @Override
@@ -264,8 +264,8 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, STANDARD);
-      setArray(FILTER, STANDARD, LOWERCASE, STOP, ASCIIFOLDING, PORTER_STEM);
-      setArray(CHAR_FILTER, HTML_STRIP);
+      setList(FILTER, STANDARD, LOWERCASE, STOP, ASCIIFOLDING, PORTER_STEM);
+      setList(CHAR_FILTER, HTML_STRIP);
     }
 
     @Override
@@ -288,7 +288,7 @@ public enum DefaultIndexSettingsElement {
     @Override
     protected void setup() {
       set(TOKENIZER, UUID_MODULE_TOKENIZER);
-      setArray(FILTER, TRIM);
+      setList(FILTER, TRIM);
     }
   },
 
@@ -321,20 +321,20 @@ public enum DefaultIndexSettingsElement {
     put(localName(settingSuffix), value);
   }
 
-  protected void setArray(String settingSuffix, String... values) {
-    putArray(localName(settingSuffix), values);
+  protected void setList(String settingSuffix, String... values) {
+    putList(localName(settingSuffix), values);
   }
 
-  protected void setArray(String settingSuffix, DefaultIndexSettingsElement... values) {
-    putArray(localName(settingSuffix), Arrays.stream(values).map(DefaultIndexSettingsElement::getName).toArray(String[]::new));
+  protected void setList(String settingSuffix, DefaultIndexSettingsElement... values) {
+    putList(localName(settingSuffix), Arrays.stream(values).map(DefaultIndexSettingsElement::getName).toArray(String[]::new));
   }
 
   private void put(String setting, String value) {
     builder = builder.put(setting, value);
   }
 
-  private void putArray(String setting, String... values) {
-    builder = builder.putArray(setting, values);
+  private void putList(String setting, String... values) {
+    builder = builder.putList(setting, values);
   }
 
   private String localName(String settingSuffix) {
index 6246e8ab345c2e989f766ee68afd3b6ce5370657..f608b9b316386f92c46ef8950c96571d7fbebb51 100644 (file)
@@ -57,7 +57,6 @@ public abstract class NewIndex<T extends NewIndex<T>> {
   private void applySettingsConfiguration(SettingsConfiguration settingsConfiguration) {
     settings.put("index.mapper.dynamic", valueOf(false));
     settings.put("index.refresh_interval", refreshInterval(settingsConfiguration));
-    settings.put("mapping.single_type", valueOf(true));
 
     Configuration config = settingsConfiguration.getConfiguration();
     boolean clusterMode = config.getBoolean(CLUSTER_ENABLED.getKey()).orElse(false);
index 51ee8240080aff38a5ca2d9fb31aa74e6e6e835f..27a6288e7e591cb016928ca3d6dd1539313dd39c 100644 (file)
@@ -79,7 +79,7 @@ public class ProxyCreateIndexRequestBuilder extends CreateIndexRequestBuilder {
       request.settings().toXContent(builder, ToXContent.EMPTY_PARAMS);
       builder.endObject().endObject();
       builder.prettyPrint();
-      return builder.string();
+      return builder.toString();
     } catch (IOException e) {
       throw new RuntimeException(e);
     }
index 68be3a47d9ba7bae347de996084d2a3a2476bc59..73ffeb4e9320fd773cfab9baedb12425761f0816 100644 (file)
 package org.sonar.server.es.request;
 
 import org.apache.commons.lang.StringUtils;
-import org.elasticsearch.action.ListenableActionFuture;
+import org.elasticsearch.action.ActionFuture;
 import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction;
 import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder;
-import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
+import org.elasticsearch.action.support.master.AcknowledgedResponse;
 import org.elasticsearch.client.Client;
 import org.elasticsearch.common.unit.TimeValue;
 import org.sonar.api.utils.log.Profiler;
@@ -36,7 +36,7 @@ public class ProxyPutMappingRequestBuilder extends PutMappingRequestBuilder {
   }
 
   @Override
-  public PutMappingResponse get() {
+  public AcknowledgedResponse get() {
     Profiler profiler = Profiler.createIfTrace(EsClient.LOGGER).start();
     try {
       return super.execute().actionGet();
@@ -50,17 +50,17 @@ public class ProxyPutMappingRequestBuilder extends PutMappingRequestBuilder {
   }
 
   @Override
-  public PutMappingResponse get(TimeValue timeout) {
+  public AcknowledgedResponse get(TimeValue timeout) {
     throw new IllegalStateException("Not yet implemented");
   }
 
   @Override
-  public PutMappingResponse get(String timeout) {
+  public AcknowledgedResponse get(String timeout) {
     throw new IllegalStateException("Not yet implemented");
   }
 
   @Override
-  public ListenableActionFuture<PutMappingResponse> execute() {
+  public ActionFuture<AcknowledgedResponse> execute() {
     throw new UnsupportedOperationException("execute() should not be called as it's used for asynchronous");
   }
 
index b3a121d62007c439dca8c9322fde9f78c8a0daa3..d0e3c43107f01a4d835d3031380309e181fd4a15 100644 (file)
@@ -45,10 +45,10 @@ import org.elasticsearch.join.query.HasParentQueryBuilder;
 import org.elasticsearch.join.query.JoinQueryBuilders;
 import org.elasticsearch.search.aggregations.AggregationBuilder;
 import org.elasticsearch.search.aggregations.AggregationBuilders;
+import org.elasticsearch.search.aggregations.BucketOrder;
 import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms;
+import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
 import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
-import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
 import org.elasticsearch.search.sort.FieldSortBuilder;
 import org.elasticsearch.search.sort.SortBuilders;
 import org.elasticsearch.search.sort.SortOrder;
@@ -502,7 +502,7 @@ public class RuleIndex {
   }
 
   private static StickyFacetBuilder stickyFacetBuilder(QueryBuilder query, Map<String, QueryBuilder> filters) {
-    return new StickyFacetBuilder(query, filters, null, Terms.Order.compound(Terms.Order.count(false), Terms.Order.term(true)));
+    return new StickyFacetBuilder(query, filters, null, BucketOrder.compound(BucketOrder.count(false), BucketOrder.key(true)));
   }
 
   private static void setSorting(RuleQuery query, SearchRequestBuilder esSearch) {
@@ -556,7 +556,7 @@ public class RuleIndex {
     TermsAggregationBuilder termsAggregation = AggregationBuilders.terms(AGGREGATION_NAME_FOR_TAGS)
       .field(FIELD_RULE_EXTENSION_TAGS)
       .size(size)
-      .order(Terms.Order.term(true))
+      .order(BucketOrder.key(true))
       .minDocCount(1);
     ofNullable(query)
       .map(EsUtils::escapeSpecialRegexChars)
diff --git a/server/sonar-server-common/src/test/java/org/elasticsearch/transport/MockTcpTransport.java b/server/sonar-server-common/src/test/java/org/elasticsearch/transport/MockTcpTransport.java
new file mode 100644 (file)
index 0000000..b5d89ef
--- /dev/null
@@ -0,0 +1,495 @@
+/*
+ * SonarQube
+ * Copyright (C) 2009-2019 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+ */
+package org.elasticsearch.transport;
+
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.Closeable;
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.net.SocketException;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executor;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.elasticsearch.Version;
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.cli.SuppressForbidden;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.concurrent.CompletableContext;
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.InputStreamStreamInput;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.network.NetworkService;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.ByteSizeValue;
+import org.elasticsearch.common.util.BigArrays;
+import org.elasticsearch.common.util.CancellableThreads;
+import org.elasticsearch.common.util.PageCacheRecycler;
+import org.elasticsearch.common.util.concurrent.AbstractRunnable;
+import org.elasticsearch.common.util.concurrent.EsExecutors;
+import org.elasticsearch.core.internal.io.IOUtils;
+import org.elasticsearch.indices.breaker.CircuitBreakerService;
+import org.elasticsearch.mocksocket.MockServerSocket;
+import org.elasticsearch.mocksocket.MockSocket;
+import org.elasticsearch.threadpool.ThreadPool;
+
+/**
+ * This is a socket based blocking TcpTransport implementation that is used for tests
+ * that need real networking. This implementation is a test only implementation that implements
+ * the networking layer in the worst possible way since it blocks and uses a thread per request model.
+ */
+public class MockTcpTransport extends TcpTransport {
+  private static final Logger logger = LogManager.getLogger(MockTcpTransport.class);
+
+  /**
+   * A pre-built light connection profile that shares a single connection across all
+   * types.
+   */
+  static final ConnectionProfile LIGHT_PROFILE;
+
+  private final Set<MockChannel> openChannels = new HashSet<>();
+
+  static {
+    ConnectionProfile.Builder builder = new ConnectionProfile.Builder();
+    builder.addConnections(1,
+      TransportRequestOptions.Type.BULK,
+      TransportRequestOptions.Type.PING,
+      TransportRequestOptions.Type.RECOVERY,
+      TransportRequestOptions.Type.REG,
+      TransportRequestOptions.Type.STATE);
+    LIGHT_PROFILE = builder.build();
+  }
+
+  private final ExecutorService executor;
+
+  public MockTcpTransport(Settings settings, ThreadPool threadPool, BigArrays bigArrays,
+                          CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry,
+                          NetworkService networkService) {
+    this(settings, threadPool, bigArrays, circuitBreakerService, namedWriteableRegistry, networkService, Version.CURRENT);
+  }
+
+  public MockTcpTransport(Settings settings, ThreadPool threadPool, BigArrays bigArrays,
+                          CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry,
+                          NetworkService networkService, Version mockVersion) {
+    super("mock-tcp-transport", settings, mockVersion, threadPool, PageCacheRecycler.NON_RECYCLING_INSTANCE, circuitBreakerService,
+      namedWriteableRegistry, networkService);
+    // we have our own crazy cached threadpool this one is not bounded at all...
+    // using the ES thread factory here is crucial for tests otherwise disruption tests won't block that thread
+    executor = Executors.newCachedThreadPool(EsExecutors.daemonThreadFactory(settings, Transports.TEST_MOCK_TRANSPORT_THREAD_PREFIX));
+  }
+
+  @Override
+  protected MockChannel bind(final String name, InetSocketAddress address) throws IOException {
+    MockServerSocket socket = new MockServerSocket();
+    socket.setReuseAddress(TransportSettings.TCP_REUSE_ADDRESS.get(settings));
+    ByteSizeValue tcpReceiveBufferSize = TransportSettings.TCP_RECEIVE_BUFFER_SIZE.get(settings);
+    if (tcpReceiveBufferSize.getBytes() > 0) {
+      socket.setReceiveBufferSize(tcpReceiveBufferSize.bytesAsInt());
+    }
+    socket.bind(address);
+    MockChannel serverMockChannel = new MockChannel(socket, name);
+    CountDownLatch started = new CountDownLatch(1);
+    executor.execute(new AbstractRunnable() {
+      @Override
+      public void onFailure(Exception e) {
+        onException(serverMockChannel, e);
+      }
+
+      @Override
+      protected void doRun() throws Exception {
+        started.countDown();
+        serverMockChannel.accept(executor);
+      }
+    });
+    try {
+      started.await();
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+    }
+    return serverMockChannel;
+  }
+
+  private void readMessage(MockChannel mockChannel, StreamInput input) throws IOException {
+    Socket socket = mockChannel.activeChannel;
+    byte[] minimalHeader = new byte[TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE];
+    try {
+      input.readFully(minimalHeader);
+    } catch (EOFException eof) {
+      throw new IOException("Connection reset by peer");
+    }
+
+    // Read message length will throw stream corrupted exception if the marker bytes incorrect
+    int msgSize = TcpTransport.readMessageLength(new BytesArray(minimalHeader));
+    if (msgSize == -1) {
+      socket.getOutputStream().flush();
+    } else {
+      final byte[] buffer = new byte[msgSize];
+      input.readFully(buffer);
+      int expectedSize = TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE + msgSize;
+      try (BytesStreamOutput output = new ReleasableBytesStreamOutput(expectedSize, bigArrays)) {
+        output.write(minimalHeader);
+        output.write(buffer);
+        consumeNetworkReads(mockChannel, output.bytes());
+      }
+    }
+  }
+
+  @Override
+  @SuppressForbidden(reason = "real socket for mocking remote connections")
+  protected MockChannel initiateChannel(DiscoveryNode node) throws IOException {
+    InetSocketAddress address = node.getAddress().address();
+    final MockSocket socket = new MockSocket();
+    final MockChannel channel = new MockChannel(socket, address, false, "none");
+
+    boolean success = false;
+    try {
+      configureSocket(socket);
+      success = true;
+    } finally {
+      if (success == false) {
+        IOUtils.close(socket);
+      }
+    }
+
+    executor.submit(() -> {
+      try {
+        socket.connect(address);
+        socket.setSoLinger(false, 0);
+        channel.connectFuture.complete(null);
+        channel.loopRead(executor);
+      } catch (Exception ex) {
+        channel.connectFuture.completeExceptionally(ex);
+      }
+    });
+
+    return channel;
+  }
+
+  @Override
+  protected ConnectionProfile maybeOverrideConnectionProfile(ConnectionProfile connectionProfile) {
+    ConnectionProfile.Builder builder = new ConnectionProfile.Builder();
+    Set<TransportRequestOptions.Type> allTypesWithConnection = new HashSet<>();
+    Set<TransportRequestOptions.Type> allTypesWithoutConnection = new HashSet<>();
+    for (ConnectionProfile.ConnectionTypeHandle handle : connectionProfile.getHandles()) {
+      Set<TransportRequestOptions.Type> types = handle.getTypes();
+      if (handle.length > 0) {
+        allTypesWithConnection.addAll(types);
+      } else {
+        allTypesWithoutConnection.addAll(types);
+      }
+    }
+    // make sure we maintain at least the types that are supported by this profile even if we only use a single channel for them.
+    builder.addConnections(1, allTypesWithConnection.toArray(new TransportRequestOptions.Type[0]));
+    if (allTypesWithoutConnection.isEmpty() == false) {
+      builder.addConnections(0, allTypesWithoutConnection.toArray(new TransportRequestOptions.Type[0]));
+    }
+    builder.setHandshakeTimeout(connectionProfile.getHandshakeTimeout());
+    builder.setConnectTimeout(connectionProfile.getConnectTimeout());
+    builder.setPingInterval(connectionProfile.getPingInterval());
+    builder.setCompressionEnabled(connectionProfile.getCompressionEnabled());
+    return builder.build();
+  }
+
+  private void configureSocket(Socket socket) throws SocketException {
+    socket.setTcpNoDelay(TransportSettings.TCP_NO_DELAY.get(settings));
+    ByteSizeValue tcpSendBufferSize = TransportSettings.TCP_SEND_BUFFER_SIZE.get(settings);
+    if (tcpSendBufferSize.getBytes() > 0) {
+      socket.setSendBufferSize(tcpSendBufferSize.bytesAsInt());
+    }
+    ByteSizeValue tcpReceiveBufferSize = TransportSettings.TCP_RECEIVE_BUFFER_SIZE.get(settings);
+    if (tcpReceiveBufferSize.getBytes() > 0) {
+      socket.setReceiveBufferSize(tcpReceiveBufferSize.bytesAsInt());
+    }
+    socket.setReuseAddress(TransportSettings.TCP_REUSE_ADDRESS.get(settings));
+  }
+
+  public final class MockChannel implements Closeable, TcpChannel, TcpServerChannel {
+    private final AtomicBoolean isOpen = new AtomicBoolean(true);
+    private final InetSocketAddress localAddress;
+    private final ServerSocket serverSocket;
+    private final Set<MockChannel> workerChannels = Collections.newSetFromMap(new ConcurrentHashMap<>());
+    private final Socket activeChannel;
+    private final boolean isServer;
+    private final String profile;
+    private final CancellableThreads cancellableThreads = new CancellableThreads();
+    private final CompletableContext<Void> closeFuture = new CompletableContext<>();
+    private final CompletableContext<Void> connectFuture = new CompletableContext<>();
+    private final ChannelStats stats = new ChannelStats();
+
+    /**
+     * Constructs a new MockChannel instance intended for handling the actual incoming / outgoing traffic.
+     *
+     * @param socket The client socket. Mut not be null.
+     * @param localAddress Address associated with the corresponding local server socket. Must not be null.
+     * @param profile The associated profile name.
+     */
+    MockChannel(Socket socket, InetSocketAddress localAddress, boolean isServer, String profile) {
+      this.localAddress = localAddress;
+      this.activeChannel = socket;
+      this.isServer = isServer;
+      this.serverSocket = null;
+      this.profile = profile;
+      synchronized (openChannels) {
+        openChannels.add(this);
+      }
+    }
+
+    /**
+     * Constructs a new MockChannel instance intended for accepting requests.
+     *
+     * @param serverSocket The associated server socket. Must not be null.
+     * @param profile The associated profile name.
+     */
+    MockChannel(ServerSocket serverSocket, String profile) {
+      this.localAddress = (InetSocketAddress) serverSocket.getLocalSocketAddress();
+      this.serverSocket = serverSocket;
+      this.profile = profile;
+      this.isServer = false;
+      this.activeChannel = null;
+      synchronized (openChannels) {
+        openChannels.add(this);
+      }
+    }
+
+    public void accept(Executor executor) throws IOException {
+      while (isOpen.get()) {
+        Socket incomingSocket = serverSocket.accept();
+        MockChannel incomingChannel = null;
+        try {
+          configureSocket(incomingSocket);
+          synchronized (this) {
+            if (isOpen.get()) {
+              InetSocketAddress localAddress = new InetSocketAddress(incomingSocket.getLocalAddress(),
+                incomingSocket.getPort());
+              incomingChannel = new MockChannel(incomingSocket, localAddress, true, profile);
+              MockChannel finalIncomingChannel = incomingChannel;
+              incomingChannel.addCloseListener(new ActionListener<Void>() {
+                @Override
+                public void onResponse(Void aVoid) {
+                  workerChannels.remove(finalIncomingChannel);
+                }
+
+                @Override
+                public void onFailure(Exception e) {
+                  workerChannels.remove(finalIncomingChannel);
+                }
+              });
+              serverAcceptedChannel(incomingChannel);
+              //establish a happens-before edge between closing and accepting a new connection
+              workerChannels.add(incomingChannel);
+
+              // this spawns a new thread immediately, so OK under lock
+              incomingChannel.loopRead(executor);
+              // the channel is properly registered and will be cleared by the close code.
+              incomingSocket = null;
+              incomingChannel = null;
+            }
+          }
+        } finally {
+          // ensure we don't leak sockets and channels in the failure case. Note that we null both
+          // if there are no exceptions so this becomes a no op.
+          IOUtils.closeWhileHandlingException(incomingSocket, incomingChannel);
+        }
+      }
+    }
+
+    void loopRead(Executor executor) {
+      executor.execute(new AbstractRunnable() {
+        @Override
+        public void onFailure(Exception e) {
+          if (isOpen.get()) {
+            try {
+              onException(MockChannel.this, e);
+            } catch (Exception ex) {
+              logger.warn("failed on handling exception", ex);
+              IOUtils.closeWhileHandlingException(MockChannel.this); // pure paranoia
+            }
+          }
+        }
+
+        @Override
+        protected void doRun() throws Exception {
+          StreamInput input = new InputStreamStreamInput(new BufferedInputStream(activeChannel.getInputStream()));
+          // There is a (slim) chance that we get interrupted right after a loop iteration, so check explicitly
+          while (isOpen.get() && !Thread.currentThread().isInterrupted()) {
+            cancellableThreads.executeIO(() -> readMessage(MockChannel.this, input));
+          }
+        }
+      });
+    }
+
+    synchronized void close0() throws IOException {
+      // establish a happens-before edge between closing and accepting a new connection
+      // we have to sync this entire block to ensure that our openChannels checks work correctly.
+      // The close block below will close all worker channels but if one of the worker channels runs into an exception
+      // for instance due to a disconnect the handling of this exception might be executed concurrently.
+      // now if we are in-turn concurrently call close we might not wait for the actual close to happen and that will, down the road
+      // make the assertion trip that not all channels are closed.
+      if (isOpen.compareAndSet(true, false)) {
+        final boolean removedChannel;
+        synchronized (openChannels) {
+          removedChannel = openChannels.remove(this);
+        }
+        IOUtils.close(serverSocket, activeChannel, () -> IOUtils.close(workerChannels),
+          () -> cancellableThreads.cancel("channel closed"));
+        assert removedChannel: "Channel was not removed or removed twice?";
+      }
+    }
+
+    @Override
+    public String toString() {
+      return "MockChannel{" +
+        "profile='" + profile + '\'' +
+        ", isOpen=" + isOpen +
+        ", localAddress=" + localAddress +
+        ", isServerSocket=" + (serverSocket != null) +
+        '}';
+    }
+
+    @Override
+    public void close() {
+      try {
+        close0();
+        closeFuture.complete(null);
+      } catch (IOException e) {
+        closeFuture.completeExceptionally(e);
+      }
+    }
+
+    @Override
+    public String getProfile() {
+      return profile;
+    }
+
+    @Override
+    public boolean isServerChannel() {
+      return isServer;
+    }
+
+    @Override
+    public void addCloseListener(ActionListener<Void> listener) {
+      closeFuture.addListener(ActionListener.toBiConsumer(listener));
+    }
+
+    @Override
+    public void addConnectListener(ActionListener<Void> listener) {
+      connectFuture.addListener(ActionListener.toBiConsumer(listener));
+    }
+
+    @Override
+    public ChannelStats getChannelStats() {
+      return stats;
+    }
+
+    @Override
+    public boolean isOpen() {
+      return isOpen.get();
+    }
+
+    @Override
+    public InetSocketAddress getLocalAddress() {
+      return localAddress;
+    }
+
+    @Override
+    public InetSocketAddress getRemoteAddress() {
+      return (InetSocketAddress) activeChannel.getRemoteSocketAddress();
+    }
+
+    @Override
+    public void sendMessage(BytesReference reference, ActionListener<Void> listener) {
+      try {
+        synchronized (this) {
+          OutputStream outputStream = new BufferedOutputStream(activeChannel.getOutputStream());
+          reference.writeTo(outputStream);
+          outputStream.flush();
+        }
+        listener.onResponse(null);
+      } catch (IOException e) {
+        listener.onFailure(e);
+        onException(this, e);
+      }
+    }
+  }
+
+
+  @Override
+  protected void doStart() {
+    boolean success = false;
+    try {
+      if (NetworkService.NETWORK_SERVER.get(settings)) {
+        // loop through all profiles and start them up, special handling for default one
+        for (ProfileSettings profileSettings : profileSettings) {
+          bindServer(profileSettings);
+        }
+      }
+      super.doStart();
+      success = true;
+    } finally {
+      if (success == false) {
+        doStop();
+      }
+    }
+  }
+
+  @Override
+  protected void stopInternal() {
+    ThreadPool.terminate(executor, 10, TimeUnit.SECONDS);
+    synchronized (openChannels) {
+      assert openChannels.isEmpty() : "there are still open channels: " + openChannels;
+    }
+  }
+}
index 3d3e91b6ae206e8859af1408cb964e3471f61d31..51863c4babb4d1bfdab4bfa67a2043ee2d68968a 100644 (file)
@@ -19,7 +19,7 @@
  */
 package org.sonar.server.es;
 
-import java.util.Map;
+import org.elasticsearch.common.settings.Settings;
 import org.junit.Test;
 import org.sonar.server.es.newindex.DefaultIndexSettings;
 import org.sonar.test.TestUtils;
@@ -31,12 +31,11 @@ public class DefaultIndexSettingsTest {
 
   @Test
   public void defaults() {
-    Map<String, String> map = DefaultIndexSettings.defaults().build().getAsMap();
-    assertThat(map).isNotEmpty();
+    Settings settings = DefaultIndexSettings.defaults().build();
 
     // test some values
-    assertThat(map.get("index.number_of_shards")).isEqualTo("1");
-    assertThat(map.get("index.analysis.analyzer." + SORTABLE_ANALYZER.getName() + ".tokenizer")).isEqualTo("keyword");
+    assertThat(settings.get("index.number_of_shards")).isEqualTo("1");
+    assertThat(settings.get("index.analysis.analyzer." + SORTABLE_ANALYZER.getName() + ".tokenizer")).isEqualTo("keyword");
   }
 
   @Test
index a8253881b6da34c1e6cc8ba8a5700a22e60f987c..9d69c0413f313cf6ea2134d77a96d0fe3a97b1b2 100644 (file)
@@ -27,40 +27,51 @@ import com.google.common.collect.Iterables;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.function.Supplier;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import org.apache.commons.lang.reflect.ConstructorUtils;
 import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
-import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
-import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
-import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse;
 import org.elasticsearch.action.bulk.BulkRequestBuilder;
 import org.elasticsearch.action.bulk.BulkResponse;
 import org.elasticsearch.action.index.IndexRequest;
 import org.elasticsearch.action.search.SearchRequestBuilder;
 import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.support.master.AcknowledgedResponse;
+import org.elasticsearch.analysis.common.CommonAnalysisPlugin;
 import org.elasticsearch.cluster.health.ClusterHealthStatus;
 import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings;
 import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
 import org.elasticsearch.common.network.NetworkModule;
+import org.elasticsearch.common.network.NetworkService;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.util.BigArrays;
+import org.elasticsearch.common.util.PageCacheRecycler;
 import org.elasticsearch.discovery.DiscoveryModule;
 import org.elasticsearch.env.Environment;
 import org.elasticsearch.env.NodeEnvironment;
 import org.elasticsearch.index.IndexNotFoundException;
 import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.index.query.TermQueryBuilder;
+import org.elasticsearch.indices.breaker.CircuitBreakerService;
 import org.elasticsearch.indices.recovery.RecoverySettings;
 import org.elasticsearch.join.ParentJoinPlugin;
 import org.elasticsearch.node.InternalSettingsPreparer;
 import org.elasticsearch.node.Node;
+import org.elasticsearch.plugins.NetworkPlugin;
+import org.elasticsearch.plugins.Plugin;
 import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.MockTcpTransport;
+import org.elasticsearch.transport.Transport;
 import org.junit.rules.ExternalResource;
 import org.sonar.server.component.index.ComponentIndexDefinition;
 import org.sonar.server.es.IndexDefinition.IndexDefinitionContext;
@@ -204,7 +215,7 @@ public class EsTester extends ExternalResource {
     List<SearchHit> hits = getDocuments(indexType);
     return new ArrayList<>(Collections2.transform(hits, input -> {
       try {
-        return (E) ConstructorUtils.invokeConstructor(docClass, input.getSource());
+        return (E) ConstructorUtils.invokeConstructor(docClass, input.getSourceAsMap());
       } catch (Exception e) {
         throw Throwables.propagate(e);
       }
@@ -287,7 +298,7 @@ public class EsTester extends ExternalResource {
   }
 
   private void setIndexSettings(String index, Map<String, Object> settings) {
-    UpdateSettingsResponse response = SHARED_NODE.client().admin().indices()
+    AcknowledgedResponse response = SHARED_NODE.client().admin().indices()
       .prepareUpdateSettings(index)
       .setSettings(settings)
       .get();
@@ -296,7 +307,7 @@ public class EsTester extends ExternalResource {
 
   private static void deleteIndexIfExists(String name) {
     try {
-      DeleteIndexResponse response = SHARED_NODE.client().admin().indices().prepareDelete(name).get();
+      AcknowledgedResponse response = SHARED_NODE.client().admin().indices().prepareDelete(name).get();
       checkState(response.isAcknowledged(), "Fail to drop the index " + name);
     } catch (IndexNotFoundException e) {
       // ignore
@@ -328,7 +339,7 @@ public class EsTester extends ExternalResource {
 
       // create types
       String typeName = index.getMainType().getType();
-      PutMappingResponse mappingResponse = SHARED_NODE.client().admin().indices().preparePutMapping(indexName)
+      AcknowledgedResponse mappingResponse = SHARED_NODE.client().admin().indices().preparePutMapping(indexName)
         .setType(typeName)
         .setSource(index.getAttributes())
         .get();
@@ -355,26 +366,39 @@ public class EsTester extends ExternalResource {
         // from failing on nodes without enough disk space
         .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b")
         .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b")
+        .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "1b")
         // always reduce this - it can make tests really slow
         .put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(), TimeValue.timeValueMillis(20))
         .put(NetworkModule.TRANSPORT_TYPE_KEY, "local")
         .put(NetworkModule.HTTP_ENABLED.getKey(), false)
         .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "single-node")
         .build();
-      Node node = new TesterNode(settings);
+      Node node = new Node(InternalSettingsPreparer.prepareEnvironment(settings, null),
+        ImmutableList.of(
+          CommonAnalysisPlugin.class,
+          // mock local transport plugin
+          MockTcpTransportPlugin.class,
+          // install ParentJoin plugin required to create field of type "join"
+          ParentJoinPlugin.class),
+        true) {
+        @Override
+        protected void registerDerivedNodeNameWithLogger(String nodeName) {
+          // nothing to do
+        }
+      };
       return node.start();
     } catch (Exception e) {
       throw new IllegalStateException("Fail to start embedded Elasticsearch", e);
     }
   }
 
-  private static class TesterNode extends Node {
-    public TesterNode(Settings preparedSettings) {
-      super(
-        InternalSettingsPreparer.prepareEnvironment(preparedSettings, null),
-        ImmutableList.of(
-          // install ParentJoin plugin required to create field of type "join"
-          ParentJoinPlugin.class));
+  public static final class MockTcpTransportPlugin extends Plugin implements NetworkPlugin {
+    @Override
+    public Map<String, Supplier<Transport>> getTransports(Settings settings, ThreadPool threadPool, PageCacheRecycler pageCacheRecycler,
+      CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, NetworkService networkService) {
+      return Collections.singletonMap(
+        "local",
+        () -> new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE, circuitBreakerService, namedWriteableRegistry, networkService));
     }
   }
 }
index 23efb9a5107e76e21e8999e684f6d23575c35107..9d68813c3c0451b3f0c7975dfb40fb218c415d5b 100644 (file)
@@ -24,28 +24,24 @@ import java.util.List;
 import org.elasticsearch.search.SearchHit;
 import org.elasticsearch.search.SearchHits;
 import org.junit.Test;
-import org.mockito.Mockito;
 import org.sonar.server.issue.index.IssueDoc;
 import org.sonar.test.TestUtils;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 import static org.sonar.server.es.EsUtils.escapeSpecialRegexChars;
 
 public class EsUtilsTest {
 
   @Test
   public void convertToDocs_empty() {
-    SearchHits hits = mock(SearchHits.class, Mockito.RETURNS_MOCKS);
+    SearchHits hits = new SearchHits(new SearchHit[] {}, 0, 0);
     List<BaseDoc> docs = EsUtils.convertToDocs(hits, IssueDoc::new);
     assertThat(docs).isEmpty();
   }
 
   @Test
   public void convertToDocs() {
-    SearchHits hits = mock(SearchHits.class, Mockito.RETURNS_MOCKS);
-    when(hits.getHits()).thenReturn(new SearchHit[] {mock(SearchHit.class)});
+    SearchHits hits = new SearchHits(new SearchHit[] {new SearchHit(16)}, 1, 1);
     List<BaseDoc> docs = EsUtils.convertToDocs(hits, IssueDoc::new);
     assertThat(docs).hasSize(1);
   }
index e11262c2e520ed4f1210e554e7bb46d92dabafa8..960ac5657589c42d5582f2c46d2e4f4fe56573dc 100644 (file)
@@ -70,6 +70,8 @@ public class NewIndexTest {
     assertThat(underTest.get("index.number_of_shards")).isNotEmpty();
     assertThat(underTest.get("index.mapper.dynamic")).isEqualTo("false");
     assertThat(underTest.get("index.refresh_interval")).isEqualTo("30s");
+    // setting "mapping.single_type" has been dropped in 6.X because multi type indices are not supported anymore
+    assertThat(underTest.get("mapping.single_type")).isNull();
     assertThat(underTest.get("index.number_of_shards")).isEqualTo("1");
     assertThat(underTest.get("index.number_of_replicas")).isEqualTo("0");
   }
@@ -83,6 +85,8 @@ public class NewIndexTest {
     assertThat(underTest.get("index.number_of_shards")).isNotEmpty();
     assertThat(underTest.get("index.mapper.dynamic")).isEqualTo("false");
     assertThat(underTest.get("index.refresh_interval")).isEqualTo("30s");
+    // setting "mapping.single_type" has been dropped in 6.X because multi type indices are not supported anymore
+    assertThat(underTest.get("mapping.single_type")).isNull();
     assertThat(underTest.get("index.number_of_shards")).isEqualTo("1");
     assertThat(underTest.get("index.number_of_replicas")).isEqualTo("1");
   }
index 6b382ebe4523c028f94e2c6ec9a0fe30afdffb11..b4feffd78636302b6340bb1c7a6dd7298c7bb5e4 100644 (file)
@@ -33,10 +33,10 @@ import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.search.SearchHit;
 import org.elasticsearch.search.SearchHits;
 import org.elasticsearch.search.aggregations.AggregationBuilders;
-import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
-import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
-import org.elasticsearch.search.aggregations.bucket.filters.InternalFilters;
-import org.elasticsearch.search.aggregations.bucket.filters.InternalFilters.InternalBucket;
+import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator.KeyedFilter;
+import org.elasticsearch.search.aggregations.bucket.filter.InternalFilters;
+import org.elasticsearch.search.aggregations.bucket.filter.InternalFilters.InternalBucket;
 import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits;
 import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
 import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
index 7112f77b28a19021035619eefa5db853e845c4fc..7a5a03fe3513143c677d0dcd91acb370f8f7a56b 100644 (file)
@@ -26,7 +26,7 @@ import java.util.Set;
 import java.util.stream.Collectors;
 import org.apache.commons.lang.StringUtils;
 import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
-import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
+import org.elasticsearch.action.support.master.AcknowledgedResponse;
 import org.elasticsearch.cluster.health.ClusterHealthStatus;
 import org.elasticsearch.common.settings.Settings;
 import org.picocontainer.Startable;
@@ -131,7 +131,7 @@ public class IndexCreator implements Startable {
 
     // create types
     LOGGER.info("Create type {}", builtIndex.getMainType().format());
-    PutMappingResponse mappingResponse = client.preparePutMapping(index)
+    AcknowledgedResponse mappingResponse = client.preparePutMapping(index)
       .setType(builtIndex.getMainType().getType())
       .setSource(builtIndex.getAttributes())
       .get();
index 8e5976d5f4d99b4a60217fe6bb2f06f6a961953f..28b2bacd4d305a752638c6efc61d4d017856bfa8 100644 (file)
@@ -47,6 +47,7 @@ import org.elasticsearch.index.query.QueryBuilders;
 import org.elasticsearch.indices.TermsLookup;
 import org.elasticsearch.search.aggregations.AggregationBuilder;
 import org.elasticsearch.search.aggregations.AggregationBuilders;
+import org.elasticsearch.search.aggregations.BucketOrder;
 import org.elasticsearch.search.aggregations.HasAggregations;
 import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
 import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
@@ -55,9 +56,8 @@ import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds;
 import org.elasticsearch.search.aggregations.bucket.terms.InternalTerms;
 import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
 import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
 import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
-import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
+import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
 import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
 import org.elasticsearch.search.aggregations.metrics.min.Min;
 import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
@@ -243,7 +243,7 @@ public class IssueIndex {
   private static final String FACET_SUFFIX_MISSING = "_missing";
   private static final String IS_ASSIGNED_FILTER = "__isAssigned";
   private static final SumAggregationBuilder EFFORT_AGGREGATION = AggregationBuilders.sum(FACET_MODE_EFFORT).field(FIELD_ISSUE_EFFORT);
-  private static final Order EFFORT_AGGREGATION_ORDER = Order.aggregation(FACET_MODE_EFFORT, false);
+  private static final BucketOrder EFFORT_AGGREGATION_ORDER = BucketOrder.aggregation(FACET_MODE_EFFORT, false);
   private static final Duration TWENTY_DAYS = Duration.standardDays(20L);
   private static final Duration TWENTY_WEEKS = Duration.standardDays(20L * 7L);
   private static final Duration TWENTY_MONTHS = Duration.standardDays(20L * 30L);
@@ -700,21 +700,21 @@ public class IssueIndex {
   }
 
   public List<String> searchTags(IssueQuery query, @Nullable String textQuery, int size) {
-    Terms terms = listTermsMatching(FIELD_ISSUE_TAGS, query, textQuery, Terms.Order.term(true), size);
+    Terms terms = listTermsMatching(FIELD_ISSUE_TAGS, query, textQuery, BucketOrder.key(true), size);
     return EsUtils.termsKeys(terms);
   }
 
   public Map<String, Long> countTags(IssueQuery query, int maxNumberOfTags) {
-    Terms terms = listTermsMatching(FIELD_ISSUE_TAGS, query, null, Terms.Order.count(false), maxNumberOfTags);
+    Terms terms = listTermsMatching(FIELD_ISSUE_TAGS, query, null, BucketOrder.count(false), maxNumberOfTags);
     return EsUtils.termsToMap(terms);
   }
 
   public List<String> searchAuthors(IssueQuery query, @Nullable String textQuery, int maxNumberOfAuthors) {
-    Terms terms = listTermsMatching(FIELD_ISSUE_AUTHOR_LOGIN, query, textQuery, Terms.Order.term(true), maxNumberOfAuthors);
+    Terms terms = listTermsMatching(FIELD_ISSUE_AUTHOR_LOGIN, query, textQuery, BucketOrder.key(true), maxNumberOfAuthors);
     return EsUtils.termsKeys(terms);
   }
 
-  private Terms listTermsMatching(String fieldName, IssueQuery query, @Nullable String textQuery, Terms.Order termsOrder, int size) {
+  private Terms listTermsMatching(String fieldName, IssueQuery query, @Nullable String textQuery, BucketOrder termsOrder, int size) {
     SearchRequestBuilder requestBuilder = client
       .prepareSearch(TYPE_ISSUE.getMainType())
       // Avoids returning search hits
index 2839c4acdf2375029fc5ef5b5b6b601ec513b1bd..2323fb87ddc658d6545156f730d25f281fec5996 100644 (file)
@@ -38,14 +38,15 @@ import org.elasticsearch.index.query.BoolQueryBuilder;
 import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
 import org.elasticsearch.search.aggregations.AggregationBuilders;
+import org.elasticsearch.search.aggregations.BucketOrder;
 import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket;
 import org.elasticsearch.search.aggregations.bucket.filter.Filter;
-import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
+import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator.KeyedFilter;
 import org.elasticsearch.search.aggregations.bucket.nested.Nested;
 import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
 import org.elasticsearch.search.aggregations.bucket.terms.Terms;
 import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
-import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
 import org.elasticsearch.search.aggregations.metrics.sum.Sum;
 import org.elasticsearch.search.sort.FieldSortBuilder;
 import org.sonar.api.server.ServerSide;
@@ -192,13 +193,13 @@ public class ProjectMeasuresIndex {
       .field(FIELD_LANGUAGES)
       .size(MAX_PAGE_SIZE)
       .minDocCount(1)
-      .order(Terms.Order.count(false)));
+      .order(BucketOrder.count(false)));
     request.addAggregation(AggregationBuilders.nested(FIELD_NCLOC_LANGUAGE_DISTRIBUTION, FIELD_NCLOC_LANGUAGE_DISTRIBUTION)
       .subAggregation(AggregationBuilders.terms(FIELD_NCLOC_LANGUAGE_DISTRIBUTION + "_terms")
         .field(FIELD_DISTRIB_LANGUAGE)
         .size(MAX_PAGE_SIZE)
         .minDocCount(1)
-        .order(Terms.Order.count(false))
+        .order(BucketOrder.count(false))
         .subAggregation(sum(FIELD_DISTRIB_NCLOC).field(FIELD_DISTRIB_NCLOC))));
 
     request.addAggregation(AggregationBuilders.nested(NCLOC_KEY, FIELD_MEASURES)
@@ -336,7 +337,7 @@ public class ProjectMeasuresIndex {
   }
 
   private static AbstractAggregationBuilder createQualityGateFacet() {
-    return AggregationBuilders.filters(
+    return filters(
       ALERT_STATUS_KEY,
       QUALITY_GATE_STATUS.entrySet().stream()
         .map(entry -> new KeyedFilter(entry.getKey(), termQuery(FIELD_QUALITY_GATE_STATUS, entry.getValue())))
@@ -425,7 +426,7 @@ public class ProjectMeasuresIndex {
       .field(FIELD_TAGS)
       .size(size)
       .minDocCount(1)
-      .order(Terms.Order.term(true));
+      .order(BucketOrder.key(true));
     if (textQuery != null) {
       tagFacet.includeExclude(new IncludeExclude(".*" + escapeSpecialRegexChars(textQuery) + ".*", null));
     }
index bea5a1a2c53fa58fdba5653d46c37c3f80092349..5edc97c0342ad152befbad564b8aff3c86d8ea34 100644 (file)
@@ -81,7 +81,6 @@ public class EsStateSection implements SystemInfoSection {
     setAttribute(protobuf, "Store Size", byteCountToDisplaySize(stats.getIndices().getStore().getSizeInBytes()));
     setAttribute(protobuf, "Open File Descriptors", stats.getProcess().getOpenFileDescriptors());
     setAttribute(protobuf, "Max File Descriptors", stats.getProcess().getMaxFileDescriptors());
-    setAttribute(protobuf, "Spinning", stats.getFs().getTotal().getSpins());
     setAttribute(protobuf, "JVM Heap Usage", formatPercent(stats.getJvm().getMem().getHeapUsedPercent()));
     setAttribute(protobuf, "JVM Heap Used", byteCountToDisplaySize(stats.getJvm().getMem().getHeapUsed().getBytes()));
     setAttribute(protobuf, "JVM Heap Max", byteCountToDisplaySize(stats.getJvm().getMem().getHeapMax().getBytes()));
index 184daed2564a8a8ca67b84d5d9598aeb251491af..c390ad4e31bca1d245174b61bb262c709acf06a7 100644 (file)
@@ -103,15 +103,15 @@ public class IssueIndexTest {
     IssueQuery.Builder query = IssueQuery.builder();
     // There are 12 issues in total, with 10 issues per page, the page 2 should only contain 2 elements
     SearchResponse result = underTest.search(query.build(), new SearchOptions().setPage(2, 10));
-    assertThat(result.getHits().hits()).hasSize(2);
+    assertThat(result.getHits().getHits()).hasSize(2);
     assertThat(result.getHits().getTotalHits()).isEqualTo(12);
 
     result = underTest.search(IssueQuery.builder().build(), new SearchOptions().setOffset(0).setLimit(5));
-    assertThat(result.getHits().hits()).hasSize(5);
+    assertThat(result.getHits().getHits()).hasSize(5);
     assertThat(result.getHits().getTotalHits()).isEqualTo(12);
 
     result = underTest.search(IssueQuery.builder().build(), new SearchOptions().setOffset(2).setLimit(0));
-    assertThat(result.getHits().hits()).hasSize(10);
+    assertThat(result.getHits().getHits()).hasSize(10);
     assertThat(result.getHits().getTotalHits()).isEqualTo(12);
   }
 
@@ -128,7 +128,7 @@ public class IssueIndexTest {
 
     IssueQuery.Builder query = IssueQuery.builder();
     SearchResponse result = underTest.search(query.build(), new SearchOptions().setLimit(Integer.MAX_VALUE));
-    assertThat(result.getHits().hits()).hasSize(SearchOptions.MAX_LIMIT);
+    assertThat(result.getHits().getHits()).hasSize(SearchOptions.MAX_LIMIT);
   }
 
   @Test
index 0deceacf733443a1d0e7116c9da86bdbf4a7aa26..22deb1a535b7004c718b87324d288d0a56110815 100644 (file)
@@ -47,8 +47,8 @@ public class FooIndex {
         .filter(authorizationTypeSupport.createQueryFilter()))
       .get()
       .getHits();
-    List<String> names = Arrays.stream(hits.hits())
-      .map(h -> h.getSource().get(FooIndexDefinition.FIELD_NAME).toString())
+    List<String> names = Arrays.stream(hits.getHits())
+      .map(h -> h.getSourceAsMap().get(FooIndexDefinition.FIELD_NAME).toString())
       .collect(MoreCollectors.toList());
     return names.size() == 2 && names.contains("bar") && names.contains("baz");
   }
index 56bf5052cc757743442efb5eda0a055cd9062454..4b5e8c53f3971dc612e5a7ef6ead18f067d541a3 100644 (file)
@@ -123,7 +123,7 @@ public class UserUpdaterCreateTest {
     assertThat(dbClient.userDao().selectByLogin(session, "user").getId()).isEqualTo(dto.getId());
     List<SearchHit> indexUsers = es.getDocuments(UserIndexDefinition.TYPE_USER);
     assertThat(indexUsers).hasSize(1);
-    assertThat(indexUsers.get(0).getSource())
+    assertThat(indexUsers.get(0).getSourceAsMap())
       .contains(
         entry("login", "user"),
         entry("name", "User"),
index dd76ec4f82410ae0805c121e3ae8ea680da84c94..cba3394ac1923ad5de4c8ed4840bf9ae239911c5 100644 (file)
@@ -108,7 +108,7 @@ public class UserUpdaterUpdateTest {
 
     List<SearchHit> indexUsers = es.getDocuments(UserIndexDefinition.TYPE_USER);
     assertThat(indexUsers).hasSize(1);
-    assertThat(indexUsers.get(0).getSource())
+    assertThat(indexUsers.get(0).getSourceAsMap())
       .contains(
         entry("login", DEFAULT_LOGIN),
         entry("name", "Marius2"),
@@ -230,7 +230,7 @@ public class UserUpdaterUpdateTest {
 
     List<SearchHit> indexUsers = es.getDocuments(UserIndexDefinition.TYPE_USER);
     assertThat(indexUsers).hasSize(1);
-    assertThat(indexUsers.get(0).getSource())
+    assertThat(indexUsers.get(0).getSourceAsMap())
       .contains(entry("login", "new_login"));
   }