Преглед изворни кода

Changing cassandra version and using datastax driver

pull/62/head
Martin Stockhammer пре 2 година
родитељ
комит
d925697678
14 измењених фајлова са 1891 додато и 2269 уклоњено
  1. 19
    6
      archiva-modules/metadata/metadata-repository-api/src/test/java/org/apache/archiva/metadata/repository/AbstractMetadataRepositoryTest.java
  2. 127
    18
      archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/pom.xml
  3. 0
    0
      archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/cassandra/cql/load.cql
  4. 15
    6
      archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/CassandraArchivaManager.java
  5. 1290
    1659
      archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/CassandraMetadataRepository.java
  6. 1
    97
      archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/CassandraUtils.java
  7. 343
    412
      archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/DefaultCassandraArchivaManager.java
  8. 2
    0
      archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/model/ColumnNames.java
  9. 57
    37
      archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/test/java/org/apache/archiva/metadata/repository/cassandra/CassandraMetadataRepositoryTest.java
  10. 8
    7
      archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/test/java/org/apache/archiva/metadata/repository/cassandra/RepositoriesNamespaceTest.java
  11. 1
    1
      archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/test/resources/log4j2-test.xml
  12. 15
    16
      archiva-modules/metadata/metadata-store-provider/metadata-store-file/src/test/java/org/apache/archiva/metadata/repository/file/FileMetadataRepositoryTest.java
  13. 7
    10
      archiva-modules/metadata/metadata-store-provider/oak-jcr/metadata-store-jcr/src/test/java/org/apache/archiva/metadata/repository/jcr/JcrMetadataRepositoryTest.java
  14. 6
    0
      pom.xml

+ 19
- 6
archiva-modules/metadata/metadata-repository-api/src/test/java/org/apache/archiva/metadata/repository/AbstractMetadataRepositoryTest.java Прегледај датотеку

@@ -19,7 +19,6 @@ package org.apache.archiva.metadata.repository;
* under the License.
*/

import junit.framework.TestCase;
import org.apache.archiva.checksum.ChecksumAlgorithm;
import org.apache.archiva.metadata.QueryParameter;
import org.apache.archiva.metadata.generic.GenericMetadataFacet;
@@ -37,12 +36,13 @@ import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.model.Scm;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.junit.jupiter.SpringExtension;

import java.text.SimpleDateFormat;
import java.time.ZoneId;
@@ -62,11 +62,12 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;

import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.*;

@RunWith( SpringJUnit4ClassRunner.class )
@ExtendWith( SpringExtension.class )
@TestInstance( TestInstance.Lifecycle.PER_CLASS )
@ContextConfiguration( locations = {"classpath*:/META-INF/spring-context.xml", "classpath*:/spring-context.xml"} )
public abstract class AbstractMetadataRepositoryTest
extends TestCase
{
protected static final String OTHER_REPO_ID = "other-repo";

@@ -108,6 +109,14 @@ public abstract class AbstractMetadataRepositoryTest
protected int assertMaxTries =10;
protected int assertRetrySleepMs=500;

protected void setUp() throws Exception {

}

protected void tearDown() throws Exception {

}

/*
* Used by tryAssert to allow to throw exceptions in the lambda expression.
*/
@@ -452,19 +461,23 @@ public abstract class AbstractMetadataRepositoryTest
getRepository( ).updateProjectVersion( session, TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, metadata );

metadata = getRepository( ).getProjectVersion( session, TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION );
assertNotNull( metadata );
assertEquals( TEST_PROJECT_VERSION, metadata.getId( ) );
assertEquals( TEST_PROJECT_VERSION, metadata.getVersion( ) );
assertEquals( "project name", metadata.getName( ) );
assertEquals( "project description", metadata.getDescription( ) );
assertEquals( "the url", metadata.getUrl( ) );

assertNotNull( metadata.getScm( ) );
assertEquals( "connection", metadata.getScm( ).getConnection( ) );
assertEquals( "dev conn", metadata.getScm( ).getDeveloperConnection( ) );
assertEquals( "url", metadata.getScm( ).getUrl( ) );

assertNotNull( metadata.getCiManagement( ) );
assertEquals( "system", metadata.getCiManagement( ).getSystem( ) );
assertEquals( "ci url", metadata.getCiManagement( ).getUrl( ) );

assertNotNull( metadata.getIssueManagement( ) );
assertEquals( "system", metadata.getIssueManagement( ).getSystem( ) );
assertEquals( "issue tracker url", metadata.getIssueManagement( ).getUrl( ) );


+ 127
- 18
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/pom.xml Прегледај датотеку

@@ -31,7 +31,8 @@

<properties>
<site.staging.base>${project.parent.parent.basedir}</site.staging.base>
<cassandraVersion>3.11.11</cassandraVersion>
<cassandraVersion>4.0.0</cassandraVersion>
<datastax.driver.version>4.13.0</datastax.driver.version>
</properties>

<dependencies>
@@ -102,16 +103,18 @@
<artifactId>modelmapper</artifactId>
</dependency>

<!--
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.27</version>
</dependency>
-->
<dependency>
<groupId>org.apache.cassandra</groupId>
<artifactId>cassandra-all</artifactId>
<version>${cassandraVersion}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
@@ -166,14 +169,36 @@
<groupId>com.addthis.metrics</groupId>
<artifactId>reporter-config3</artifactId>
</exclusion>
<!-- Version upgrade, see below -->
<exclusion>
<groupId>org.apache.tika</groupId>
<artifactId>tika-core</artifactId>
<groupId>net.openhft</groupId>
<artifactId>chronicle-wire</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>net.openhft</groupId>
<artifactId>chronicle-wire</artifactId>
<version>2.21.89</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.datastax.oss</groupId>
<artifactId>java-driver-core</artifactId>
<version>${datastax.driver.version}</version>
</dependency>
<dependency>
<groupId>com.datastax.oss</groupId>
<artifactId>java-driver-query-builder</artifactId>
<version>${datastax.driver.version}</version>
</dependency>
<dependency>
<groupId>com.datastax.oss</groupId>
<artifactId>java-driver-mapper-runtime</artifactId>
<version>${datastax.driver.version}</version>
</dependency>

<!--
<dependency>
<groupId>org.hectorclient</groupId>
<artifactId>hector-core</artifactId>
@@ -197,6 +222,8 @@
</exclusion>
</exclusions>
</dependency>
-->
<!--
<dependency>
<groupId>org.apache.cassandra</groupId>
<artifactId>cassandra-thrift</artifactId>
@@ -212,7 +239,9 @@
</exclusion>
</exclusions>
</dependency>
-->
<!-- Transient dependencies of cassandra that are selected to use a higher version -->
<!--
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
@@ -234,25 +263,35 @@
<artifactId>tika-core</artifactId>
<version>1.26</version>
</dependency>
-->
<!-- Transitive dependency. Declared here to increase the version. -->
<!--
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
<version>${netty.version}</version>
</dependency>
-->
<!--
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
-->
<!-- Is a dependency of cassandra -> hibernate-validator and replaced by new version -->
<!--
<dependency>
<groupId>org.jboss.logging</groupId>
<artifactId>jboss-logging</artifactId>
</dependency>
-->

<!-- TEST Scope -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-test-utils</artifactId>
@@ -297,6 +336,12 @@
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>


</dependencies>

@@ -320,10 +365,11 @@
</goals>
<configuration>
<portNames>
<portName>cassandraPort</portName>
<portName>cassandra.rpcPort</portName>
<portName>cassandra.storagePort</portName>
<portName>cassandra.stopPort</portName>
<portName>cassandra.jmxPort</portName>
<portName>cassandra.nativeTransportPort</portName>
</portNames>
</configuration>
</execution>
@@ -332,27 +378,37 @@
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cassandra-maven-plugin</artifactId>
<version>2.0.0-1</version>
<version>3.6</version>
<executions>
<execution>
<id>start-cassandra</id>
<phase>process-test-classes</phase>
<phase>pre-integration-test</phase>
<goals>
<goal>start</goal>
</goals>
<configuration>
<rpcPort>${cassandraPort}</rpcPort>
<nativeTransportPort>${cassandra.nativeTransportPort}</nativeTransportPort>
<rpcPort>${cassandra.rpcPort}</rpcPort>
<storagePort>${cassandra.storagePort}</storagePort>
<stopPort>${cassandra.stopPort}</stopPort>
<jmxPort>${cassandra.jmxPort}</jmxPort>
<addMainClasspath>false</addMainClasspath>
<addTestClasspath>false</addTestClasspath>
<startWaitSeconds>500</startWaitSeconds>
<startNativeTransport>true</startNativeTransport>
<logLevel>DEBUG</logLevel>
<loadAfterFirstStart>false</loadAfterFirstStart>
<yaml>
broadcast_rpc_address: 127.0.0.1
</yaml>
<systemPropertyVariables>
<java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
</systemPropertyVariables>
</configuration>
</execution>
<execution>
<id>stop-cassandra</id>
<phase>test</phase>
<phase>post-integration-test</phase>
<goals>
<goal>stop</goal>
</goals>
@@ -364,21 +420,74 @@
<artifactId>slf4j-simple</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>net.java.dev.jna</groupId>
<artifactId>jna</artifactId>
<version>4.2.2</version>
</dependency>
<dependency>
<groupId>org.apache.cassandra</groupId>
<artifactId>cassandra-all</artifactId>
<version>3.11.10</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>3.0.0-M5</version>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
<configuration>
<includes>
<include>**/*Test.java</include>
</includes>
<systemPropertyVariables>
<cassandra.port>${cassandra.nativeTransportPort}</cassandra.port>
<cassandra.host>127.0.0.1</cassandra.host>
<archiva.repositorySessionFactory.id>cassandra</archiva.repositorySessionFactory.id>
<appserver.base>${project.build.directory}/appserver-base</appserver.base>
<java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
</systemPropertyVariables>
<trimStackTrace>false</trimStackTrace>
<skip>false</skip>
</configuration>
<dependencies>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit.jupiter.version}</version>
</dependency>
</dependencies>
</plugin>

</plugins>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<executions>

</executions>
<configuration>
<skip>true</skip>
</configuration>
</plugin>

<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<systemPropertyVariables>
<cassandra.port>${cassandraPort}</cassandra.port>
<archiva.repositorySessionFactory.id>cassandra</archiva.repositorySessionFactory.id>
<appserver.base>${project.build.directory}/appserver-base</appserver.base>
</systemPropertyVariables>
<trimStackTrace>false</trimStackTrace>
<excludes>
<exclude>src/cassandra/**</exclude>
</excludes>
</configuration>
</plugin>
</plugins>

+ 0
- 0
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/cassandra/cql/load.cql Прегледај датотеку


+ 15
- 6
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/CassandraArchivaManager.java Прегледај датотеку

@@ -19,8 +19,9 @@ package org.apache.archiva.metadata.repository.cassandra;
* under the License.
*/

import me.prettyprint.hector.api.Cluster;
import me.prettyprint.hector.api.Keyspace;
import com.datastax.oss.driver.api.core.CqlSession;
import com.datastax.oss.driver.api.core.CqlSessionBuilder;
import com.datastax.oss.driver.api.core.config.DriverConfigLoader;

/**
* @author Olivier Lamy
@@ -28,16 +29,14 @@ import me.prettyprint.hector.api.Keyspace;
*/
public interface CassandraArchivaManager
{
String DEFAULT_PRIMARY_KEY = "key";

void start();

void shutdown();

boolean started();

Keyspace getKeyspace();

Cluster getCluster();

String getRepositoryFamilyName();

String getNamespaceFamilyName();
@@ -46,6 +45,8 @@ public interface CassandraArchivaManager

String getProjectVersionMetadataFamilyName();

String[] getProjectVersionMetadataColumns();

String getArtifactMetadataFamilyName();

String getMetadataFacetFamilyName();
@@ -58,4 +59,12 @@ public interface CassandraArchivaManager

String getChecksumFamilyName();

DriverConfigLoader getConfigLoader();

CqlSessionBuilder getSessionBuilder( );

CqlSession getSession();

String getKeyspaceName();

}

+ 1290
- 1659
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/CassandraMetadataRepository.java
Разлика између датотеке није приказан због своје велике величине
Прегледај датотеку


+ 1
- 97
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/CassandraUtils.java Прегледај датотеку

@@ -9,8 +9,7 @@ package org.apache.archiva.metadata.repository.cassandra;
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,18 +18,6 @@ package org.apache.archiva.metadata.repository.cassandra;
* under the License.
*/

import me.prettyprint.cassandra.serializers.LongSerializer;
import me.prettyprint.cassandra.serializers.SerializerTypeInferer;
import me.prettyprint.cassandra.serializers.StringSerializer;
import me.prettyprint.cassandra.service.template.ColumnFamilyUpdater;
import me.prettyprint.hector.api.Serializer;
import me.prettyprint.hector.api.beans.ColumnSlice;
import me.prettyprint.hector.api.beans.HColumn;
import me.prettyprint.hector.api.factory.HFactory;
import me.prettyprint.hector.api.mutation.Mutator;
import org.apache.archiva.metadata.repository.cassandra.model.ColumnNames;
import org.apache.commons.lang3.StringUtils;

/**
* @author Olivier Lamy
* @since 2.0.0
@@ -69,90 +56,7 @@ public class CassandraUtils
return builder.toString();
}

public static <A, B> HColumn<A, B> column( final A name, final B value )
{

return HFactory.createColumn( name, //
value, //
SerializerTypeInferer.getSerializer( name ), //
SerializerTypeInferer.getSerializer( value ) );
}

public static String getStringValue( ColumnSlice<String, String> columnSlice, ColumnNames columnName )
{
return getStringValue( columnSlice, columnName.toString() );
}

public static String getStringValue( ColumnSlice<String, String> columnSlice, String columnName )
{
if ( StringUtils.isEmpty( columnName ) )
{
return null;
}

HColumn<String, String> hColumn = columnSlice.getColumnByName( columnName );
return hColumn == null ? null : hColumn.getValue();
}

public static Long getLongValue( ColumnSlice<String, Long> columnSlice, String columnName )
{
if ( StringUtils.isEmpty( columnName ) )
{
return null;
}

HColumn<String, Long> hColumn = columnSlice.getColumnByName( columnName );
return hColumn == null ? null : hColumn.getValue();
}

public static <T> String getAsStringValue( ColumnSlice<String, T> columnSlice, String columnName )
{
StringSerializer ss = StringSerializer.get();
if ( StringUtils.isEmpty( columnName ) )
{
return null;
}

HColumn<String, T> hColumn = columnSlice.getColumnByName( columnName );
return hColumn == null ? null : ss.fromByteBuffer( hColumn.getValueBytes() );
}

public static Long getAsLongValue( ColumnSlice<String, String> columnSlice, String columnName )
{
LongSerializer ls = LongSerializer.get();
if ( StringUtils.isEmpty( columnName ) )
{
return null;
}

HColumn<String, String> hColumn = columnSlice.getColumnByName( columnName );
return hColumn == null ? null : ls.fromByteBuffer( hColumn.getValueBytes() );
}

public static void addInsertion( Mutator<String> mutator, String key, String columnFamily, String columnName,
String value )
{
if ( value != null )
{
mutator.addInsertion( key, columnFamily, column( columnName, value ) );
}
}

/**
* null check on the value to prevent {@link java.lang.IllegalArgumentException}
* @param updater
* @param columnName
* @param value
*/
public static void addUpdateStringValue(ColumnFamilyUpdater<String,String> updater, String columnName, String value )
{
if (value == null)
{
return;
}
updater.setString( columnName, value );

}

private CassandraUtils()
{

+ 343
- 412
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/DefaultCassandraArchivaManager.java Прегледај датотеку

@@ -19,18 +19,14 @@ package org.apache.archiva.metadata.repository.cassandra;
* under the License.
*/

import me.prettyprint.cassandra.model.BasicColumnDefinition;
import me.prettyprint.cassandra.model.ConfigurableConsistencyLevel;
import me.prettyprint.cassandra.serializers.StringSerializer;
import me.prettyprint.cassandra.service.CassandraHostConfigurator;
import me.prettyprint.cassandra.service.ThriftKsDef;
import me.prettyprint.hector.api.Cluster;
import me.prettyprint.hector.api.HConsistencyLevel;
import me.prettyprint.hector.api.Keyspace;
import me.prettyprint.hector.api.ddl.ColumnFamilyDefinition;
import me.prettyprint.hector.api.ddl.ColumnIndexType;
import me.prettyprint.hector.api.ddl.ComparatorType;
import me.prettyprint.hector.api.factory.HFactory;
import com.datastax.oss.driver.api.core.CqlSession;
import com.datastax.oss.driver.api.core.CqlSessionBuilder;
import com.datastax.oss.driver.api.core.config.DefaultDriverOption;
import com.datastax.oss.driver.api.core.config.DriverConfigLoader;
import com.datastax.oss.driver.api.core.type.DataTypes;
import com.datastax.oss.driver.api.querybuilder.schema.CreateIndex;
import com.datastax.oss.driver.api.querybuilder.schema.CreateKeyspace;
import com.datastax.oss.driver.api.querybuilder.schema.CreateTableWithOptions;
import org.apache.archiva.metadata.repository.RepositorySessionFactoryBean;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
@@ -43,8 +39,12 @@ import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;

import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal;
import static com.datastax.oss.driver.api.querybuilder.SchemaBuilder.*;
import static org.apache.archiva.metadata.repository.cassandra.model.ColumnNames.*;

/**
@@ -53,12 +53,12 @@ import static org.apache.archiva.metadata.repository.cassandra.model.ColumnNames
* @author Olivier Lamy
* @since 2.0.0
*/
@Service("archivaEntityManagerFactory#cassandra")
@Service( "archivaEntityManagerFactory#cassandra" )
public class DefaultCassandraArchivaManager
implements CassandraArchivaManager
{

private Logger logger = LoggerFactory.getLogger( getClass() );
private Logger logger = LoggerFactory.getLogger( getClass( ) );

@Inject
private ApplicationContext applicationContext;
@@ -69,16 +69,12 @@ public class DefaultCassandraArchivaManager

private boolean started;

private Cluster cluster;

private Keyspace keyspace;

// configurable???
private String repositoryFamilyName = "repository";

private String namespaceFamilyName = "namespace";

private String projectFamilyName = PROJECT.toString();
private String projectFamilyName = PROJECT.toString( );

private String projectVersionMetadataFamilyName = "projectversionmetadata";

@@ -94,513 +90,448 @@ public class DefaultCassandraArchivaManager

private String checksumFamilyName = "checksum";

@Value("${cassandra.host}")

private static String[] projectVersionMetadataColumns;


static
{
projectVersionMetadataColumns = new String[]{
DEFAULT_PRIMARY_KEY,
NAMESPACE_ID.toString( ),
REPOSITORY_NAME.toString( ),
PROJECT_VERSION.toString( ),
PROJECT_ID.toString( ),
DESCRIPTION.toString( ),
URL.toString( ),
NAME.toString( ),
VERSION.toString( ),
VERSION_PROPERTIES.toString( ),
"incomplete",
"ciManagement.system",
"ciManagement.url",
"issueManagement.system",
"issueManagement.url",
"organization.name",
"organization.url",
"scm.url",
"scm.connection",
"scm.developerConnection"
};
Arrays.sort( projectVersionMetadataColumns );
}

@Value( "${cassandra.host}" )
private String cassandraHost;

@Value("${cassandra.port}")
@Value( "${cassandra.port}" )
private String cassandraPort;

@Value("${cassandra.maxActive}")
@Value( "${cassandra.maxActive}" )
private int maxActive;

@Value("${cassandra.readConsistencyLevel}")
@Value( "${cassandra.readConsistencyLevel}" )
private String readConsistencyLevel;

@Value("${cassandra.writeConsistencyLevel}")
@Value( "${cassandra.writeConsistencyLevel}" )
private String writeConsistencyLevel;

@Value("${cassandra.replicationFactor}")
@Value( "${cassandra.replicationFactor}" )
private int replicationFactor;

@Value("${cassandra.keyspace.name}")
@Value( "${cassandra.keyspace.name}" )
private String keyspaceName;

@Value("${cassandra.cluster.name}")
@Value( "${cassandra.cluster.name}" )
private String clusterName;

@Inject
private RepositorySessionFactoryBean repositorySessionFactoryBean;

@PostConstruct
public void initialize()
{
// skip initialisation if not cassandra
if ( !StringUtils.equals( repositorySessionFactoryBean.getId(), "cassandra" ) )
{
return;
}
final CassandraHostConfigurator configurator =
new CassandraHostConfigurator( cassandraHost + ":" + cassandraPort );
configurator.setMaxActive( maxActive );
//configurator.setCassandraThriftSocketTimeout( );

cluster = HFactory.getOrCreateCluster( clusterName, configurator );

final ConfigurableConsistencyLevel consistencyLevelPolicy = new ConfigurableConsistencyLevel();
consistencyLevelPolicy.setDefaultReadConsistencyLevel( HConsistencyLevel.valueOf( readConsistencyLevel ) );
consistencyLevelPolicy.setDefaultWriteConsistencyLevel( HConsistencyLevel.valueOf( writeConsistencyLevel ) );
keyspace = HFactory.createKeyspace( keyspaceName, cluster, consistencyLevelPolicy );
DriverConfigLoader configLoader;

List<ColumnFamilyDefinition> cfds = new ArrayList<>();
CqlSession cqlSession;

// namespace table
{

final ColumnFamilyDefinition namespace =
HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
getNamespaceFamilyName(), //
ComparatorType.UTF8TYPE );
cfds.add( namespace );

// creating indexes for cql query

BasicColumnDefinition nameColumn = new BasicColumnDefinition();
nameColumn.setName( StringSerializer.get().toByteBuffer( NAME.toString() ) );
nameColumn.setIndexName( NAME.toString() );
nameColumn.setIndexType( ColumnIndexType.KEYS );
nameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
namespace.addColumnDefinition( nameColumn );

BasicColumnDefinition repositoryIdColumn = new BasicColumnDefinition();
repositoryIdColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
repositoryIdColumn.setIndexName( REPOSITORY_NAME.toString() );
repositoryIdColumn.setIndexType( ColumnIndexType.KEYS );
repositoryIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
namespace.addColumnDefinition( repositoryIdColumn );
}
@Override
public CqlSessionBuilder getSessionBuilder( )
{
return CqlSession.builder( ).withConfigLoader( configLoader ).withKeyspace( keyspaceName ).withLocalDatacenter( "datacenter1" );
}

// repository table
{
final ColumnFamilyDefinition repository =
HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
getRepositoryFamilyName(), //
ComparatorType.UTF8TYPE );

cfds.add( repository );

BasicColumnDefinition nameColumn = new BasicColumnDefinition();
nameColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
nameColumn.setIndexName( REPOSITORY_NAME.toString() );
nameColumn.setIndexType( ColumnIndexType.KEYS );
nameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
repository.addColumnDefinition( nameColumn );
@Override
public CqlSession getSession( )
{
if (cqlSession==null || cqlSession.isClosed()) {
this.cqlSession = getSessionBuilder( ).build( );
}
return this.cqlSession;
}

// project table
@PostConstruct
public void initialize( )
{
// skip initialisation if not cassandra
if ( !StringUtils.equals( repositorySessionFactoryBean.getId( ), "cassandra" ) )
{

final ColumnFamilyDefinition project = HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
getProjectFamilyName(), //
ComparatorType.UTF8TYPE );
cfds.add( project );

// creating indexes for cql query

BasicColumnDefinition projectIdColumn = new BasicColumnDefinition();
projectIdColumn.setName( StringSerializer.get().toByteBuffer( PROJECT_ID.toString() ) );
projectIdColumn.setIndexName( PROJECT_ID.toString() );
projectIdColumn.setIndexType( ColumnIndexType.KEYS );
projectIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
project.addColumnDefinition( projectIdColumn );

BasicColumnDefinition repositoryIdColumn = new BasicColumnDefinition();
repositoryIdColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
repositoryIdColumn.setIndexName( REPOSITORY_NAME.toString() );
repositoryIdColumn.setIndexType( ColumnIndexType.KEYS );
repositoryIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
project.addColumnDefinition( repositoryIdColumn );

BasicColumnDefinition namespaceIdColumn = new BasicColumnDefinition();
namespaceIdColumn.setName( StringSerializer.get().toByteBuffer( NAMESPACE_ID.toString() ) );
namespaceIdColumn.setIndexName( NAMESPACE_ID.toString() );
namespaceIdColumn.setIndexType( ColumnIndexType.KEYS );
namespaceIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
project.addColumnDefinition( namespaceIdColumn );
return;
}

//projectversionmetadatamodel
{

final ColumnFamilyDefinition projectVersionMetadataModel =
HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
getProjectVersionMetadataFamilyName(), //
ComparatorType.UTF8TYPE );
cfds.add( projectVersionMetadataModel );

// creating indexes for cql query

BasicColumnDefinition namespaceIdColumn = new BasicColumnDefinition();
namespaceIdColumn.setName( StringSerializer.get().toByteBuffer( NAMESPACE_ID.toString() ) );
namespaceIdColumn.setIndexName( NAMESPACE_ID.toString() );
namespaceIdColumn.setIndexType( ColumnIndexType.KEYS );
namespaceIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
projectVersionMetadataModel.addColumnDefinition( namespaceIdColumn );

BasicColumnDefinition repositoryNameColumn = new BasicColumnDefinition();
repositoryNameColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
repositoryNameColumn.setIndexName( REPOSITORY_NAME.toString() );
repositoryNameColumn.setIndexType( ColumnIndexType.KEYS );
repositoryNameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
projectVersionMetadataModel.addColumnDefinition( repositoryNameColumn );

BasicColumnDefinition idColumn = new BasicColumnDefinition();
idColumn.setName( StringSerializer.get().toByteBuffer( ID.toString() ) );
idColumn.setIndexName( ID.toString() );
idColumn.setIndexType( ColumnIndexType.KEYS );
idColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
projectVersionMetadataModel.addColumnDefinition( idColumn );

BasicColumnDefinition projectIdColumn = new BasicColumnDefinition();
projectIdColumn.setName( StringSerializer.get().toByteBuffer( PROJECT_ID.toString() ) );
projectIdColumn.setIndexName( PROJECT_ID.toString() );
projectIdColumn.setIndexType( ColumnIndexType.KEYS );
projectIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
projectVersionMetadataModel.addColumnDefinition( projectIdColumn );
List<String> hostNames = new ArrayList<>( );
hostNames.add( cassandraHost + ":" + cassandraPort );
System.out.println( "Contact point: " + cassandraHost + ":" + cassandraPort );
configLoader =
DriverConfigLoader.programmaticBuilder( )

}
.withStringList( DefaultDriverOption.CONTACT_POINTS, hostNames )
.withInt( DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE, maxActive )
.withInt( DefaultDriverOption.CONNECTION_POOL_REMOTE_SIZE, maxActive )
//.withInt( DefaultDriverOption.CONNECTION_MAX_REQUESTS, maxActive )
.withString( DefaultDriverOption.REQUEST_CONSISTENCY, readConsistencyLevel )
.build( );

// artifactmetadatamodel table
{

final ColumnFamilyDefinition artifactMetadataModel =
HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
getArtifactMetadataFamilyName(), //
ComparatorType.UTF8TYPE );
cfds.add( artifactMetadataModel );

// creating indexes for cql query

BasicColumnDefinition idColumn = new BasicColumnDefinition();
idColumn.setName( StringSerializer.get().toByteBuffer( ID.toString() ) );
idColumn.setIndexName( ID.toString() );
idColumn.setIndexType( ColumnIndexType.KEYS );
idColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
artifactMetadataModel.addColumnDefinition( idColumn );

BasicColumnDefinition repositoryNameColumn = new BasicColumnDefinition();
repositoryNameColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
repositoryNameColumn.setIndexName( REPOSITORY_NAME.toString() );
repositoryNameColumn.setIndexType( ColumnIndexType.KEYS );
repositoryNameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
artifactMetadataModel.addColumnDefinition( repositoryNameColumn );

BasicColumnDefinition namespaceIdColumn = new BasicColumnDefinition();
namespaceIdColumn.setName( StringSerializer.get().toByteBuffer( NAMESPACE_ID.toString() ) );
namespaceIdColumn.setIndexName( NAMESPACE_ID.toString() );
namespaceIdColumn.setIndexType( ColumnIndexType.KEYS );
namespaceIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
artifactMetadataModel.addColumnDefinition( namespaceIdColumn );

BasicColumnDefinition projectColumn = new BasicColumnDefinition();
projectColumn.setName( StringSerializer.get().toByteBuffer( PROJECT.toString() ) );
projectColumn.setIndexName( PROJECT.toString() );
projectColumn.setIndexType( ColumnIndexType.KEYS );
projectColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
artifactMetadataModel.addColumnDefinition( projectColumn );

BasicColumnDefinition projectVersionColumn = new BasicColumnDefinition();
projectVersionColumn.setName( StringSerializer.get().toByteBuffer( PROJECT_VERSION.toString() ) );
projectVersionColumn.setIndexName( PROJECT_VERSION.toString() );
projectVersionColumn.setIndexType( ColumnIndexType.KEYS );
projectVersionColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
artifactMetadataModel.addColumnDefinition( projectVersionColumn );

BasicColumnDefinition versionColumn = new BasicColumnDefinition();
versionColumn.setName( StringSerializer.get().toByteBuffer( VERSION.toString() ) );
versionColumn.setIndexName( VERSION.toString() );
versionColumn.setIndexType( ColumnIndexType.KEYS );
versionColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
artifactMetadataModel.addColumnDefinition( versionColumn );

BasicColumnDefinition whenGatheredColumn = new BasicColumnDefinition();
whenGatheredColumn.setName( StringSerializer.get().toByteBuffer( WHEN_GATHERED.toString() ) );
whenGatheredColumn.setIndexName( WHEN_GATHERED.toString() );
whenGatheredColumn.setIndexType( ColumnIndexType.KEYS );
whenGatheredColumn.setValidationClass( ComparatorType.LONGTYPE.getClassName() );
artifactMetadataModel.addColumnDefinition( whenGatheredColumn );

BasicColumnDefinition sha1Column = new BasicColumnDefinition();
sha1Column.setName( StringSerializer.get().toByteBuffer( SHA1.toString() ) );
sha1Column.setIndexName( SHA1.toString() );
sha1Column.setIndexType( ColumnIndexType.KEYS );
sha1Column.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
artifactMetadataModel.addColumnDefinition( sha1Column );

BasicColumnDefinition md5Column = new BasicColumnDefinition();
md5Column.setName( StringSerializer.get().toByteBuffer( MD5.toString() ) );
md5Column.setIndexName( MD5.toString() );
md5Column.setIndexType( ColumnIndexType.KEYS );
md5Column.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
artifactMetadataModel.addColumnDefinition( md5Column );


CreateKeyspace cKeySpace = createKeyspace( keyspaceName ).ifNotExists( ).withSimpleStrategy( replicationFactor );
CqlSession.builder( ).withConfigLoader( configLoader ).withLocalDatacenter( "datacenter1" ).build().execute( cKeySpace.build( ) );
}

// metadatafacetmodel table
{
final ColumnFamilyDefinition metadataFacetModel =
HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
getMetadataFacetFamilyName(), //
ComparatorType.UTF8TYPE );
cfds.add( metadataFacetModel );

// creating indexes for cql query

BasicColumnDefinition facetIdColumn = new BasicColumnDefinition();
facetIdColumn.setName( StringSerializer.get().toByteBuffer( FACET_ID.toString() ) );
facetIdColumn.setIndexName( FACET_ID.toString() );
facetIdColumn.setIndexType( ColumnIndexType.KEYS );
facetIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
metadataFacetModel.addColumnDefinition( facetIdColumn );

BasicColumnDefinition repositoryNameColumn = new BasicColumnDefinition();
repositoryNameColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
repositoryNameColumn.setIndexName( REPOSITORY_NAME.toString() );
repositoryNameColumn.setIndexType( ColumnIndexType.KEYS );
repositoryNameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
metadataFacetModel.addColumnDefinition( repositoryNameColumn );

BasicColumnDefinition nameColumn = new BasicColumnDefinition();
nameColumn.setName( StringSerializer.get().toByteBuffer( NAME.toString() ) );
nameColumn.setIndexName( NAME.toString() );
nameColumn.setIndexType( ColumnIndexType.KEYS );
nameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
metadataFacetModel.addColumnDefinition( nameColumn );

BasicColumnDefinition namespaceColumn = new BasicColumnDefinition();
namespaceColumn.setName( StringSerializer.get().toByteBuffer( NAMESPACE_ID.toString() ) );
namespaceColumn.setIndexName( NAMESPACE_ID.toString() );
namespaceColumn.setIndexType( ColumnIndexType.KEYS );
namespaceColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
metadataFacetModel.addColumnDefinition( namespaceColumn );

BasicColumnDefinition projectIdColumn = new BasicColumnDefinition();
projectIdColumn.setName( StringSerializer.get().toByteBuffer( PROJECT_ID.toString() ) );
projectIdColumn.setIndexName( PROJECT_ID.toString() );
projectIdColumn.setIndexType( ColumnIndexType.KEYS );
projectIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
metadataFacetModel.addColumnDefinition( projectIdColumn );

BasicColumnDefinition projectVersionColumn = new BasicColumnDefinition();
projectVersionColumn.setName( StringSerializer.get().toByteBuffer( PROJECT_VERSION.toString() ) );
projectVersionColumn.setIndexName( PROJECT_VERSION.toString() );
projectVersionColumn.setIndexType( ColumnIndexType.KEYS );
projectVersionColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
metadataFacetModel.addColumnDefinition( projectVersionColumn );

}
CqlSession session = getSession( );

// Checksum table
{
final ColumnFamilyDefinition checksumCf =
HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
getChecksumFamilyName(), //
ComparatorType.UTF8TYPE );

BasicColumnDefinition artifactMetatadaModel_key = new BasicColumnDefinition();
artifactMetatadaModel_key.setName( StringSerializer.get().toByteBuffer( "artifactMetadataModel.key" ) );
artifactMetatadaModel_key.setIndexName( "artifactMetadataModel_key" );
artifactMetatadaModel_key.setIndexType( ColumnIndexType.KEYS );
artifactMetatadaModel_key.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
checksumCf.addColumnDefinition( artifactMetatadaModel_key );


BasicColumnDefinition checksumAlgorithmColumn = new BasicColumnDefinition();
checksumAlgorithmColumn.setName( StringSerializer.get().toByteBuffer( CHECKSUM_ALG.toString() ) );
checksumAlgorithmColumn.setIndexName( CHECKSUM_ALG.toString() );
checksumAlgorithmColumn.setIndexType( ColumnIndexType.KEYS );
checksumAlgorithmColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
checksumCf.addColumnDefinition( checksumAlgorithmColumn );

BasicColumnDefinition checksumValueColumn = new BasicColumnDefinition();
checksumValueColumn.setName( StringSerializer.get().toByteBuffer( CHECKSUM_VALUE.toString() ) );
checksumValueColumn.setIndexName( CHECKSUM_VALUE.toString() );
checksumValueColumn.setIndexType( ColumnIndexType.KEYS );
checksumValueColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
checksumCf.addColumnDefinition( checksumValueColumn );

BasicColumnDefinition repositoryNameColumn = new BasicColumnDefinition();
repositoryNameColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
repositoryNameColumn.setIndexName( REPOSITORY_NAME.toString() );
repositoryNameColumn.setIndexType( ColumnIndexType.KEYS );
repositoryNameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
checksumCf.addColumnDefinition( repositoryNameColumn );


cfds.add( checksumCf );

// creating indexes for cql query

}

// mailinglist table
{
final ColumnFamilyDefinition mailingListCf =
HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
getMailingListFamilyName(), //
ComparatorType.UTF8TYPE );
// namespace table
{
String tableName = getNamespaceFamilyName( );
CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
.withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
.withColumn( NAME.toString( ), DataTypes.TEXT )
.withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
.withCompactStorage( );
session.execute( table.build( ) );
CreateIndex index = createIndex( NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( NAME.toString( ) );
session.execute( index.build( ) );
index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
session.execute( index.build( ) );
}

BasicColumnDefinition projectVersionMetadataModel_key = new BasicColumnDefinition();
projectVersionMetadataModel_key.setName( StringSerializer.get().toByteBuffer( "projectVersionMetadataModel.key" ) );
projectVersionMetadataModel_key.setIndexName( "projectVersionMetadataModel_key" );
projectVersionMetadataModel_key.setIndexType( ColumnIndexType.KEYS );
projectVersionMetadataModel_key.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
mailingListCf.addColumnDefinition( projectVersionMetadataModel_key );
// Repository Table
{
String tableName = getRepositoryFamilyName( );
CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
.withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
.withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
.withCompactStorage( );
session.execute( table.build( ) );
CreateIndex index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
session.execute( index.build( ) );

cfds.add( mailingListCf );
}

// creating indexes for cql query
// Project table
{
String tableName = getProjectFamilyName( );
CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
.withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
.withColumn( PROJECT_ID.toString( ), DataTypes.TEXT )
.withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
.withColumn( NAMESPACE_ID.toString( ), DataTypes.TEXT )
.withColumn( PROJECT_PROPERTIES.toString( ), DataTypes.frozenMapOf( DataTypes.TEXT, DataTypes.TEXT ) )
.withCompactStorage( );
session.execute( table.build( ) );
CreateIndex index = createIndex( PROJECT_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_ID.toString( ) );
session.execute( index.build( ) );
index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
session.execute( index.build( ) );
index = createIndex( NAMESPACE_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( NAMESPACE_ID.toString( ) );
session.execute( index.build( ) );

}
}

// license table
{
final ColumnFamilyDefinition licenseCf =
HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
getLicenseFamilyName(), //
ComparatorType.UTF8TYPE );
// Project Version Metadata Model
{
String tableName = getProjectVersionMetadataFamilyName( );
CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
.withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
.withColumn( NAMESPACE_ID.toString( ), DataTypes.TEXT )
.withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
.withColumn( PROJECT_VERSION.toString( ), DataTypes.TEXT )
.withColumn( PROJECT_ID.toString( ), DataTypes.TEXT )
.withColumn( DESCRIPTION.toString( ), DataTypes.TEXT )
.withColumn( URL.toString( ), DataTypes.TEXT )
.withColumn( NAME.toString(), DataTypes.TEXT )
.withColumn( VERSION.toString(), DataTypes.TEXT )
.withColumn( VERSION_PROPERTIES.toString(), DataTypes.mapOf( DataTypes.TEXT, DataTypes.TEXT ) )
.withColumn( "incomplete", DataTypes.BOOLEAN )
.withColumn( "\"ciManagement.system\"", DataTypes.TEXT )
.withColumn( "\"ciManagement.url\"", DataTypes.TEXT )
.withColumn( "\"issueManagement.system\"", DataTypes.TEXT )
.withColumn( "\"issueManagement.url\"", DataTypes.TEXT )
.withColumn( "\"organization.name\"", DataTypes.TEXT )
.withColumn( "\"organization.url\"", DataTypes.TEXT )
.withColumn( "\"scm.url\"", DataTypes.TEXT )
.withColumn( "\"scm.connection\"", DataTypes.TEXT )
.withColumn( "\"scm.developerConnection\"", DataTypes.TEXT );
session.execute( table.build( ) );
CreateIndex index = createIndex( NAMESPACE_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( NAMESPACE_ID.toString( ) );
session.execute( index.build( ) );
index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
session.execute( index.build( ) );
index = createIndex( PROJECT_VERSION.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_VERSION.toString( ) );
session.execute( index.build( ) );
index = createIndex( PROJECT_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_ID.toString( ) );
session.execute( index.build( ) );
index = createIndex( VERSION_PROPERTIES.toString( ) + "_idx" ).ifNotExists( ).onTable( tableName ).andColumnEntries( VERSION_PROPERTIES.toString( ) );
session.execute( index.build( ) );
}

BasicColumnDefinition projectVersionMetadataModel_key = new BasicColumnDefinition();
projectVersionMetadataModel_key.setName( StringSerializer.get().toByteBuffer( "projectVersionMetadataModel.key" ) );
projectVersionMetadataModel_key.setIndexName( "projectVersionMetadataModel_key" );
projectVersionMetadataModel_key.setIndexType( ColumnIndexType.KEYS );
projectVersionMetadataModel_key.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
licenseCf.addColumnDefinition( projectVersionMetadataModel_key );
// Artifact Metadata Model
{
String tableName = getArtifactMetadataFamilyName( );
CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
.withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
.withColumn( ID.toString( ), DataTypes.TEXT )
.withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
.withColumn( NAMESPACE_ID.toString( ), DataTypes.TEXT )
.withColumn( PROJECT_ID.toString( ), DataTypes.TEXT )
.withColumn( PROJECT_VERSION.toString( ), DataTypes.TEXT )
.withColumn( VERSION.toString( ), DataTypes.TEXT )
.withColumn( WHEN_GATHERED.toString( ), DataTypes.BIGINT )
.withColumn( SHA1.toString( ), DataTypes.TEXT )
.withColumn( MD5.toString( ), DataTypes.TEXT )
.withColumn( FILE_LAST_MODIFIED.toString(), DataTypes.BIGINT)
.withColumn( SIZE.toString(), DataTypes.BIGINT )
.withCompactStorage( );
session.execute( table.build( ) );

CreateIndex index = createIndex( ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( ID.toString( ) );
session.execute( index.build( ) );
index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
session.execute( index.build( ) );
index = createIndex( NAMESPACE_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( NAMESPACE_ID.toString( ) );
session.execute( index.build( ) );
index = createIndex( PROJECT_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_ID.toString( ) );
session.execute( index.build( ) );
index = createIndex( PROJECT_VERSION.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_VERSION.toString( ) );
session.execute( index.build( ) );
index = createIndex( VERSION.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( VERSION.toString( ) );
session.execute( index.build( ) );
index = createIndex( WHEN_GATHERED.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( WHEN_GATHERED.toString( ) );
session.execute( index.build( ) );
index = createIndex( SHA1.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( SHA1.toString( ) );
session.execute( index.build( ) );
index = createIndex( MD5.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( MD5.toString( ) );
session.execute( index.build( ) );

cfds.add( licenseCf );
}
// Metadata Facet Model
{
String tableName = getMetadataFacetFamilyName( );
CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
.withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
.withColumn( FACET_ID.toString( ), DataTypes.TEXT )
.withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
.withColumn( NAME.toString( ), DataTypes.TEXT )
.withColumn( NAMESPACE_ID.toString( ), DataTypes.TEXT )
.withColumn( PROJECT_ID.toString( ), DataTypes.TEXT )
.withColumn( PROJECT_VERSION.toString( ), DataTypes.TEXT )
.withColumn( KEY.toString(), DataTypes.TEXT )
.withColumn( VALUE.toString(), DataTypes.TEXT)
.withColumn( WHEN_GATHERED.toString(), DataTypes.BIGINT )
.withCompactStorage( );
session.execute( table.build( ) );

CreateIndex index = createIndex( FACET_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( FACET_ID.toString( ) );
session.execute( index.build( ) );
index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
session.execute( index.build( ) );
index = createIndex( NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( NAME.toString( ) );
session.execute( index.build( ) );
index = createIndex( NAMESPACE_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( NAMESPACE_ID.toString( ) );
session.execute( index.build( ) );
index = createIndex( PROJECT_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_ID.toString( ) );
session.execute( index.build( ) );
index = createIndex( PROJECT_VERSION.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_VERSION.toString( ) );
session.execute( index.build( ) );
}
// Checksum Table
{
String tableName = getChecksumFamilyName( );
CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
.withPartitionKey( DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
.withColumn( "\"artifactMetadataModel.key\"", DataTypes.TEXT )
.withColumn( CHECKSUM_ALG.toString( ), DataTypes.TEXT )
.withColumn( CHECKSUM_VALUE.toString( ), DataTypes.TEXT )
.withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
.withCompactStorage( );
session.execute( table.build( ) );

CreateIndex index = createIndex( CHECKSUM_ALG.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( CHECKSUM_ALG.toString( ) );
session.execute( index.build( ) );
index = createIndex( CHECKSUM_VALUE.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( CHECKSUM_VALUE.toString( ) );
session.execute( index.build( ) );
index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
session.execute( index.build( ) );
}
// Mailinglist Table
{
String tableName = getMailingListFamilyName( );
CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
.withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
.withColumn( NAME.toString(), DataTypes.TEXT )
.withColumn( "\"projectVersionMetadataModel.key\"", DataTypes.TEXT )
.withColumn( "mainArchiveUrl", DataTypes.TEXT )
.withColumn( "postAddress", DataTypes.TEXT )
.withColumn( "subscribeAddress", DataTypes.TEXT )
.withColumn( "unsubscribeAddress", DataTypes.TEXT )
.withColumn( "otherArchive", DataTypes.frozenListOf( DataTypes.TEXT ) )
.withCompactStorage( );
session.execute( table.build( ) );

CreateIndex index = createIndex( "\"projectVersionMetadataModel_key\"" ).ifNotExists( ).onTable( tableName ).andColumn( "\"\"projectVersionMetadataModel.key\"\"" );
session.execute( index.build( ) );
}

// creating indexes for cql query
// License Table
{
String tableName = getLicenseFamilyName( );
CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
.withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
.withColumn( "\"projectVersionMetadataModel.key\"", DataTypes.TEXT )
.withColumn( NAME.toString(), DataTypes.TEXT )
.withColumn( URL.toString(), DataTypes.TEXT )
.withCompactStorage( );
session.execute( table.build( ) );

CreateIndex index = createIndex( "\"projectVersionMetadataModel_key\"" ).ifNotExists( ).onTable( tableName ).andColumn( "\"\"projectVersionMetadataModel.key\"\"" );
session.execute( index.build( ) );
}

}
// Dependency Table
{
String tableName = getDependencyFamilyName( );
CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
.withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
.withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
.withColumn( GROUP_ID.toString( ), DataTypes.TEXT )
.withColumn( ARTIFACT_ID.toString( ), DataTypes.TEXT )
.withColumn( VERSION.toString( ), DataTypes.TEXT )
.withColumn( "\"projectVersionMetadataModel.key\"", DataTypes.TEXT )
.withColumn( "classifier", DataTypes.TEXT )
.withColumn( "optional", DataTypes.TEXT )
.withColumn( "scope", DataTypes.TEXT )
.withColumn( "systemPath", DataTypes.TEXT )
.withColumn( "type", DataTypes.TEXT )
.withCompactStorage( );

session.execute( table.build( ) );

CreateIndex index = createIndex( "groupIdIdx" ).ifNotExists( ).onTable( tableName ).andColumn( GROUP_ID.toString( ) );
session.execute( index.build( ) );
index = createIndex( "\"projectVersionMetadataModel_key\"" ).ifNotExists( ).onTable( tableName ).andColumn( "\"\"projectVersionMetadataModel.key\"\"" );
session.execute( index.build( ) );

// dependency table
{
final ColumnFamilyDefinition dependencyCf =
HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
getDependencyFamilyName(), //
ComparatorType.UTF8TYPE );
cfds.add( dependencyCf );

// creating indexes for cql query

BasicColumnDefinition groupIdColumn = new BasicColumnDefinition();
groupIdColumn.setName( StringSerializer.get().toByteBuffer( GROUP_ID.toString() ) );
groupIdColumn.setIndexName( "groupIdIdx" );
groupIdColumn.setIndexType( ColumnIndexType.KEYS );
groupIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
dependencyCf.addColumnDefinition( groupIdColumn );

BasicColumnDefinition projectVersionMetadataModel_key = new BasicColumnDefinition();
projectVersionMetadataModel_key.setName( StringSerializer.get().toByteBuffer( "projectVersionMetadataModel.key" ) );
projectVersionMetadataModel_key.setIndexName( "projectVersionMetadataModel_key" );
projectVersionMetadataModel_key.setIndexType( ColumnIndexType.KEYS );
projectVersionMetadataModel_key.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
dependencyCf.addColumnDefinition( projectVersionMetadataModel_key );
}

}

// TODO take care of update new table!!
{ // ensure keyspace exists, here if the keyspace doesn't exist we suppose nothing exist
if ( cluster.describeKeyspace( keyspaceName ) == null )
{
logger.info( "Creating Archiva Cassandra '{}' keyspace.", keyspaceName );
cluster.addKeyspace( HFactory.createKeyspaceDefinition( keyspaceName, //
ThriftKsDef.DEF_STRATEGY_CLASS, //
replicationFactor, //
cfds )
);
}
}

}

@Override
public void start()
public void start( )
{
}

@PreDestroy
@Override
public void shutdown()
public void shutdown( )
{
if (this.cqlSession!=null) {
this.cqlSession.close( );
}
}


@Override
public boolean started()
public boolean started( )
{
return started;
}


@Override
public Keyspace getKeyspace()
{
return keyspace;
}

@Override
public Cluster getCluster()
{
return cluster;
}

@Override
public String getRepositoryFamilyName()
public String getRepositoryFamilyName( )
{
return repositoryFamilyName;
}

@Override
public String getNamespaceFamilyName()
public String getNamespaceFamilyName( )
{
return namespaceFamilyName;
}

@Override
public String getProjectFamilyName()
public String getProjectFamilyName( )
{
return projectFamilyName;
}

@Override
public String getProjectVersionMetadataFamilyName()
public String getProjectVersionMetadataFamilyName( )
{
return projectVersionMetadataFamilyName;
}

public String[] getProjectVersionMetadataColumns() {
return projectVersionMetadataColumns;
}

@Override
public String getArtifactMetadataFamilyName()
public String getArtifactMetadataFamilyName( )
{
return artifactMetadataFamilyName;
}

@Override
public String getMetadataFacetFamilyName()
public String getMetadataFacetFamilyName( )
{
return metadataFacetFamilyName;
}

@Override
public String getMailingListFamilyName()
public String getMailingListFamilyName( )
{
return mailingListFamilyName;
}

@Override
public String getLicenseFamilyName()
public String getLicenseFamilyName( )
{
return licenseFamilyName;
}

@Override
public String getDependencyFamilyName()
public String getDependencyFamilyName( )
{
return dependencyFamilyName;
}

@Override
public String getChecksumFamilyName() {
public String getChecksumFamilyName( )
{
return checksumFamilyName;
}

@Override
public DriverConfigLoader getConfigLoader( )
{
return configLoader;
}

@Override
public String getKeyspaceName( )
{
return keyspaceName;
}
}

+ 2
- 0
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/model/ColumnNames.java Прегледај датотеку

@@ -32,6 +32,7 @@ public enum ColumnNames
NAMESPACE_ID( "namespaceId" ),
PROJECT_ID( "projectId" ),
PROJECT_VERSION( "projectVersion" ),
PROJECT_PROPERTIES("projectProperties"),
KEY( "facetKey" ),
VALUE( "value" ),
ID( "id" ),
@@ -41,6 +42,7 @@ public enum ColumnNames
PROJECT( "project" ),
FILE_LAST_MODIFIED( "fileLastModified" ),
VERSION( "version" ),
VERSION_PROPERTIES("versionProperties"),
GROUP_ID( "groupId" ),
ARTIFACT_ID( "artifactId" ),
DESCRIPTION( "description" ),

+ 57
- 37
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/test/java/org/apache/archiva/metadata/repository/cassandra/CassandraMetadataRepositoryTest.java Прегледај датотеку

@@ -19,6 +19,7 @@ package org.apache.archiva.metadata.repository.cassandra;
* under the License.
*/

import com.datastax.oss.driver.api.core.CqlSession;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
import org.apache.archiva.metadata.repository.MetadataRepository;
@@ -28,22 +29,32 @@ import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.cassandra.model.ProjectVersionMetadataModel;
import org.easymock.EasyMock;
import org.easymock.IMocksControl;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.test.context.junit.jupiter.SpringExtension;

import javax.inject.Inject;
import javax.inject.Named;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;

import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.truncate;
import static com.datastax.oss.driver.api.querybuilder.SchemaBuilder.dropTable;
import static org.assertj.core.api.Assertions.assertThat;

/**
* @author Olivier Lamy
*/
@ExtendWith( SpringExtension.class )
@TestInstance( TestInstance.Lifecycle.PER_CLASS )
public class CassandraMetadataRepositoryTest
extends AbstractMetadataRepositoryTest
{
@@ -59,6 +70,9 @@ public class CassandraMetadataRepositoryTest
IMocksControl sessionControl;
RepositorySession session;

long cTime;
int testNum = 0;
AtomicBoolean clearedTables = new AtomicBoolean( false );


@Override
@@ -73,13 +87,14 @@ public class CassandraMetadataRepositoryTest
return cmr;
}

@Before
@Override
public void setUp()
@BeforeEach
public void setUp( TestInfo testInfo )
throws Exception
{

cTime = System.currentTimeMillis( );
System.err.println( "Setting up "+(testNum++) + " - " + testInfo.getDisplayName() );
super.setUp();
System.err.println( "Setting up 2 " + testInfo.getDisplayName( ) + " - " + (System.currentTimeMillis( ) - cTime) );
assertMaxTries =1;
assertRetrySleepMs=10;

@@ -104,7 +119,12 @@ public class CassandraMetadataRepositoryTest

sessionFactoryControl.replay();

clearReposAndNamespace( cassandraArchivaManager );
if (!clearedTables.get())
{
clearReposAndNamespace( cassandraArchivaManager );
clearedTables.set( true );
}
System.err.println( "Finished setting up "+testInfo.getDisplayName() + " - " + (System.currentTimeMillis( ) - cTime) );
}

/**
@@ -139,44 +159,44 @@ public class CassandraMetadataRepositoryTest
}


@After
public void shutdown()
@AfterEach
public void shutdown(TestInfo testInfo)
throws Exception
{
System.err.println( "Shutting down " + testInfo.getDisplayName( ) + " - " + ( System.currentTimeMillis( ) - cTime ) );
clearReposAndNamespace( cassandraArchivaManager );
clearedTables.set( true );
super.tearDown();
System.err.println( "Shutting down finished" + testInfo.getDisplayName( ) + " - " + ( System.currentTimeMillis( ) - cTime ) );
}

static void clearReposAndNamespace( CassandraArchivaManager cassandraArchivaManager )
throws Exception
{
cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
cassandraArchivaManager.getProjectFamilyName() );

cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
cassandraArchivaManager.getNamespaceFamilyName() );

cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
cassandraArchivaManager.getRepositoryFamilyName() );

cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
cassandraArchivaManager.getProjectVersionMetadataFamilyName() );

cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
cassandraArchivaManager.getArtifactMetadataFamilyName() );

cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
cassandraArchivaManager.getMetadataFacetFamilyName() );

cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
cassandraArchivaManager.getMailingListFamilyName() );

cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
cassandraArchivaManager.getLicenseFamilyName() );

cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
cassandraArchivaManager.getDependencyFamilyName() );

if (cassandraArchivaManager!=null)
{
CqlSession session = cassandraArchivaManager.getSession( );
{
List<String> tables = Arrays.asList(
cassandraArchivaManager.getProjectFamilyName( ),
cassandraArchivaManager.getNamespaceFamilyName( ),
cassandraArchivaManager.getRepositoryFamilyName( ),
cassandraArchivaManager.getProjectVersionMetadataFamilyName( ),
cassandraArchivaManager.getArtifactMetadataFamilyName( ),
cassandraArchivaManager.getMetadataFacetFamilyName( ),
cassandraArchivaManager.getMailingListFamilyName( ),
cassandraArchivaManager.getLicenseFamilyName( ),
cassandraArchivaManager.getDependencyFamilyName( )
);
for ( String table : tables )
{
session.execute( truncate( table ).build( ) );
}

}
} else {
System.err.println( "cassandraArchivaManager is null" );
}
}

}

+ 8
- 7
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/test/java/org/apache/archiva/metadata/repository/cassandra/RepositoriesNamespaceTest.java Прегледај датотеку

@@ -23,13 +23,14 @@ import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.repository.cassandra.model.Namespace;
import org.apache.archiva.metadata.repository.cassandra.model.Repository;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;

import javax.inject.Inject;
import javax.inject.Named;
@@ -39,7 +40,7 @@ import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Olivier Lamy
*/
@RunWith( ArchivaSpringJUnit4ClassRunner.class )
@ExtendWith( SpringExtension.class )
@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath*:/spring-context.xml" } )
public class RepositoriesNamespaceTest
{
@@ -53,7 +54,7 @@ public class RepositoriesNamespaceTest

CassandraMetadataRepository cmr;

@Before
@BeforeEach
public void setup()
throws Exception
{
@@ -65,7 +66,7 @@ public class RepositoriesNamespaceTest
CassandraMetadataRepositoryTest.clearReposAndNamespace( cassandraArchivaManager );
}

@After
@AfterEach
public void shutdown()
throws Exception
{

+ 1
- 1
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/test/resources/log4j2-test.xml Прегледај датотеку

@@ -35,7 +35,7 @@

<logger name="org.apache.archiva.metadata.repository.cassandra" level="debug"/>

<root level="info" includeLocation="true">
<root level="debug" includeLocation="true">
<appender-ref ref="console"/>
</root>
</loggers>

+ 15
- 16
archiva-modules/metadata/metadata-store-provider/metadata-store-file/src/test/java/org/apache/archiva/metadata/repository/file/FileMetadataRepositoryTest.java Прегледај датотеку

@@ -27,14 +27,13 @@ import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataService;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;

import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;

import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -58,7 +57,7 @@ public class FileMetadataRepositoryTest
return this.sessionFactory;
}

@Before
@BeforeEach
@Override
public void setUp()
throws Exception
@@ -81,7 +80,7 @@ public class FileMetadataRepositoryTest
}

@Override
@Ignore
@Disabled
public void testGetArtifactsByProjectVersionMetadata()
throws Exception
{
@@ -89,7 +88,7 @@ public class FileMetadataRepositoryTest
}

@Override
@Ignore
@Disabled
public void testGetArtifactsByProjectVersionMetadataNoRepository()
throws Exception
{
@@ -97,7 +96,7 @@ public class FileMetadataRepositoryTest
}

@Override
@Ignore
@Disabled
public void testGetArtifactsByProjectVersionMetadataAllRepositories()
throws Exception
{
@@ -105,7 +104,7 @@ public class FileMetadataRepositoryTest
}

@Override
@Ignore
@Disabled
public void testGetArtifactsByMetadataAllRepositories()
throws Exception
{
@@ -113,7 +112,7 @@ public class FileMetadataRepositoryTest
}

@Override
@Ignore
@Disabled
public void testGetArtifactsByPropertySingleResult()
throws Exception
{
@@ -121,7 +120,7 @@ public class FileMetadataRepositoryTest
}

@Override
@Ignore
@Disabled
public void testSearchArtifactsByKey()
throws Exception
{
@@ -129,7 +128,7 @@ public class FileMetadataRepositoryTest
}

@Override
@Ignore
@Disabled
public void testSearchArtifactsByKeyExact()
throws Exception
{
@@ -137,7 +136,7 @@ public class FileMetadataRepositoryTest
}

@Override
@Ignore
@Disabled
public void testSearchArtifactsFullText()
throws Exception
{
@@ -145,7 +144,7 @@ public class FileMetadataRepositoryTest
}

@Override
@Ignore
@Disabled
public void testSearchArtifactsFullTextExact()
throws Exception
{
@@ -153,7 +152,7 @@ public class FileMetadataRepositoryTest
}

@Override
@Ignore
@Disabled
public void testSearchArtifactsByFacetKeyAllRepos()
throws Exception
{
@@ -161,7 +160,7 @@ public class FileMetadataRepositoryTest
}

@Override
@Ignore
@Disabled
public void testSearchArtifactsByFacetKey()
throws Exception
{
@@ -169,7 +168,7 @@ public class FileMetadataRepositoryTest
}

@Override
@Ignore
@Disabled
public void testSearchArtifactsFullTextByFacet()
throws Exception
{

+ 7
- 10
archiva-modules/metadata/metadata-store-provider/oak-jcr/metadata-store-jcr/src/test/java/org/apache/archiva/metadata/repository/jcr/JcrMetadataRepositoryTest.java Прегледај датотеку

@@ -23,17 +23,14 @@ import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
import org.apache.archiva.metadata.repository.DefaultMetadataResolver;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataService;
import org.apache.archiva.metadata.repository.MetadataSessionException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

import javax.jcr.RepositoryException;
import javax.jcr.Session;
import java.io.IOException;
import java.nio.file.Files;
@@ -67,7 +64,7 @@ public class JcrMetadataRepositoryTest
return sessionFactory;
}

@BeforeClass
@BeforeAll
public static void setupSpec( ) throws IOException, InvalidFileStoreVersionException
{
Path directory = Paths.get( "target/test-repositories" );
@@ -89,7 +86,7 @@ public class JcrMetadataRepositoryTest

}

@Before
@BeforeEach
@Override
public void setUp() throws Exception
{
@@ -106,7 +103,7 @@ public class JcrMetadataRepositoryTest
}
}

@AfterClass
@AfterAll
public static void stopSpec( )
throws Exception
{

+ 6
- 0
pom.xml Прегледај датотеку

@@ -1800,6 +1800,12 @@
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.easymock</groupId>

Loading…
Откажи
Сачувај