]> source.dussan.org Git - archiva.git/commitdiff
Changing cassandra version and using datastax driver
authorMartin Stockhammer <martin_s@apache.org>
Sat, 28 Aug 2021 18:43:10 +0000 (20:43 +0200)
committerMartin Stockhammer <martin_s@apache.org>
Sat, 28 Aug 2021 18:43:10 +0000 (20:43 +0200)
14 files changed:
archiva-modules/metadata/metadata-repository-api/src/test/java/org/apache/archiva/metadata/repository/AbstractMetadataRepositoryTest.java
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/pom.xml
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/cassandra/cql/load.cql [new file with mode: 0644]
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/CassandraArchivaManager.java
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/CassandraMetadataRepository.java
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/CassandraUtils.java
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/DefaultCassandraArchivaManager.java
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/main/java/org/apache/archiva/metadata/repository/cassandra/model/ColumnNames.java
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/test/java/org/apache/archiva/metadata/repository/cassandra/CassandraMetadataRepositoryTest.java
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/test/java/org/apache/archiva/metadata/repository/cassandra/RepositoriesNamespaceTest.java
archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/test/resources/log4j2-test.xml
archiva-modules/metadata/metadata-store-provider/metadata-store-file/src/test/java/org/apache/archiva/metadata/repository/file/FileMetadataRepositoryTest.java
archiva-modules/metadata/metadata-store-provider/oak-jcr/metadata-store-jcr/src/test/java/org/apache/archiva/metadata/repository/jcr/JcrMetadataRepositoryTest.java
pom.xml

index abcd2495522cef1ca10f56aa467f3f0ab2497bce..52eb42195310d0f58d3419cef31cc2769c2b9d91 100644 (file)
@@ -19,7 +19,6 @@ package org.apache.archiva.metadata.repository;
  * under the License.
  */
 
-import junit.framework.TestCase;
 import org.apache.archiva.checksum.ChecksumAlgorithm;
 import org.apache.archiva.metadata.QueryParameter;
 import org.apache.archiva.metadata.generic.GenericMetadataFacet;
@@ -37,12 +36,13 @@ import org.apache.archiva.metadata.model.ProjectMetadata;
 import org.apache.archiva.metadata.model.ProjectVersionMetadata;
 import org.apache.archiva.metadata.model.ProjectVersionReference;
 import org.apache.archiva.metadata.model.Scm;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.test.context.ContextConfiguration;
-import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+import org.springframework.test.context.junit.jupiter.SpringExtension;
 
 import java.text.SimpleDateFormat;
 import java.time.ZoneId;
@@ -62,11 +62,12 @@ import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
 import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.*;
 
-@RunWith( SpringJUnit4ClassRunner.class )
+@ExtendWith( SpringExtension.class )
+@TestInstance( TestInstance.Lifecycle.PER_CLASS )
 @ContextConfiguration( locations = {"classpath*:/META-INF/spring-context.xml", "classpath*:/spring-context.xml"} )
 public abstract class AbstractMetadataRepositoryTest
-    extends TestCase
 {
     protected static final String OTHER_REPO_ID = "other-repo";
 
@@ -108,6 +109,14 @@ public abstract class AbstractMetadataRepositoryTest
     protected int assertMaxTries =10;
     protected int assertRetrySleepMs=500;
 
+    protected void setUp() throws Exception {
+
+    }
+
+    protected void tearDown() throws Exception {
+
+    }
+
     /*
      * Used by tryAssert to allow to throw exceptions in the lambda expression.
      */
@@ -452,19 +461,23 @@ public abstract class AbstractMetadataRepositoryTest
             getRepository( ).updateProjectVersion( session, TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, metadata );
 
             metadata = getRepository( ).getProjectVersion( session, TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION );
+            assertNotNull( metadata );
             assertEquals( TEST_PROJECT_VERSION, metadata.getId( ) );
             assertEquals( TEST_PROJECT_VERSION, metadata.getVersion( ) );
             assertEquals( "project name", metadata.getName( ) );
             assertEquals( "project description", metadata.getDescription( ) );
             assertEquals( "the url", metadata.getUrl( ) );
 
+            assertNotNull( metadata.getScm( ) );
             assertEquals( "connection", metadata.getScm( ).getConnection( ) );
             assertEquals( "dev conn", metadata.getScm( ).getDeveloperConnection( ) );
             assertEquals( "url", metadata.getScm( ).getUrl( ) );
 
+            assertNotNull( metadata.getCiManagement( ) );
             assertEquals( "system", metadata.getCiManagement( ).getSystem( ) );
             assertEquals( "ci url", metadata.getCiManagement( ).getUrl( ) );
 
+            assertNotNull( metadata.getIssueManagement( ) );
             assertEquals( "system", metadata.getIssueManagement( ).getSystem( ) );
             assertEquals( "issue tracker url", metadata.getIssueManagement( ).getUrl( ) );
 
index 116125850a51d3ab30549ab5dc10f040baa53fba..bb9ae94fa9e5c9a356564369bd37a8baaf64e0a2 100644 (file)
@@ -31,7 +31,8 @@
 
   <properties>
     <site.staging.base>${project.parent.parent.basedir}</site.staging.base>
-    <cassandraVersion>3.11.11</cassandraVersion>
+    <cassandraVersion>4.0.0</cassandraVersion>
+    <datastax.driver.version>4.13.0</datastax.driver.version>
   </properties>
 
   <dependencies>
       <artifactId>modelmapper</artifactId>
     </dependency>
 
+    <!--
     <dependency>
       <groupId>org.yaml</groupId>
       <artifactId>snakeyaml</artifactId>
       <version>1.27</version>
     </dependency>
-
+-->
     <dependency>
       <groupId>org.apache.cassandra</groupId>
       <artifactId>cassandra-all</artifactId>
       <version>${cassandraVersion}</version>
+      <scope>test</scope>
       <exclusions>
         <exclusion>
           <groupId>log4j</groupId>
           <groupId>com.addthis.metrics</groupId>
           <artifactId>reporter-config3</artifactId>
         </exclusion>
-        <!-- Version upgrade, see below -->
         <exclusion>
-          <groupId>org.apache.tika</groupId>
-          <artifactId>tika-core</artifactId>
+          <groupId>net.openhft</groupId>
+          <artifactId>chronicle-wire</artifactId>
         </exclusion>
       </exclusions>
     </dependency>
+    <dependency>
+      <groupId>net.openhft</groupId>
+      <artifactId>chronicle-wire</artifactId>
+      <version>2.21.89</version>
+      <scope>test</scope>
+    </dependency>
 
+    <dependency>
+      <groupId>com.datastax.oss</groupId>
+      <artifactId>java-driver-core</artifactId>
+      <version>${datastax.driver.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.datastax.oss</groupId>
+      <artifactId>java-driver-query-builder</artifactId>
+      <version>${datastax.driver.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.datastax.oss</groupId>
+      <artifactId>java-driver-mapper-runtime</artifactId>
+      <version>${datastax.driver.version}</version>
+    </dependency>
+
+    <!--
     <dependency>
       <groupId>org.hectorclient</groupId>
       <artifactId>hector-core</artifactId>
         </exclusion>
       </exclusions>
     </dependency>
+    -->
+    <!--
     <dependency>
       <groupId>org.apache.cassandra</groupId>
       <artifactId>cassandra-thrift</artifactId>
           </exclusion>
       </exclusions>
     </dependency>
+    -->
     <!-- Transient dependencies of cassandra that are selected to use a higher version -->
+    <!--
     <dependency>
       <groupId>org.apache.thrift</groupId>
       <artifactId>libthrift</artifactId>
       <artifactId>tika-core</artifactId>
       <version>1.26</version>
     </dependency>
-
+-->
     <!-- Transitive dependency. Declared here to increase the version. -->
+    <!--
     <dependency>
       <groupId>io.netty</groupId>
       <artifactId>netty-all</artifactId>
       <version>${netty.version}</version>
     </dependency>
+    -->
+    <!--
     <dependency>
       <groupId>com.fasterxml.jackson.core</groupId>
       <artifactId>jackson-core</artifactId>
     </dependency>
-
+-->
     <!-- Is a dependency of cassandra -> hibernate-validator and replaced by new version -->
+    <!--
     <dependency>
       <groupId>org.jboss.logging</groupId>
       <artifactId>jboss-logging</artifactId>
     </dependency>
+    -->
 
     <!-- TEST Scope -->
+    <dependency>
+      <groupId>org.junit.jupiter</groupId>
+      <artifactId>junit-jupiter-api</artifactId>
+      <scope>test</scope>
+    </dependency>
     <dependency>
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-test-utils</artifactId>
       <scope>test</scope>
     </dependency>
 
+    <dependency>
+      <groupId>org.junit.jupiter</groupId>
+      <artifactId>junit-jupiter-engine</artifactId>
+      <scope>test</scope>
+    </dependency>
+
 
   </dependencies>
 
             </goals>
             <configuration>
               <portNames>
-                <portName>cassandraPort</portName>
+                <portName>cassandra.rpcPort</portName>
                 <portName>cassandra.storagePort</portName>
                 <portName>cassandra.stopPort</portName>
                 <portName>cassandra.jmxPort</portName>
+                <portName>cassandra.nativeTransportPort</portName>
               </portNames>
             </configuration>
           </execution>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>cassandra-maven-plugin</artifactId>
-        <version>2.0.0-1</version>
+        <version>3.6</version>
         <executions>
           <execution>
             <id>start-cassandra</id>
-            <phase>process-test-classes</phase>
+            <phase>pre-integration-test</phase>
             <goals>
               <goal>start</goal>
             </goals>
             <configuration>
-              <rpcPort>${cassandraPort}</rpcPort>
+              <nativeTransportPort>${cassandra.nativeTransportPort}</nativeTransportPort>
+              <rpcPort>${cassandra.rpcPort}</rpcPort>
               <storagePort>${cassandra.storagePort}</storagePort>
               <stopPort>${cassandra.stopPort}</stopPort>
               <jmxPort>${cassandra.jmxPort}</jmxPort>
               <addMainClasspath>false</addMainClasspath>
               <addTestClasspath>false</addTestClasspath>
               <startWaitSeconds>500</startWaitSeconds>
+              <startNativeTransport>true</startNativeTransport>
+              <logLevel>DEBUG</logLevel>
+              <loadAfterFirstStart>false</loadAfterFirstStart>
+              <yaml>
+                broadcast_rpc_address: 127.0.0.1
+              </yaml>
+              <systemPropertyVariables>
+                <java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
+              </systemPropertyVariables>
             </configuration>
           </execution>
           <execution>
             <id>stop-cassandra</id>
-            <phase>test</phase>
+            <phase>post-integration-test</phase>
             <goals>
               <goal>stop</goal>
             </goals>
             <artifactId>slf4j-simple</artifactId>
             <version>${slf4j.version}</version>
           </dependency>
+          <dependency>
+            <groupId>net.java.dev.jna</groupId>
+            <artifactId>jna</artifactId>
+            <version>4.2.2</version>
+          </dependency>
+            <dependency>
+              <groupId>org.apache.cassandra</groupId>
+              <artifactId>cassandra-all</artifactId>
+              <version>3.11.10</version>
+            </dependency>
         </dependencies>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-failsafe-plugin</artifactId>
+        <version>3.0.0-M5</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>integration-test</goal>
+              <goal>verify</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <includes>
+            <include>**/*Test.java</include>
+          </includes>
+          <systemPropertyVariables>
+            <cassandra.port>${cassandra.nativeTransportPort}</cassandra.port>
+            <cassandra.host>127.0.0.1</cassandra.host>
+            <archiva.repositorySessionFactory.id>cassandra</archiva.repositorySessionFactory.id>
+            <appserver.base>${project.build.directory}/appserver-base</appserver.base>
+            <java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
+          </systemPropertyVariables>
+          <trimStackTrace>false</trimStackTrace>
+          <skip>false</skip>
+        </configuration>
+        <dependencies>
+          <dependency>
+            <groupId>org.junit.jupiter</groupId>
+            <artifactId>junit-jupiter-engine</artifactId>
+            <version>${junit.jupiter.version}</version>
+          </dependency>
+        </dependencies>
+      </plugin>
+
     </plugins>
     <pluginManagement>
       <plugins>
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-surefire-plugin</artifactId>
+          <executions>
+
+          </executions>
+          <configuration>
+            <skip>true</skip>
+          </configuration>
+        </plugin>
+
+        <plugin>
+          <groupId>org.apache.rat</groupId>
+          <artifactId>apache-rat-plugin</artifactId>
           <configuration>
-            <systemPropertyVariables>
-              <cassandra.port>${cassandraPort}</cassandra.port>
-              <archiva.repositorySessionFactory.id>cassandra</archiva.repositorySessionFactory.id>
-              <appserver.base>${project.build.directory}/appserver-base</appserver.base>
-            </systemPropertyVariables>
-            <trimStackTrace>false</trimStackTrace>
+            <excludes>
+              <exclude>src/cassandra/**</exclude>
+            </excludes>
           </configuration>
         </plugin>
       </plugins>
diff --git a/archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/cassandra/cql/load.cql b/archiva-modules/metadata/metadata-store-provider/metadata-store-cassandra/src/cassandra/cql/load.cql
new file mode 100644 (file)
index 0000000..e69de29
index 41ac95a2441e182915dba90d701ccda698d0927f..bbee4bbf7ed40178a606ffbac6644ed332ff3d07 100644 (file)
@@ -19,8 +19,9 @@ package org.apache.archiva.metadata.repository.cassandra;
  * under the License.
  */
 
-import me.prettyprint.hector.api.Cluster;
-import me.prettyprint.hector.api.Keyspace;
+import com.datastax.oss.driver.api.core.CqlSession;
+import com.datastax.oss.driver.api.core.CqlSessionBuilder;
+import com.datastax.oss.driver.api.core.config.DriverConfigLoader;
 
 /**
  * @author Olivier Lamy
@@ -28,16 +29,14 @@ import me.prettyprint.hector.api.Keyspace;
  */
 public interface CassandraArchivaManager
 {
+    String DEFAULT_PRIMARY_KEY = "key";
+
     void start();
 
     void shutdown();
 
     boolean started();
 
-    Keyspace getKeyspace();
-
-    Cluster getCluster();
-
     String getRepositoryFamilyName();
 
     String getNamespaceFamilyName();
@@ -46,6 +45,8 @@ public interface CassandraArchivaManager
 
     String getProjectVersionMetadataFamilyName();
 
+    String[] getProjectVersionMetadataColumns();
+
     String getArtifactMetadataFamilyName();
 
     String getMetadataFacetFamilyName();
@@ -58,4 +59,12 @@ public interface CassandraArchivaManager
 
     String getChecksumFamilyName();
 
+    DriverConfigLoader getConfigLoader();
+
+    CqlSessionBuilder getSessionBuilder( );
+
+    CqlSession getSession();
+
+    String getKeyspaceName();
+
 }
index bc8d7da66cb0a052ca904b18b2e946bc2d8f8c6a..09a9f04aa8c96120750bf6f3dd8cf19b879ea557 100644 (file)
@@ -19,22 +19,19 @@ package org.apache.archiva.metadata.repository.cassandra;
  * under the License.
  */
 
-import me.prettyprint.cassandra.serializers.LongSerializer;
-import me.prettyprint.cassandra.serializers.StringSerializer;
-import me.prettyprint.cassandra.service.template.ColumnFamilyResult;
-import me.prettyprint.cassandra.service.template.ColumnFamilyTemplate;
-import me.prettyprint.cassandra.service.template.ColumnFamilyUpdater;
-import me.prettyprint.cassandra.service.template.ThriftColumnFamilyTemplate;
-import me.prettyprint.hector.api.Keyspace;
-import me.prettyprint.hector.api.beans.ColumnSlice;
-import me.prettyprint.hector.api.beans.OrderedRows;
-import me.prettyprint.hector.api.beans.Row;
-import me.prettyprint.hector.api.exceptions.HInvalidRequestException;
-import me.prettyprint.hector.api.factory.HFactory;
-import me.prettyprint.hector.api.mutation.MutationResult;
-import me.prettyprint.hector.api.mutation.Mutator;
-import me.prettyprint.hector.api.query.QueryResult;
-import me.prettyprint.hector.api.query.RangeSlicesQuery;
+
+import com.datastax.oss.driver.api.core.CqlSession;
+import com.datastax.oss.driver.api.core.cql.ColumnDefinition;
+import com.datastax.oss.driver.api.core.cql.ResultSet;
+import com.datastax.oss.driver.api.core.cql.Row;
+import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder;
+import com.datastax.oss.driver.api.querybuilder.delete.Delete;
+import com.datastax.oss.driver.api.querybuilder.insert.Insert;
+import com.datastax.oss.driver.api.querybuilder.insert.RegularInsert;
+import com.datastax.oss.driver.api.querybuilder.select.Select;
+import com.datastax.oss.driver.api.querybuilder.update.Update;
+import com.datastax.oss.driver.api.querybuilder.update.UpdateStart;
+import com.datastax.oss.driver.api.querybuilder.update.UpdateWithAssignments;
 import org.apache.archiva.checksum.ChecksumAlgorithm;
 import org.apache.archiva.metadata.QueryParameter;
 import org.apache.archiva.metadata.model.ArtifactMetadata;
@@ -71,15 +68,14 @@ import org.slf4j.LoggerFactory;
 import java.time.Instant;
 import java.time.ZonedDateTime;
 import java.util.ArrayList;
-import java.util.Collections;
+import java.util.Arrays;
 import java.util.Comparator;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
+import java.util.Properties;
 import java.util.Spliterator;
 import java.util.UUID;
 import java.util.function.BiFunction;
@@ -88,8 +84,9 @@ import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import java.util.stream.StreamSupport;
 
+import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.*;
 import static org.apache.archiva.metadata.model.ModelInfo.STORAGE_TZ;
-import static org.apache.archiva.metadata.repository.cassandra.CassandraUtils.*;
+import static org.apache.archiva.metadata.repository.cassandra.CassandraArchivaManager.DEFAULT_PRIMARY_KEY;
 import static org.apache.archiva.metadata.repository.cassandra.model.ColumnNames.*;
 
 /**
@@ -100,84 +97,17 @@ public class CassandraMetadataRepository
     extends AbstractMetadataRepository implements MetadataRepository
 {
 
-    private static final String ARTIFACT_METADATA_MODEL_KEY = "artifactMetadataModel.key";
-    private Logger logger = LoggerFactory.getLogger( getClass() );
+    private static final String ARTIFACT_METADATA_MODEL_KEY = "\"artifactMetadataModel.key\"";
+    private Logger logger = LoggerFactory.getLogger( getClass( ) );
 
     private final CassandraArchivaManager cassandraArchivaManager;
 
-    private final ColumnFamilyTemplate<String, String> projectVersionMetadataTemplate;
-
-    private final ColumnFamilyTemplate<String, String> projectTemplate;
-
-    private final ColumnFamilyTemplate<String, String> artifactMetadataTemplate;
-
-    private final ColumnFamilyTemplate<String, String> metadataFacetTemplate;
-
-    private final ColumnFamilyTemplate<String, String> mailingListTemplate;
-
-    private final ColumnFamilyTemplate<String, String> licenseTemplate;
-
-    private final ColumnFamilyTemplate<String, String> dependencyTemplate;
-
-    private final ColumnFamilyTemplate<String, String> checksumTemplate;
-
-    private final Keyspace keyspace;
-
-    private final StringSerializer ss = StringSerializer.get();
 
     public CassandraMetadataRepository( MetadataService metadataService,
                                         CassandraArchivaManager cassandraArchivaManager )
     {
         super( metadataService );
         this.cassandraArchivaManager = cassandraArchivaManager;
-        this.keyspace = cassandraArchivaManager.getKeyspace();
-
-        this.projectVersionMetadataTemplate =
-            new ThriftColumnFamilyTemplate<>( cassandraArchivaManager.getKeyspace(), //
-                                              cassandraArchivaManager.getProjectVersionMetadataFamilyName(), //
-                                              StringSerializer.get(), //
-                                              StringSerializer.get() );
-
-        this.projectTemplate = new ThriftColumnFamilyTemplate<>( cassandraArchivaManager.getKeyspace(), //
-                                                                 cassandraArchivaManager.getProjectFamilyName(), //
-                                                                 //
-                                                                 StringSerializer.get(), //
-                                                                 StringSerializer.get() );
-
-        this.artifactMetadataTemplate = new ThriftColumnFamilyTemplate<>( cassandraArchivaManager.getKeyspace(), //
-                                                                          cassandraArchivaManager.getArtifactMetadataFamilyName(),
-                                                                          StringSerializer.get(), //
-                                                                          StringSerializer.get() );
-
-        this.metadataFacetTemplate = new ThriftColumnFamilyTemplate<>( cassandraArchivaManager.getKeyspace(), //
-                                                                       cassandraArchivaManager.getMetadataFacetFamilyName(),
-                                                                       //
-                                                                       StringSerializer.get(), //
-                                                                       StringSerializer.get() );
-
-        this.mailingListTemplate = new ThriftColumnFamilyTemplate<>( cassandraArchivaManager.getKeyspace(), //
-                                                                     cassandraArchivaManager.getMailingListFamilyName(),
-                                                                     //
-                                                                     StringSerializer.get(), //
-                                                                     StringSerializer.get() );
-
-        this.licenseTemplate = new ThriftColumnFamilyTemplate<>( cassandraArchivaManager.getKeyspace(), //
-                                                                 cassandraArchivaManager.getLicenseFamilyName(),
-                                                                 //
-                                                                 StringSerializer.get(), //
-                                                                 StringSerializer.get() );
-
-        this.dependencyTemplate = new ThriftColumnFamilyTemplate<>( cassandraArchivaManager.getKeyspace(), //
-                                                                    cassandraArchivaManager.getDependencyFamilyName(),
-                                                                    //
-                                                                    StringSerializer.get(), //
-                                                                    StringSerializer.get() );
-
-        this.checksumTemplate = new ThriftColumnFamilyTemplate<>( cassandraArchivaManager.getKeyspace(), //
-                cassandraArchivaManager.getChecksumFamilyName(),
-                //
-                StringSerializer.get(), //
-                StringSerializer.get() );
     }
 
 
@@ -190,40 +120,26 @@ public class CassandraMetadataRepository
     public Repository getOrCreateRepository( String repositoryId )
         throws MetadataRepositoryException
     {
-        String cf = cassandraArchivaManager.getRepositoryFamilyName();
-
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, StringSerializer.get(), StringSerializer.get(),
-                                     StringSerializer.get() ) //
-            .setColumnFamily( cf ) //
-            .setColumnNames( REPOSITORY_NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .execute();
+        String cf = cassandraArchivaManager.getRepositoryFamilyName( );
 
-        if ( result.get().getCount() < 1 )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            // we need to create the repository
-            Repository repository = new Repository( repositoryId );
-
-            try
+            Select query = selectFrom( cf ).column( REPOSITORY_NAME.toString( ) ).whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) ).allowFiltering();
+            ResultSet qResult = session.execute( query.build( ) );
+            Row row = qResult.one( );
+            if ( row == null )
             {
-                MutationResult mutationResult = HFactory.createMutator( keyspace, StringSerializer.get() ) //
-                    .addInsertion( repositoryId, cf,
-                                   CassandraUtils.column( REPOSITORY_NAME.toString(), repository.getName() ) ) //
-                    .execute();
-                logger.debug( "time to insert repository: {}", mutationResult.getExecutionTimeMicro() );
+                Repository repository = new Repository( repositoryId );
+                RegularInsert insert = insertInto( cf )
+                    .value( DEFAULT_PRIMARY_KEY, literal( repositoryId ) )
+                    .value( REPOSITORY_NAME.toString( ), literal( repository.getName( ) ) );
+                session.execute( insert.build( ) );
                 return repository;
             }
-            catch ( HInvalidRequestException e )
-            {
-                logger.error( e.getMessage(), e );
-                throw new MetadataRepositoryException( e.getMessage(), e );
-            }
+            return new Repository( row.get( REPOSITORY_NAME.toString( ), String.class ) );
 
         }
 
-        return new Repository(
-            result.get().getList().get( 0 ).getColumnSlice().getColumnByName( REPOSITORY_NAME.toString() ).getValue() );
     }
 
 
@@ -231,14 +147,14 @@ public class CassandraMetadataRepository
         throws MetadataRepositoryException
     {
 
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, StringSerializer.get(), StringSerializer.get(),
-                                     StringSerializer.get() ) //
-            .setColumnFamily( cassandraArchivaManager.getRepositoryFamilyName() ) //
-            .setColumnNames( REPOSITORY_NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .execute();
-        return ( result.get().getCount() > 0 ) ? new Repository( repositoryId ) : null;
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            Select query = selectFrom( cassandraArchivaManager.getRepositoryFamilyName( ) ).column( REPOSITORY_NAME.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .allowFiltering();
+            Row row = session.execute( query.build( ) ).one( );
+            return row != null ? new Repository( repositoryId ) : null;
+        }
     }
 
     @Override
@@ -251,476 +167,391 @@ public class CassandraMetadataRepository
     private Namespace updateOrAddNamespace( String repositoryId, String namespaceId )
         throws MetadataRepositoryException
     {
-        try
-        {
-            Repository repository = getOrCreateRepository( repositoryId );
+        Repository repository = getOrCreateRepository( repositoryId );
 
-            String key =
-                new Namespace.KeyBuilder().withNamespace( namespaceId ).withRepositoryId( repositoryId ).build();
+        String key =
+            new Namespace.KeyBuilder( ).withNamespace( namespaceId ).withRepositoryId( repositoryId ).build( );
 
-            Namespace namespace = getNamespace( repositoryId, namespaceId );
-            if ( namespace == null )
+        Namespace namespace = getNamespace( repositoryId, namespaceId );
+        if ( namespace == null )
+        {
+            String cf = cassandraArchivaManager.getNamespaceFamilyName( );
+            namespace = new Namespace( namespaceId, repository );
+
+            CqlSession session = cassandraArchivaManager.getSession( );
             {
-                String cf = cassandraArchivaManager.getNamespaceFamilyName();
-                namespace = new Namespace( namespaceId, repository );
-                HFactory.createMutator( keyspace, StringSerializer.get() )
-                    //  values
-                    .addInsertion( key, cf, CassandraUtils.column( NAME.toString(), namespace.getName() ) ) //
-                    .addInsertion( key, cf, CassandraUtils.column( REPOSITORY_NAME.toString(), repository.getName() ) ) //
-                    .execute();
+                RegularInsert insert = insertInto( cf )
+                    .value( DEFAULT_PRIMARY_KEY, literal( key ) )
+                    .value( NAME.toString( ), literal( namespace.getName( ) ) )
+                    .value( REPOSITORY_NAME.toString( ), literal( repository.getName( ) ) );
+                session.execute( insert.build( ) );
             }
 
-            return namespace;
-        }
-        catch ( HInvalidRequestException e )
-        {
-            logger.error( e.getMessage(), e );
-            throw new MetadataRepositoryException( e.getMessage(), e );
         }
+
+        return namespace;
     }
 
     protected Namespace getNamespace( String repositoryId, String namespaceId )
     {
 
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) //
-            .setColumnNames( REPOSITORY_NAME.toString(), NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .addEqualsExpression( NAME.toString(), namespaceId ) //
-            .execute();
-        if ( result.get().getCount() > 0 )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            ColumnSlice<String, String> columnSlice = result.get().getList().get( 0 ).getColumnSlice();
-            return new Namespace( getStringValue( columnSlice, NAME.toString() ), //
-                                  new Repository( getStringValue( columnSlice, REPOSITORY_NAME.toString() ) ) );
-
+            String table = cassandraArchivaManager.getNamespaceFamilyName( );
+            String key =
+                new Namespace.KeyBuilder( ).withNamespace( namespaceId ).withRepositoryId( repositoryId ).build( );
+            Select query = selectFrom( table )
+                .columns( REPOSITORY_NAME.toString( ), NAME.toString( ) )
+                .whereColumn(  DEFAULT_PRIMARY_KEY ).isEqualTo(  literal( key ) );
+            Row row = session.execute( query.build( ) ).one( );
+            if ( row != null )
+            {
+                return new Namespace( row.get( NAME.toString( ), String.class ),
+                    new Repository( row.get( REPOSITORY_NAME.toString( ), String.class ) ) );
+            }
+            return null;
         }
-        return null;
     }
 
 
     @Override
-    public void removeNamespace( RepositorySession session, String repositoryId, String namespaceId )
+    public void removeNamespace( RepositorySession repositorySession, String repositoryId, String namespaceId )
         throws MetadataRepositoryException
     {
 
-        try
-        {
-            String key = new Namespace.KeyBuilder() //
-                .withNamespace( namespaceId ) //
-                .withRepositoryId( repositoryId ) //
-                .build();
-
-            HFactory.createMutator( cassandraArchivaManager.getKeyspace(), new StringSerializer() ) //
-                .addDeletion( key, cassandraArchivaManager.getNamespaceFamilyName() ) //
-                .execute();
-
-            QueryResult<OrderedRows<String, String, String>> result = HFactory //
-                .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-                .setColumnFamily( cassandraArchivaManager.getProjectFamilyName() ) //
-                .setColumnNames( REPOSITORY_NAME.toString() ) //
-                .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-                .addEqualsExpression( NAMESPACE_ID.toString(), namespaceId ) //
-                .execute();
-
-            for ( Row<String, String, String> row : result.get() )
-            {
-                this.projectTemplate.deleteRow( row.getKey() );
-            }
-
-            result = HFactory //
-                .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-                .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) //
-                .setColumnNames( REPOSITORY_NAME.toString() ) //
-                .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-                .addEqualsExpression( NAMESPACE_ID.toString(), namespaceId ) //
-                .execute();
-
-            for ( Row<String, String, String> row : result.get() )
-            {
-                this.projectVersionMetadataTemplate.deleteRow( row.getKey() );
-                removeMailingList( row.getKey() );
-            }
+        String key = new Namespace.KeyBuilder( ) //
+            .withNamespace( namespaceId ) //
+            .withRepositoryId( repositoryId ) //
+            .build( );
 
-            result = HFactory //
-                .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-                .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) //
-                .setColumnNames( REPOSITORY_NAME.toString() ) //
-                .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-                .addEqualsExpression( NAMESPACE_ID.toString(), namespaceId ) //
-                .execute();
 
-            for ( Row<String, String, String> row : result.get() )
-            {
-                this.artifactMetadataTemplate.deleteRow( row.getKey() );
-            }
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            String pTable = cassandraArchivaManager.getNamespaceFamilyName( );
+            Delete delete = deleteFrom( pTable ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) );
+            session.execute( delete.build( ) );
 
-            result = HFactory //
-                .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-                .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
-                .setColumnNames( REPOSITORY_NAME.toString() ) //
-                .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-                .addEqualsExpression( NAMESPACE_ID.toString(), namespaceId ) //
-                .execute();
+            List<String> tables = Arrays.asList(
+                cassandraArchivaManager.getProjectFamilyName( ),
+                cassandraArchivaManager.getProjectVersionMetadataFamilyName( ),
+                cassandraArchivaManager.getArtifactMetadataFamilyName( ),
+                cassandraArchivaManager.getMetadataFacetFamilyName( ) );
 
-            for ( Row<String, String, String> row : result.get() )
+            for ( String table : tables )
             {
-                this.metadataFacetTemplate.deleteRow( row.getKey() );
+                Select deleteRows = selectFrom( table )
+                    .column( DEFAULT_PRIMARY_KEY )
+                    .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                    .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespaceId ) )
+                    .allowFiltering();
+                ResultSet result = session.execute( deleteRows.build( ) );
+                StreamSupport.stream( result.spliterator( ), false ).map( row -> row.getString( DEFAULT_PRIMARY_KEY ) )
+                    .distinct( ).forEach( delKey ->
+                        session.execute( deleteFrom( table ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( delKey ) ).build( ) ) );
             }
 
         }
-        catch ( HInvalidRequestException e )
-        {
-            logger.error( e.getMessage(), e );
-            throw new MetadataRepositoryException( e.getMessage(), e );
-        }
     }
 
 
     @Override
-    public void removeRepository( RepositorySession session, final String repositoryId )
+    public void removeRepository( RepositorySession repositorySession, final String repositoryId )
         throws MetadataRepositoryException
     {
 
-        // TODO use cql queries to delete all
-        List<String> namespacesKey = new ArrayList<>();
-
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) //
-            .setColumnNames( REPOSITORY_NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .execute();
-
-        for ( Row<String, String, String> row : result.get().getList() )
-        {
-            namespacesKey.add( row.getKey() );
-        }
-
-        HFactory.createMutator( cassandraArchivaManager.getKeyspace(), ss ) //
-            .addDeletion( namespacesKey, cassandraArchivaManager.getNamespaceFamilyName() ) //
-            .execute();
-
-        //delete repositoryId
-        HFactory.createMutator( cassandraArchivaManager.getKeyspace(), ss ) //
-            .addDeletion( repositoryId, cassandraArchivaManager.getRepositoryFamilyName() ) //
-            .execute();
-
-        result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectFamilyName() ) //
-            .setColumnNames( REPOSITORY_NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .execute();
-
-        for ( Row<String, String, String> row : result.get() )
-        {
-            this.projectTemplate.deleteRow( row.getKey() );
-        }
-
-        result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) //
-            .setColumnNames( REPOSITORY_NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .execute();
-
-        for ( Row<String, String, String> row : result.get() )
-        {
-            this.projectVersionMetadataTemplate.deleteRow( row.getKey() );
-            removeMailingList( row.getKey() );
-        }
-
-        result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) //
-            .setColumnNames( REPOSITORY_NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .execute();
-
-        for ( Row<String, String, String> row : result.get() )
-        {
-            this.artifactMetadataTemplate.deleteRow( row.getKey() );
-        }
-
-        result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
-            .setColumnNames( REPOSITORY_NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .execute();
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            final String table = cassandraArchivaManager.getNamespaceFamilyName( );
+            Select deleteRows = selectFrom( table )
+                .column( DEFAULT_PRIMARY_KEY )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) );
+            ResultSet result = session.execute( deleteRows.build( ) );
+            StreamSupport.stream( result.spliterator( ), false )
+                .map( row -> row.getString( DEFAULT_PRIMARY_KEY ) )
+                .distinct( )
+                .forEach(
+                    delKey ->
+                        session.execute( deleteFrom( table ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( delKey ) ).build( ) )
+                );
+
+            String deleteTable = cassandraArchivaManager.getRepositoryFamilyName( );
+            Delete delete = deleteFrom( deleteTable ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( repositoryId ) );
+            session.execute( delete.build( ) );
+
+            List<String> tables = Arrays.asList(
+                cassandraArchivaManager.getProjectFamilyName( ),
+                cassandraArchivaManager.getProjectVersionMetadataFamilyName( ),
+                cassandraArchivaManager.getArtifactMetadataFamilyName( ),
+                cassandraArchivaManager.getMetadataFacetFamilyName( )
+            );
+
+            for ( String dTable : tables )
+            {
+                deleteRows = selectFrom( dTable )
+                    .column( DEFAULT_PRIMARY_KEY )
+                    .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                    .allowFiltering();
+                result = session.execute( deleteRows.build( ) );
+                StreamSupport.stream( result.spliterator(), false )
+                    .map(row -> row.getString( DEFAULT_PRIMARY_KEY ))
+                    .distinct()
+                    .forEach( delKey ->
+                        session.execute( deleteFrom( dTable ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal(delKey) ).build(  ) ));
+            }
 
-        for ( Row<String, String, String> row : result.get() )
-        {
-            this.metadataFacetTemplate.deleteRow( row.getKey() );
         }
 
-
     }
 
-    // FIXME this one need peformance improvement maybe a cache?
     @Override
-    public List<String> getRootNamespaces( RepositorySession session, final String repoId )
+    public List<String> getRootNamespaces( RepositorySession repositorySession, final String repoId )
         throws MetadataResolutionException
     {
-
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) //
-            .setColumnNames( NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .execute();
-
-        Set<String> namespaces = new HashSet<>( result.get( ).getCount( ) );
-
-        for ( Row<String, String, String> row : result.get() )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            namespaces.add( StringUtils.substringBefore( getStringValue( row.getColumnSlice(), NAME.toString() ), "." ) );
+            String table = cassandraArchivaManager.getNamespaceFamilyName( );
+            Select query = selectFrom( table ).column( NAME.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repoId ) );
+            return StreamSupport.stream( session.execute( query.build( ) ).spliterator( ), false )
+                .map( row ->
+                    StringUtils.substringBefore( row.get( NAME.toString( ), String.class ), "." ) )
+                .distinct( )
+                .collect( Collectors.toList( ) );
         }
-
-        return new ArrayList<>( namespaces );
     }
 
     // FIXME this one need peformance improvement maybe a cache?
     @Override
-    public List<String> getChildNamespaces( RepositorySession session, final String repoId, final String namespaceId )
+    public List<String> getChildNamespaces( RepositorySession repositorySession, final String repoId, final String namespaceId )
         throws MetadataResolutionException
     {
-
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) //
-            .setColumnNames( NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .execute();
-
-        List<String> namespaces = new ArrayList<>( result.get().getCount() );
-
-        for ( Row<String, String, String> row : result.get() )
+        final String calledNs = namespaceId.endsWith( "." ) ? namespaceId : namespaceId + ".";
+        final int nslen = calledNs.length( );
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            String currentNamespace = getStringValue( row.getColumnSlice(), NAME.toString() );
-            if ( StringUtils.startsWith( currentNamespace, namespaceId ) //
-                && ( StringUtils.length( currentNamespace ) > StringUtils.length( namespaceId ) ) )
-            {
-                // store after namespaceId '.' but before next '.'
-                // call org namespace org.apache.maven.shared -> stored apache
-
-                String calledNamespace = StringUtils.endsWith( namespaceId, "." ) ? namespaceId : namespaceId + ".";
-                String storedNamespace = StringUtils.substringAfter( currentNamespace, calledNamespace );
-
-                storedNamespace = StringUtils.substringBefore( storedNamespace, "." );
-
-                namespaces.add( storedNamespace );
-            }
+            String table = cassandraArchivaManager.getNamespaceFamilyName( );
+            Select query = selectFrom( table ).column( NAME.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repoId ) );
+            return StreamSupport.stream( session.execute( query.build( ) ).spliterator( ), false )
+                .map( row -> row.get( NAME.toString( ), String.class ) )
+                .filter( namespace -> namespace.length( ) > nslen && namespace.startsWith( calledNs ) )
+                .map( namespace -> StringUtils.substringBefore( StringUtils.substringAfter( namespace, calledNs ), "." ) )
+                .distinct( )
+                .collect( Collectors.toList( ) );
         }
-
-        return namespaces;
-
     }
 
     // only use for testing purpose
     protected List<String> getNamespaces( final String repoId )
         throws MetadataResolutionException
     {
-
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) //
-            .setColumnNames( NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .execute();
-
-        List<String> namespaces = new ArrayList<>( result.get().getCount() );
-
-        for ( Row<String, String, String> row : result.get() )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            namespaces.add( getStringValue( row.getColumnSlice(), NAME.toString() ) );
+            String table = cassandraArchivaManager.getNamespaceFamilyName( );
+            Select query = selectFrom( table ).column( NAME.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repoId ) );
+            return StreamSupport.stream( session.execute( query.build( ) ).spliterator( ), false )
+                .map( row ->
+                    row.get( NAME.toString( ), String.class ) )
+                .distinct( )
+                .collect( Collectors.toList( ) );
         }
-
-        return namespaces;
     }
 
 
     @Override
-    public void updateProject( RepositorySession session, String repositoryId, ProjectMetadata projectMetadata )
+    public void updateProject( RepositorySession repositorySession, String repositoryId, ProjectMetadata projectMetadata )
         throws MetadataRepositoryException
     {
-
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectFamilyName() ) //
-            .setColumnNames( PROJECT_ID.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), projectMetadata.getNamespace() ) //
-            .addEqualsExpression( PROJECT_ID.toString(), projectMetadata.getId() ) //
-            .execute();
-
-        // project exists ? if yes return nothing to update here
-        if ( result.get( ).getCount( ) <= 0 )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            Namespace namespace = updateOrAddNamespace( repositoryId, projectMetadata.getNamespace() );
+            String table = cassandraArchivaManager.getProjectFamilyName( );
+            Select query = selectFrom( table ).column( PROJECT_ID.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( projectMetadata.getNamespace( ) ) )
+                .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectMetadata.getId( ) ) ).allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            if ( result.one( ) == null )
+            {
+                Namespace namespace = updateOrAddNamespace( repositoryId, projectMetadata.getNamespace( ) );
+                String key =
+                    new Project.KeyBuilder( ).withProjectId( projectMetadata.getId( ) ).withNamespace( namespace ).build( );
+                RegularInsert insert = insertInto( table )
+                    .value( DEFAULT_PRIMARY_KEY, literal( key ) )
+                    .value( PROJECT_ID.toString( ), literal( projectMetadata.getId( ) ) )
+                    .value( REPOSITORY_NAME.toString( ), literal( repositoryId ) )
+                    .value( NAMESPACE_ID.toString( ), literal( projectMetadata.getNamespace( ) ) );
+                session.execute( insert.build( ) );
+            }
+            if ( projectMetadata.hasProperties( ) )
+            {
+                UpdateStart update = update( table );
+                UpdateWithAssignments newUpdat = null;
+                final Properties props = projectMetadata.getProperties( );
+                for ( String propKey : props.stringPropertyNames( ) )
+                {
+                    newUpdat = update.setMapValue( PROJECT_PROPERTIES.toString( ), literal( propKey ), literal( props.getProperty( propKey, "" ) ) );
+                }
+                Update finalUpdate = newUpdat
+                    .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                    .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( projectMetadata.getNamespace( ) ) )
+                    .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectMetadata.getId( ) ) );
+                session.execute( finalUpdate.build( ) );
+            }
+
 
-            String key =
-                new Project.KeyBuilder().withProjectId( projectMetadata.getId() ).withNamespace( namespace ).build();
-
-            String cf = cassandraArchivaManager.getProjectFamilyName();
-            projectTemplate.createMutator()
-                //  values
-                .addInsertion( key, cf, CassandraUtils.column( PROJECT_ID.toString(), projectMetadata.getId() ) ) //
-                .addInsertion( key, cf, CassandraUtils.column( REPOSITORY_NAME.toString(), repositoryId ) ) //
-                .addInsertion( key, cf, CassandraUtils.column( NAMESPACE_ID.toString(), projectMetadata.getNamespace() ) )//
-                .execute();
         }
+
     }
 
     @Override
-    public List<String> getProjects( RepositorySession session, final String repoId, final String namespace )
+    public List<String> getProjects( RepositorySession repositorySession, final String repoId, final String namespace )
         throws MetadataResolutionException
     {
 
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectFamilyName() ) //
-            .setColumnNames( PROJECT_ID.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
-            .execute();
-
-        final Set<String> projects = new HashSet<>( result.get( ).getCount( ) );
-
-        for ( Row<String, String, String> row : result.get() )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            projects.add( getStringValue( row.getColumnSlice(), PROJECT_ID.toString() ) );
+            String table = cassandraArchivaManager.getProjectFamilyName( );
+            Select query = selectFrom( table ).column( PROJECT_ID.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repoId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespace ) )
+                .allowFiltering();
+            return StreamSupport.stream( session.execute( query.build( ) ).spliterator( ), false )
+                .map( row ->
+                    row.get( PROJECT_ID.toString( ), String.class ) )
+                .distinct( )
+                .collect( Collectors.toList( ) );
         }
 
-        return new ArrayList<>( projects );
     }
 
     @Override
-    public void removeProject( RepositorySession session, final String repositoryId, final String namespaceId, final String projectId )
+    public void removeProject( RepositorySession repositorySession, final String repositoryId, final String namespaceId, final String projectId )
         throws MetadataRepositoryException
     {
 
-        String key = new Project.KeyBuilder() //
+        String key = new Project.KeyBuilder( ) //
             .withProjectId( projectId ) //
             .withNamespace( new Namespace( namespaceId, new Repository( repositoryId ) ) ) //
-            .build();
+            .build( );
 
-        this.projectTemplate.deleteRow( key );
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            String table = cassandraArchivaManager.getProjectFamilyName( );
+            Delete delete = deleteFrom( table ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) );
+            session.execute( delete.build( ) );
 
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) //
-            .setColumnNames( ID.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespaceId ) //
-            .addEqualsExpression( PROJECT_ID.toString(), projectId ) //
-            .execute();
+            table = cassandraArchivaManager.getProjectVersionMetadataFamilyName( );
+            Select query = selectFrom( table ).columns( DEFAULT_PRIMARY_KEY, PROJECT_ID.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespaceId ) )
+                .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectId ) )
+                .allowFiltering();
 
-        for ( Row<String, String, String> row : result.get() )
-        {
-            this.projectVersionMetadataTemplate.deleteRow( row.getKey() );
-            removeMailingList( row.getKey() );
-        }
+            ResultSet result = session.execute( query.build( ) );
+            result.forEach( row -> removeMailingList( row.get( DEFAULT_PRIMARY_KEY, String.class ) ) );
 
-        result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) //
-            .setColumnNames( PROJECT_ID.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespaceId ) //
-            .addEqualsExpression( PROJECT_ID.toString(), projectId ) //
-            .execute();
 
-        for ( Row<String, String, String> row : result.get() )
-        {
-            this.artifactMetadataTemplate.deleteRow( row.getKey() );
+            List<String> tables = Arrays.asList(
+                cassandraArchivaManager.getProjectVersionMetadataFamilyName( ),
+                cassandraArchivaManager.getArtifactMetadataFamilyName( )
+            );
+
+            for ( String dTable : tables )
+            {
+                Select deleteRows = selectFrom( dTable ).column( DEFAULT_PRIMARY_KEY )
+                    .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                    .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespaceId ) )
+                    .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectId ) )
+                    .allowFiltering();
+                result = session.execute( deleteRows.build( ) );
+                StreamSupport.stream( result.spliterator( ), false )
+                    .map( row -> row.getString( DEFAULT_PRIMARY_KEY ) )
+                    .forEach( delKey -> session.execute( deleteFrom( dTable ).column( PROJECT_ID.toString( ) ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( delKey ) ).build( ) ) );
+            }
         }
+
     }
 
     @Override
-    public List<String> getProjectVersions( RepositorySession session, final String repoId, final String namespace, final String projectId )
+    public List<String> getProjectVersions( RepositorySession repositorySession, final String repositoryId, final String namespaceId, final String projectId )
         throws MetadataResolutionException
     {
 
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) //
-            .setColumnNames( PROJECT_VERSION.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
-            .addEqualsExpression( PROJECT_ID.toString(), projectId ) //
-            .execute();
-
-        int count = result.get().getCount();
-
-        if ( count < 1 )
-        {
-            return Collections.emptyList();
-        }
-
-        Set<String> versions = new HashSet<>( count );
-
-        for ( Row<String, String, String> orderedRows : result.get() )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            versions.add( getStringValue( orderedRows.getColumnSlice(), PROJECT_VERSION.toString() ) );
+            String table = cassandraArchivaManager.getProjectVersionMetadataFamilyName( );
+            Select query = selectFrom( table ).column( PROJECT_ID.toString( ) )
+                .column( PROJECT_VERSION.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespaceId ) )
+                .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectId ) )
+                .allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            return StreamSupport.stream( result.spliterator( ), false )
+                .map( row -> row.get( PROJECT_VERSION.toString( ), String.class ) )
+                .distinct( )
+                .collect( Collectors.toList( ) );
         }
-
-        return new ArrayList<>( versions );
-
     }
 
     @Override
-    public ProjectMetadata getProject( RepositorySession session, final String repoId, final String namespace, final String id )
+    public ProjectMetadata getProject( RepositorySession repositorySession, final String repositoryId, final String namespaceId, final String id )
         throws MetadataResolutionException
     {
-
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectFamilyName() ) //
-            .setColumnNames( PROJECT_ID.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
-            .addEqualsExpression( PROJECT_ID.toString(), id ) //
-            .execute();
-
-        int count = result.get().getCount();
-
-        if ( count < 1 )
-        {
-            return null;
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            String table = cassandraArchivaManager.getProjectFamilyName( );
+            Select query = selectFrom( table ).column( PROJECT_ID.toString( ) )
+                .column( PROJECT_ID.toString( ) )
+                .column( PROJECT_PROPERTIES.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespaceId ) )
+                .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( id ) ).allowFiltering();
+            Row result = session.execute( query.build( ) ).one( );
+            if ( result == null )
+            {
+                return null;
+            }
+            else
+            {
+                ProjectMetadata projectMetadata = new ProjectMetadata( );
+                projectMetadata.setId( id );
+                projectMetadata.setNamespace( namespaceId );
+                Map<String, String> props = result.getMap( PROJECT_PROPERTIES.toString( ), String.class, String.class );
+                Properties pProps = new Properties( );
+                if ( props != null )
+                {
+                    pProps.putAll( props );
+                }
+                projectMetadata.setProperties( pProps );
+                return projectMetadata;
+            }
         }
-
-        ProjectMetadata projectMetadata = new ProjectMetadata();
-        projectMetadata.setId( id );
-        projectMetadata.setNamespace( namespace );
-
-        logger.debug( "getProject repoId: {}, namespace: {}, projectId: {} -> {}", repoId, namespace, id,
-                      projectMetadata );
-
-        return projectMetadata;
     }
 
-    protected ProjectVersionMetadataModel mapProjectVersionMetadataModel( ColumnSlice<String, String> columnSlice )
+    protected ProjectVersionMetadataModel mapProjectVersionMetadataModel( Row row )
     {
-        ProjectVersionMetadataModel projectVersionMetadataModel = new ProjectVersionMetadataModel();
-        projectVersionMetadataModel.setId( getStringValue( columnSlice, ID.toString() ) );
-        projectVersionMetadataModel.setDescription( getStringValue( columnSlice, DESCRIPTION.toString() ) );
-        projectVersionMetadataModel.setName( getStringValue( columnSlice, NAME.toString() ) );
-        Namespace namespace = new Namespace( getStringValue( columnSlice, NAMESPACE_ID.toString() ), //
-                                             new Repository( getStringValue( columnSlice, REPOSITORY_NAME.toString() ) ) );
+        ProjectVersionMetadataModel projectVersionMetadataModel = new ProjectVersionMetadataModel( );
+        projectVersionMetadataModel.setId( row.get( VERSION.toString( ), String.class ) );
+        projectVersionMetadataModel.setDescription( row.get( DESCRIPTION.toString( ), String.class ) );
+        projectVersionMetadataModel.setName( row.get( NAME.toString( ), String.class ) );
+        Namespace namespace = new Namespace( row.get( NAMESPACE_ID.toString( ), String.class ), //
+            new Repository( row.get( REPOSITORY_NAME.toString( ), String.class ) ) );
         projectVersionMetadataModel.setNamespace( namespace );
-        projectVersionMetadataModel.setIncomplete(
-            Boolean.parseBoolean( getStringValue( columnSlice, "incomplete" ) ) );
-        projectVersionMetadataModel.setProjectId( getStringValue( columnSlice, PROJECT_ID.toString() ) );
-        projectVersionMetadataModel.setUrl( getStringValue( columnSlice, URL.toString() ) );
+        projectVersionMetadataModel.setIncomplete( row.getBoolean( "incomplete" ) );
+        projectVersionMetadataModel.setProjectId( row.get( PROJECT_ID.toString( ), String.class ) );
+        projectVersionMetadataModel.setUrl( row.get( URL.toString( ), String.class ) );
         return projectVersionMetadataModel;
     }
 
+    protected UpdateWithAssignments addUpdate( UpdateWithAssignments update, String column, Object value )
+    {
+        return update.setColumn( column, literal( value ) );
+    }
 
     @Override
-    public void updateProjectVersion( RepositorySession session, String repositoryId, String namespaceId, String projectId,
+    public void updateProjectVersion( RepositorySession repositorySession, String repositoryId, String namespaceId, String projectId,
                                       ProjectVersionMetadata versionMetadata )
         throws MetadataRepositoryException
     {
@@ -733,546 +564,464 @@ public class CassandraMetadataRepository
                 updateOrAddNamespace( repositoryId, namespaceId );
             }
 
-            if ( getProject( session, repositoryId, namespaceId, projectId ) == null )
+            if ( getProject( repositorySession, repositoryId, namespaceId, projectId ) == null )
             {
-                ProjectMetadata projectMetadata = new ProjectMetadata();
+                ProjectMetadata projectMetadata = new ProjectMetadata( );
                 projectMetadata.setNamespace( namespaceId );
                 projectMetadata.setId( projectId );
-                updateProject( session, repositoryId, projectMetadata );
+                updateProject( repositorySession, repositoryId, projectMetadata );
             }
 
         }
         catch ( MetadataResolutionException e )
         {
-            throw new MetadataRepositoryException( e.getMessage(), e );
+            throw new MetadataRepositoryException( e.getMessage( ), e );
         }
 
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) //
-            .setColumnNames( PROJECT_VERSION.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespaceId ) //
-            .addEqualsExpression( PROJECT_ID.toString(), projectId ) //
-            .addEqualsExpression( PROJECT_VERSION.toString(), versionMetadata.getId() ) //
-            .execute();
-
-        ProjectVersionMetadataModel projectVersionMetadataModel;
-        boolean creation = true;
-        if ( result.get().getCount() > 0 )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            projectVersionMetadataModel =
-                mapProjectVersionMetadataModel( result.get().getList().get( 0 ).getColumnSlice() );
-            creation = false;
-        }
-        else
-        {
-            projectVersionMetadataModel = getModelMapper().map( versionMetadata, ProjectVersionMetadataModel.class );
-        }
-
-        projectVersionMetadataModel.setProjectId( projectId );
-        projectVersionMetadataModel.setNamespace( new Namespace( namespaceId, new Repository( repositoryId ) ) );
-
-        projectVersionMetadataModel.setCiManagement( versionMetadata.getCiManagement() );
-        projectVersionMetadataModel.setIssueManagement( versionMetadata.getIssueManagement() );
-        projectVersionMetadataModel.setOrganization( versionMetadata.getOrganization() );
-        projectVersionMetadataModel.setScm( versionMetadata.getScm() );
-
-        projectVersionMetadataModel.setMailingLists( versionMetadata.getMailingLists() );
-        projectVersionMetadataModel.setDependencies( versionMetadata.getDependencies() );
-        projectVersionMetadataModel.setLicenses( versionMetadata.getLicenses() );
-
-        // we don't test of repository and namespace really exist !
-        String key = new ProjectVersionMetadataModel.KeyBuilder() //
-            .withRepository( repositoryId ) //
-            .withNamespace( namespaceId ) //
-            .withProjectId( projectId ) //
-            .withProjectVersion( versionMetadata.getVersion() ) //
-            .withId( versionMetadata.getId() ) //
-            .build();
+            String table = cassandraArchivaManager.getProjectVersionMetadataFamilyName( );
+            Select query = selectFrom( table ).column( PROJECT_ID.toString( ) )
+                .all( )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespaceId ) )
+                .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectId ) )
+                .whereColumn( PROJECT_VERSION.toString( ) ).isEqualTo( literal( versionMetadata.getId( ) ) ).allowFiltering();
+            ProjectVersionMetadataModel projectVersionMetadataModel;
+            boolean create = true;
+            Row result = session.execute( query.build( ) ).one( );
+            if ( result != null )
+            {
+                projectVersionMetadataModel = mapProjectVersionMetadataModel( result );
+                create = false;
+            }
+            else
+            {
+                projectVersionMetadataModel = getModelMapper( ).map( versionMetadata, ProjectVersionMetadataModel.class );
+            }
+            projectVersionMetadataModel.setProjectId( projectId );
+            projectVersionMetadataModel.setNamespace( new Namespace( namespaceId, new Repository( repositoryId ) ) );
 
-        // FIXME nested objects to store!!!
-        if ( creation )
-        {
-            String cf = cassandraArchivaManager.getProjectVersionMetadataFamilyName();
-            Mutator<String> mutator = projectVersionMetadataTemplate.createMutator()
-                //  values
-                .addInsertion( key, cf, column( PROJECT_ID.toString(), projectId ) ) //
-                .addInsertion( key, cf, column( REPOSITORY_NAME.toString(), repositoryId ) ) //
-                .addInsertion( key, cf, column( NAMESPACE_ID.toString(), namespaceId ) )//
-                .addInsertion( key, cf, column( PROJECT_VERSION.toString(), versionMetadata.getVersion() ) ); //
+            projectVersionMetadataModel.setCiManagement( versionMetadata.getCiManagement( ) );
+            projectVersionMetadataModel.setIssueManagement( versionMetadata.getIssueManagement( ) );
+            projectVersionMetadataModel.setOrganization( versionMetadata.getOrganization( ) );
+            projectVersionMetadataModel.setScm( versionMetadata.getScm( ) );
 
-            addInsertion( mutator, key, cf, DESCRIPTION.toString(), versionMetadata.getDescription() );
+            projectVersionMetadataModel.setMailingLists( versionMetadata.getMailingLists( ) );
+            projectVersionMetadataModel.setDependencies( versionMetadata.getDependencies( ) );
+            projectVersionMetadataModel.setLicenses( versionMetadata.getLicenses( ) );
 
-            addInsertion( mutator, key, cf, NAME.toString(), versionMetadata.getName() );
+            // we don't test, if repository and namespace really exist !
+            String key = new ProjectVersionMetadataModel.KeyBuilder( ) //
+                .withRepository( repositoryId ) //
+                .withNamespace( namespaceId ) //
+                .withProjectId( projectId ) //
+                .withProjectVersion( versionMetadata.getVersion( ) ) //
+                .withId( versionMetadata.getId( ) ) //
+                .build( );
+
+            // Update is upsert
+            table = cassandraArchivaManager.getProjectVersionMetadataFamilyName( );
+            UpdateWithAssignments update = update( table )
+                .setColumn( PROJECT_ID.toString( ), literal( projectId ) )
+                .setColumn( REPOSITORY_NAME.toString( ), literal( repositoryId ) )
+                .setColumn( NAMESPACE_ID.toString( ), literal( namespaceId ) )
+                .setColumn( PROJECT_VERSION.toString( ), literal( versionMetadata.getVersion( ) ) )
+                .setColumn( DESCRIPTION.toString( ), literal( versionMetadata.getDescription( ) ) )
+                .setColumn( NAME.toString( ), literal( versionMetadata.getName( ) ) )
+                .setColumn( "incomplete", literal( versionMetadata.isIncomplete( ) ) )
+                .setColumn( URL.toString( ), literal( versionMetadata.getUrl( ) ) );
 
-            addInsertion( mutator, key, cf, "incomplete", Boolean.toString( versionMetadata.isIncomplete() ) );
 
-            addInsertion( mutator, key, cf, URL.toString(), versionMetadata.getUrl() );
             {
-                CiManagement ci = versionMetadata.getCiManagement();
+                CiManagement ci = versionMetadata.getCiManagement( );
                 if ( ci != null )
                 {
-                    addInsertion( mutator, key, cf, "ciManagement.system", ci.getSystem() );
-                    addInsertion( mutator, key, cf, "ciManagement.url", ci.getUrl() );
+                    update = update.setColumn( "\"ciManagement.system\"", literal( ci.getSystem( ) ) )
+                        .setColumn( "\"ciManagement.url\"", literal( ci.getUrl( ) ) );
                 }
             }
 
             {
-                IssueManagement issueManagement = versionMetadata.getIssueManagement();
+                IssueManagement issueManagement = versionMetadata.getIssueManagement( );
 
                 if ( issueManagement != null )
                 {
-                    addInsertion( mutator, key, cf, "issueManagement.system", issueManagement.getSystem() );
-                    addInsertion( mutator, key, cf, "issueManagement.url", issueManagement.getUrl() );
+                    update = update.setColumn( "\"issueManagement.system\"", literal( issueManagement.getSystem( ) ) )
+                        .setColumn( "\"issueManagement.url\"", literal( issueManagement.getUrl( ) ) );
                 }
             }
 
             {
-                Organization organization = versionMetadata.getOrganization();
+                Organization organization = versionMetadata.getOrganization( );
                 if ( organization != null )
                 {
-                    addInsertion( mutator, key, cf, "organization.name", organization.getName() );
-                    addInsertion( mutator, key, cf, "organization.url", organization.getUrl() );
+                    update = update.setColumn( "\"organization.name\"", literal( organization.getName( ) ) )
+                        .setColumn( "\"organization.url\"", literal( organization.getUrl( ) ) );
                 }
             }
 
             {
-                Scm scm = versionMetadata.getScm();
+                Scm scm = versionMetadata.getScm( );
                 if ( scm != null )
                 {
-                    addInsertion( mutator, key, cf, "scm.url", scm.getUrl() );
-                    addInsertion( mutator, key, cf, "scm.connection", scm.getConnection() );
-                    addInsertion( mutator, key, cf, "scm.developerConnection", scm.getDeveloperConnection() );
+                    update = update.setColumn( "\"scm.url\"", literal( scm.getUrl( ) ) )
+                        .setColumn( "\"scm.connection\"", literal( scm.getConnection( ) ) )
+                        .setColumn( "\"scm.developerConnection\"", literal( scm.getDeveloperConnection( ) ) );
+                }
+            }
+            if (versionMetadata.getProperties()!=null && versionMetadata.getProperties().size()>0) {
+                for( Map.Entry<String, String> entry : versionMetadata.getProperties().entrySet()) {
+                    update = update.setMapValue( VERSION_PROPERTIES.toString( ), literal( entry.getKey( ) ), literal( entry.getValue( ) ) );
                 }
             }
 
-            recordMailingList( key, versionMetadata.getMailingLists() );
-
-            recordLicenses( key, versionMetadata.getLicenses() );
-
-            recordDependencies( key, versionMetadata.getDependencies(), repositoryId );
-
-            mutator.execute();
-        }
-        else
-        {
-            ColumnFamilyUpdater<String, String> updater = projectVersionMetadataTemplate.createUpdater( key );
-            addUpdateStringValue( updater, PROJECT_ID.toString(), projectId );
-            addUpdateStringValue( updater, REPOSITORY_NAME.toString(), repositoryId );
-            addUpdateStringValue( updater, NAMESPACE_ID.toString(), namespaceId );
-            addUpdateStringValue( updater, PROJECT_VERSION.toString(), versionMetadata.getVersion() );
-            addUpdateStringValue( updater, DESCRIPTION.toString(), versionMetadata.getDescription() );
-
-            addUpdateStringValue( updater, NAME.toString(), versionMetadata.getName() );
+            Update finalUpdate = update.whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) );
+            session.execute( finalUpdate.build( ) );
 
-            updater.setString( "incomplete", Boolean.toString( versionMetadata.isIncomplete() ) );
-            addUpdateStringValue( updater, URL.toString(), versionMetadata.getUrl() );
 
+            if ( !create )
             {
-                CiManagement ci = versionMetadata.getCiManagement();
-                if ( ci != null )
-                {
-                    addUpdateStringValue( updater, "ciManagement.system", ci.getSystem() );
-                    addUpdateStringValue( updater, "ciManagement.url", ci.getUrl() );
-                }
-            }
-            {
-                IssueManagement issueManagement = versionMetadata.getIssueManagement();
-                if ( issueManagement != null )
-                {
-                    addUpdateStringValue( updater, "issueManagement.system", issueManagement.getSystem() );
-                    addUpdateStringValue( updater, "issueManagement.url", issueManagement.getUrl() );
-                }
-            }
-            {
-                Organization organization = versionMetadata.getOrganization();
-                if ( organization != null )
-                {
-                    addUpdateStringValue( updater, "organization.name", organization.getName() );
-                    addUpdateStringValue( updater, "organization.url", organization.getUrl() );
-                }
-            }
-            {
-                Scm scm = versionMetadata.getScm();
-                if ( scm != null )
-                {
-                    addUpdateStringValue( updater, "scm.url", scm.getUrl() );
-                    addUpdateStringValue( updater, "scm.connection", scm.getConnection() );
-                    addUpdateStringValue( updater, "scm.developerConnection", scm.getDeveloperConnection() );
-                }
+                removeMailingList( key );
+                removeLicenses( key );
+                removeDependencies( key );
             }
+            recordMailingList( key, versionMetadata.getMailingLists( ) );
+            recordLicenses( key, versionMetadata.getLicenses( ) );
+            recordDependencies( key, versionMetadata.getDependencies( ), repositoryId );
 
-            // update is a delete record
-            removeMailingList( key );
-            recordMailingList( key, versionMetadata.getMailingLists() );
+            ArtifactMetadataModel artifactMetadataModel = new ArtifactMetadataModel( );
+            artifactMetadataModel.setRepositoryId( repositoryId );
+            artifactMetadataModel.setNamespace( namespaceId );
+            artifactMetadataModel.setProject( projectId );
+            artifactMetadataModel.setProjectVersion( versionMetadata.getVersion( ) );
+            artifactMetadataModel.setVersion( versionMetadata.getVersion( ) );
+            updateFacets( versionMetadata, artifactMetadataModel );
 
-            removeLicenses( key );
-            recordLicenses( key, versionMetadata.getLicenses() );
-
-            removeDependencies( key );
-            recordDependencies( key, versionMetadata.getDependencies(), repositoryId );
-
-            projectVersionMetadataTemplate.update( updater );
 
         }
 
-        ArtifactMetadataModel artifactMetadataModel = new ArtifactMetadataModel();
-        artifactMetadataModel.setRepositoryId( repositoryId );
-        artifactMetadataModel.setNamespace( namespaceId );
-        artifactMetadataModel.setProject( projectId );
-        artifactMetadataModel.setProjectVersion( versionMetadata.getVersion() );
-        artifactMetadataModel.setVersion( versionMetadata.getVersion() );
-        updateFacets( versionMetadata, artifactMetadataModel );
-
     }
 
 
     @Override
-    public ProjectVersionMetadata getProjectVersion( RepositorySession session, final String repoId, final String namespace,
+    public ProjectVersionMetadata getProjectVersion( RepositorySession repositorySession, final String repositoryId, final String namespaceId,
                                                      final String projectId, final String projectVersion )
         throws MetadataResolutionException
     {
 
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) //
-            .setColumnNames( PROJECT_VERSION.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
-            .addEqualsExpression( PROJECT_ID.toString(), projectId ) //
-            .addEqualsExpression( PROJECT_VERSION.toString(), projectVersion ) //
-            .execute();
-
-        if ( result.get().getCount() < 1 )
-        {
-            return null;
-        }
-
-        String key = result.get().iterator().next().getKey();
-
-        ColumnFamilyResult<String, String> columnFamilyResult = this.projectVersionMetadataTemplate.queryColumns( key );
-
-        if ( !columnFamilyResult.hasResults() )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            return null;
-        }
-
-        ProjectVersionMetadata projectVersionMetadata = new ProjectVersionMetadata();
-        projectVersionMetadata.setId( columnFamilyResult.getString( PROJECT_VERSION.toString() ) );
-        projectVersionMetadata.setDescription( columnFamilyResult.getString( DESCRIPTION.toString() ) );
-        projectVersionMetadata.setName( columnFamilyResult.getString( NAME.toString() ) );
-
-        projectVersionMetadata.setIncomplete( Boolean.parseBoolean( columnFamilyResult.getString( "incomplete" ) ) );
+            String table = cassandraArchivaManager.getProjectVersionMetadataFamilyName( );
+            Select query = selectFrom( table ).column( PROJECT_ID.toString( ) )
+                .all( )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespaceId ) )
+                .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectId ) )
+                .whereColumn( PROJECT_VERSION.toString( ) ).isEqualTo( literal( projectVersion ) )
+                .allowFiltering();
+            Row result = session.execute( query.build( ) ).one( );
+            if ( result == null )
+            {
+                return null;
+            }
+            String key = result.getString( DEFAULT_PRIMARY_KEY );
+            ProjectVersionMetadata projectVersionMetadata = new ProjectVersionMetadata( );
+            projectVersionMetadata.setId( result.getString( PROJECT_VERSION.toString( ) ) );
+            projectVersionMetadata.setDescription( result.getString( DESCRIPTION.toString( ) ) );
+            projectVersionMetadata.setName( result.getString( NAME.toString( ) ) );
 
-        projectVersionMetadata.setUrl( columnFamilyResult.getString( URL.toString() ) );
-        {
-            String ciUrl = columnFamilyResult.getString( "ciManagement.url" );
-            String ciSystem = columnFamilyResult.getString( "ciManagement.system" );
+            projectVersionMetadata.setIncomplete( result.getBoolean( "incomplete" ) ) ;
 
-            if ( StringUtils.isNotEmpty( ciSystem ) || StringUtils.isNotEmpty( ciUrl ) )
+            projectVersionMetadata.setUrl( result.getString( URL.toString( ) ) );
             {
-                projectVersionMetadata.setCiManagement( new CiManagement( ciSystem, ciUrl ) );
+                String ciUrl = result.getString( "\"ciManagement.url\"" );
+                String ciSystem = result.getString( "\"ciManagement.system\"" );
+
+                if ( StringUtils.isNotEmpty( ciSystem ) || StringUtils.isNotEmpty( ciUrl ) )
+                {
+                    projectVersionMetadata.setCiManagement( new CiManagement( ciSystem, ciUrl ) );
+                }
             }
-        }
-        {
-            String issueUrl = columnFamilyResult.getString( "issueManagement.url" );
-            String issueSystem = columnFamilyResult.getString( "issueManagement.system" );
-            if ( StringUtils.isNotEmpty( issueSystem ) || StringUtils.isNotEmpty( issueUrl ) )
             {
-                projectVersionMetadata.setIssueManagement( new IssueManagement( issueSystem, issueUrl ) );
+                String issueUrl = result.getString( "\"issueManagement.url\"" );
+                String issueSystem = result.getString( "\"issueManagement.system\"" );
+                if ( StringUtils.isNotEmpty( issueSystem ) || StringUtils.isNotEmpty( issueUrl ) )
+                {
+                    projectVersionMetadata.setIssueManagement( new IssueManagement( issueSystem, issueUrl ) );
+                }
             }
-        }
-        {
-            String organizationUrl = columnFamilyResult.getString( "organization.url" );
-            String organizationName = columnFamilyResult.getString( "organization.name" );
-            if ( StringUtils.isNotEmpty( organizationUrl ) || StringUtils.isNotEmpty( organizationName ) )
             {
-                projectVersionMetadata.setOrganization( new Organization( organizationName, organizationUrl ) );
+                String organizationUrl = result.getString( "\"organization.url\"" );
+                String organizationName = result.getString( "\"organization.name\"" );
+                if ( StringUtils.isNotEmpty( organizationUrl ) || StringUtils.isNotEmpty( organizationName ) )
+                {
+                    projectVersionMetadata.setOrganization( new Organization( organizationName, organizationUrl ) );
+                }
             }
-        }
-        {
-            String devConn = columnFamilyResult.getString( "scm.developerConnection" );
-            String conn = columnFamilyResult.getString( "scm.connection" );
-            String url = columnFamilyResult.getString( "scm.url" );
-            if ( StringUtils.isNotEmpty( devConn ) || StringUtils.isNotEmpty( conn ) || StringUtils.isNotEmpty( url ) )
             {
-                projectVersionMetadata.setScm( new Scm( conn, devConn, url ) );
+                String devConn = result.getString( "\"scm.developerConnection\"" );
+                String conn = result.getString( "\"scm.connection\"" );
+                String url = result.getString( "\"scm.url\"" );
+                if ( StringUtils.isNotEmpty( devConn ) || StringUtils.isNotEmpty( conn ) || StringUtils.isNotEmpty( url ) )
+                {
+                    projectVersionMetadata.setScm( new Scm( conn, devConn, url ) );
+                }
             }
-        }
-        projectVersionMetadata.setMailingLists( getMailingLists( key ) );
-        projectVersionMetadata.setLicenses( getLicenses( key ) );
-        projectVersionMetadata.setDependencies( getDependencies( key ) );
-        // facets
-
-        result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
-            .setColumnNames( FACET_ID.toString(), KEY.toString(), VALUE.toString(), NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
-            .addEqualsExpression( PROJECT_ID.toString(), projectId ) //
-            .addEqualsExpression( PROJECT_VERSION.toString(), projectVersion ) //
-            .execute();
-
-        Map<String, Map<String, String>> metadataFacetsPerFacetIds = new HashMap<>();
-
-        for ( Row<String, String, String> row : result.get() )
-        {
-            ColumnSlice<String, String> columnSlice = row.getColumnSlice();
-            String facetId = getStringValue( columnSlice, FACET_ID.toString() );
-            Map<String, String> metaValues = metadataFacetsPerFacetIds.computeIfAbsent( facetId, k -> new HashMap<>( ) );
-            metaValues.put( getStringValue( columnSlice, KEY.toString() ), getStringValue( columnSlice, VALUE.toString() ) );
-        }
-
-        if ( !metadataFacetsPerFacetIds.isEmpty() )
-        {
-            for ( Map.Entry<String, Map<String, String>> entry : metadataFacetsPerFacetIds.entrySet() )
+            projectVersionMetadata.setMailingLists( getMailingLists( key ) );
+            projectVersionMetadata.setLicenses( getLicenses( key ) );
+            projectVersionMetadata.setDependencies( getDependencies( key ) );
+
+
+            // Facets
+            table = cassandraArchivaManager.getMetadataFacetFamilyName( );
+            query = selectFrom( table ).column( PROJECT_ID.toString( ) )
+                .column( FACET_ID.toString( ) )
+                .column( KEY.toString( ) )
+                .column( VALUE.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespaceId ) )
+                .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectId ) )
+                .whereColumn( PROJECT_VERSION.toString( ) ).isEqualTo( literal( projectVersion ) )
+                .allowFiltering();
+            ResultSet rows = session.execute( query.build( ) );
+            Map<String, Map<String, String>> metadataFacetsPerFacetIds = StreamSupport.stream( rows.spliterator( ), false )
+                .collect(
+                    Collectors.groupingBy(
+                        row -> row.getString( FACET_ID.toString( ) ),
+                        Collectors.toMap(
+                            row -> row.getString( KEY.toString( ) ),
+                            row -> row.getString( VALUE.toString( ) )
+                        )
+                    )
+                );
+            if ( !metadataFacetsPerFacetIds.isEmpty( ) )
             {
-                MetadataFacetFactory<?> metadataFacetFactory = getFacetFactory( entry.getKey() );
-                if ( metadataFacetFactory != null )
+                for ( Map.Entry<String, Map<String, String>> entry : metadataFacetsPerFacetIds.entrySet( ) )
                 {
-                    MetadataFacet metadataFacet = metadataFacetFactory.createMetadataFacet();
-                    metadataFacet.fromProperties( entry.getValue() );
-                    projectVersionMetadata.addFacet( metadataFacet );
+                    MetadataFacetFactory<?> metadataFacetFactory = getFacetFactory( entry.getKey( ) );
+                    if ( metadataFacetFactory != null )
+                    {
+                        MetadataFacet metadataFacet = metadataFacetFactory.createMetadataFacet( );
+                        metadataFacet.fromProperties( entry.getValue( ) );
+                        projectVersionMetadata.addFacet( metadataFacet );
+                    }
                 }
             }
-        }
 
-        return projectVersionMetadata;
+            return projectVersionMetadata;
+        }
     }
 
-    protected void recordChecksums( String repositoryId, String artifactMetadataKey, Map<String, String> checksums)
+    protected void recordChecksums( String repositoryId, String artifactMetadataKey, Map<String, String> checksums )
     {
-        if ( checksums == null || checksums.isEmpty() )
+        if ( checksums == null || checksums.isEmpty( ) )
         {
             return;
         }
-        Mutator<String> checksumMutator = this.checksumTemplate.createMutator();
-        for ( Map.Entry<String, String> entry : checksums.entrySet())
-        {
-            // we don't care about the key as the real used one with the projectVersionMetadata
-            String keyChecksums = UUID.randomUUID().toString();
-            String cfChecksums = cassandraArchivaManager.getChecksumFamilyName();
 
-            addInsertion( checksumMutator, keyChecksums, cfChecksums, ARTIFACT_METADATA_MODEL_KEY,
-                    artifactMetadataKey );
-            addInsertion( checksumMutator, keyChecksums, cfChecksums, CHECKSUM_ALG.toString(), entry.getKey());
-            addInsertion( checksumMutator, keyChecksums, cfChecksums, CHECKSUM_VALUE.toString(),
-                    entry.getValue() );
-            addInsertion(checksumMutator, keyChecksums, cfChecksums, REPOSITORY_NAME.toString(), repositoryId);
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            String table = cassandraArchivaManager.getChecksumFamilyName( );
+            for ( Map.Entry<String, String> entry : checksums.entrySet( ) )
+            {
+                String key = getChecksumKey( artifactMetadataKey, entry.getKey( ));
+                RegularInsert insert = insertInto( table )
+                    .value(DEFAULT_PRIMARY_KEY, literal(key))
+                    .value( ARTIFACT_METADATA_MODEL_KEY, literal( artifactMetadataKey ) )
+                    .value( CHECKSUM_ALG.toString( ), literal( entry.getKey( ) ) )
+                    .value( CHECKSUM_VALUE.toString( ), literal( entry.getValue( ) ) )
+                    .value( REPOSITORY_NAME.toString( ), literal( repositoryId ) );
+                session.execute( insert.build( ) );
 
+            }
         }
-        checksumMutator.execute();
+    }
+
+    private String getChecksumKey(String metadataKey, String checksumAlg) {
+        return metadataKey + "." + checksumAlg;
     }
 
     protected void removeChecksums( String artifactMetadataKey )
     {
-
-        QueryResult<OrderedRows<String, String, String>> result =
-                HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) //
-                        .setColumnFamily( cassandraArchivaManager.getChecksumFamilyName() ) //
-                        .setColumnNames( CHECKSUM_ALG.toString() ) //
-                        .setRowCount( Integer.MAX_VALUE ) //
-                        .addEqualsExpression(ARTIFACT_METADATA_MODEL_KEY, artifactMetadataKey ) //
-                        .execute();
-
-        if ( result.get().getCount() < 1 )
-        {
-            return;
-        }
-
-        for ( Row<String, String, String> row : result.get() )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            this.checksumTemplate.deleteRow( row.getKey() );
+            String table = cassandraArchivaManager.getChecksumFamilyName( );
+            Select deleteRows = selectFrom( table )
+                .column( DEFAULT_PRIMARY_KEY )
+                .whereColumn( ARTIFACT_METADATA_MODEL_KEY ).isEqualTo( literal( artifactMetadataKey ) )
+                .allowFiltering();
+            ResultSet result = session.execute( deleteRows.build( ) );
+            StreamSupport.stream( result.spliterator(), false )
+                .map(row -> row.getString( DEFAULT_PRIMARY_KEY ))
+                .distinct()
+                .forEach(
+                    delKey -> session.execute( deleteFrom( table ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal(delKey) ).build(  ) )
+                );
         }
-
     }
 
     protected Map<String, String> getChecksums( String artifactMetadataKey )
     {
-        Map<String, String> checksums = new HashMap<>();
-
-        QueryResult<OrderedRows<String, String, String>> result =
-                HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) //
-                        .setColumnFamily( cassandraArchivaManager.getChecksumFamilyName() ) //
-                        .setColumnNames( ARTIFACT_METADATA_MODEL_KEY, REPOSITORY_NAME.toString(),
-                                CHECKSUM_ALG.toString(), CHECKSUM_VALUE.toString() ) //
-                        .setRowCount( Integer.MAX_VALUE ) //
-                        .addEqualsExpression(ARTIFACT_METADATA_MODEL_KEY, artifactMetadataKey) //
-                        .execute();
-        for ( Row<String, String, String> row : result.get() )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            ColumnFamilyResult<String, String> columnFamilyResult =
-                    this.checksumTemplate.queryColumns( row.getKey() );
-
-            checksums.put(columnFamilyResult.getString(CHECKSUM_ALG.toString()),
-                    columnFamilyResult.getString(CHECKSUM_VALUE.toString()));
+            String table = cassandraArchivaManager.getChecksumFamilyName( );
+            Select query = selectFrom( table )
+                .all( )
+                .whereColumn( ARTIFACT_METADATA_MODEL_KEY ).isEqualTo( literal( artifactMetadataKey ) )
+                .allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            return StreamSupport.stream( result.spliterator( ), false )
+                .collect(
+                    Collectors.toMap(
+                        row -> row.getString( CHECKSUM_ALG.toString( ) ),
+                        row -> row.getString( CHECKSUM_VALUE.toString( ) )
+                    )
+                );
         }
-
-        return checksums;
     }
 
     protected void recordMailingList( String projectVersionMetadataKey, List<MailingList> mailingLists )
     {
-        if ( mailingLists == null || mailingLists.isEmpty() )
+        if ( mailingLists == null || mailingLists.isEmpty( ) )
         {
             return;
         }
-        Mutator<String> mailingMutator = this.mailingListTemplate.createMutator();
-        for ( MailingList mailingList : mailingLists )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            // we don't care about the key as the real used one with the projectVersionMetadata
-            String keyMailingList = UUID.randomUUID().toString();
-            String cfMailingList = cassandraArchivaManager.getMailingListFamilyName();
-
-            addInsertion( mailingMutator, keyMailingList, cfMailingList, "projectVersionMetadataModel.key",
-                          projectVersionMetadataKey );
-            addInsertion( mailingMutator, keyMailingList, cfMailingList, NAME.toString(), mailingList.getName() );
-            addInsertion( mailingMutator, keyMailingList, cfMailingList, "mainArchiveUrl",
-                          mailingList.getMainArchiveUrl() );
-            addInsertion( mailingMutator, keyMailingList, cfMailingList, "postAddress", mailingList.getPostAddress() );
-            addInsertion( mailingMutator, keyMailingList, cfMailingList, "subscribeAddress",
-                          mailingList.getSubscribeAddress() );
-            addInsertion( mailingMutator, keyMailingList, cfMailingList, "unsubscribeAddress",
-                          mailingList.getUnsubscribeAddress() );
-            int idx = 0;
-            for ( String otherArchive : mailingList.getOtherArchives() )
+            String table = cassandraArchivaManager.getMailingListFamilyName( );
+            for ( MailingList mailingList : mailingLists )
             {
-                addInsertion( mailingMutator, keyMailingList, cfMailingList, "otherArchive." + idx, otherArchive );
-                idx++;
+                // we don't care about the key as the real used one with the projectVersionMetadata
+                String keyMailingList = UUID.randomUUID( ).toString( );
+                RegularInsert insert = insertInto( table )
+                    .value( DEFAULT_PRIMARY_KEY, literal( keyMailingList ) );
+                insert = insert.value( "\"projectVersionMetadataModel.key\"", literal( projectVersionMetadataKey ) )
+                    .value( NAME.toString( ), literal( mailingList.getName( ) ) )
+                    .value( "mainArchiveUrl", literal( mailingList.getMainArchiveUrl( ) ) )
+                    .value( "postAddress", literal( mailingList.getPostAddress( ) ) )
+                    .value( "subscribeAddress", literal( mailingList.getSubscribeAddress( ) ) )
+                    .value( "unsubscribeAddress", literal( mailingList.getUnsubscribeAddress( ) ) )
+                    .value( "otherArchive", literal( mailingList.getOtherArchives( ) ) );
+                session.execute( insert.build( ) );
             }
-
         }
-        mailingMutator.execute();
     }
 
     protected void removeMailingList( String projectVersionMetadataKey )
     {
-
-        QueryResult<OrderedRows<String, String, String>> result =
-            HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) //
-                .setColumnFamily( cassandraArchivaManager.getMailingListFamilyName() ) //
-                .setColumnNames( NAME.toString() ) //
-                .setRowCount( Integer.MAX_VALUE ) //
-                .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) //
-                .execute();
-
-        if ( result.get().getCount() < 1 )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            return;
-        }
-
-        for ( Row<String, String, String> row : result.get() )
-        {
-            this.mailingListTemplate.deleteRow( row.getKey() );
+            String table = cassandraArchivaManager.getMailingListFamilyName( );
+            Select deleteRows = selectFrom( table )
+                .column( DEFAULT_PRIMARY_KEY )
+                .whereColumn( "\"projectVersionMetadataModel.key\"" ).isEqualTo( literal( projectVersionMetadataKey ) );
+            ResultSet result = session.execute( deleteRows.build( ) );
+            StreamSupport.stream( result.spliterator( ), false )
+                .map( row -> row.getString( DEFAULT_PRIMARY_KEY ) )
+                .distinct( )
+                .forEach(
+                    delKey ->
+                        session.execute( deleteFrom( table ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( delKey ) ).build( ) )
+                );
         }
+    }
 
+    protected MailingList getMailingList( Row row )
+    {
+        MailingList mailingList = new MailingList( );
+        mailingList.setName( row.getString( NAME.toString( ) ) );
+        mailingList.setMainArchiveUrl( row.getString( "mainArchiveUrl" ) );
+        mailingList.setPostAddress( row.getString( "postAddress" ) );
+        mailingList.setSubscribeAddress( row.getString( "subscribeAddress" ) );
+        mailingList.setUnsubscribeAddress( row.getString( "unsubscribeAddress" ) );
+        mailingList.setOtherArchives( row.getList( "otherArchive", String.class ) );
+        return mailingList;
     }
 
     protected List<MailingList> getMailingLists( String projectVersionMetadataKey )
     {
-        List<MailingList> mailingLists = new ArrayList<>();
+        List<MailingList> mailingLists = new ArrayList<>( );
 
-        QueryResult<OrderedRows<String, String, String>> result =
-            HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) //
-                .setColumnFamily( cassandraArchivaManager.getMailingListFamilyName() ) //
-                .setColumnNames( NAME.toString() ) //
-                .setRowCount( Integer.MAX_VALUE ) //
-                .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) //
-                .execute();
-        for ( Row<String, String, String> row : result.get() )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            ColumnFamilyResult<String, String> columnFamilyResult =
-                this.mailingListTemplate.queryColumns( row.getKey() );
-
-            MailingList mailingList = new MailingList();
-            mailingList.setName( columnFamilyResult.getString( NAME.toString() ) );
-            mailingList.setMainArchiveUrl( columnFamilyResult.getString( "mainArchiveUrl" ) );
-            mailingList.setPostAddress( columnFamilyResult.getString( "postAddress" ) );
-            mailingList.setSubscribeAddress( columnFamilyResult.getString( "subscribeAddress" ) );
-            mailingList.setUnsubscribeAddress( columnFamilyResult.getString( "unsubscribeAddress" ) );
-
-            List<String> otherArchives = new ArrayList<>();
-
-            for ( String columnName : columnFamilyResult.getColumnNames() )
-            {
-                if ( StringUtils.startsWith( columnName, "otherArchive." ) )
-                {
-                    otherArchives.add( columnFamilyResult.getString( columnName ) );
-                }
-            }
-
-            mailingList.setOtherArchives( otherArchives );
-            mailingLists.add( mailingList );
+            String table = cassandraArchivaManager.getMailingListFamilyName( );
+            Select query = selectFrom( table )
+                .all( )
+                .whereColumn( "\"projectVersionMetadataModel.key\"" ).isEqualTo( literal( projectVersionMetadataKey ) );
+            ResultSet result = session.execute( query.build( ) );
+            return StreamSupport.stream( result.spliterator( ), false )
+                .map( this::getMailingList )
+                .collect( Collectors.toList( ) );
         }
-
-        return mailingLists;
     }
 
     protected void recordLicenses( String projectVersionMetadataKey, List<License> licenses )
     {
 
-        if ( licenses == null || licenses.isEmpty() )
+        if ( licenses == null || licenses.isEmpty( ) )
         {
             return;
         }
-        Mutator<String> licenseMutator = this.licenseTemplate.createMutator();
-
-        for ( License license : licenses )
+        String table = cassandraArchivaManager.getLicenseFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            // we don't care about the key as the real used one with the projectVersionMetadata
-            String keyLicense = UUID.randomUUID().toString();
-            String cfLicense = cassandraArchivaManager.getLicenseFamilyName();
-
-            addInsertion( licenseMutator, keyLicense, cfLicense, "projectVersionMetadataModel.key",
-                          projectVersionMetadataKey );
 
-            addInsertion( licenseMutator, keyLicense, cfLicense, NAME.toString(), license.getName() );
-
-            addInsertion( licenseMutator, keyLicense, cfLicense, URL.toString(), license.getUrl() );
+            for ( License license : licenses )
+            {
+                // we don't care about the key as the real used one with the projectVersionMetadata
+                String keyLicense = UUID.randomUUID( ).toString( );
+                RegularInsert insert = insertInto( table )
+                    .value( DEFAULT_PRIMARY_KEY, literal( keyLicense ) )
+                    .value( "\"projectVersionMetadataModel.key\"", literal( projectVersionMetadataKey ) )
+                    .value( NAME.toString( ), literal( license.getName( ) ) )
+                    .value( URL.toString( ), literal( license.getUrl( ) ) );
+                session.execute( insert.build( ) );
 
+            }
         }
-        licenseMutator.execute();
     }
 
     protected void removeLicenses( String projectVersionMetadataKey )
     {
-
-        QueryResult<OrderedRows<String, String, String>> result =
-            HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) //
-                .setColumnFamily( cassandraArchivaManager.getLicenseFamilyName() ) //
-                .setColumnNames( NAME.toString() ) //
-                .setRowCount( Integer.MAX_VALUE ) //
-                .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) //
-                .execute();
-        for ( Row<String, String, String> row : result.get() )
+        String table = cassandraArchivaManager.getLicenseFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            this.licenseTemplate.deleteRow( row.getKey() );
+            Select deleteRows = selectFrom( table )
+                .column( DEFAULT_PRIMARY_KEY )
+                .whereColumn( "\"projectVersionMetadataModel.key\"" ).isEqualTo( literal( projectVersionMetadataKey ) )
+                .allowFiltering();
+            ResultSet result = session.execute( deleteRows.build( ) );
+            StreamSupport.stream( result.spliterator( ), false )
+                .map( row -> row.getString( DEFAULT_PRIMARY_KEY ) )
+                .distinct( )
+                .forEach(
+                    delKey ->
+                        session.execute( deleteFrom( table ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( delKey ) ).build( ) )
+                );
+
         }
     }
 
     protected List<License> getLicenses( String projectVersionMetadataKey )
-    {
-        List<License> licenses = new ArrayList<>();
-
-        QueryResult<OrderedRows<String, String, String>> result =
-            HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) //
-                .setColumnFamily( cassandraArchivaManager.getLicenseFamilyName() ) //
-                .setColumnNames( "projectVersionMetadataModel.key" ) //
-                .setRowCount( Integer.MAX_VALUE ) //
-                .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) //
-                .execute();
-
-        for ( Row<String, String, String> row : result.get() )
+    {
+        String table = cassandraArchivaManager.getLicenseFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            ColumnFamilyResult<String, String> columnFamilyResult = this.licenseTemplate.queryColumns( row.getKey() );
-
-            licenses.add(
-                new License( columnFamilyResult.getString( NAME.toString() ), columnFamilyResult.getString( URL.toString() ) ) );
+            Select query = selectFrom( table )
+                .column( NAME.toString( ) )
+                .column( URL.toString( ) )
+                .whereColumn( "\"projectVersionMetadataModel.key\"" ).isEqualTo( literal( projectVersionMetadataKey ) )
+                .allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            return StreamSupport.stream( result.spliterator( ), false )
+                .map(
+                    row ->
+                        new License( row.getString( NAME.toString( ) ), row.getString( URL.toString( ) ) )
+                )
+                .collect( Collectors.toList( ) );
         }
 
-        return licenses;
     }
 
 
@@ -1280,114 +1029,111 @@ public class CassandraMetadataRepository
                                        String repositoryId )
     {
 
-        if ( dependencies == null || dependencies.isEmpty() )
+        if ( dependencies == null || dependencies.isEmpty( ) )
         {
             return;
         }
-        Mutator<String> dependencyMutator = this.dependencyTemplate.createMutator();
-
-        for ( Dependency dependency : dependencies )
+        String table = cassandraArchivaManager.getDependencyFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            // we don't care about the key as the real used one with the projectVersionMetadata
-            String keyDependency = UUID.randomUUID().toString();
-            String cfDependency = cassandraArchivaManager.getDependencyFamilyName();
-
-            addInsertion( dependencyMutator, keyDependency, cfDependency, "projectVersionMetadataModel.key",
-                          projectVersionMetadataKey );
-
-            addInsertion( dependencyMutator, keyDependency, cfDependency, REPOSITORY_NAME.toString(), repositoryId );
-
-            addInsertion( dependencyMutator, keyDependency, cfDependency, "classifier", dependency.getClassifier() );
-
-            addInsertion( dependencyMutator, keyDependency, cfDependency, "optional",
-                          Boolean.toString( dependency.isOptional() ) );
-
-            addInsertion( dependencyMutator, keyDependency, cfDependency, "scope", dependency.getScope() );
-
-            addInsertion( dependencyMutator, keyDependency, cfDependency, "systemPath", dependency.getSystemPath() );
-
-            addInsertion( dependencyMutator, keyDependency, cfDependency, "type", dependency.getType() );
-
-            addInsertion( dependencyMutator, keyDependency, cfDependency, ARTIFACT_ID.toString(), dependency.getArtifactId() );
-
-            addInsertion( dependencyMutator, keyDependency, cfDependency, GROUP_ID.toString(), dependency.getNamespace() );
-
-            addInsertion( dependencyMutator, keyDependency, cfDependency, VERSION.toString(), dependency.getVersion() );
-
+            for ( Dependency dependency : dependencies )
+            {
+                // we don't care about the key as the real used one with the projectVersionMetadata
+                String keyDependency = UUID.randomUUID( ).toString( );
+                RegularInsert insert = insertInto( table )
+                    .value( DEFAULT_PRIMARY_KEY, literal( keyDependency ) )
+                    .value( "\"projectVersionMetadataModel.key\"", literal( projectVersionMetadataKey ) )
+                    .value( REPOSITORY_NAME.toString( ), literal( repositoryId ) )
+                    .value( "classifier", literal( dependency.getClassifier( ) ) )
+                    .value( "optional", literal( Boolean.toString( dependency.isOptional( ) ) ) )
+                    .value( "scope", literal( dependency.getScope( ) ) )
+                    .value( "systemPath", literal( dependency.getSystemPath( ) ) )
+                    .value( "type", literal( dependency.getType( ) ) )
+                    .value( ARTIFACT_ID.toString( ), literal( dependency.getArtifactId( ) ) )
+                    .value( GROUP_ID.toString( ), literal( dependency.getNamespace( ) ) )
+                    .value( VERSION.toString( ), literal( dependency.getVersion( ) ) );
+                session.execute( insert.build( ) );
+            }
         }
-        dependencyMutator.execute();
     }
 
     protected void removeDependencies( String projectVersionMetadataKey )
     {
 
-        QueryResult<OrderedRows<String, String, String>> result =
-            HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) //
-                .setColumnFamily( cassandraArchivaManager.getDependencyFamilyName() ) //
-                .setColumnNames( GROUP_ID.toString() ) //
-                .setRowCount( Integer.MAX_VALUE ) //
-                .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) //
-                .execute();
-        for ( Row<String, String, String> row : result.get() )
+        String table = cassandraArchivaManager.getDependencyFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            this.dependencyTemplate.deleteRow( row.getKey() );
+            Select deleteRows = selectFrom( table )
+                .column( DEFAULT_PRIMARY_KEY )
+                .whereColumn( "\"projectVersionMetadataModel.key\"" ).isEqualTo( literal( projectVersionMetadataKey ) )
+                .allowFiltering();
+            ResultSet result = session.execute( deleteRows.build( ) );
+            StreamSupport.stream( result.spliterator( ), false )
+                .map( row -> row.getString( DEFAULT_PRIMARY_KEY ) )
+                .distinct( )
+                .forEach(
+                    delKey ->
+                        session.execute( deleteFrom( table ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( delKey ) ).build( ) )
+                );
+
         }
     }
 
-    protected List<Dependency> getDependencies( String projectVersionMetadataKey )
+    protected Dependency newDependency( Row row )
     {
-        List<Dependency> dependencies = new ArrayList<>();
+        Dependency dependency = new Dependency( );
+        dependency.setClassifier( row.getString( "classifier" ) );
 
-        QueryResult<OrderedRows<String, String, String>> result =
-            HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) //
-                .setColumnFamily( cassandraArchivaManager.getDependencyFamilyName() ) //
-                .setColumnNames( "projectVersionMetadataModel.key" ) //
-                .setRowCount( Integer.MAX_VALUE ) //
-                .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) //
-                .execute();
-
-        for ( Row<String, String, String> row : result.get() )
-        {
-            ColumnFamilyResult<String, String> columnFamilyResult =
-                this.dependencyTemplate.queryColumns( row.getKey() );
+        dependency.setOptional( Boolean.parseBoolean( row.getString( "optional" ) ) );
 
-            Dependency dependency = new Dependency();
-            dependency.setClassifier( columnFamilyResult.getString( "classifier" ) );
+        dependency.setScope( row.getString( "scope" ) );
 
-            dependency.setOptional( Boolean.parseBoolean( columnFamilyResult.getString( "optional" ) ) );
+        dependency.setSystemPath( row.getString( "systemPath" ) );
 
-            dependency.setScope( columnFamilyResult.getString( "scope" ) );
+        dependency.setType( row.getString( "type" ) );
 
-            dependency.setSystemPath( columnFamilyResult.getString( "systemPath" ) );
+        dependency.setArtifactId( row.getString( ARTIFACT_ID.toString( ) ) );
 
-            dependency.setType( columnFamilyResult.getString( "type" ) );
+        dependency.setNamespace( row.getString( GROUP_ID.toString( ) ) );
 
-            dependency.setArtifactId( columnFamilyResult.getString( ARTIFACT_ID.toString() ) );
+        dependency.setVersion( row.getString( VERSION.toString( ) ) );
 
-            dependency.setNamespace( columnFamilyResult.getString( GROUP_ID.toString() ) );
+        return dependency;
+    }
 
-            dependency.setVersion( columnFamilyResult.getString( VERSION.toString() ) );
+    protected List<Dependency> getDependencies( String projectVersionMetadataKey )
+    {
 
-            dependencies.add( dependency );
+        String table = cassandraArchivaManager.getDependencyFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            Select query = selectFrom( table )
+                .all( )
+                .whereColumn( "\"projectVersionMetadataModel.key\"" ).isEqualTo( literal( projectVersionMetadataKey ) )
+                .allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            return StreamSupport.stream( result.spliterator( ), false )
+                .map( this::newDependency )
+                .collect( Collectors.toList( ) );
         }
-
-        return dependencies;
     }
 
-    private Map<String, String> mapChecksums(Map<ChecksumAlgorithm,String> checksums) {
-        return checksums.entrySet().stream().collect(Collectors.toMap(
-                e -> e.getKey().name(), Map.Entry::getValue
-        ));
+    private Map<String, String> mapChecksums( Map<ChecksumAlgorithm, String> checksums )
+    {
+        return checksums.entrySet( ).stream( ).collect( Collectors.toMap(
+            e -> e.getKey( ).name( ), Map.Entry::getValue
+        ) );
     }
 
-    private Map<ChecksumAlgorithm, String> mapChecksumsReverse(Map<String,String> checksums) {
-        return checksums.entrySet().stream().collect(Collectors.toMap(
-                e -> ChecksumAlgorithm.valueOf(e.getKey()), Map.Entry::getValue
-        ));
+    private Map<ChecksumAlgorithm, String> mapChecksumsReverse( Map<String, String> checksums )
+    {
+        return checksums.entrySet( ).stream( ).collect( Collectors.toMap(
+            e -> ChecksumAlgorithm.valueOf( e.getKey( ) ), Map.Entry::getValue
+        ) );
     }
 
     @Override
-    public void updateArtifact( RepositorySession session, String repositoryId, String namespaceId, String projectId, String projectVersion,
+    public void updateArtifact( RepositorySession repositorySession, String repositoryId, String namespaceId, String projectId, String projectVersion,
                                 ArtifactMetadata artifactMeta )
         throws MetadataRepositoryException
     {
@@ -1398,94 +1144,63 @@ public class CassandraMetadataRepository
             namespace = updateOrAddNamespace( repositoryId, namespaceId );
         }
 
-        ProjectMetadata projectMetadata = new ProjectMetadata();
+        ProjectMetadata projectMetadata = new ProjectMetadata( );
         projectMetadata.setId( projectId );
         projectMetadata.setNamespace( namespaceId );
-        updateProject( session, repositoryId, projectMetadata );
-
-        String key = new ArtifactMetadataModel.KeyBuilder().withNamespace( namespace ).withProject( projectId ).withId(
-            artifactMeta.getId() ).withProjectVersion( projectVersion ).build();
-
-        // exists?
-
-        boolean exists = this.artifactMetadataTemplate.isColumnsExist( key );
-
-        if ( exists )
-        {
-            // updater
-            ColumnFamilyUpdater<String, String> updater = this.artifactMetadataTemplate.createUpdater( key );
-            updater.setLong( FILE_LAST_MODIFIED.toString(), artifactMeta.getFileLastModified().toInstant().toEpochMilli());
-            updater.setLong( WHEN_GATHERED.toString(), artifactMeta.getWhenGathered().toInstant().toEpochMilli() );
-            updater.setLong( SIZE.toString(), artifactMeta.getSize() );
-            addUpdateStringValue( updater, VERSION.toString(), artifactMeta.getVersion() );
-            removeChecksums(key);
-            recordChecksums(repositoryId, key, mapChecksums(artifactMeta.getChecksums()));
-            this.artifactMetadataTemplate.update( updater );
-        }
-        else
-        {
-            String cf = this.cassandraArchivaManager.getArtifactMetadataFamilyName();
-            // create
-            this.artifactMetadataTemplate.createMutator() //
-                .addInsertion( key, cf, column( ID.toString(), artifactMeta.getId() ) )//
-                .addInsertion( key, cf, column( REPOSITORY_NAME.toString(), repositoryId ) ) //
-                .addInsertion( key, cf, column( NAMESPACE_ID.toString(), namespaceId ) ) //
-                .addInsertion( key, cf, column( PROJECT.toString(), artifactMeta.getProject() ) ) //
-                .addInsertion( key, cf, column( PROJECT_VERSION.toString(), projectVersion ) ) //
-                .addInsertion( key, cf, column( VERSION.toString(), artifactMeta.getVersion() ) ) //
-                .addInsertion( key, cf, column( FILE_LAST_MODIFIED.toString(), artifactMeta.getFileLastModified().toInstant().toEpochMilli() ) ) //
-                .addInsertion( key, cf, column( SIZE.toString(), artifactMeta.getSize() ) ) //
-                .addInsertion( key, cf, column( WHEN_GATHERED.toString(), artifactMeta.getWhenGathered().toInstant().toEpochMilli() ) )//
-                .execute();
-            recordChecksums(repositoryId, key, mapChecksums(artifactMeta.getChecksums()));
-        }
-
-        key = new ProjectVersionMetadataModel.KeyBuilder() //
-            .withRepository( repositoryId ) //
-            .withNamespace( namespace ) //
-            .withProjectId( projectId ) //
-            .withProjectVersion( projectVersion ) //
-            .withId( artifactMeta.getId() ) //
-            .build();
-
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) //
-            .setColumnNames( VERSION.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespaceId ) //
-            .addEqualsExpression( PROJECT_ID.toString(), projectId ) //
-            .addEqualsExpression( PROJECT_VERSION.toString(), projectVersion ) //
-            .addEqualsExpression( VERSION.toString(), artifactMeta.getVersion() ) //
-            .execute();
-
-        exists = result.get().getCount() > 0;
-
-        if ( !exists )
-        {
-            String cf = this.cassandraArchivaManager.getProjectVersionMetadataFamilyName();
-
-            projectVersionMetadataTemplate.createMutator() //
-                .addInsertion( key, cf, column( NAMESPACE_ID.toString(), namespace.getName() ) ) //
-                .addInsertion( key, cf, column( REPOSITORY_NAME.toString(), repositoryId ) ) //
-                .addInsertion( key, cf, column( PROJECT_VERSION.toString(), projectVersion ) ) //
-                .addInsertion( key, cf, column( PROJECT_ID.toString(), projectId ) ) //
-                .addInsertion( key, cf, column( VERSION.toString(), artifactMeta.getVersion() ) ) //
-                .execute();
-
-        }
-
-        ArtifactMetadataModel artifactMetadataModel = new ArtifactMetadataModel();
+        updateProject( repositorySession, repositoryId, projectMetadata );
+
+        String key = new ArtifactMetadataModel.KeyBuilder( ).withNamespace( namespace ).withProject( projectId ).withId(
+            artifactMeta.getId( ) ).withProjectVersion( projectVersion ).build( );
+
+
+        String table = this.cassandraArchivaManager.getArtifactMetadataFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            Update update = update( table )
+                .setColumn( ID.toString( ), literal( artifactMeta.getId( ) ) )//
+                .setColumn( REPOSITORY_NAME.toString( ), literal( repositoryId ) ) //
+                .setColumn( NAMESPACE_ID.toString( ), literal( namespaceId ) ) //
+                .setColumn( PROJECT_ID.toString( ), literal( artifactMeta.getProject( ) ) ) //
+                .setColumn( PROJECT_VERSION.toString( ), literal( projectVersion ) ) //
+                .setColumn( VERSION.toString( ), literal( artifactMeta.getVersion( ) ) ) //
+                .setColumn( FILE_LAST_MODIFIED.toString( ), literal( artifactMeta.getFileLastModified( ).toInstant( ).toEpochMilli( ) ) ) //
+                .setColumn( SIZE.toString( ), literal( artifactMeta.getSize( ) ) ) //
+                .setColumn( ( WHEN_GATHERED.toString( ) ), literal( artifactMeta.getWhenGathered( ).toInstant( ).toEpochMilli( ) ) )
+                .whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) );
+            session.execute( update.build( ) ).wasApplied( );
+            removeChecksums( key );
+            recordChecksums( repositoryId, key, mapChecksums( artifactMeta.getChecksums( ) ) );
+
+            key = new ProjectVersionMetadataModel.KeyBuilder( ) //
+                .withRepository( repositoryId ) //
+                .withNamespace( namespace ) //
+                .withProjectId( projectId ) //
+                .withProjectVersion( projectVersion ) //
+                .withId( artifactMeta.getId( ) ) //
+                .build( );
+            table = cassandraArchivaManager.getProjectVersionMetadataFamilyName( );
+
+            Insert insert = insertInto( table )
+                .value( DEFAULT_PRIMARY_KEY, literal( key ) )
+                .value( REPOSITORY_NAME.toString( ), literal( repositoryId ) )
+                .value( NAMESPACE_ID.toString( ), literal( namespaceId ) )
+                .value( PROJECT_ID.toString( ), literal( projectId ) )
+                .value( PROJECT_VERSION.toString( ), literal( projectVersion ) )
+                .value( VERSION.toString( ), literal( artifactMeta.getVersion( ) ) )
+                .ifNotExists( );
+            session.execute( insert.build( ) );
+        }
+        ArtifactMetadataModel artifactMetadataModel = new ArtifactMetadataModel( );
 
         artifactMetadataModel.setRepositoryId( repositoryId );
         artifactMetadataModel.setNamespace( namespaceId );
         artifactMetadataModel.setProject( projectId );
         artifactMetadataModel.setProjectVersion( projectVersion );
-        artifactMetadataModel.setVersion( artifactMeta.getVersion() );
-        artifactMetadataModel.setFileLastModified( artifactMeta.getFileLastModified() == null
-                                                       ? ZonedDateTime.now().toInstant().toEpochMilli()
-                                                       : artifactMeta.getFileLastModified().toInstant().toEpochMilli() );
-        artifactMetadataModel.setChecksums(mapChecksums(artifactMeta.getChecksums()));
+        artifactMetadataModel.setVersion( artifactMeta.getVersion( ) );
+        artifactMetadataModel.setFileLastModified( artifactMeta.getFileLastModified( ) == null
+            ? ZonedDateTime.now( ).toInstant( ).toEpochMilli( )
+            : artifactMeta.getFileLastModified( ).toInstant( ).toEpochMilli( ) );
+        artifactMetadataModel.setChecksums( mapChecksums( artifactMeta.getChecksums( ) ) );
 
         // now facets
         updateFacets( artifactMeta, artifactMetadataModel );
@@ -1493,30 +1208,26 @@ public class CassandraMetadataRepository
     }
 
     @Override
-    public List<String> getArtifactVersions( RepositorySession session, final String repoId, final String namespace, final String projectId,
+    public List<String> getArtifactVersions( RepositorySession repositorySession, final String repoId, final String namespace, final String projectId,
                                              final String projectVersion )
         throws MetadataResolutionException
     {
-
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) //
-            .setColumnNames( VERSION.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
-            .addEqualsExpression( PROJECT_ID.toString(), projectId ) //
-            .addEqualsExpression( PROJECT_VERSION.toString(), projectVersion ) //
-            .execute();
-
-        final Set<String> versions = new HashSet<>();
-
-        for ( Row<String, String, String> row : result.get() )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            versions.add( getStringValue( row.getColumnSlice(), VERSION.toString() ) );
+            String table = cassandraArchivaManager.getProjectVersionMetadataFamilyName( );
+            Select query = selectFrom( table )
+                .column( VERSION.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repoId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespace ) )
+                .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectId ) )
+                .whereColumn( PROJECT_VERSION.toString( ) ).isEqualTo( literal( projectVersion ) )
+                .allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            return StreamSupport.stream( result.spliterator( ), false )
+                .map( row -> row.getString( VERSION.toString( ) ) )
+                .distinct()
+                .collect( Collectors.toList( ) );
         }
-
-        return new ArrayList<>( versions );
-
     }
 
     /*
@@ -1528,86 +1239,81 @@ public class CassandraMetadataRepository
     private void updateFacets( final FacetedMetadata facetedMetadata,
                                final ArtifactMetadataModel artifactMetadataModel )
     {
-
-        String cf = cassandraArchivaManager.getMetadataFacetFamilyName();
-
-        for ( final String facetId : getSupportedFacets() )
+        String table = cassandraArchivaManager.getMetadataFacetFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            MetadataFacet metadataFacet = facetedMetadata.getFacet( facetId );
-            if ( metadataFacet == null )
-            {
-                continue;
-            }
-            // clean first
-
-            QueryResult<OrderedRows<String, String, String>> result =
-                HFactory.createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-                    .setColumnFamily( cf ) //
-                    .setColumnNames( REPOSITORY_NAME.toString() ) //
-                    .addEqualsExpression( REPOSITORY_NAME.toString(), artifactMetadataModel.getRepositoryId() ) //
-                    .addEqualsExpression( NAMESPACE_ID.toString(), artifactMetadataModel.getNamespace() ) //
-                    .addEqualsExpression( PROJECT_ID.toString(), artifactMetadataModel.getProject() ) //
-                    .addEqualsExpression( PROJECT_VERSION.toString(), artifactMetadataModel.getProjectVersion() ) //
-                    .addEqualsExpression( FACET_ID.toString(), facetId ) //
-                    .execute();
-
-            for ( Row<String, String, String> row : result.get().getList() )
-            {
-                this.metadataFacetTemplate.deleteRow( row.getKey() );
-            }
-
-            Map<String, String> properties = metadataFacet.toProperties();
-
-            for ( Map.Entry<String, String> entry : properties.entrySet() )
+            for ( final String facetId : getSupportedFacets( ) )
             {
-                String key = new MetadataFacetModel.KeyBuilder().withKey( entry.getKey() ).withArtifactMetadataModel(
-                    artifactMetadataModel ).withFacetId( facetId ).withName( metadataFacet.getName() ).build();
-                Mutator<String> mutator = metadataFacetTemplate.createMutator() //
-                    .addInsertion( key, cf, column( REPOSITORY_NAME.toString(), artifactMetadataModel.getRepositoryId() ) ) //
-                    .addInsertion( key, cf, column( NAMESPACE_ID.toString(), artifactMetadataModel.getNamespace() ) ) //
-                    .addInsertion( key, cf, column( PROJECT_ID.toString(), artifactMetadataModel.getProject() ) ) //
-                    .addInsertion( key, cf, column( PROJECT_VERSION.toString(), artifactMetadataModel.getProjectVersion() ) ) //
-                    .addInsertion( key, cf, column( FACET_ID.toString(), facetId ) ) //
-                    .addInsertion( key, cf, column( KEY.toString(), entry.getKey() ) ) //
-                    .addInsertion( key, cf, column( VALUE.toString(), entry.getValue() ) );
-
-                if ( metadataFacet.getName() != null )
+                MetadataFacet metadataFacet = facetedMetadata.getFacet( facetId );
+                if (metadataFacet!=null)
                 {
-                    mutator.addInsertion( key, cf, column( NAME.toString(), metadataFacet.getName() ) );
+                    Select deleteRows = selectFrom( table )
+                        .column( DEFAULT_PRIMARY_KEY )
+                        .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( artifactMetadataModel.getRepositoryId( ) ) )
+                        .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( artifactMetadataModel.getNamespace( ) ) )
+                        .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( artifactMetadataModel.getProject( ) ) )
+                        .whereColumn( PROJECT_VERSION.toString( ) ).isEqualTo( literal( artifactMetadataModel.getProjectVersion( ) ) )
+                        .whereColumn( FACET_ID.toString() ).isEqualTo( literal(facetId) )
+                        .allowFiltering( );
+                    ResultSet resultSet = session.execute( deleteRows.build( ) );
+                    StreamSupport.stream( resultSet.spliterator(), false ).map(row -> row.getString( DEFAULT_PRIMARY_KEY )).distinct().forEach( key ->
+                        {
+                            Delete delete = deleteFrom( table )
+                                .whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) );
+                            session.execute( delete.build( ) );
+                        }
+                    );
+                    Map<String, String> properties = metadataFacet.toProperties( );
+
+                    for ( Map.Entry<String, String> entry : properties.entrySet( ) )
+                    {
+                        String key = new MetadataFacetModel.KeyBuilder( ).withKey( entry.getKey( ) ).withArtifactMetadataModel(
+                            artifactMetadataModel ).withFacetId( facetId ).withName( metadataFacet.getName( ) ).build( );
+                        Update update = update( table )
+                            .setColumn( REPOSITORY_NAME.toString( ), literal( artifactMetadataModel.getRepositoryId( ) ) )
+                            .setColumn( NAMESPACE_ID.toString( ), literal( artifactMetadataModel.getNamespace( ) ) )
+                            .setColumn( PROJECT_ID.toString( ), literal( artifactMetadataModel.getProject( ) ) )
+                            .setColumn( PROJECT_VERSION.toString( ), literal( artifactMetadataModel.getProjectVersion( ) ) )
+                            .setColumn( FACET_ID.toString( ), literal( facetId ) )
+                            .setColumn( KEY.toString( ), literal( entry.getKey( ) ) )
+                            .setColumn( VALUE.toString( ), literal( entry.getValue( ) ) )
+                            .whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) );
+                        session.execute( update.build( ) );
+                    }
                 }
 
-                mutator.execute();
+
             }
         }
     }
 
 
     @Override
-    public List<String> getMetadataFacets( RepositorySession session, final String repositoryId, final String facetId )
+    public List<String> getMetadataFacets( RepositorySession repositorySession, final String repositoryId, final String facetId )
         throws MetadataRepositoryException
     {
 
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
-            .setColumnNames( NAME.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .addEqualsExpression( FACET_ID.toString(), facetId ) //
-            .execute();
+        String table = cassandraArchivaManager.getMetadataFacetFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            Select query = selectFrom( table )
+                .column( NAME.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( FACET_ID.toString( ) ).isEqualTo( literal( facetId ) )
+                .allowFiltering();
 
-        final List<String> facets = new ArrayList<>();
+            ResultSet result = session.execute( query.build( ) );
+            return StreamSupport.stream( result.spliterator( ), false )
+                .map( row -> row.getString( NAME.toString( ) ) )
+                .distinct( )
+                .collect( Collectors.toList( ) );
 
-        for ( Row<String, String, String> row : result.get() )
-        {
-            facets.add( getStringValue( row.getColumnSlice(), NAME.toString() ) );
         }
-        return facets;
     }
 
-    private <T> Spliterator<T> createResultSpliterator( QueryResult<OrderedRows<String, String, String>> result, BiFunction<Row<String, String, String>, T, T> converter) throws MetadataRepositoryException
+    private <T> Spliterator<T> createResultSpliterator( ResultSet result, BiFunction<Row, T, T> converter ) throws MetadataRepositoryException
     {
-        final int size = result.get().getCount();
-        final Iterator<Row<String, String, String>> it = result.get( ).iterator( );
+        final Iterator<Row> it = result.iterator( );
 
         return new Spliterator<T>( )
         {
@@ -1616,29 +1322,27 @@ public class CassandraMetadataRepository
             @Override
             public boolean tryAdvance( Consumer<? super T> action )
             {
-                if (size>=1)
+                if ( it.hasNext( ) )
                 {
-                    if(it.hasNext())
+                    while ( it.hasNext( ) )
                     {
-                        while ( it.hasNext( ) )
+                        Row row = it.next( );
+                        T item = converter.apply( row, lastItem );
+                        if ( item != null && lastItem != null && item != lastItem )
                         {
-                            Row<String, String, String> row = it.next( );
-                            T item = converter.apply( row, lastItem );
-                            if ( item != null && lastItem !=null && item != lastItem )
-                            {
-                                action.accept( lastItem );
-                                lastItem = item;
-                                return true;
-                            }
+                            action.accept( lastItem );
                             lastItem = item;
+                            return true;
                         }
-                        action.accept( lastItem );
-                        return true;
-                    } else {
-                        return false;
+                        lastItem = item;
                     }
+                    action.accept( lastItem );
+                    return true;
+                }
+                else
+                {
+                    return false;
                 }
-                return false;
             }
 
             @Override
@@ -1650,24 +1354,34 @@ public class CassandraMetadataRepository
             @Override
             public long estimateSize( )
             {
-                return size;
+                return Long.MAX_VALUE;
             }
 
             @Override
             public int characteristics( )
             {
-                return ORDERED+NONNULL+SIZED;
+                return ORDERED + NONNULL;
             }
         };
     }
 
+    <T extends MetadataFacet> Comparator<T> getFacetComparator(boolean ascending) {
+        return new Comparator<T>( )
+        {
+            @Override
+            public int compare( T o1, T o2 )
+            {
+                return ascending ? o1.getName( ).compareTo( o2.getName( ) ) : o2.getName( ).compareTo( o1.getName( ) );
+            }
+        };
+    }
 
     /**
      * Implementation is not very performant, because sorting is part of the stream. I do not know how to specify the sort
      * in the query.
-     * 
+     *
      * @param <T>
-     * @param session
+     * @param repositorySession
      * @param repositoryId
      * @param facetClazz
      * @param queryParameter
@@ -1675,96 +1389,97 @@ public class CassandraMetadataRepository
      * @throws MetadataRepositoryException
      */
     @Override
-    public <T extends MetadataFacet> Stream<T> getMetadataFacetStream(RepositorySession session, String repositoryId, Class<T> facetClazz, QueryParameter queryParameter) throws MetadataRepositoryException
+    public <T extends MetadataFacet> Stream<T> getMetadataFacetStream( RepositorySession repositorySession, String repositoryId, Class<T> facetClazz, QueryParameter queryParameter ) throws MetadataRepositoryException
     {
         final MetadataFacetFactory<T> metadataFacetFactory = getFacetFactory( facetClazz );
         final String facetId = metadataFacetFactory.getFacetId( );
+        String table = cassandraArchivaManager.getMetadataFacetFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            Select query = selectFrom( table )
+                .columns( NAME.toString( ), KEY.toString( ), VALUE.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( FACET_ID.toString( ) ).isEqualTo( literal( facetId ) )
+                .allowFiltering();
+
+            ResultSet result = session.execute( query.build( ) );
+            return StreamSupport.stream( createResultSpliterator( result, ( Row row, T lastItem ) -> {
+                String name = row.getString( NAME.toString( ) );
+                String key = row.getString( KEY.toString( ) );
+                String value = row.getString( VALUE.toString( ) );
+                T updateItem;
+                if ( lastItem != null && lastItem.getName( ).equals( name ) )
+                {
+                    updateItem = lastItem;
+                }
+                else
+                {
+                    updateItem = metadataFacetFactory.createMetadataFacet( repositoryId, name );
+                }
+                if ( StringUtils.isNotEmpty( key ) )
+                {
+                    Map<String, String> map = new HashMap<>( );
+                    map.put( key, value );
+                    updateItem.fromProperties( map );
+                }
+                return updateItem;
 
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName( ) ) //
-            .setColumnNames( NAME.toString( ), KEY.toString( ), VALUE.toString( ) ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString( ), repositoryId ) //
-            .addEqualsExpression( FACET_ID.toString( ), facetId ) //
-            .setRange( null, null, false, Integer.MAX_VALUE )
-            .setRowCount( Integer.MAX_VALUE )
-            .execute( );
-
-
-
-        return StreamSupport.stream( createResultSpliterator( result, ( Row<String, String, String> row, T lastItem)-> {
-            ColumnSlice<String, String> columnSlice = row.getColumnSlice();
-            String name = getStringValue( columnSlice, NAME.toString( ) );
-            T updateItem;
-            if (lastItem!=null && lastItem.getName().equals(name))
-            {
-                updateItem = lastItem;
-            } else
-            {
-                updateItem = metadataFacetFactory.createMetadataFacet( repositoryId, name );
-            }
-            String key = getStringValue( columnSlice, KEY.toString() );
-            if (StringUtils.isNotEmpty( key ))
-            {
-                Map<String, String> map = new HashMap<>( );
-                map.put( key , getStringValue( columnSlice, VALUE.toString( ) ) );
-                updateItem.fromProperties( map );
-            }
-            return updateItem;
-
-        }), false ).sorted( (f1, f2) -> f1.getName()!=null ? f1.getName().compareTo( f2.getName() ) : 1 ).skip( queryParameter.getOffset()).limit( queryParameter.getLimit());
+            } ), false )
+                .sorted( getFacetComparator( queryParameter.isAscending() ) )
+                .skip( queryParameter.getOffset( ) ).limit( queryParameter.getLimit( ) );
+        }
     }
 
     @Override
     public boolean hasMetadataFacet( RepositorySession session, String repositoryId, String facetId )
         throws MetadataRepositoryException
     {
-        return !getMetadataFacets( session, repositoryId, facetId ).isEmpty();
+        return !getMetadataFacets( session, repositoryId, facetId ).isEmpty( );
     }
 
     @Override
-    public <T extends MetadataFacet> T getMetadataFacet( RepositorySession session, final String repositoryId, final Class<T> facetClazz, final String name )
+    public <T extends MetadataFacet> T getMetadataFacet( RepositorySession repositorySession, final String repositoryId, final Class<T> facetClazz, final String name )
         throws MetadataRepositoryException
     {
         final MetadataFacetFactory<T> metadataFacetFactory = getFacetFactory( facetClazz );
-        if (metadataFacetFactory==null) {
+        if ( metadataFacetFactory == null )
+        {
             return null;
         }
         final String facetId = metadataFacetFactory.getFacetId( );
-
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
-            .setColumnNames( KEY.toString(), VALUE.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .addEqualsExpression( FACET_ID.toString(), facetId ) //
-            .addEqualsExpression( NAME.toString(), name ) //
-            .execute();
-
+        final String table = cassandraArchivaManager.getMetadataFacetFamilyName( );
         T metadataFacet = metadataFacetFactory.createMetadataFacet( repositoryId, name );
-        int size = result.get().getCount();
-        if ( size < 1 )
-        {
-            return null;
-        }
-        Map<String, String> map = new HashMap<>( size );
-        for ( Row<String, String, String> row : result.get() )
-        {
-            ColumnSlice<String, String> columnSlice = row.getColumnSlice();
-            map.put( getStringValue( columnSlice, KEY.toString() ), getStringValue( columnSlice, VALUE.toString() ) );
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+
+            Select query = selectFrom( table )
+                .column( KEY.toString( ) )
+                .column( VALUE.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( FACET_ID.toString( ) ).isEqualTo( literal( facetId ) )
+                .whereColumn( NAME.toString( ) ).isEqualTo( literal( name ) )
+                .allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            if ( result.getAvailableWithoutFetching( ) == 0 )
+            {
+                return null;
+            }
+            Map<String, String> props = StreamSupport.stream( result.spliterator( ), false )
+                .filter(row -> !row.isNull(KEY.toString()))
+                .collect( Collectors.toMap( row -> row.getString( KEY.toString( ) ), row -> row.getString( VALUE.toString( ) ) ) );
+            metadataFacet.fromProperties( props );
+            return metadataFacet;
         }
-        metadataFacet.fromProperties( map );
-        return metadataFacet;
     }
 
     @Override
-    public MetadataFacet getMetadataFacet( RepositorySession session, String repositoryId, String facetId, String name ) throws MetadataRepositoryException
+    public MetadataFacet getMetadataFacet( RepositorySession repositorySession, String repositoryId, String facetId, String name ) throws MetadataRepositoryException
     {
-        return getMetadataFacet( session, repositoryId, getFactoryClassForId( facetId ), name );
+        return getMetadataFacet( repositorySession, repositoryId, getFactoryClassForId( facetId ), name );
     }
 
     @Override
-    public void addMetadataFacet( RepositorySession session, String repositoryId, MetadataFacet metadataFacet )
+    public void addMetadataFacet( RepositorySession repositorySession, String repositoryId, MetadataFacet metadataFacet )
         throws MetadataRepositoryException
     {
 
@@ -1772,251 +1487,191 @@ public class CassandraMetadataRepository
         {
             return;
         }
-
-        if ( metadataFacet.toProperties().isEmpty() )
+        final String table = this.cassandraArchivaManager.getMetadataFacetFamilyName( );
+        if ( metadataFacet.toProperties( ).isEmpty( ) )
         {
-            String key = new MetadataFacetModel.KeyBuilder().withRepositoryId( repositoryId ).withFacetId(
-                metadataFacet.getFacetId() ).withName( metadataFacet.getName() ).build();
+            String key = new MetadataFacetModel.KeyBuilder( ).withRepositoryId( repositoryId ).withFacetId(
+                metadataFacet.getFacetId( ) ).withName( metadataFacet.getName( ) ).build( );
 
-            boolean exists = this.metadataFacetTemplate.isColumnsExist( key );
-
-            if ( exists )
-            {
-                ColumnFamilyUpdater<String, String> updater = this.metadataFacetTemplate.createUpdater( key );
-                addUpdateStringValue( updater, FACET_ID.toString(), metadataFacet.getFacetId() );
-                addUpdateStringValue( updater, NAME.toString(), metadataFacet.getName() );
-                this.metadataFacetTemplate.update( updater );
-            }
-            else
+            CqlSession session = cassandraArchivaManager.getSession( );
             {
-                String cf = this.cassandraArchivaManager.getMetadataFacetFamilyName();
-                this.metadataFacetTemplate.createMutator() //
-                    .addInsertion( key, cf, column( REPOSITORY_NAME.toString(), repositoryId ) ) //
-                    .addInsertion( key, cf, column( FACET_ID.toString(), metadataFacet.getFacetId() ) ) //
-                    .addInsertion( key, cf, column( NAME.toString(), metadataFacet.getName() ) ) //
-                    .execute();
+                Update update = update( table )
+                    .setColumn( REPOSITORY_NAME.toString( ), literal( repositoryId ) )
+                    .setColumn( FACET_ID.toString( ), literal( metadataFacet.getFacetId( ) ) )
+                    .setColumn( NAME.toString( ), literal( metadataFacet.getName( ) ) )
+                    .whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) );
+                session.execute( update.build( ) );
             }
-
         }
         else
         {
-            for ( Map.Entry<String, String> entry : metadataFacet.toProperties().entrySet() )
+            CqlSession session = cassandraArchivaManager.getSession( );
             {
-                String key = new MetadataFacetModel.KeyBuilder().withRepositoryId( repositoryId ).withFacetId(
-                    metadataFacet.getFacetId() ).withName( metadataFacet.getName() ).withKey( entry.getKey() ).build();
-
-                boolean exists = this.metadataFacetTemplate.isColumnsExist( key );
-                if ( !exists )
-                {
-                    String cf = this.cassandraArchivaManager.getMetadataFacetFamilyName();
-                    this.metadataFacetTemplate.createMutator() //
-                        .addInsertion( key, cf, column( REPOSITORY_NAME.toString(), repositoryId ) ) //
-                        .addInsertion( key, cf, column( FACET_ID.toString(), metadataFacet.getFacetId() ) ) //
-                        .addInsertion( key, cf, column( NAME.toString(), metadataFacet.getName() ) ) //
-                        .addInsertion( key, cf, column( KEY.toString(), entry.getKey() ) ) //
-                        .addInsertion( key, cf, column( VALUE.toString(), entry.getValue() ) ) //
-                        .execute();
-                }
-                else
+                for ( Map.Entry<String, String> entry : metadataFacet.toProperties( ).entrySet( ) )
                 {
-                    ColumnFamilyUpdater<String, String> updater = this.metadataFacetTemplate.createUpdater( key );
-                    addUpdateStringValue( updater, VALUE.toString(), entry.getValue() );
-                    this.metadataFacetTemplate.update( updater );
+                    String key = new MetadataFacetModel.KeyBuilder( ).withRepositoryId( repositoryId ).withFacetId(
+                        metadataFacet.getFacetId( ) ).withName( metadataFacet.getName( ) ).withKey( entry.getKey( ) ).build( );
+                    Update update = update( table )
+                        .setColumn( REPOSITORY_NAME.toString( ), literal( repositoryId ) )
+                        .setColumn( FACET_ID.toString( ), literal( metadataFacet.getFacetId( ) ) )
+                        .setColumn( NAME.toString( ), literal( metadataFacet.getName( ) ) )
+                        .setColumn( KEY.toString( ), literal( entry.getKey( ) ) )
+                        .setColumn( VALUE.toString( ), literal( entry.getValue( ) ) )
+                        .whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) );
+                    session.execute( update.build( ) );
                 }
             }
         }
     }
 
     @Override
-    public void removeMetadataFacets( RepositorySession session, final String repositoryId, final String facetId )
+    public void removeMetadataFacets( RepositorySession repositorySession, final String repositoryId, final String facetId )
         throws MetadataRepositoryException
     {
-
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
-            .setColumnNames( KEY.toString(), VALUE.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .addEqualsExpression( FACET_ID.toString(), facetId ) //
-            .execute();
-
-        for ( Row<String, String, String> row : result.get() )
+        final String table = cassandraArchivaManager.getMetadataFacetFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            this.metadataFacetTemplate.deleteRow( row.getKey() );
+            Select deleteRows = selectFrom( table )
+                .column( DEFAULT_PRIMARY_KEY )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( FACET_ID.toString( ) ).isEqualTo( literal( facetId ) )
+                .allowFiltering( );
+            ResultSet result = session.execute( deleteRows.build( ) );
+            StreamSupport.stream( result.spliterator(), false ).map(row -> row.getString(DEFAULT_PRIMARY_KEY))
+                .distinct().forEach( delKey ->
+                    session.execute( deleteFrom( table ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal(delKey) ).build(  ) )
+                );
         }
 
     }
 
     @Override
-    public void removeMetadataFacet( RepositorySession session, final String repositoryId, final String facetId, final String name )
+    public void removeMetadataFacet( RepositorySession repositorySession, final String repositoryId, final String facetId, final String name )
         throws MetadataRepositoryException
     {
-
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
-            .setColumnNames( KEY.toString(), VALUE.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .addEqualsExpression( FACET_ID.toString(), facetId ) //
-            .addEqualsExpression( NAME.toString(), name ) //
-            .execute();
-
-        for ( Row<String, String, String> row : result.get() )
+        final String table = cassandraArchivaManager.getMetadataFacetFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            this.metadataFacetTemplate.deleteRow( row.getKey() );
+            Select deleteRows = selectFrom( table )
+                .column( DEFAULT_PRIMARY_KEY )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( FACET_ID.toString( ) ).isEqualTo( literal( facetId ) )
+                .whereColumn( NAME.toString( ) ).isEqualTo( literal( name ) )
+                .allowFiltering( );
+            ResultSet result = session.execute( deleteRows.build( ) );
+            StreamSupport.stream( result.spliterator( ), false )
+                .map( row -> row.getString( DEFAULT_PRIMARY_KEY ) )
+                .distinct( )
+                .forEach(
+                    delKey ->
+                        session.execute( deleteFrom( table ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( delKey ) ).build( ) )
+                );
+
         }
+
     }
 
     @Override
-    public List<ArtifactMetadata> getArtifactsByDateRange( RepositorySession session, final String repositoryId, final ZonedDateTime startTime,
+    public List<ArtifactMetadata> getArtifactsByDateRange( RepositorySession repositorySession, final String repositoryId, final ZonedDateTime startTime,
                                                            final ZonedDateTime endTime, QueryParameter queryParameter )
         throws MetadataRepositoryException
     {
-
-        LongSerializer ls = LongSerializer.get();
-        RangeSlicesQuery<String, String, Long> query = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ls ) //
-            .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) //
-            .setColumnNames( ArtifactMetadataModel.COLUMNS ); //
-
-
-        if ( startTime != null )
-        {
-            query = query.addGteExpression( WHEN_GATHERED.toString(), startTime.toInstant().toEpochMilli() );
-        }
-        if ( endTime != null )
+        final String table = cassandraArchivaManager.getArtifactMetadataFamilyName();
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            query = query.addLteExpression( WHEN_GATHERED.toString(), endTime.toInstant().toEpochMilli() );
+            long start = startTime == null ? Long.MIN_VALUE : startTime.toInstant( ).toEpochMilli( );
+            long end = endTime == null ? Long.MAX_VALUE : endTime.toInstant( ).toEpochMilli( );
+            Select query = selectFrom( table )
+                .all( )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( WHEN_GATHERED.toString( ) ).isGreaterThanOrEqualTo( literal( start ) )
+                .whereColumn( WHEN_GATHERED.toString( ) ).isLessThanOrEqualTo( literal( end ) )
+                .allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            return StreamSupport.stream( result.spliterator( ), false )
+                .map( this::mapArtifactMetadata )
+                .collect( Collectors.toList( ) );
         }
-        QueryResult<OrderedRows<String, String, Long>> result = query.execute();
-
-        List<ArtifactMetadata> artifactMetadatas = new ArrayList<>( result.get().getCount() );
-        Iterator<Row<String, String, Long>> keyIter = result.get().iterator();
-        if (keyIter.hasNext()) {
-            String key = keyIter.next().getKey();
-            for (Row<String, String, Long> row : result.get()) {
-                ColumnSlice<String, Long> columnSlice = row.getColumnSlice();
-                String repositoryName = getAsStringValue(columnSlice, REPOSITORY_NAME.toString());
-                if (StringUtils.equals(repositoryName, repositoryId)) {
-
-                    artifactMetadatas.add(mapArtifactMetadataLongColumnSlice(key, columnSlice));
-                }
-            }
-        }
-
-        return artifactMetadatas;
     }
 
     /**
      * For documentation see {@link MetadataRepository#getArtifactByDateRangeStream(RepositorySession, String, ZonedDateTime, ZonedDateTime, QueryParameter)}
-     *
+     * <p>
      * This implementation orders the stream. It does not order the query in the backend.
      *
-     * @param session The repository session
-     * @param repositoryId The repository id
-     * @param startTime The start time, can be <code>null</code>
-     * @param endTime The end time, can be <code>null</code>
+     * @param session        The repository session
+     * @param repositoryId   The repository id
+     * @param startTime      The start time, can be <code>null</code>
+     * @param endTime        The end time, can be <code>null</code>
      * @param queryParameter Additional parameters for the query that affect ordering and number of returned results.
      * @return
      * @throws MetadataRepositoryException
      * @see MetadataRepository#getArtifactByDateRangeStream
      */
     @Override
-    public Stream<ArtifactMetadata> getArtifactByDateRangeStream( RepositorySession session, String repositoryId, ZonedDateTime startTime, ZonedDateTime endTime, QueryParameter queryParameter) throws MetadataRepositoryException
+    public Stream<ArtifactMetadata> getArtifactByDateRangeStream( RepositorySession session, String repositoryId, ZonedDateTime startTime, ZonedDateTime endTime, QueryParameter queryParameter ) throws MetadataRepositoryException
     {
-        Comparator<ArtifactMetadata> comp = getArtifactMetadataComparator(queryParameter, "whenGathered");
-        return getArtifactsByDateRange(session, repositoryId, startTime, endTime, queryParameter).stream().sorted(comp).skip(queryParameter.getOffset()).limit(queryParameter.getLimit());
+        Comparator<ArtifactMetadata> comp = getArtifactMetadataComparator( queryParameter, "whenGathered" );
+        return getArtifactsByDateRange( session, repositoryId, startTime, endTime, queryParameter ).stream( ).sorted( comp ).skip( queryParameter.getOffset( ) ).limit( queryParameter.getLimit( ) );
     }
 
 
-    protected ArtifactMetadata mapArtifactMetadataLongColumnSlice( String key, ColumnSlice<String, Long> columnSlice )
+    protected ArtifactMetadata mapArtifactMetadata( Row row )
     {
-        ArtifactMetadata artifactMetadata = new ArtifactMetadata();
-        artifactMetadata.setNamespace( getAsStringValue( columnSlice, NAMESPACE_ID.toString() ) );
-        artifactMetadata.setSize( getLongValue( columnSlice, SIZE.toString() ) );
-        artifactMetadata.setId( getAsStringValue( columnSlice, ID.toString() ) );
-        artifactMetadata.setFileLastModified( getLongValue( columnSlice, FILE_LAST_MODIFIED.toString() ) );
-        artifactMetadata.setMd5( getAsStringValue( columnSlice, MD5.toString() ) );
-        artifactMetadata.setProject( getAsStringValue( columnSlice, PROJECT.toString() ) );
-        artifactMetadata.setProjectVersion( getAsStringValue( columnSlice, PROJECT_VERSION.toString() ) );
-        artifactMetadata.setRepositoryId( getAsStringValue( columnSlice, REPOSITORY_NAME.toString() ) );
-        artifactMetadata.setSha1( getAsStringValue( columnSlice, SHA1.toString() ) );
-        artifactMetadata.setVersion( getAsStringValue( columnSlice, VERSION.toString() ) );
-        Long whenGathered = getLongValue( columnSlice, WHEN_GATHERED.toString() );
-        if ( whenGathered != null )
-        {
-            artifactMetadata.setWhenGathered(ZonedDateTime.ofInstant(Instant.ofEpochMilli(whenGathered), STORAGE_TZ));
-        }
-        artifactMetadata.setChecksums(mapChecksumsReverse(getChecksums(key)));
-        return artifactMetadata;
-    }
-
-    protected ArtifactMetadata mapArtifactMetadataStringColumnSlice( String key, ColumnSlice<String, String> columnSlice )
-    {
-        ArtifactMetadata artifactMetadata = new ArtifactMetadata();
-        artifactMetadata.setNamespace( getStringValue( columnSlice, NAMESPACE_ID.toString() ) );
-        artifactMetadata.setSize( getAsLongValue( columnSlice, SIZE.toString() ) );
-        artifactMetadata.setId( getStringValue( columnSlice, ID.toString() ) );
-        artifactMetadata.setFileLastModified( getAsLongValue( columnSlice, FILE_LAST_MODIFIED.toString() ) );
-        artifactMetadata.setMd5( getStringValue( columnSlice, MD5.toString() ) );
-        artifactMetadata.setProject( getStringValue( columnSlice, PROJECT.toString() ) );
-        artifactMetadata.setProjectVersion( getStringValue( columnSlice, PROJECT_VERSION.toString() ) );
-        artifactMetadata.setRepositoryId( getStringValue( columnSlice, REPOSITORY_NAME.toString() ) );
-        artifactMetadata.setSha1( getStringValue( columnSlice, SHA1.toString() ) );
-        artifactMetadata.setVersion( getStringValue( columnSlice, VERSION.toString() ) );
-        Long whenGathered = getAsLongValue( columnSlice, WHEN_GATHERED.toString() );
+        ArtifactMetadata artifactMetadata = new ArtifactMetadata( );
+        artifactMetadata.setNamespace( row.getString( NAMESPACE_ID.toString( ) ) );
+        artifactMetadata.setSize( row.getLong( SIZE.toString( ) ) );
+        artifactMetadata.setId( row.getString( ID.toString( ) ) );
+        artifactMetadata.setFileLastModified( row.getLong( FILE_LAST_MODIFIED.toString( ) ) );
+        artifactMetadata.setMd5( row.getString( MD5.toString( ) ) );
+        artifactMetadata.setProject( row.getString( PROJECT_ID.toString( ) ) );
+        artifactMetadata.setProjectVersion( row.getString( PROJECT_VERSION.toString( ) ) );
+        artifactMetadata.setRepositoryId( row.getString( REPOSITORY_NAME.toString( ) ) );
+        artifactMetadata.setSha1( row.getString( SHA1.toString( ) ) );
+        artifactMetadata.setVersion( row.getString( VERSION.toString( ) ) );
+        Long whenGathered = row.getLong( WHEN_GATHERED.toString( ) );
         if ( whenGathered != null )
         {
-            artifactMetadata.setWhenGathered(ZonedDateTime.ofInstant(Instant.ofEpochMilli(whenGathered), STORAGE_TZ));
+            artifactMetadata.setWhenGathered( ZonedDateTime.ofInstant( Instant.ofEpochMilli( whenGathered ), STORAGE_TZ ) );
         }
-        artifactMetadata.setChecksums(mapChecksumsReverse(getChecksums(key)));
+        artifactMetadata.setChecksums( mapChecksumsReverse( getChecksums( row.getString( DEFAULT_PRIMARY_KEY ) ) ) );
         return artifactMetadata;
     }
 
     @Override
-    public List<ArtifactMetadata> getArtifactsByChecksum(RepositorySession session, final String repositoryId, final String checksum )
+    public List<ArtifactMetadata> getArtifactsByChecksum( RepositorySession repositorySession, final String repositoryId, final String checksum )
         throws MetadataRepositoryException
     {
+        String table = cassandraArchivaManager.getChecksumFamilyName( );
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            Select query = selectFrom( table )
+                .column( ARTIFACT_METADATA_MODEL_KEY.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( CHECKSUM_VALUE.toString( ) ).isEqualTo( literal( checksum ) )
+                .allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            List<String> artifactKeys = StreamSupport.stream( result.spliterator( ), false )
+                .map( row -> row.getString( ARTIFACT_METADATA_MODEL_KEY.toString( ) ) )
+                .distinct( )
+                .collect( Collectors.toList( ) );
+            List<ArtifactMetadata> metadataList = new ArrayList<>( );
+            for ( String key : artifactKeys )
+            {
+                table = cassandraArchivaManager.getArtifactMetadataFamilyName( );
+                query = selectFrom( table )
+                    .all( )
+                    .whereColumn( DEFAULT_PRIMARY_KEY.toString( ) ).isEqualTo( literal( key ) );
+                Row row = session.execute( query.build( ) ).one( );
+                if ( row != null )
+                {
+                    metadataList.add( mapArtifactMetadata( row ) );
+                }
 
-        // cql cannot run or in queries so running twice the query
-        Map<String, ArtifactMetadata> artifactMetadataMap = new HashMap<>();
-
-        RangeSlicesQuery<String, String, String> query = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getChecksumFamilyName()) //
-            .setColumnNames(ARTIFACT_METADATA_MODEL_KEY); //
-
-        query = query.addEqualsExpression( CHECKSUM_VALUE.toString(), checksum )
-                .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId );
-
-        QueryResult<OrderedRows<String, String, String>> result = query.execute();
-
-        List<String> artifactKeys = new ArrayList<>();
-        for ( Row<String, String, String> row : result.get() )
-        {
-            ColumnSlice<String, String> columnSlice = row.getColumnSlice();
-
-            artifactKeys.add(columnSlice.getColumnByName(ARTIFACT_METADATA_MODEL_KEY).getValue());
-
-        }
-
-        for (String key : artifactKeys) {
-            query = HFactory //
-                    .createRangeSlicesQuery(keyspace, ss, ss, ss) //
-                    .setColumnFamily(cassandraArchivaManager.getArtifactMetadataFamilyName()) //
-                    .setColumnNames(NAMESPACE_ID.toString(), SIZE.toString(), ID.toString(), FILE_LAST_MODIFIED.toString(), MD5.toString(), PROJECT.toString(), PROJECT_VERSION.toString(),
-                            REPOSITORY_NAME.toString(), VERSION.toString(), WHEN_GATHERED.toString(), SHA1.toString())
-                    .setKeys(key, key);
-            result = query.execute();
-
-            for (Row<String, String, String> row : result.get()) {
-                ColumnSlice<String, String> columnSlice = row.getColumnSlice();
-
-                artifactMetadataMap.put(row.getKey(), mapArtifactMetadataStringColumnSlice(key, columnSlice));
             }
-        }
+            return metadataList;
 
-        return new ArrayList<>(artifactMetadataMap.values());
+        }
     }
 
     /**
@@ -2030,349 +1685,329 @@ public class CassandraMetadataRepository
         return this.getArtifactsByAttribute( session, key, value, repositoryId );
     }
 
+    MetadataFacetModel mapMetadataFacet( Row row )
+    {
+        MetadataFacetModel metadataFacetModel = new MetadataFacetModel( );
+        metadataFacetModel.setFacetId( row.getString( FACET_ID.toString( ) ) );
+        metadataFacetModel.setName( row.getString( NAME.toString( ) ) );
+        metadataFacetModel.setValue( row.getString( VALUE.toString( ) ) );
+        metadataFacetModel.setKey( row.getString( KEY.toString( ) ) );
+        metadataFacetModel.setProjectVersion( row.getString( PROJECT_VERSION.toString( ) ) );
+        return metadataFacetModel;
+    }
+
     @Override
-    public List<ArtifactMetadata> getArtifactsByAttribute( RepositorySession session, String key, String value, String repositoryId )
+    public List<ArtifactMetadata> getArtifactsByAttribute( RepositorySession repositorySession, String key, String value, String repositoryId )
         throws MetadataRepositoryException
     {
-        RangeSlicesQuery<String, String, String> query =
-            HFactory.createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
-            .setColumnNames( MetadataFacetModel.COLUMNS ) //
-            .addEqualsExpression( VALUE.toString(), value );
-
-        if ( key != null )
-        {
-            query.addEqualsExpression( KEY.toString(), key ); //
-        }
-        if ( repositoryId != null )
-        {
-            query.addEqualsExpression( "repositoryName", repositoryId );
-        }
 
-        QueryResult<OrderedRows<String, String, String>> metadataFacetResult = query.execute();
-        if ( metadataFacetResult.get() == null || metadataFacetResult.get().getCount() < 1 )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            return Collections.emptyList();
-        }
-
-        List<ArtifactMetadata> artifactMetadatas = new LinkedList<>( );
-
-        // TODO doing multiple queries, there should be a way to get all the artifactMetadatas for any number of
-        // projects
-        for ( Row<String, String, String> row : metadataFacetResult.get() )
-        {
-            QueryResult<OrderedRows<String, String, String>> artifactMetadataResult =
-                HFactory.createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-                .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) //
-                .setColumnNames( ArtifactMetadataModel.COLUMNS ) //
-                .setRowCount( Integer.MAX_VALUE ) //
-                .addEqualsExpression( REPOSITORY_NAME.toString(),
-                                      getStringValue( row.getColumnSlice(), REPOSITORY_NAME ) ) //
-                .addEqualsExpression( NAMESPACE_ID.toString(), getStringValue( row.getColumnSlice(), NAMESPACE_ID ) ) //
-                .addEqualsExpression( PROJECT.toString(), getStringValue( row.getColumnSlice(), PROJECT_ID ) ) //
-                .addEqualsExpression( PROJECT_VERSION.toString(),
-                                      getStringValue( row.getColumnSlice(), PROJECT_VERSION ) ) //
-                .execute();
-
-            if ( artifactMetadataResult.get() == null || artifactMetadataResult.get().getCount() < 1 )
+            String table = cassandraArchivaManager.getMetadataFacetFamilyName( );
+            Select query = selectFrom( table )
+                .all( )
+                .whereColumn( VALUE.toString( ) ).isEqualTo( literal( value ) )
+                .allowFiltering();
+            if ( key != null )
             {
-                return Collections.emptyList();
+                query = query.whereColumn( KEY.toString( ) ).isEqualTo( literal( key ) );
+            }
+            if ( repositoryId != null )
+            {
+                query = query.whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) );
             }
 
-            for ( Row<String, String, String> artifactMetadataRow : artifactMetadataResult.get() )
+
+            final List<ArtifactMetadata> artifactMetadatas = new LinkedList<>( );
+            final List<MetadataFacetModel> metadataFacetModels = new ArrayList<>( );
+            table = cassandraArchivaManager.getArtifactMetadataFamilyName( );
+            ResultSet result = session.execute( query.build( ) );
+            Iterator<Row> iterator = result.iterator( );
+            while ( iterator.hasNext( ) )
             {
-                String artifactKey = artifactMetadataRow.getKey();
-                artifactMetadatas.add( mapArtifactMetadataStringColumnSlice( artifactKey, artifactMetadataRow.getColumnSlice() ) );
+                Row row = iterator.next( );
+                metadataFacetModels.add( mapMetadataFacet( row ) );
+
+                query = selectFrom( table )
+                    .all( )
+                    .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( row.getString( REPOSITORY_NAME.toString( ) ) ) )
+                    .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( row.getString( NAMESPACE_ID.toString( ) ) ) )
+                    .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( row.getString( PROJECT_ID.toString( ) ) ) )
+                    .whereColumn( PROJECT_VERSION.toString( ) ).isEqualTo( literal( row.getString( PROJECT_VERSION.toString( ) ) ) )
+                    .allowFiltering();
+
+                ResultSet subResult = session.execute( query.build( ) );
+                subResult.forEach( sRow ->
+                    artifactMetadatas.add( mapArtifactMetadata( sRow ) ) );
+
             }
-        }
 
-        return mapArtifactFacetToArtifact( metadataFacetResult, artifactMetadatas );
+            return mapArtifactFacetToArtifact( metadataFacetModels, artifactMetadatas );
+
+        }
     }
 
     @Override
-    public List<ArtifactMetadata> getArtifactsByProjectVersionAttribute( RepositorySession session, String key, String value, String repositoryId )
+    public List<ArtifactMetadata> getArtifactsByProjectVersionAttribute( RepositorySession repositorySession, String key, String value, String repositoryId )
         throws MetadataRepositoryException
     {
-        QueryResult<OrderedRows<String, String, String>> result =
-            HFactory.createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) //
-            .setColumnNames( PROJECT_ID.toString(), REPOSITORY_NAME.toString(), NAMESPACE_ID.toString(),
-                             PROJECT_VERSION.toString() ) //
-            .addEqualsExpression( key, value ) //
-            .execute();
-
-        int count = result.get().getCount();
-
-        if ( count < 1 )
-        {
-            return Collections.emptyList();
-        }
-
-        List<ArtifactMetadata> artifacts = new LinkedList<>( );
-
-        for ( Row<String, String, String> row : result.get() )
-        {
-            // TODO doing multiple queries, there should be a way to get all the artifactMetadatas for any number of
-            // projects
-            try
-            {
-                artifacts.addAll( getArtifacts( session,
-                    getStringValue( row.getColumnSlice(), REPOSITORY_NAME ),
-                    getStringValue( row.getColumnSlice(), NAMESPACE_ID ),
-                    getStringValue( row.getColumnSlice(), PROJECT_ID ), getStringValue( row.getColumnSlice(), PROJECT_VERSION ) ) );
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            String searchKey = StringUtils.wrapIfMissing( key, '"' );
+            String table = cassandraArchivaManager.getProjectVersionMetadataFamilyName( );
+            Select query = selectFrom( table )
+                .columns( PROJECT_ID.toString( ), REPOSITORY_NAME.toString( ), NAMESPACE_ID.toString( ),
+                    PROJECT_VERSION.toString( ) ).allowFiltering();
+            if (Arrays.binarySearch( cassandraArchivaManager.getProjectVersionMetadataColumns(), key )>=0){
+                query = query.whereColumn( searchKey ).isEqualTo( literal( value ) );
+            } else {
+                query = query.whereMapValue( VERSION_PROPERTIES.toString( ), literal( key ) ).isEqualTo( literal( value ) );
             }
-            catch ( MetadataResolutionException e )
+            ResultSet result = session.execute( query.build( ) );
+            List<ArtifactMetadata> artifacts = new LinkedList<>( );
+            Iterator<Row> iterator = result.iterator( );
+            while ( iterator.hasNext( ) )
             {
-                // never raised
-                throw new IllegalStateException( e );
+                Row row = iterator.next( );
+                try
+                {
+                    artifacts.addAll( getArtifacts( repositorySession,
+                        row.getString( REPOSITORY_NAME.toString( ) ),
+                        row.getString( NAMESPACE_ID.toString( ) ),
+                        row.getString( PROJECT_ID.toString( ) ), row.getString( PROJECT_VERSION.toString( ) ) ) );
+                }
+                catch ( MetadataResolutionException e )
+                {
+                    // never raised
+                    throw new IllegalStateException( e );
+                }
             }
+            return artifacts;
+
         }
-        return artifacts;
     }
 
     @Override
-    public void removeArtifact( RepositorySession session, final String repositoryId, final String namespace, final String project,
+    public void removeArtifact( RepositorySession repositorySession, final String repositoryId, final String namespace, final String project,
                                 final String version, final String id )
         throws MetadataRepositoryException
     {
         logger.debug( "removeTimestampedArtifact repositoryId: '{}', namespace: '{}', project: '{}', version: '{}', id: '{}'",
-                      repositoryId, namespace, project, version, id );
-        String key =
-            new ArtifactMetadataModel.KeyBuilder().withRepositoryId( repositoryId ).withNamespace( namespace ).withId(
-                id ).withProjectVersion( version ).withProject( project ).build();
+            repositoryId, namespace, project, version, id );
+
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            String key =
+                new ArtifactMetadataModel.KeyBuilder( ).withRepositoryId( repositoryId ).withNamespace( namespace ).withId(
+                    id ).withProjectVersion( version ).withProject( project ).build( );
+            String table = cassandraArchivaManager.getArtifactMetadataFamilyName( );
+            Delete delete = deleteFrom( table )
+                .whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) );
+            session.execute( delete.build( ) );
 
-        this.artifactMetadataTemplate.deleteRow( key );
+            key = new ProjectVersionMetadataModel.KeyBuilder( ) //
+                .withRepository( repositoryId ) //
+                .withNamespace( namespace ) //
+                .withProjectId( project ) //
+                .withProjectVersion( version ) //
+                .withId( id ) //
+                .build( );
+            table = cassandraArchivaManager.getProjectVersionMetadataFamilyName( );
+            delete = deleteFrom( table )
+                .whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) );
+            session.execute( delete.build( ) );
 
-        key = new ProjectVersionMetadataModel.KeyBuilder() //
-            .withRepository( repositoryId ) //
-            .withNamespace( namespace ) //
-            .withProjectId( project ) //
-            .withProjectVersion( version ) //
-            .withId( id ) //
-            .build();
+        }
 
-        this.projectVersionMetadataTemplate.deleteRow( key );
     }
 
     @Override
-    public void removeTimestampedArtifact( RepositorySession session, ArtifactMetadata artifactMetadata, String baseVersion )
+    public void removeTimestampedArtifact( RepositorySession repositorySession, ArtifactMetadata artifactMetadata, String baseVersion )
         throws MetadataRepositoryException
     {
         logger.debug( "removeTimestampedArtifact repositoryId: '{}', namespace: '{}', project: '{}', version: '{}', id: '{}'",
-                      artifactMetadata.getRepositoryId(), artifactMetadata.getNamespace(),
-                      artifactMetadata.getProject(), baseVersion, artifactMetadata.getId() );
-        String key =
-            new ArtifactMetadataModel.KeyBuilder().withRepositoryId( artifactMetadata.getRepositoryId() ).withNamespace(
-                artifactMetadata.getNamespace() ).withId( artifactMetadata.getId() ).withProjectVersion(
-                baseVersion ).withProject( artifactMetadata.getProject() ).build();
-
-        this.artifactMetadataTemplate.deleteRow( key );
+            artifactMetadata.getRepositoryId( ), artifactMetadata.getNamespace( ),
+            artifactMetadata.getProject( ), baseVersion, artifactMetadata.getId( ) );
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            String key =
+                new ArtifactMetadataModel.KeyBuilder( ).withRepositoryId( artifactMetadata.getRepositoryId( ) ).withNamespace(
+                    artifactMetadata.getNamespace( ) ).withId( artifactMetadata.getId( ) ).withProjectVersion(
+                    baseVersion ).withProject( artifactMetadata.getProject( ) ).build( );
+            String table = cassandraArchivaManager.getArtifactMetadataFamilyName( );
+            Delete delete = deleteFrom( table )
+                .whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) );
+            session.execute( delete.build( ) );
+        }
 
     }
 
     @Override
-    public void removeFacetFromArtifact( RepositorySession session, final String repositoryId, final String namespace, final String project,
+    public void removeFacetFromArtifact( RepositorySession repositorySession, final String repositoryId, final String namespace, final String project,
                                          final String version, final MetadataFacet metadataFacet )
         throws MetadataRepositoryException
     {
-
-        RangeSlicesQuery<String, String, String> query = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) //
-            .setColumnNames( NAMESPACE_ID.toString() ); //
-
-        query = query.addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
-            .addEqualsExpression( PROJECT.toString(), project ) //
-            .addEqualsExpression( VERSION.toString(), version );
-
-        QueryResult<OrderedRows<String, String, String>> result = query.execute();
-
-        for ( Row<String, String, String> row : result.get() )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            this.artifactMetadataTemplate.deleteRow( row.getKey() );
+            String table = cassandraArchivaManager.getArtifactMetadataFamilyName( );
+            Delete delete = deleteFrom( table )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespace ) )
+                .whereColumn( PROJECT.toString( ) ).isEqualTo( literal( project ) )
+                .whereColumn( VERSION.toString( ) ).isEqualTo( literal( version ) );
+            session.execute( delete.build( ) );
         }
     }
 
 
     @Override
-    public List<ArtifactMetadata> getArtifacts( RepositorySession session, final String repositoryId )
+    public List<ArtifactMetadata> getArtifacts( RepositorySession repositorySession, final String repositoryId )
         throws MetadataRepositoryException
     {
-
-        RangeSlicesQuery<String, String, String> query = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) //
-            .setColumnNames( ArtifactMetadataModel.COLUMNS ); //
-
-        query = query.addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId );
-
-        QueryResult<OrderedRows<String, String, String>> result = query.execute();
-
-
-
-        List<ArtifactMetadata> artifactMetadatas = new ArrayList<>( result.get().getCount() );
-
-        for ( Row<String, String, String> row : result.get() )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            String key = row.getKey();
-            ColumnSlice<String, String> columnSlice = row.getColumnSlice();
-            artifactMetadatas.add( mapArtifactMetadataStringColumnSlice( key, columnSlice ) );
-
+            String table = cassandraArchivaManager.getArtifactMetadataFamilyName( );
+            Select query = selectFrom( table )
+                .all( )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) )
+                .allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            return StreamSupport.stream( result.spliterator( ), false )
+                .map( this::mapArtifactMetadata )
+                .collect( Collectors.toList( ) );
         }
-
-        return artifactMetadatas;
     }
 
 
     @Override
-    public List<ProjectVersionReference> getProjectReferences( RepositorySession session, String repoId, String namespace, String projectId,
+    public List<ProjectVersionReference> getProjectReferences( RepositorySession repositorySession, String repoId, String namespace, String projectId,
                                                                String projectVersion )
         throws MetadataResolutionException
     {
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getDependencyFamilyName() ) //
-            .setColumnNames( "projectVersionMetadataModel.key" ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .addEqualsExpression( GROUP_ID.toString(), namespace ) //
-            .addEqualsExpression( ARTIFACT_ID.toString(), projectId ) //
-            .addEqualsExpression( VERSION.toString(), projectVersion ) //
-            .execute();
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            String table = cassandraArchivaManager.getDependencyFamilyName( );
+            Select query = selectFrom( table )
+                .column( "\"projectVersionMetadataModel.key\"" )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repoId ) )
+                .whereColumn( GROUP_ID.toString( ) ).isEqualTo( literal( namespace ) )
+                .whereColumn( ARTIFACT_ID.toString( ) ).isEqualTo( literal( projectId ) )
+                .whereColumn( VERSION.toString( ) ).isEqualTo( literal( projectVersion ) )
+                .allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            List<String> dependenciesIds = StreamSupport.stream( result.spliterator( ), false )
+                .map( row -> row.getString( "\"projectVersionMetadataModel.key\"" ) )
+                .collect( Collectors.toList( ) );
 
-        List<String> dependenciesIds = new ArrayList<>( result.get().getCount() );
+            List<ProjectVersionReference> references = new ArrayList<>( );
 
-        for ( Row<String, String, String> row : result.get().getList() )
-        {
-            dependenciesIds.add( getStringValue( row.getColumnSlice(), "projectVersionMetadataModel.key" ) );
-        }
 
-        List<ProjectVersionReference> references = new ArrayList<>( result.get().getCount() );
+            table = cassandraArchivaManager.getProjectVersionMetadataFamilyName( );
+            for ( String key : dependenciesIds )
+            {
+                query = selectFrom( table )
+                    .columns( PROJECT_ID.toString( ), NAMESPACE_ID.toString( ), PROJECT_VERSION.toString( ) )
+                    .whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) );
+                Row rowResult = session.execute( query.build( ) ).one( );
+                if ( rowResult != null )
+                {
+                    references.add( new ProjectVersionReference( ProjectVersionReference.ReferenceType.DEPENDENCY,
+                        rowResult.getString( PROJECT_ID.toString( ) ),
+                        rowResult.getString( NAMESPACE_ID.toString( ) ),
+                        rowResult.getString( PROJECT_VERSION.toString( ) )
+                    ) );
+                }
 
-        for ( String key : dependenciesIds )
-        {
-            ColumnFamilyResult<String, String> columnFamilyResult =
-                this.projectVersionMetadataTemplate.queryColumns( key );
-            references.add( new ProjectVersionReference( ProjectVersionReference.ReferenceType.DEPENDENCY, //
-                                                         columnFamilyResult.getString( PROJECT_ID.toString() ), //
-                                                         columnFamilyResult.getString( NAMESPACE_ID.toString() ), //
-                                                         columnFamilyResult.getString( PROJECT_VERSION.toString() ) ) );
+            }
+            return references;
         }
-
-        return references;
     }
 
     @Override
-    public void removeProjectVersion( RepositorySession session, final String repoId, final String namespace, final String projectId,
+    public void removeProjectVersion( RepositorySession repositorySession, final String repoId, final String namespace, final String projectId,
                                       final String projectVersion )
         throws MetadataRepositoryException
     {
+        CqlSession session = cassandraArchivaManager.getSession( );
+        {
+            String table = cassandraArchivaManager.getProjectVersionMetadataFamilyName( );
+            Select query = selectFrom( table )
+                .columns( DEFAULT_PRIMARY_KEY, VERSION.toString( ) )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repoId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespace ) )
+                .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectId ) )
+                .whereColumn( PROJECT_VERSION.toString( ) ).isEqualTo( literal( projectVersion ) )
+                .allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            Iterator<Row> iterator = result.iterator( );
+            while ( iterator.hasNext( ) )
+            {
+                Row row = iterator.next( );
+                String key = row.getString( DEFAULT_PRIMARY_KEY );
+                session.execute( deleteFrom( table ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( key ) ).build( ) );
+                removeMailingList( key );
+                removeLicenses( key );
+                removeDependencies( key );
+            }
 
-        QueryResult<OrderedRows<String, String, String>> result = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) //
-            .setColumnNames( VERSION.toString() ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
-            .addEqualsExpression( PROJECT_ID.toString(), projectId ) //
-            .addEqualsExpression( PROJECT_VERSION.toString(), projectVersion ) //
-            .execute();
-
-        for ( Row<String, String, String> row : result.get().getList() )
-        {
-            this.projectVersionMetadataTemplate.deleteRow( row.getKey() );
-            removeMailingList( row.getKey() );
-            removeLicenses( row.getKey() );
-            removeDependencies( row.getKey() );
-        }
-
-        RangeSlicesQuery<String, String, String> query = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) //
-            .setColumnNames( NAMESPACE_ID.toString() ); //
-
-        query = query.addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
-            .addEqualsExpression( PROJECT.toString(), projectId ) //
-            .addEqualsExpression( PROJECT_VERSION.toString(), projectVersion );
-
-        result = query.execute();
-
-        for ( Row<String, String, String> row : result.get() )
-        {
-            this.artifactMetadataTemplate.deleteRow( row.getKey() );
+            final String deleteTable = cassandraArchivaManager.getArtifactMetadataFamilyName( );
+            Select deleteRows = selectFrom( deleteTable )
+                .column( DEFAULT_PRIMARY_KEY )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repoId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespace ) )
+                .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectId ) )
+                .whereColumn( PROJECT_VERSION.toString( ) ).isEqualTo( literal( projectVersion ) )
+                .allowFiltering();
+            result = session.execute( deleteRows.build( ) );
+            StreamSupport.stream( result.spliterator( ), false )
+                .map( row -> row.getString( DEFAULT_PRIMARY_KEY ) )
+                .distinct( )
+                .forEach( delKey ->
+                    session.execute( deleteFrom( deleteTable ).whereColumn( DEFAULT_PRIMARY_KEY ).isEqualTo( literal( delKey ) ).build( ) ) );
 
         }
     }
 
     @Override
-    public List<ArtifactMetadata> getArtifacts( RepositorySession session, final String repoId, final String namespace,
+    public List<ArtifactMetadata> getArtifacts( RepositorySession repositorySession, final String repoId, final String namespace,
                                                 final String projectId, final String projectVersion )
         throws MetadataResolutionException
     {
-
-        QueryResult<OrderedRows<String, String, String>> result =
-            HFactory.createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-                .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) //
-                .setColumnNames( ArtifactMetadataModel.COLUMNS )//
-                .setRowCount( Integer.MAX_VALUE ) //
-                .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-                .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
-                .addEqualsExpression( PROJECT.toString(), projectId ) //
-                .addEqualsExpression( PROJECT_VERSION.toString(), projectVersion ) //
-                .execute();
-
-        if ( result.get() == null || result.get().getCount() < 1 )
-        {
-            return Collections.emptyList();
-        }
-
-        List<ArtifactMetadata> artifactMetadatas = new ArrayList<>( result.get().getCount() );
-
-        for ( Row<String, String, String> row : result.get() )
+        CqlSession session = cassandraArchivaManager.getSession( );
         {
-            String key = row.getKey();
-            artifactMetadatas.add( mapArtifactMetadataStringColumnSlice( key, row.getColumnSlice() ) );
-        }
-
-        result = HFactory.createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
-            .setColumnNames( MetadataFacetModel.COLUMNS ) //
-            .setRowCount( Integer.MAX_VALUE ) //
-            .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
-            .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
-            .addEqualsExpression( PROJECT_ID.toString(), projectId ) //
-            .addEqualsExpression( PROJECT_VERSION.toString(), projectVersion ) //
-            .execute();
-
-        return mapArtifactFacetToArtifact(result, artifactMetadatas);
-    }
+            String table = cassandraArchivaManager.getArtifactMetadataFamilyName( );
+            Select query = selectFrom( table )
+                .all( )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repoId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespace ) )
+                .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectId ) )
+                .whereColumn( PROJECT_VERSION.toString( ) ).isEqualTo( literal( projectVersion ) )
+                .allowFiltering();
+            ResultSet result = session.execute( query.build( ) );
+            List<ArtifactMetadata> artifactMetadatas = StreamSupport.stream( result.spliterator( ), false )
+                .map( this::mapArtifactMetadata )
+                .collect( Collectors.toList( ) );
 
-    /**
-     * Attach metadata to each of the  ArtifactMetadata objects
-     */
-    private List<ArtifactMetadata> mapArtifactFacetToArtifact( QueryResult<OrderedRows<String, String, String>> result, List<ArtifactMetadata> artifactMetadatas) {
-        if ( result.get() == null || result.get().getCount() < 1 )
-        {
-            return artifactMetadatas;
-        }
 
-        final List<MetadataFacetModel> metadataFacetModels = new ArrayList<>( result.get().getCount() );
+            table = cassandraArchivaManager.getMetadataFacetFamilyName( );
+            query = selectFrom( table )
+                .all( )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repoId ) )
+                .whereColumn( NAMESPACE_ID.toString( ) ).isEqualTo( literal( namespace ) )
+                .whereColumn( PROJECT_ID.toString( ) ).isEqualTo( literal( projectId ) )
+                .whereColumn( PROJECT_VERSION.toString( ) ).isEqualTo( literal( projectVersion ) )
+                .allowFiltering();
+            result = session.execute( query.build( ) );
+            List<MetadataFacetModel> facetMetadata = StreamSupport.stream( result.spliterator( ), false )
+                .map( row -> mapMetadataFacet( row ) )
+                .collect( Collectors.toList( ) );
+            return mapArtifactFacetToArtifact( facetMetadata, artifactMetadatas );
 
-        for ( Row<String, String, String> row : result.get() )
-        {
-            ColumnSlice<String, String> columnSlice = row.getColumnSlice();
-            MetadataFacetModel metadataFacetModel = new MetadataFacetModel();
-            metadataFacetModel.setFacetId( getStringValue( columnSlice, FACET_ID.toString() ) );
-            metadataFacetModel.setName( getStringValue( columnSlice, NAME.toString() ) );
-            metadataFacetModel.setValue( getStringValue( columnSlice, VALUE.toString() ) );
-            metadataFacetModel.setKey( getStringValue( columnSlice, KEY.toString() ) );
-            metadataFacetModel.setProjectVersion( getStringValue( columnSlice, PROJECT_VERSION.toString() ) );
-            metadataFacetModels.add( metadataFacetModel );
         }
+    }
 
-        // rebuild MetadataFacet for artifacts
-
+    private List<ArtifactMetadata> mapArtifactFacetToArtifact( List<MetadataFacetModel> metadataFacetModels, List<ArtifactMetadata> artifactMetadatas )
+    {
         for ( final ArtifactMetadata artifactMetadata : artifactMetadatas )
         {
             Iterator<MetadataFacetModel> iterator = metadataFacetModels.stream( ).filter( metadataFacetModel -> {
@@ -2384,33 +2019,33 @@ public class CassandraMetadataRepository
                 return false;
 
             } ).iterator( );
-            Map<String, List<MetadataFacetModel>> metadataFacetValuesPerFacetId = new HashMap<>();
-            while ( iterator.hasNext() )
+            Map<String, List<MetadataFacetModel>> metadataFacetValuesPerFacetId = new HashMap<>( );
+            while ( iterator.hasNext( ) )
             {
-                MetadataFacetModel metadataFacetModel = iterator.next();
-                List<MetadataFacetModel> values = metadataFacetValuesPerFacetId.get( metadataFacetModel.getName() );
+                MetadataFacetModel metadataFacetModel = iterator.next( );
+                List<MetadataFacetModel> values = metadataFacetValuesPerFacetId.get( metadataFacetModel.getName( ) );
                 if ( values == null )
                 {
-                    values = new ArrayList<>();
-                    metadataFacetValuesPerFacetId.put( metadataFacetModel.getFacetId(), values );
+                    values = new ArrayList<>( );
+                    metadataFacetValuesPerFacetId.put( metadataFacetModel.getFacetId( ), values );
                 }
                 values.add( metadataFacetModel );
 
             }
 
-            for ( Map.Entry<String, List<MetadataFacetModel>> entry : metadataFacetValuesPerFacetId.entrySet() )
+            for ( Map.Entry<String, List<MetadataFacetModel>> entry : metadataFacetValuesPerFacetId.entrySet( ) )
             {
-                MetadataFacetFactory<?> metadataFacetFactory = getFacetFactory( entry.getKey() );
+                MetadataFacetFactory<?> metadataFacetFactory = getFacetFactory( entry.getKey( ) );
                 if ( metadataFacetFactory != null )
                 {
-                    List<MetadataFacetModel> facetModels = entry.getValue();
-                    if ( !facetModels.isEmpty() )
+                    List<MetadataFacetModel> facetModels = entry.getValue( );
+                    if ( !facetModels.isEmpty( ) )
                     {
-                        MetadataFacet metadataFacet = metadataFacetFactory.createMetadataFacet();
-                        Map<String, String> props = new HashMap<>( facetModels.size() );
+                        MetadataFacet metadataFacet = metadataFacetFactory.createMetadataFacet( );
+                        Map<String, String> props = new HashMap<>( facetModels.size( ) );
                         for ( MetadataFacetModel metadataFacetModel : facetModels )
                         {
-                            props.put( metadataFacetModel.getKey(), metadataFacetModel.getValue() );
+                            props.put( metadataFacetModel.getKey( ), metadataFacetModel.getValue( ) );
                         }
                         metadataFacet.fromProperties( props );
                         artifactMetadata.addFacet( metadataFacet );
@@ -2422,8 +2057,9 @@ public class CassandraMetadataRepository
         return artifactMetadatas;
     }
 
+
     @Override
-    public void close()
+    public void close( )
         throws MetadataRepositoryException
     {
         logger.trace( "close" );
@@ -2432,10 +2068,10 @@ public class CassandraMetadataRepository
 
     private static class ModelMapperHolder
     {
-        private static ModelMapper MODEL_MAPPER = new ModelMapper();
+        private static ModelMapper MODEL_MAPPER = new ModelMapper( );
     }
 
-    protected ModelMapper getModelMapper()
+    protected ModelMapper getModelMapper( )
     {
         return ModelMapperHolder.MODEL_MAPPER;
     }
@@ -2468,23 +2104,18 @@ public class CassandraMetadataRepository
     }
 
     @Override
-    public Stream<ArtifactMetadata> getArtifactStream( final RepositorySession session, final String repositoryId,
+    public Stream<ArtifactMetadata> getArtifactStream( final RepositorySession repositorySession, final String repositoryId,
                                                        final QueryParameter queryParameter ) throws MetadataResolutionException
     {
-        RangeSlicesQuery<String, String, String> query = HFactory //
-            .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
-            .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName( ) ) //
-            .setColumnNames( ArtifactMetadataModel.COLUMNS ); //
-
-        query = query.addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId );
-
-        QueryResult<OrderedRows<String, String, String>> result = query.execute();
-
-        try
-        {
-            return StreamSupport.stream( createResultSpliterator( result, ( Row<String, String, String> row, ArtifactMetadata last ) ->
-                mapArtifactMetadataStringColumnSlice( row.getKey( ), row.getColumnSlice( ) ) ), false )
-                .skip( queryParameter.getOffset( ) ).limit( queryParameter.getLimit( ) );
+        CqlSession session = cassandraArchivaManager.getSession( );
+        try {
+            String table = cassandraArchivaManager.getArtifactMetadataFamilyName( );
+            Select query = selectFrom( table )
+                .columns( ArtifactMetadataModel.COLUMNS )
+                .whereColumn( REPOSITORY_NAME.toString( ) ).isEqualTo( literal( repositoryId ) );
+            ResultSet result = session.execute( query.build( ) );
+            return StreamSupport.stream( createResultSpliterator( result, ( Row row, ArtifactMetadata last ) ->
+                mapArtifactMetadata( row ) ), false ).skip( queryParameter.getOffset( ) ).limit( queryParameter.getLimit( ) );
         }
         catch ( MetadataRepositoryException e )
         {
index f041a43968622fb232dac3fdd469a9b2317482a0..61017d8dc4081f1e390e149a22d7205cfee4d35d 100644 (file)
@@ -9,8 +9,7 @@ package org.apache.archiva.metadata.repository.cassandra;
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
+ * http://www.apache.org/licenses/LICENSE-2.0
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,18 +18,6 @@ package org.apache.archiva.metadata.repository.cassandra;
  * under the License.
  */
 
-import me.prettyprint.cassandra.serializers.LongSerializer;
-import me.prettyprint.cassandra.serializers.SerializerTypeInferer;
-import me.prettyprint.cassandra.serializers.StringSerializer;
-import me.prettyprint.cassandra.service.template.ColumnFamilyUpdater;
-import me.prettyprint.hector.api.Serializer;
-import me.prettyprint.hector.api.beans.ColumnSlice;
-import me.prettyprint.hector.api.beans.HColumn;
-import me.prettyprint.hector.api.factory.HFactory;
-import me.prettyprint.hector.api.mutation.Mutator;
-import org.apache.archiva.metadata.repository.cassandra.model.ColumnNames;
-import org.apache.commons.lang3.StringUtils;
-
 /**
  * @author Olivier Lamy
  * @since 2.0.0
@@ -69,90 +56,7 @@ public class CassandraUtils
         return builder.toString();
     }
 
-    public static <A, B> HColumn<A, B> column( final A name, final B value )
-    {
-
-        return HFactory.createColumn( name, //
-                                      value, //
-            SerializerTypeInferer.getSerializer( name ), //
-            SerializerTypeInferer.getSerializer( value ) );
-    }
-
-    public static String getStringValue( ColumnSlice<String, String> columnSlice, ColumnNames columnName )
-    {
-        return getStringValue( columnSlice, columnName.toString() );
-    }
-
-    public static String getStringValue( ColumnSlice<String, String> columnSlice, String columnName )
-    {
-        if ( StringUtils.isEmpty( columnName ) )
-        {
-            return null;
-        }
-
-        HColumn<String, String> hColumn = columnSlice.getColumnByName( columnName );
-        return hColumn == null ? null : hColumn.getValue();
-    }
-
-    public static Long getLongValue( ColumnSlice<String, Long> columnSlice, String columnName )
-    {
-        if ( StringUtils.isEmpty( columnName ) )
-        {
-            return null;
-        }
-
-        HColumn<String, Long> hColumn = columnSlice.getColumnByName( columnName );
-        return hColumn == null ? null : hColumn.getValue();
-    }
-
-    public static <T> String getAsStringValue( ColumnSlice<String, T> columnSlice, String columnName )
-    {
-        StringSerializer ss = StringSerializer.get();
-        if ( StringUtils.isEmpty( columnName ) )
-        {
-            return null;
-        }
-
-        HColumn<String, T> hColumn = columnSlice.getColumnByName( columnName );
-        return hColumn == null ? null : ss.fromByteBuffer( hColumn.getValueBytes() );
-    }
-
-    public static Long getAsLongValue( ColumnSlice<String, String> columnSlice, String columnName )
-    {
-        LongSerializer ls = LongSerializer.get();
-        if ( StringUtils.isEmpty( columnName ) )
-        {
-            return null;
-        }
-
-        HColumn<String, String> hColumn = columnSlice.getColumnByName( columnName );
-        return hColumn == null ? null : ls.fromByteBuffer( hColumn.getValueBytes() );
-    }
 
-    public static void addInsertion( Mutator<String> mutator, String key, String columnFamily, String columnName,
-                                     String value )
-    {
-        if ( value != null )
-        {
-            mutator.addInsertion( key, columnFamily, column( columnName, value ) );
-        }
-    }
-
-    /**
-     * null check on the value to prevent {@link java.lang.IllegalArgumentException}
-     * @param updater
-     * @param columnName
-     * @param value
-     */
-    public static void addUpdateStringValue(ColumnFamilyUpdater<String,String> updater, String columnName, String value )
-    {
-        if (value == null)
-        {
-            return;
-        }
-        updater.setString( columnName, value );
-
-    }
 
     private CassandraUtils()
     {
index 65a5901f79e5d5e3c70257e1a5f8def1ecffa40d..fb1180b60672d02e82cc92f67aa386c6207fe946 100644 (file)
@@ -19,18 +19,14 @@ package org.apache.archiva.metadata.repository.cassandra;
  * under the License.
  */
 
-import me.prettyprint.cassandra.model.BasicColumnDefinition;
-import me.prettyprint.cassandra.model.ConfigurableConsistencyLevel;
-import me.prettyprint.cassandra.serializers.StringSerializer;
-import me.prettyprint.cassandra.service.CassandraHostConfigurator;
-import me.prettyprint.cassandra.service.ThriftKsDef;
-import me.prettyprint.hector.api.Cluster;
-import me.prettyprint.hector.api.HConsistencyLevel;
-import me.prettyprint.hector.api.Keyspace;
-import me.prettyprint.hector.api.ddl.ColumnFamilyDefinition;
-import me.prettyprint.hector.api.ddl.ColumnIndexType;
-import me.prettyprint.hector.api.ddl.ComparatorType;
-import me.prettyprint.hector.api.factory.HFactory;
+import com.datastax.oss.driver.api.core.CqlSession;
+import com.datastax.oss.driver.api.core.CqlSessionBuilder;
+import com.datastax.oss.driver.api.core.config.DefaultDriverOption;
+import com.datastax.oss.driver.api.core.config.DriverConfigLoader;
+import com.datastax.oss.driver.api.core.type.DataTypes;
+import com.datastax.oss.driver.api.querybuilder.schema.CreateIndex;
+import com.datastax.oss.driver.api.querybuilder.schema.CreateKeyspace;
+import com.datastax.oss.driver.api.querybuilder.schema.CreateTableWithOptions;
 import org.apache.archiva.metadata.repository.RepositorySessionFactoryBean;
 import org.apache.commons.lang3.StringUtils;
 import org.slf4j.Logger;
@@ -43,8 +39,12 @@ import javax.annotation.PostConstruct;
 import javax.annotation.PreDestroy;
 import javax.inject.Inject;
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
 import java.util.List;
 
+import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal;
+import static com.datastax.oss.driver.api.querybuilder.SchemaBuilder.*;
 import static org.apache.archiva.metadata.repository.cassandra.model.ColumnNames.*;
 
 /**
@@ -53,12 +53,12 @@ import static org.apache.archiva.metadata.repository.cassandra.model.ColumnNames
  * @author Olivier Lamy
  * @since 2.0.0
  */
-@Service("archivaEntityManagerFactory#cassandra")
+@Service( "archivaEntityManagerFactory#cassandra" )
 public class DefaultCassandraArchivaManager
     implements CassandraArchivaManager
 {
 
-    private Logger logger = LoggerFactory.getLogger( getClass() );
+    private Logger logger = LoggerFactory.getLogger( getClass( ) );
 
     @Inject
     private ApplicationContext applicationContext;
@@ -69,16 +69,12 @@ public class DefaultCassandraArchivaManager
 
     private boolean started;
 
-    private Cluster cluster;
-
-    private Keyspace keyspace;
-
     // configurable???
     private String repositoryFamilyName = "repository";
 
     private String namespaceFamilyName = "namespace";
 
-    private String projectFamilyName = PROJECT.toString();
+    private String projectFamilyName = PROJECT.toString( );
 
     private String projectVersionMetadataFamilyName = "projectversionmetadata";
 
@@ -94,513 +90,448 @@ public class DefaultCassandraArchivaManager
 
     private String checksumFamilyName = "checksum";
 
-    @Value("${cassandra.host}")
+
+    private static String[] projectVersionMetadataColumns;
+
+
+    static
+    {
+        projectVersionMetadataColumns = new String[]{
+            DEFAULT_PRIMARY_KEY,
+            NAMESPACE_ID.toString( ),
+            REPOSITORY_NAME.toString( ),
+            PROJECT_VERSION.toString( ),
+            PROJECT_ID.toString( ),
+            DESCRIPTION.toString( ),
+            URL.toString( ),
+            NAME.toString( ),
+            VERSION.toString( ),
+            VERSION_PROPERTIES.toString( ),
+            "incomplete",
+            "ciManagement.system",
+            "ciManagement.url",
+            "issueManagement.system",
+            "issueManagement.url",
+            "organization.name",
+            "organization.url",
+            "scm.url",
+            "scm.connection",
+            "scm.developerConnection"
+        };
+        Arrays.sort( projectVersionMetadataColumns );
+    }
+
+    @Value( "${cassandra.host}" )
     private String cassandraHost;
 
-    @Value("${cassandra.port}")
+    @Value( "${cassandra.port}" )
     private String cassandraPort;
 
-    @Value("${cassandra.maxActive}")
+    @Value( "${cassandra.maxActive}" )
     private int maxActive;
 
-    @Value("${cassandra.readConsistencyLevel}")
+    @Value( "${cassandra.readConsistencyLevel}" )
     private String readConsistencyLevel;
 
-    @Value("${cassandra.writeConsistencyLevel}")
+    @Value( "${cassandra.writeConsistencyLevel}" )
     private String writeConsistencyLevel;
 
-    @Value("${cassandra.replicationFactor}")
+    @Value( "${cassandra.replicationFactor}" )
     private int replicationFactor;
 
-    @Value("${cassandra.keyspace.name}")
+    @Value( "${cassandra.keyspace.name}" )
     private String keyspaceName;
 
-    @Value("${cassandra.cluster.name}")
+    @Value( "${cassandra.cluster.name}" )
     private String clusterName;
 
     @Inject
     private RepositorySessionFactoryBean repositorySessionFactoryBean;
 
-    @PostConstruct
-    public void initialize()
-    {
-        // skip initialisation if not cassandra
-        if ( !StringUtils.equals( repositorySessionFactoryBean.getId(), "cassandra" ) )
-        {
-            return;
-        }
-        final CassandraHostConfigurator configurator =
-            new CassandraHostConfigurator( cassandraHost + ":" + cassandraPort );
-        configurator.setMaxActive( maxActive );
-        //configurator.setCassandraThriftSocketTimeout(  );
-
-        cluster = HFactory.getOrCreateCluster( clusterName, configurator );
-
-        final ConfigurableConsistencyLevel consistencyLevelPolicy = new ConfigurableConsistencyLevel();
-        consistencyLevelPolicy.setDefaultReadConsistencyLevel( HConsistencyLevel.valueOf( readConsistencyLevel ) );
-        consistencyLevelPolicy.setDefaultWriteConsistencyLevel( HConsistencyLevel.valueOf( writeConsistencyLevel ) );
-        keyspace = HFactory.createKeyspace( keyspaceName, cluster, consistencyLevelPolicy );
+    DriverConfigLoader configLoader;
 
-        List<ColumnFamilyDefinition> cfds = new ArrayList<>();
+    CqlSession cqlSession;
 
-        // namespace table
-        {
-
-            final ColumnFamilyDefinition namespace =
-                HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
-                                                       getNamespaceFamilyName(), //
-                                                       ComparatorType.UTF8TYPE );
-            cfds.add( namespace );
-
-            // creating indexes for cql query
-
-            BasicColumnDefinition nameColumn = new BasicColumnDefinition();
-            nameColumn.setName( StringSerializer.get().toByteBuffer( NAME.toString() ) );
-            nameColumn.setIndexName( NAME.toString() );
-            nameColumn.setIndexType( ColumnIndexType.KEYS );
-            nameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            namespace.addColumnDefinition( nameColumn );
-
-            BasicColumnDefinition repositoryIdColumn = new BasicColumnDefinition();
-            repositoryIdColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
-            repositoryIdColumn.setIndexName( REPOSITORY_NAME.toString() );
-            repositoryIdColumn.setIndexType( ColumnIndexType.KEYS );
-            repositoryIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            namespace.addColumnDefinition( repositoryIdColumn );
-        }
+    @Override
+    public CqlSessionBuilder getSessionBuilder( )
+    {
+        return CqlSession.builder( ).withConfigLoader( configLoader ).withKeyspace( keyspaceName ).withLocalDatacenter( "datacenter1" );
+    }
 
-        // repository table
-        {
-            final ColumnFamilyDefinition repository =
-                HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
-                                                       getRepositoryFamilyName(), //
-                                                       ComparatorType.UTF8TYPE );
-
-            cfds.add( repository );
-
-            BasicColumnDefinition nameColumn = new BasicColumnDefinition();
-            nameColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
-            nameColumn.setIndexName( REPOSITORY_NAME.toString() );
-            nameColumn.setIndexType( ColumnIndexType.KEYS );
-            nameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            repository.addColumnDefinition( nameColumn );
+    @Override
+    public CqlSession getSession( )
+    {
+        if (cqlSession==null || cqlSession.isClosed()) {
+            this.cqlSession = getSessionBuilder( ).build( );
         }
+        return this.cqlSession;
+    }
 
-        // project table
+    @PostConstruct
+    public void initialize( )
+    {
+        // skip initialisation if not cassandra
+        if ( !StringUtils.equals( repositorySessionFactoryBean.getId( ), "cassandra" ) )
         {
-
-            final ColumnFamilyDefinition project = HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
-                                                                                          getProjectFamilyName(), //
-                                                                                          ComparatorType.UTF8TYPE );
-            cfds.add( project );
-
-            // creating indexes for cql query
-
-            BasicColumnDefinition projectIdColumn = new BasicColumnDefinition();
-            projectIdColumn.setName( StringSerializer.get().toByteBuffer( PROJECT_ID.toString() ) );
-            projectIdColumn.setIndexName( PROJECT_ID.toString() );
-            projectIdColumn.setIndexType( ColumnIndexType.KEYS );
-            projectIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            project.addColumnDefinition( projectIdColumn );
-
-            BasicColumnDefinition repositoryIdColumn = new BasicColumnDefinition();
-            repositoryIdColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
-            repositoryIdColumn.setIndexName( REPOSITORY_NAME.toString() );
-            repositoryIdColumn.setIndexType( ColumnIndexType.KEYS );
-            repositoryIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            project.addColumnDefinition( repositoryIdColumn );
-
-            BasicColumnDefinition namespaceIdColumn = new BasicColumnDefinition();
-            namespaceIdColumn.setName( StringSerializer.get().toByteBuffer( NAMESPACE_ID.toString() ) );
-            namespaceIdColumn.setIndexName( NAMESPACE_ID.toString() );
-            namespaceIdColumn.setIndexType( ColumnIndexType.KEYS );
-            namespaceIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            project.addColumnDefinition( namespaceIdColumn );
+            return;
         }
 
-        //projectversionmetadatamodel
-        {
-
-            final ColumnFamilyDefinition projectVersionMetadataModel =
-                HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
-                                                       getProjectVersionMetadataFamilyName(), //
-                                                       ComparatorType.UTF8TYPE );
-            cfds.add( projectVersionMetadataModel );
-
-            // creating indexes for cql query
-
-            BasicColumnDefinition namespaceIdColumn = new BasicColumnDefinition();
-            namespaceIdColumn.setName( StringSerializer.get().toByteBuffer( NAMESPACE_ID.toString() ) );
-            namespaceIdColumn.setIndexName( NAMESPACE_ID.toString() );
-            namespaceIdColumn.setIndexType( ColumnIndexType.KEYS );
-            namespaceIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            projectVersionMetadataModel.addColumnDefinition( namespaceIdColumn );
-
-            BasicColumnDefinition repositoryNameColumn = new BasicColumnDefinition();
-            repositoryNameColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
-            repositoryNameColumn.setIndexName( REPOSITORY_NAME.toString() );
-            repositoryNameColumn.setIndexType( ColumnIndexType.KEYS );
-            repositoryNameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            projectVersionMetadataModel.addColumnDefinition( repositoryNameColumn );
-
-            BasicColumnDefinition idColumn = new BasicColumnDefinition();
-            idColumn.setName( StringSerializer.get().toByteBuffer( ID.toString() ) );
-            idColumn.setIndexName( ID.toString() );
-            idColumn.setIndexType( ColumnIndexType.KEYS );
-            idColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            projectVersionMetadataModel.addColumnDefinition( idColumn );
-
-            BasicColumnDefinition projectIdColumn = new BasicColumnDefinition();
-            projectIdColumn.setName( StringSerializer.get().toByteBuffer( PROJECT_ID.toString() ) );
-            projectIdColumn.setIndexName( PROJECT_ID.toString() );
-            projectIdColumn.setIndexType( ColumnIndexType.KEYS );
-            projectIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            projectVersionMetadataModel.addColumnDefinition( projectIdColumn );
+        List<String> hostNames = new ArrayList<>( );
+        hostNames.add( cassandraHost + ":" + cassandraPort );
+        System.out.println( "Contact point: " + cassandraHost + ":" + cassandraPort );
+        configLoader =
+            DriverConfigLoader.programmaticBuilder( )
 
-        }
+                .withStringList( DefaultDriverOption.CONTACT_POINTS, hostNames )
+                .withInt( DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE, maxActive )
+                .withInt( DefaultDriverOption.CONNECTION_POOL_REMOTE_SIZE, maxActive )
+                //.withInt( DefaultDriverOption.CONNECTION_MAX_REQUESTS, maxActive )
+                .withString( DefaultDriverOption.REQUEST_CONSISTENCY, readConsistencyLevel )
+                .build( );
 
-        // artifactmetadatamodel table
         {
 
-            final ColumnFamilyDefinition artifactMetadataModel =
-                HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
-                                                       getArtifactMetadataFamilyName(), //
-                                                       ComparatorType.UTF8TYPE );
-            cfds.add( artifactMetadataModel );
-
-            // creating indexes for cql query
-
-            BasicColumnDefinition idColumn = new BasicColumnDefinition();
-            idColumn.setName( StringSerializer.get().toByteBuffer( ID.toString() ) );
-            idColumn.setIndexName( ID.toString() );
-            idColumn.setIndexType( ColumnIndexType.KEYS );
-            idColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            artifactMetadataModel.addColumnDefinition( idColumn );
-
-            BasicColumnDefinition repositoryNameColumn = new BasicColumnDefinition();
-            repositoryNameColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
-            repositoryNameColumn.setIndexName( REPOSITORY_NAME.toString() );
-            repositoryNameColumn.setIndexType( ColumnIndexType.KEYS );
-            repositoryNameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            artifactMetadataModel.addColumnDefinition( repositoryNameColumn );
-
-            BasicColumnDefinition namespaceIdColumn = new BasicColumnDefinition();
-            namespaceIdColumn.setName( StringSerializer.get().toByteBuffer( NAMESPACE_ID.toString() ) );
-            namespaceIdColumn.setIndexName( NAMESPACE_ID.toString() );
-            namespaceIdColumn.setIndexType( ColumnIndexType.KEYS );
-            namespaceIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            artifactMetadataModel.addColumnDefinition( namespaceIdColumn );
-
-            BasicColumnDefinition projectColumn = new BasicColumnDefinition();
-            projectColumn.setName( StringSerializer.get().toByteBuffer( PROJECT.toString() ) );
-            projectColumn.setIndexName( PROJECT.toString() );
-            projectColumn.setIndexType( ColumnIndexType.KEYS );
-            projectColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            artifactMetadataModel.addColumnDefinition( projectColumn );
-
-            BasicColumnDefinition projectVersionColumn = new BasicColumnDefinition();
-            projectVersionColumn.setName( StringSerializer.get().toByteBuffer( PROJECT_VERSION.toString() ) );
-            projectVersionColumn.setIndexName( PROJECT_VERSION.toString() );
-            projectVersionColumn.setIndexType( ColumnIndexType.KEYS );
-            projectVersionColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            artifactMetadataModel.addColumnDefinition( projectVersionColumn );
-
-            BasicColumnDefinition versionColumn = new BasicColumnDefinition();
-            versionColumn.setName( StringSerializer.get().toByteBuffer( VERSION.toString() ) );
-            versionColumn.setIndexName( VERSION.toString() );
-            versionColumn.setIndexType( ColumnIndexType.KEYS );
-            versionColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            artifactMetadataModel.addColumnDefinition( versionColumn );
-
-            BasicColumnDefinition whenGatheredColumn = new BasicColumnDefinition();
-            whenGatheredColumn.setName( StringSerializer.get().toByteBuffer( WHEN_GATHERED.toString() ) );
-            whenGatheredColumn.setIndexName( WHEN_GATHERED.toString() );
-            whenGatheredColumn.setIndexType( ColumnIndexType.KEYS );
-            whenGatheredColumn.setValidationClass( ComparatorType.LONGTYPE.getClassName() );
-            artifactMetadataModel.addColumnDefinition( whenGatheredColumn );
-
-            BasicColumnDefinition sha1Column = new BasicColumnDefinition();
-            sha1Column.setName( StringSerializer.get().toByteBuffer( SHA1.toString() ) );
-            sha1Column.setIndexName( SHA1.toString() );
-            sha1Column.setIndexType( ColumnIndexType.KEYS );
-            sha1Column.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            artifactMetadataModel.addColumnDefinition( sha1Column );
-
-            BasicColumnDefinition md5Column = new BasicColumnDefinition();
-            md5Column.setName( StringSerializer.get().toByteBuffer( MD5.toString() ) );
-            md5Column.setIndexName( MD5.toString() );
-            md5Column.setIndexType( ColumnIndexType.KEYS );
-            md5Column.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            artifactMetadataModel.addColumnDefinition( md5Column );
-
-
+            CreateKeyspace cKeySpace = createKeyspace( keyspaceName ).ifNotExists( ).withSimpleStrategy( replicationFactor );
+            CqlSession.builder( ).withConfigLoader( configLoader ).withLocalDatacenter( "datacenter1" ).build().execute( cKeySpace.build( ) );
         }
 
-        // metadatafacetmodel table
-        {
-            final ColumnFamilyDefinition metadataFacetModel =
-                HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
-                                                       getMetadataFacetFamilyName(), //
-                                                       ComparatorType.UTF8TYPE );
-            cfds.add( metadataFacetModel );
-
-            // creating indexes for cql query
-
-            BasicColumnDefinition facetIdColumn = new BasicColumnDefinition();
-            facetIdColumn.setName( StringSerializer.get().toByteBuffer( FACET_ID.toString() ) );
-            facetIdColumn.setIndexName( FACET_ID.toString() );
-            facetIdColumn.setIndexType( ColumnIndexType.KEYS );
-            facetIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            metadataFacetModel.addColumnDefinition( facetIdColumn );
-
-            BasicColumnDefinition repositoryNameColumn = new BasicColumnDefinition();
-            repositoryNameColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
-            repositoryNameColumn.setIndexName( REPOSITORY_NAME.toString() );
-            repositoryNameColumn.setIndexType( ColumnIndexType.KEYS );
-            repositoryNameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            metadataFacetModel.addColumnDefinition( repositoryNameColumn );
-
-            BasicColumnDefinition nameColumn = new BasicColumnDefinition();
-            nameColumn.setName( StringSerializer.get().toByteBuffer( NAME.toString() ) );
-            nameColumn.setIndexName( NAME.toString() );
-            nameColumn.setIndexType( ColumnIndexType.KEYS );
-            nameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            metadataFacetModel.addColumnDefinition( nameColumn );
-
-            BasicColumnDefinition namespaceColumn = new BasicColumnDefinition();
-            namespaceColumn.setName( StringSerializer.get().toByteBuffer( NAMESPACE_ID.toString() ) );
-            namespaceColumn.setIndexName( NAMESPACE_ID.toString() );
-            namespaceColumn.setIndexType( ColumnIndexType.KEYS );
-            namespaceColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            metadataFacetModel.addColumnDefinition( namespaceColumn );
-
-            BasicColumnDefinition projectIdColumn = new BasicColumnDefinition();
-            projectIdColumn.setName( StringSerializer.get().toByteBuffer( PROJECT_ID.toString() ) );
-            projectIdColumn.setIndexName( PROJECT_ID.toString() );
-            projectIdColumn.setIndexType( ColumnIndexType.KEYS );
-            projectIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            metadataFacetModel.addColumnDefinition( projectIdColumn );
-
-            BasicColumnDefinition projectVersionColumn = new BasicColumnDefinition();
-            projectVersionColumn.setName( StringSerializer.get().toByteBuffer( PROJECT_VERSION.toString() ) );
-            projectVersionColumn.setIndexName( PROJECT_VERSION.toString() );
-            projectVersionColumn.setIndexType( ColumnIndexType.KEYS );
-            projectVersionColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            metadataFacetModel.addColumnDefinition( projectVersionColumn );
-
-        }
+        CqlSession session = getSession( );
 
-        // Checksum table
         {
-            final ColumnFamilyDefinition checksumCf =
-                    HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
-                            getChecksumFamilyName(), //
-                            ComparatorType.UTF8TYPE );
-
-            BasicColumnDefinition artifactMetatadaModel_key = new BasicColumnDefinition();
-            artifactMetatadaModel_key.setName( StringSerializer.get().toByteBuffer( "artifactMetadataModel.key" ) );
-            artifactMetatadaModel_key.setIndexName( "artifactMetadataModel_key" );
-            artifactMetatadaModel_key.setIndexType( ColumnIndexType.KEYS );
-            artifactMetatadaModel_key.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            checksumCf.addColumnDefinition( artifactMetatadaModel_key );
-
-
-            BasicColumnDefinition checksumAlgorithmColumn = new BasicColumnDefinition();
-            checksumAlgorithmColumn.setName( StringSerializer.get().toByteBuffer( CHECKSUM_ALG.toString() ) );
-            checksumAlgorithmColumn.setIndexName( CHECKSUM_ALG.toString() );
-            checksumAlgorithmColumn.setIndexType( ColumnIndexType.KEYS );
-            checksumAlgorithmColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            checksumCf.addColumnDefinition( checksumAlgorithmColumn );
-
-            BasicColumnDefinition checksumValueColumn = new BasicColumnDefinition();
-            checksumValueColumn.setName( StringSerializer.get().toByteBuffer( CHECKSUM_VALUE.toString() ) );
-            checksumValueColumn.setIndexName( CHECKSUM_VALUE.toString() );
-            checksumValueColumn.setIndexType( ColumnIndexType.KEYS );
-            checksumValueColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            checksumCf.addColumnDefinition( checksumValueColumn );
-
-            BasicColumnDefinition repositoryNameColumn = new BasicColumnDefinition();
-            repositoryNameColumn.setName( StringSerializer.get().toByteBuffer( REPOSITORY_NAME.toString() ) );
-            repositoryNameColumn.setIndexName( REPOSITORY_NAME.toString() );
-            repositoryNameColumn.setIndexType( ColumnIndexType.KEYS );
-            repositoryNameColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            checksumCf.addColumnDefinition( repositoryNameColumn );
-
-
-            cfds.add( checksumCf );
-
-            // creating indexes for cql query
-
-        }
 
-        // mailinglist table
-        {
-            final ColumnFamilyDefinition mailingListCf =
-                HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
-                                                       getMailingListFamilyName(), //
-                                                       ComparatorType.UTF8TYPE );
+            // namespace table
+            {
+                String tableName = getNamespaceFamilyName( );
+                CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
+                    .withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
+                    .withColumn( NAME.toString( ), DataTypes.TEXT )
+                    .withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
+                    .withCompactStorage( );
+                session.execute( table.build( ) );
+                CreateIndex index = createIndex( NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( NAME.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
+                session.execute( index.build( ) );
+            }
 
-            BasicColumnDefinition projectVersionMetadataModel_key = new BasicColumnDefinition();
-            projectVersionMetadataModel_key.setName( StringSerializer.get().toByteBuffer( "projectVersionMetadataModel.key" ) );
-            projectVersionMetadataModel_key.setIndexName( "projectVersionMetadataModel_key" );
-            projectVersionMetadataModel_key.setIndexType( ColumnIndexType.KEYS );
-            projectVersionMetadataModel_key.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            mailingListCf.addColumnDefinition( projectVersionMetadataModel_key );
+            // Repository Table
+            {
+                String tableName = getRepositoryFamilyName( );
+                CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
+                    .withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
+                    .withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
+                    .withCompactStorage( );
+                session.execute( table.build( ) );
+                CreateIndex index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
+                session.execute( index.build( ) );
 
-            cfds.add( mailingListCf );
+            }
 
-            // creating indexes for cql query
+            // Project table
+            {
+                String tableName = getProjectFamilyName( );
+                CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
+                    .withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
+                    .withColumn( PROJECT_ID.toString( ), DataTypes.TEXT )
+                    .withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
+                    .withColumn( NAMESPACE_ID.toString( ), DataTypes.TEXT )
+                    .withColumn( PROJECT_PROPERTIES.toString( ), DataTypes.frozenMapOf( DataTypes.TEXT, DataTypes.TEXT ) )
+                    .withCompactStorage( );
+                session.execute( table.build( ) );
+                CreateIndex index = createIndex( PROJECT_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_ID.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( NAMESPACE_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( NAMESPACE_ID.toString( ) );
+                session.execute( index.build( ) );
 
-        }
+            }
 
-        // license table
-        {
-            final ColumnFamilyDefinition licenseCf =
-                HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
-                                                       getLicenseFamilyName(), //
-                                                       ComparatorType.UTF8TYPE );
+            // Project Version Metadata Model
+            {
+                String tableName = getProjectVersionMetadataFamilyName( );
+                CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
+                    .withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
+                    .withColumn( NAMESPACE_ID.toString( ), DataTypes.TEXT )
+                    .withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
+                    .withColumn( PROJECT_VERSION.toString( ), DataTypes.TEXT )
+                    .withColumn( PROJECT_ID.toString( ), DataTypes.TEXT )
+                    .withColumn( DESCRIPTION.toString( ), DataTypes.TEXT )
+                    .withColumn( URL.toString( ), DataTypes.TEXT )
+                    .withColumn( NAME.toString(), DataTypes.TEXT )
+                    .withColumn( VERSION.toString(), DataTypes.TEXT )
+                    .withColumn( VERSION_PROPERTIES.toString(), DataTypes.mapOf( DataTypes.TEXT, DataTypes.TEXT ) )
+                    .withColumn( "incomplete", DataTypes.BOOLEAN )
+                    .withColumn( "\"ciManagement.system\"", DataTypes.TEXT )
+                    .withColumn( "\"ciManagement.url\"", DataTypes.TEXT )
+                    .withColumn( "\"issueManagement.system\"", DataTypes.TEXT )
+                    .withColumn( "\"issueManagement.url\"", DataTypes.TEXT )
+                    .withColumn( "\"organization.name\"", DataTypes.TEXT )
+                    .withColumn( "\"organization.url\"", DataTypes.TEXT )
+                    .withColumn( "\"scm.url\"", DataTypes.TEXT )
+                    .withColumn( "\"scm.connection\"", DataTypes.TEXT )
+                    .withColumn( "\"scm.developerConnection\"", DataTypes.TEXT );
+                session.execute( table.build( ) );
+                CreateIndex index = createIndex( NAMESPACE_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( NAMESPACE_ID.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( PROJECT_VERSION.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_VERSION.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( PROJECT_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_ID.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( VERSION_PROPERTIES.toString( ) + "_idx" ).ifNotExists( ).onTable( tableName ).andColumnEntries(  VERSION_PROPERTIES.toString( ) );
+                session.execute( index.build( ) );
+            }
 
-            BasicColumnDefinition projectVersionMetadataModel_key = new BasicColumnDefinition();
-            projectVersionMetadataModel_key.setName( StringSerializer.get().toByteBuffer( "projectVersionMetadataModel.key" ) );
-            projectVersionMetadataModel_key.setIndexName( "projectVersionMetadataModel_key" );
-            projectVersionMetadataModel_key.setIndexType( ColumnIndexType.KEYS );
-            projectVersionMetadataModel_key.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            licenseCf.addColumnDefinition( projectVersionMetadataModel_key );
+            // Artifact Metadata Model
+            {
+                String tableName = getArtifactMetadataFamilyName( );
+                CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
+                    .withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
+                    .withColumn( ID.toString( ), DataTypes.TEXT )
+                    .withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
+                    .withColumn( NAMESPACE_ID.toString( ), DataTypes.TEXT )
+                    .withColumn( PROJECT_ID.toString( ), DataTypes.TEXT )
+                    .withColumn( PROJECT_VERSION.toString( ), DataTypes.TEXT )
+                    .withColumn( VERSION.toString( ), DataTypes.TEXT )
+                    .withColumn( WHEN_GATHERED.toString( ), DataTypes.BIGINT )
+                    .withColumn( SHA1.toString( ), DataTypes.TEXT )
+                    .withColumn( MD5.toString( ), DataTypes.TEXT )
+                    .withColumn( FILE_LAST_MODIFIED.toString(), DataTypes.BIGINT)
+                    .withColumn( SIZE.toString(), DataTypes.BIGINT )
+                    .withCompactStorage( );
+                session.execute( table.build( ) );
+
+                CreateIndex index = createIndex( ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( ID.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( NAMESPACE_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( NAMESPACE_ID.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( PROJECT_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_ID.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( PROJECT_VERSION.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_VERSION.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( VERSION.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( VERSION.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( WHEN_GATHERED.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( WHEN_GATHERED.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( SHA1.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( SHA1.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( MD5.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( MD5.toString( ) );
+                session.execute( index.build( ) );
 
-            cfds.add( licenseCf );
+            }
+            // Metadata Facet Model
+            {
+                String tableName = getMetadataFacetFamilyName( );
+                CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
+                    .withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
+                    .withColumn( FACET_ID.toString( ), DataTypes.TEXT )
+                    .withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
+                    .withColumn( NAME.toString( ), DataTypes.TEXT )
+                    .withColumn( NAMESPACE_ID.toString( ), DataTypes.TEXT )
+                    .withColumn( PROJECT_ID.toString( ), DataTypes.TEXT )
+                    .withColumn( PROJECT_VERSION.toString( ), DataTypes.TEXT )
+                    .withColumn( KEY.toString(), DataTypes.TEXT )
+                    .withColumn( VALUE.toString(), DataTypes.TEXT)
+                    .withColumn( WHEN_GATHERED.toString(), DataTypes.BIGINT )
+                    .withCompactStorage( );
+                session.execute( table.build( ) );
+
+                CreateIndex index = createIndex( FACET_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( FACET_ID.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( NAME.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( NAMESPACE_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( NAMESPACE_ID.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( PROJECT_ID.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_ID.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( PROJECT_VERSION.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( PROJECT_VERSION.toString( ) );
+                session.execute( index.build( ) );
+            }
+            // Checksum Table
+            {
+                String tableName = getChecksumFamilyName( );
+                CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
+                    .withPartitionKey( DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
+                    .withColumn( "\"artifactMetadataModel.key\"", DataTypes.TEXT )
+                    .withColumn( CHECKSUM_ALG.toString( ), DataTypes.TEXT )
+                    .withColumn( CHECKSUM_VALUE.toString( ), DataTypes.TEXT )
+                    .withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
+                    .withCompactStorage( );
+                session.execute( table.build( ) );
+
+                CreateIndex index = createIndex( CHECKSUM_ALG.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( CHECKSUM_ALG.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( CHECKSUM_VALUE.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( CHECKSUM_VALUE.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( REPOSITORY_NAME.toString( ) ).ifNotExists( ).onTable( tableName ).andColumn( REPOSITORY_NAME.toString( ) );
+                session.execute( index.build( ) );
+            }
+            // Mailinglist Table
+            {
+                String tableName = getMailingListFamilyName( );
+                CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
+                    .withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
+                    .withColumn( NAME.toString(), DataTypes.TEXT )
+                    .withColumn( "\"projectVersionMetadataModel.key\"", DataTypes.TEXT )
+                    .withColumn( "mainArchiveUrl", DataTypes.TEXT )
+                    .withColumn( "postAddress", DataTypes.TEXT )
+                    .withColumn( "subscribeAddress", DataTypes.TEXT )
+                    .withColumn( "unsubscribeAddress", DataTypes.TEXT )
+                    .withColumn( "otherArchive", DataTypes.frozenListOf( DataTypes.TEXT ) )
+                    .withCompactStorage( );
+                session.execute( table.build( ) );
+
+                CreateIndex index = createIndex( "\"projectVersionMetadataModel_key\"" ).ifNotExists( ).onTable( tableName ).andColumn( "\"\"projectVersionMetadataModel.key\"\"" );
+                session.execute( index.build( ) );
+            }
 
-            // creating indexes for cql query
+            // License Table
+            {
+                String tableName = getLicenseFamilyName( );
+                CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
+                    .withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
+                    .withColumn( "\"projectVersionMetadataModel.key\"", DataTypes.TEXT )
+                    .withColumn( NAME.toString(), DataTypes.TEXT )
+                    .withColumn( URL.toString(), DataTypes.TEXT )
+                    .withCompactStorage( );
+                session.execute( table.build( ) );
+
+                CreateIndex index = createIndex( "\"projectVersionMetadataModel_key\"" ).ifNotExists( ).onTable( tableName ).andColumn( "\"\"projectVersionMetadataModel.key\"\"" );
+                session.execute( index.build( ) );
+            }
 
-        }
+            // Dependency Table
+            {
+                String tableName = getDependencyFamilyName( );
+                CreateTableWithOptions table = createTable( keyspaceName, tableName ).ifNotExists( )
+                    .withPartitionKey( CassandraArchivaManager.DEFAULT_PRIMARY_KEY, DataTypes.TEXT )
+                    .withColumn( REPOSITORY_NAME.toString( ), DataTypes.TEXT )
+                    .withColumn( GROUP_ID.toString( ), DataTypes.TEXT )
+                    .withColumn( ARTIFACT_ID.toString( ), DataTypes.TEXT )
+                    .withColumn( VERSION.toString( ), DataTypes.TEXT )
+                    .withColumn( "\"projectVersionMetadataModel.key\"", DataTypes.TEXT )
+                    .withColumn( "classifier", DataTypes.TEXT )
+                    .withColumn( "optional", DataTypes.TEXT )
+                    .withColumn( "scope", DataTypes.TEXT )
+                    .withColumn( "systemPath", DataTypes.TEXT )
+                    .withColumn( "type", DataTypes.TEXT )
+                    .withCompactStorage( );
+
+                session.execute( table.build( ) );
+
+                CreateIndex index = createIndex( "groupIdIdx" ).ifNotExists( ).onTable( tableName ).andColumn( GROUP_ID.toString( ) );
+                session.execute( index.build( ) );
+                index = createIndex( "\"projectVersionMetadataModel_key\"" ).ifNotExists( ).onTable( tableName ).andColumn( "\"\"projectVersionMetadataModel.key\"\"" );
+                session.execute( index.build( ) );
 
-        // dependency table
-        {
-            final ColumnFamilyDefinition dependencyCf =
-                HFactory.createColumnFamilyDefinition( keyspace.getKeyspaceName(), //
-                                                       getDependencyFamilyName(), //
-                                                       ComparatorType.UTF8TYPE );
-            cfds.add( dependencyCf );
-
-            // creating indexes for cql query
-
-            BasicColumnDefinition groupIdColumn = new BasicColumnDefinition();
-            groupIdColumn.setName( StringSerializer.get().toByteBuffer( GROUP_ID.toString() ) );
-            groupIdColumn.setIndexName( "groupIdIdx" );
-            groupIdColumn.setIndexType( ColumnIndexType.KEYS );
-            groupIdColumn.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            dependencyCf.addColumnDefinition( groupIdColumn );
-
-            BasicColumnDefinition projectVersionMetadataModel_key = new BasicColumnDefinition();
-            projectVersionMetadataModel_key.setName( StringSerializer.get().toByteBuffer( "projectVersionMetadataModel.key" ) );
-            projectVersionMetadataModel_key.setIndexName( "projectVersionMetadataModel_key" );
-            projectVersionMetadataModel_key.setIndexType( ColumnIndexType.KEYS );
-            projectVersionMetadataModel_key.setValidationClass( ComparatorType.UTF8TYPE.getClassName() );
-            dependencyCf.addColumnDefinition( projectVersionMetadataModel_key );
+            }
 
         }
 
-        // TODO take care of update new table!!
-        { // ensure keyspace exists, here if the keyspace doesn't exist we suppose nothing exist
-            if ( cluster.describeKeyspace( keyspaceName ) == null )
-            {
-                logger.info( "Creating Archiva Cassandra '{}' keyspace.", keyspaceName );
-                cluster.addKeyspace( HFactory.createKeyspaceDefinition( keyspaceName, //
-                                                                        ThriftKsDef.DEF_STRATEGY_CLASS, //
-                                                                        replicationFactor, //
-                                                                        cfds )
-                );
-            }
-        }
 
     }
 
     @Override
-    public void start()
+    public void start( )
     {
     }
 
     @PreDestroy
     @Override
-    public void shutdown()
+    public void shutdown( )
     {
+        if (this.cqlSession!=null) {
+            this.cqlSession.close( );
+        }
     }
 
 
     @Override
-    public boolean started()
+    public boolean started( )
     {
         return started;
     }
 
 
     @Override
-    public Keyspace getKeyspace()
-    {
-        return keyspace;
-    }
-
-    @Override
-    public Cluster getCluster()
-    {
-        return cluster;
-    }
-
-    @Override
-    public String getRepositoryFamilyName()
+    public String getRepositoryFamilyName( )
     {
         return repositoryFamilyName;
     }
 
     @Override
-    public String getNamespaceFamilyName()
+    public String getNamespaceFamilyName( )
     {
         return namespaceFamilyName;
     }
 
     @Override
-    public String getProjectFamilyName()
+    public String getProjectFamilyName( )
     {
         return projectFamilyName;
     }
 
     @Override
-    public String getProjectVersionMetadataFamilyName()
+    public String getProjectVersionMetadataFamilyName( )
     {
         return projectVersionMetadataFamilyName;
     }
 
+    public String[] getProjectVersionMetadataColumns() {
+        return projectVersionMetadataColumns;
+    }
+
     @Override
-    public String getArtifactMetadataFamilyName()
+    public String getArtifactMetadataFamilyName( )
     {
         return artifactMetadataFamilyName;
     }
 
     @Override
-    public String getMetadataFacetFamilyName()
+    public String getMetadataFacetFamilyName( )
     {
         return metadataFacetFamilyName;
     }
 
     @Override
-    public String getMailingListFamilyName()
+    public String getMailingListFamilyName( )
     {
         return mailingListFamilyName;
     }
 
     @Override
-    public String getLicenseFamilyName()
+    public String getLicenseFamilyName( )
     {
         return licenseFamilyName;
     }
 
     @Override
-    public String getDependencyFamilyName()
+    public String getDependencyFamilyName( )
     {
         return dependencyFamilyName;
     }
 
     @Override
-    public String getChecksumFamilyName() {
+    public String getChecksumFamilyName( )
+    {
         return checksumFamilyName;
     }
+
+    @Override
+    public DriverConfigLoader getConfigLoader( )
+    {
+        return configLoader;
+    }
+
+    @Override
+    public String getKeyspaceName( )
+    {
+        return keyspaceName;
+    }
 }
index 8ea2f389fd642ad28e5dd69bfc1be4e78b4a1e63..364569b4f8ceb850b61059ceb5b8fba05cd0ff3f 100644 (file)
@@ -32,6 +32,7 @@ public enum ColumnNames
     NAMESPACE_ID( "namespaceId" ),
     PROJECT_ID( "projectId" ),
     PROJECT_VERSION( "projectVersion" ),
+    PROJECT_PROPERTIES("projectProperties"),
     KEY( "facetKey" ),
     VALUE( "value" ),
     ID( "id" ),
@@ -41,6 +42,7 @@ public enum ColumnNames
     PROJECT( "project" ),
     FILE_LAST_MODIFIED( "fileLastModified" ),
     VERSION( "version" ),
+    VERSION_PROPERTIES("versionProperties"),
     GROUP_ID( "groupId" ),
     ARTIFACT_ID( "artifactId" ),
     DESCRIPTION( "description" ),
index ea03801c5e088c19c20c388a20d391e7b131149a..491edc2f03ac2d8315b0e8fb069df558d1eec955 100644 (file)
@@ -19,6 +19,7 @@ package org.apache.archiva.metadata.repository.cassandra;
  * under the License.
  */
 
+import com.datastax.oss.driver.api.core.CqlSession;
 import org.apache.archiva.metadata.model.MetadataFacetFactory;
 import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
 import org.apache.archiva.metadata.repository.MetadataRepository;
@@ -28,22 +29,32 @@ import org.apache.archiva.metadata.repository.RepositorySessionFactory;
 import org.apache.archiva.metadata.repository.cassandra.model.ProjectVersionMetadataModel;
 import org.easymock.EasyMock;
 import org.easymock.IMocksControl;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.springframework.test.context.junit.jupiter.SpringExtension;
 
 import javax.inject.Inject;
 import javax.inject.Named;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
+import java.util.Arrays;
 import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
 
+import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.truncate;
+import static com.datastax.oss.driver.api.querybuilder.SchemaBuilder.dropTable;
 import static org.assertj.core.api.Assertions.assertThat;
 
 /**
  * @author Olivier Lamy
  */
+@ExtendWith( SpringExtension.class )
+@TestInstance( TestInstance.Lifecycle.PER_CLASS )
 public class CassandraMetadataRepositoryTest
     extends AbstractMetadataRepositoryTest
 {
@@ -59,6 +70,9 @@ public class CassandraMetadataRepositoryTest
     IMocksControl sessionControl;
     RepositorySession session;
 
+    long cTime;
+    int testNum = 0;
+    AtomicBoolean clearedTables = new AtomicBoolean( false );
 
 
     @Override
@@ -73,13 +87,14 @@ public class CassandraMetadataRepositoryTest
         return cmr;
     }
 
-    @Before
-    @Override
-    public void setUp()
+    @BeforeEach
+    public void setUp( TestInfo testInfo )
         throws Exception
     {
-
+        cTime = System.currentTimeMillis( );
+        System.err.println( "Setting up "+(testNum++) + " - " + testInfo.getDisplayName() );
         super.setUp();
+        System.err.println( "Setting up 2 " + testInfo.getDisplayName( ) + " - " + (System.currentTimeMillis( ) - cTime) );
         assertMaxTries =1;
         assertRetrySleepMs=10;
 
@@ -104,7 +119,12 @@ public class CassandraMetadataRepositoryTest
 
         sessionFactoryControl.replay();
 
-        clearReposAndNamespace( cassandraArchivaManager );
+        if (!clearedTables.get())
+        {
+            clearReposAndNamespace( cassandraArchivaManager );
+            clearedTables.set( true );
+        }
+        System.err.println( "Finished setting up "+testInfo.getDisplayName() + " - " + (System.currentTimeMillis( ) - cTime) );
     }
 
     /**
@@ -139,44 +159,44 @@ public class CassandraMetadataRepositoryTest
     }
 
 
-    @After
-    public void shutdown()
+    @AfterEach
+    public void shutdown(TestInfo testInfo)
         throws Exception
     {
+        System.err.println( "Shutting down " + testInfo.getDisplayName( ) + " - " + ( System.currentTimeMillis( ) - cTime ) );
         clearReposAndNamespace( cassandraArchivaManager );
+        clearedTables.set( true );
         super.tearDown();
+        System.err.println( "Shutting down finished" + testInfo.getDisplayName( ) + " - " + ( System.currentTimeMillis( ) - cTime ) );
     }
 
     static void clearReposAndNamespace( CassandraArchivaManager cassandraArchivaManager )
         throws Exception
     {
-        cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
-                                                       cassandraArchivaManager.getProjectFamilyName() );
-
-        cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
-                                                       cassandraArchivaManager.getNamespaceFamilyName() );
-
-        cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
-                                                       cassandraArchivaManager.getRepositoryFamilyName() );
-
-        cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
-                                                       cassandraArchivaManager.getProjectVersionMetadataFamilyName() );
-
-        cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
-                                                       cassandraArchivaManager.getArtifactMetadataFamilyName() );
-
-        cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
-                                                       cassandraArchivaManager.getMetadataFacetFamilyName() );
-
-        cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
-                                                       cassandraArchivaManager.getMailingListFamilyName() );
-
-        cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
-                                                       cassandraArchivaManager.getLicenseFamilyName() );
-
-        cassandraArchivaManager.getCluster().truncate( cassandraArchivaManager.getKeyspace().getKeyspaceName(),
-                                                       cassandraArchivaManager.getDependencyFamilyName() );
-
+        if (cassandraArchivaManager!=null)
+        {
+            CqlSession session = cassandraArchivaManager.getSession( );
+            {
+                List<String> tables = Arrays.asList(
+                    cassandraArchivaManager.getProjectFamilyName( ),
+                    cassandraArchivaManager.getNamespaceFamilyName( ),
+                    cassandraArchivaManager.getRepositoryFamilyName( ),
+                    cassandraArchivaManager.getProjectVersionMetadataFamilyName( ),
+                    cassandraArchivaManager.getArtifactMetadataFamilyName( ),
+                    cassandraArchivaManager.getMetadataFacetFamilyName( ),
+                    cassandraArchivaManager.getMailingListFamilyName( ),
+                    cassandraArchivaManager.getLicenseFamilyName( ),
+                    cassandraArchivaManager.getDependencyFamilyName( )
+                );
+                for ( String table : tables )
+                {
+                    session.execute( truncate( table ).build( ) );
+                }
+
+            }
+        } else {
+            System.err.println( "cassandraArchivaManager is null" );
+        }
     }
 
 }
index 9a631907fd4d4f52f593058a429daae52b1f8d23..c67e4d3484d4ace2c22cb97d12010a8a55640048 100644 (file)
@@ -23,13 +23,14 @@ import org.apache.archiva.metadata.model.ProjectMetadata;
 import org.apache.archiva.metadata.repository.cassandra.model.Namespace;
 import org.apache.archiva.metadata.repository.cassandra.model.Repository;
 import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.junit.jupiter.SpringExtension;
 
 import javax.inject.Inject;
 import javax.inject.Named;
@@ -39,7 +40,7 @@ import static org.assertj.core.api.Assertions.assertThat;
 /**
  * @author Olivier Lamy
  */
-@RunWith( ArchivaSpringJUnit4ClassRunner.class )
+@ExtendWith( SpringExtension.class )
 @ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath*:/spring-context.xml" } )
 public class RepositoriesNamespaceTest
 {
@@ -53,7 +54,7 @@ public class RepositoriesNamespaceTest
 
     CassandraMetadataRepository cmr;
 
-    @Before
+    @BeforeEach
     public void setup()
         throws Exception
     {
@@ -65,7 +66,7 @@ public class RepositoriesNamespaceTest
         CassandraMetadataRepositoryTest.clearReposAndNamespace( cassandraArchivaManager );
     }
 
-    @After
+    @AfterEach
     public void shutdown()
         throws Exception
     {
index a2aad30342754c66c683c2c5182bf74a9adc6cde..6e871d9672a03d3a43c547a166279dc7c0d3ffbe 100644 (file)
@@ -35,7 +35,7 @@
 
     <logger name="org.apache.archiva.metadata.repository.cassandra" level="debug"/>
 
-    <root level="info" includeLocation="true">
+    <root level="debug" includeLocation="true">
       <appender-ref ref="console"/>
     </root>
   </loggers>
index 053b283536728556950d892069cfa3127374f498..4fd4bab269a74b8be3d8d55f8b582dd56f0fa978 100644 (file)
@@ -27,14 +27,13 @@ import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
 import org.apache.archiva.metadata.repository.MetadataRepository;
 import org.apache.archiva.metadata.repository.MetadataService;
 import org.apache.archiva.metadata.repository.RepositorySessionFactory;
-import org.junit.Before;
-import org.junit.Ignore;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
 
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.List;
-import java.util.Map;
 
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -58,7 +57,7 @@ public class FileMetadataRepositoryTest
         return this.sessionFactory;
     }
 
-    @Before
+    @BeforeEach
     @Override
     public void setUp()
         throws Exception
@@ -81,7 +80,7 @@ public class FileMetadataRepositoryTest
     }
 
     @Override
-    @Ignore
+    @Disabled
     public void testGetArtifactsByProjectVersionMetadata()
         throws Exception
     {
@@ -89,7 +88,7 @@ public class FileMetadataRepositoryTest
     }
 
     @Override
-    @Ignore
+    @Disabled
     public void testGetArtifactsByProjectVersionMetadataNoRepository()
         throws Exception
     {
@@ -97,7 +96,7 @@ public class FileMetadataRepositoryTest
     }
 
     @Override
-    @Ignore
+    @Disabled
     public void testGetArtifactsByProjectVersionMetadataAllRepositories()
         throws Exception
     {
@@ -105,7 +104,7 @@ public class FileMetadataRepositoryTest
     }
 
     @Override
-    @Ignore
+    @Disabled
     public void testGetArtifactsByMetadataAllRepositories()
         throws Exception
     {
@@ -113,7 +112,7 @@ public class FileMetadataRepositoryTest
     }
 
     @Override
-    @Ignore
+    @Disabled
     public void testGetArtifactsByPropertySingleResult()
         throws Exception
     {
@@ -121,7 +120,7 @@ public class FileMetadataRepositoryTest
     }
 
     @Override
-    @Ignore
+    @Disabled
     public void testSearchArtifactsByKey()
         throws Exception
     {
@@ -129,7 +128,7 @@ public class FileMetadataRepositoryTest
     }
 
     @Override
-    @Ignore
+    @Disabled
     public void testSearchArtifactsByKeyExact()
         throws Exception
     {
@@ -137,7 +136,7 @@ public class FileMetadataRepositoryTest
     }
 
     @Override
-    @Ignore
+    @Disabled
     public void testSearchArtifactsFullText()
         throws Exception
     {
@@ -145,7 +144,7 @@ public class FileMetadataRepositoryTest
     }
 
     @Override
-    @Ignore
+    @Disabled
     public void testSearchArtifactsFullTextExact()
         throws Exception
     {
@@ -153,7 +152,7 @@ public class FileMetadataRepositoryTest
     }
 
     @Override
-    @Ignore
+    @Disabled
     public void testSearchArtifactsByFacetKeyAllRepos()
         throws Exception
     {
@@ -161,7 +160,7 @@ public class FileMetadataRepositoryTest
     }
 
     @Override
-    @Ignore
+    @Disabled
     public void testSearchArtifactsByFacetKey()
         throws Exception
     {
@@ -169,7 +168,7 @@ public class FileMetadataRepositoryTest
     }
 
     @Override
-    @Ignore
+    @Disabled
     public void testSearchArtifactsFullTextByFacet()
         throws Exception
     {
index 9f7bd8ca69ef71e6e4843e92ff5a7e8b0b40aa55..25305d8435d3bc3948f5e8858f7ea31419a811ab 100644 (file)
@@ -23,17 +23,14 @@ import org.apache.archiva.metadata.model.ArtifactMetadata;
 import org.apache.archiva.metadata.model.MetadataFacetFactory;
 import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
 import org.apache.archiva.metadata.repository.DefaultMetadataResolver;
-import org.apache.archiva.metadata.repository.MetadataRepositoryException;
 import org.apache.archiva.metadata.repository.MetadataService;
-import org.apache.archiva.metadata.repository.MetadataSessionException;
 import org.apache.archiva.metadata.repository.RepositorySession;
 import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
-import javax.jcr.RepositoryException;
 import javax.jcr.Session;
 import java.io.IOException;
 import java.nio.file.Files;
@@ -67,7 +64,7 @@ public class JcrMetadataRepositoryTest
         return sessionFactory;
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setupSpec( ) throws IOException, InvalidFileStoreVersionException
     {
         Path directory = Paths.get( "target/test-repositories" );
@@ -89,7 +86,7 @@ public class JcrMetadataRepositoryTest
 
     }
 
-    @Before
+    @BeforeEach
     @Override
     public void setUp() throws Exception
     {
@@ -106,7 +103,7 @@ public class JcrMetadataRepositoryTest
         }
     }
 
-    @AfterClass
+    @AfterAll
     public static void stopSpec( )
         throws Exception
     {
diff --git a/pom.xml b/pom.xml
index 7b38dd19d473ae11c68049982bbc68b67dc73e30..3acfee38313c2ca629be2cd1f872d770fc9a78c0 100644 (file)
--- a/pom.xml
+++ b/pom.xml
         <version>${junit.jupiter.version}</version>
         <scope>test</scope>
       </dependency>
+      <dependency>
+        <groupId>org.junit.jupiter</groupId>
+        <artifactId>junit-jupiter-api</artifactId>
+        <version>${junit.jupiter.version}</version>
+        <scope>test</scope>
+      </dependency>
 
       <dependency>
         <groupId>org.easymock</groupId>