// repositoryId + namespaceId + project + projectVersion + id
@Id
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String artifactMetadataModelId;
@Column( name = "id" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String id;
@Column( name = "repositoryId" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String repositoryId;
@Column( name = "namespace" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String namespace;
@Column( name = "project" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String project;
@Column( name = "projectVersion" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String projectVersion;
@Column( name = "version" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String version;
@Column( name = "fileLastModified" )
private long size;
@Column( name = "md5" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String md5;
@Column( name = "sha1" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String sha1;
@Column( name = "whenGathered" )
--- /dev/null
+package org.apache.archiva.metadata.repository.cassandra.model;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+import com.netflix.astyanax.serializers.AbstractSerializer;
+import com.netflix.astyanax.serializers.ComparatorType;
+import org.apache.commons.codec.binary.StringUtils;
+import org.apache.commons.io.IOUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.util.zip.Deflater;
+import java.util.zip.DeflaterOutputStream;
+import java.util.zip.InflaterInputStream;
+
+
+/**
+ * For Huge String we use a deflate compression
+ * @author Olivier Lamy
+ * @since 2.0.0
+ */
+public class DeflateStringSerializer
+ extends AbstractSerializer<String>
+{
+
+ private Logger logger = LoggerFactory.getLogger( getClass() );
+
+ private static final String UTF_8 = "UTF-8";
+
+ private static final DeflateStringSerializer instance = new DeflateStringSerializer();
+
+ private static final Charset charset = Charset.forName( UTF_8 );
+
+ public static DeflateStringSerializer get()
+ {
+ return instance;
+ }
+
+ @Override
+ public ByteBuffer toByteBuffer( String obj )
+ {
+ if ( obj == null )
+ {
+ return null;
+ }
+
+ try
+ {
+ byte[] bytes = compressWithDeflate( StringUtils.getBytesUtf8( obj ) );
+ return ByteBuffer.wrap( bytes );
+ }
+ catch ( IOException e )
+ {
+ throw new RuntimeException( "Fail to compress column data", e );
+ }
+ }
+
+ @Override
+ public String fromByteBuffer( ByteBuffer byteBuffer )
+ {
+ if ( byteBuffer == null )
+ {
+ return null;
+ }
+
+ ByteBuffer dup = byteBuffer.duplicate();
+ try
+ {
+ String str = getFromDeflateBytes( dup.array() );
+ return str;
+ }
+ catch ( IOException e )
+ {
+ throw new RuntimeException( "Fail to decompress column data", e );
+ }
+
+ }
+
+ public String getFromDeflateBytes( byte[] bytes )
+ throws IOException
+ {
+ ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream( bytes );
+ InflaterInputStream inflaterInputStream = new InflaterInputStream( byteArrayInputStream );
+ return IOUtils.toString( inflaterInputStream );
+ }
+
+ public byte[] compressWithDeflate( byte[] unCompress )
+ throws IOException
+ {
+ try
+ {
+ ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+ DeflaterOutputStream out = new DeflaterOutputStream( buffer, new Deflater( Deflater.BEST_COMPRESSION ) );
+ out.write( unCompress );
+ out.finish();
+ ByteArrayInputStream bais = new ByteArrayInputStream( buffer.toByteArray() );
+ byte[] res = IOUtils.toByteArray( bais );
+ return res;
+ }
+ catch ( IOException e )
+ {
+ logger.debug( "IOException in compressStringWithDeflate", e );
+ throw e;
+ }
+
+ }
+
+ @Override
+ public ComparatorType getComparatorType()
+ {
+ return ComparatorType.BYTESTYPE;
+ }
+
+ @Override
+ public ByteBuffer fromString( String str )
+ {
+ return instance.fromString( str );
+ }
+
+ @Override
+ public String getString( ByteBuffer byteBuffer )
+ {
+ return instance.getString( byteBuffer );
+ }
+
+}
+++ /dev/null
-package org.apache.archiva.metadata.repository.cassandra.model;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-import com.netflix.astyanax.serializers.AbstractSerializer;
-import com.netflix.astyanax.serializers.ComparatorType;
-import org.apache.cassandra.db.marshal.UTF8Type;
-import org.apache.commons.codec.binary.StringUtils;
-import org.apache.commons.io.IOUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
-import java.util.zip.Deflater;
-import java.util.zip.DeflaterInputStream;
-import java.util.zip.DeflaterOutputStream;
-import java.util.zip.InflaterInputStream;
-
-
-/**
- * For Huge String we use a compression
- * @author Olivier Lamy
- */
-public class HugeStringSerializer
- extends AbstractSerializer<String>
-{
-
- private Logger logger = LoggerFactory.getLogger( getClass() );
-
- private static final String UTF_8 = "UTF-8";
-
- private static final HugeStringSerializer instance = new HugeStringSerializer();
-
- private static final Charset charset = Charset.forName( UTF_8 );
-
- public static HugeStringSerializer get()
- {
- return instance;
- }
-
- @Override
- public ByteBuffer toByteBuffer( String obj )
- {
- if ( obj == null )
- {
- return null;
- }
-
- try
- {
- byte[] bytes = compressWithDeflate( StringUtils.getBytesUtf8( obj ) );
- return ByteBuffer.wrap( bytes );
- }
- catch ( IOException e )
- {
- throw new RuntimeException( "Fail to compress column data", e );
- }
- }
-
- @Override
- public String fromByteBuffer( ByteBuffer byteBuffer )
- {
- if ( byteBuffer == null )
- {
- return null;
- }
-
- ByteBuffer dup = byteBuffer.duplicate();
- try
- {
- String str = getFromDeflateBytes( dup.array() );
- return str;
- }
- catch ( IOException e )
- {
- throw new RuntimeException( "Fail to decompress column data", e );
- }
-
- }
-
- public String getFromDeflateBytes( byte[] bytes )
- throws IOException
- {
- ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream( bytes );
- InflaterInputStream inflaterInputStream = new InflaterInputStream( byteArrayInputStream );
- return IOUtils.toString( inflaterInputStream );
- }
-
- public byte[] compressWithDeflate( byte[] unCompress )
- throws IOException
- {
- try
- {
- ByteArrayOutputStream buffer = new ByteArrayOutputStream();
- DeflaterOutputStream out = new DeflaterOutputStream( buffer, new Deflater( Deflater.BEST_COMPRESSION ) );
- out.write( unCompress );
- out.finish();
- ByteArrayInputStream bais = new ByteArrayInputStream( buffer.toByteArray() );
- byte[] res = IOUtils.toByteArray( bais );
- return res;
- }
- catch ( IOException e )
- {
- logger.debug( "IOException in compressStringWithDeflate", e );
- throw e;
- }
-
- }
-
- @Override
- public ComparatorType getComparatorType()
- {
- return ComparatorType.BYTESTYPE;
- }
-
- @Override
- public ByteBuffer fromString( String str )
- {
- return instance.fromString( str );
- }
-
- @Override
- public String getString( ByteBuffer byteBuffer )
- {
- return instance.getString( byteBuffer );
- }
-
- /*
- private static final String UTF_8 = "UTF-8";
- private static final HugeStringSerializer instance = new HugeStringSerializer();
- private static final Charset charset = Charset.forName(UTF_8);
-
- public static HugeStringSerializer get() {
- return instance;
- }
-
- @Override
- public ByteBuffer toByteBuffer(String obj) {
- if (obj == null) {
- return null;
- }
- return ByteBuffer.wrap(obj.getBytes(charset));
- }
-
- @Override
- public String fromByteBuffer(ByteBuffer byteBuffer) {
- if (byteBuffer == null) {
- return null;
- }
- final ByteBuffer dup = byteBuffer.duplicate();
- return charset.decode(dup).toString();
- }
-
- @Override
- public ComparatorType getComparatorType() {
- return ComparatorType.UTF8TYPE;
- }
-
- @Override
- public ByteBuffer fromString(String str) {
- return UTF8Type.instance.fromString(str);
- }
-
- @Override
- public String getString(ByteBuffer byteBuffer) {
- return UTF8Type.instance.getString(byteBuffer);
- }
- */
-}
*/
import com.netflix.astyanax.entitystore.Serializer;
-import com.netflix.astyanax.serializers.GzipStringSerializer;
import org.apache.archiva.metadata.repository.cassandra.CassandraUtils;
import javax.persistence.Column;
// id is repositoryId + namespaceId + projectId + facetId + name + mapKey
@Id
@Column( name = "id" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String id;
@Column( name = "artifactMetadataModel" )
private ArtifactMetadataModel artifactMetadataModel;
@Column( name = "facetId" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String facetId;
@Column( name = "key" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String key;
@Column( name = "name" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String name;
@Column( name = "value" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String value;
public MetadataFacetModel()
@Id
@Column( name = "id" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String id;
@Column( name = "name" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String name;
@Column( name = "repository" )
{
@Id
@Column( name = "projectKey" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String projectKey;
@Column( name = "projectId" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String projectId;
{
// repositoryId + namespace + projectId + id (version)
@Id
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String rowId;
@Column( name = "namespace" )
* id is the version
*/
@Column( name = "id" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String id;
@Column( name = "projectId" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String projectId;
@Column( name = "url" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String url;
@Column( name = "name" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String name;
@Column( name = "description" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String description;
@Column( name = "organization" )
import javax.persistence.Id;
import javax.persistence.Table;
import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
/**
@Id
@Column( name = "id" )
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String id;
@Column(name = "name")
- @Serializer( HugeStringSerializer.class )
+ @Serializer( DeflateStringSerializer.class )
private String name;
//private transient List<Namespace> namespaces = new ArrayList<Namespace>();