Browse Source

[MRM-294] and [MRM-275]

- Added the test cases which were left out during the initial commit
- Added DefaultCleanupReleasedSnapshots (m2 implementation for cleaning up snapshots that were already released)
- Updated the following files: archiva-core-consumers/pom.xml, RepositoryPurgeConsumer, DaysOldRepositoryPurge, RetentionCountRepositoryPurge and 
AbstractRepositoryPurge



git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@562882 13f79535-47bb-0310-9956-ffa450edef68
tags/1.0-beta-1
Maria Odea B. Ching 17 years ago
parent
commit
1a7b107c06
11 changed files with 841 additions and 36 deletions
  1. 2
    2
      archiva-base/archiva-consumers/archiva-core-consumers/pom.xml
  2. 3
    8
      archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/AbstractRepositoryPurge.java
  3. 8
    3
      archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/DaysOldRepositoryPurge.java
  4. 190
    0
      archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/DefaultCleanupReleasedSnapshots.java
  5. 34
    16
      archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/RepositoryPurgeConsumer.java
  6. 9
    3
      archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/RetentionCountRepositoryPurge.java
  7. 195
    0
      archiva-base/archiva-consumers/archiva-core-consumers/src/test/java/org/apache/maven/archiva/consumers/core/repository/AbstractRepositoryPurgeTest.java
  8. 93
    0
      archiva-base/archiva-consumers/archiva-core-consumers/src/test/java/org/apache/maven/archiva/consumers/core/repository/DaysOldRepositoryPurgeTest.java
  9. 303
    0
      archiva-base/archiva-consumers/archiva-core-consumers/src/test/java/org/apache/maven/archiva/consumers/core/repository/RetentionCountRepositoryPurgeTest.java
  10. 2
    2
      archiva-base/archiva-consumers/archiva-core-consumers/src/test/resources/org/apache/maven/archiva/consumers/core/repository/DaysOldRepositoryPurgeTest.xml
  11. 2
    2
      archiva-base/archiva-consumers/archiva-core-consumers/src/test/resources/org/apache/maven/archiva/consumers/core/repository/RetentionCountRepositoryPurgeTest.xml

+ 2
- 2
archiva-base/archiva-consumers/archiva-core-consumers/pom.xml View File

@@ -55,10 +55,10 @@
</dependency>

<!-- test dependencies -->
<!--dependency>
<dependency>
<groupId>hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<scope>test</scope>
</dependency-->
</dependency>
</dependencies>
</project>

+ 3
- 8
archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/AbstractRepositoryPurge.java View File

@@ -66,7 +66,7 @@ public abstract class AbstractRepositoryPurge

if ( !dir.isDirectory() )
{
throw new RepositoryPurgeException( "Parent file " + dir.getPath() + " is not a directory." );
System.out.println( "File is not a directory." );
}

File[] files = dir.listFiles( filter );
@@ -87,7 +87,7 @@ public abstract class AbstractRepositoryPurge
throws RepositoryIndexException
{
List records = new ArrayList();
for ( int i = 0; i < artifactFiles.length; i++ )
{
artifactFiles[i].delete();
@@ -116,7 +116,7 @@ public abstract class AbstractRepositoryPurge
ArchivaArtifact queriedArtifact = artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion(), artifact.getClassifier(),
artifact.getType() );
artifactDao.deleteArtifact( queriedArtifact );
}
catch ( ArchivaDatabaseException ae )
@@ -129,11 +129,6 @@ public abstract class AbstractRepositoryPurge
}
}

protected void updateMetadata()
{

}

/**
* Get the artifactId, version, extension and classifier from the path parameter
*

+ 8
- 3
archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/DaysOldRepositoryPurge.java View File

@@ -45,6 +45,13 @@ public class DaysOldRepositoryPurge
{
try
{
File artifactFile = new File( getRepository().getUrl().getPath(), path );

if( !artifactFile.exists() )
{
return;
}

FilenameParts parts = getFilenameParts( path );

if ( VersionUtil.isSnapshot( parts.version ) )
@@ -54,8 +61,6 @@ public class DaysOldRepositoryPurge
Calendar olderThanThisDate = new GregorianCalendar();
olderThanThisDate.add( Calendar.DATE, ( -1 * repoConfig.getDaysOlder() ) );

File artifactFile = new File( getRepository().getUrl().getPath(), path );

if ( artifactFile.lastModified() < olderThanThisDate.getTimeInMillis() )
{
String[] fileParts = artifactFile.getName().split( "." + parts.extension );
@@ -75,5 +80,5 @@ public class DaysOldRepositoryPurge
throw new RepositoryPurgeException( re.getMessage() );
}
}
}

+ 190
- 0
archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/DefaultCleanupReleasedSnapshots.java View File

@@ -0,0 +1,190 @@
package org.apache.maven.archiva.consumers.core.repository;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.model.ArchivaRepositoryMetadata;
import org.apache.maven.archiva.repository.layout.FilenameParts;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.commons.io.FileUtils;

import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.Date;

/**
* M2 implementation for cleaning up the released snapshots.
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
*/
public class DefaultCleanupReleasedSnapshots
extends AbstractRepositoryPurge
{
public static final String SNAPSHOT = "-SNAPSHOT";

private RepositoryMetadataReader metadataReader;

public DefaultCleanupReleasedSnapshots()
{
metadataReader = new RepositoryMetadataReader();
}

public void process( String path, Configuration configuration )
throws RepositoryPurgeException
{
try
{
File artifactFile = new File( getRepository().getUrl().getPath(), path );

if ( !artifactFile.exists() )
{
return;
}

FilenameParts parts = getFilenameParts( path );

if ( VersionUtil.isSnapshot( parts.version ) )
{
// version
File versionDir = artifactFile.getParentFile();

// artifactID - scan for other versions
File artifactIdDir = versionDir.getParentFile();

boolean updated = false;

List versions = getVersionsInDir( artifactIdDir );
Collections.sort( versions, VersionComparator.getInstance() );
for ( int j = 0; j < versions.size(); j++ )
{
String version = (String) versions.get( j );

if ( VersionComparator.getInstance().compare( version, versionDir.getName() ) > 0 )
{
purge( versionDir.listFiles() );

FileUtils.deleteDirectory( versionDir );

updated = true;
break;
}
}

if ( updated )
{
updateMetadata( artifactIdDir );
}
}
}
catch ( LayoutException le )
{
throw new RepositoryPurgeException( le.getMessage() );
}
catch ( IOException ie )
{
throw new RepositoryPurgeException( ie.getMessage() );
}
catch ( RepositoryIndexException re )
{
throw new RepositoryPurgeException( re.getMessage() );
}
}

private void updateMetadata( File artifactIdDir )
throws RepositoryPurgeException
{

File[] metadataFiles = getFiles( artifactIdDir, "maven-metadata" );
List availableVersions = getVersionsInDir( artifactIdDir );

Collections.sort( availableVersions );

String latestReleased = getLatestReleased( availableVersions );
for ( int i = 0; i < metadataFiles.length; i++ )
{
if ( !( metadataFiles[i].getName().toUpperCase() ).endsWith( "SHA1" ) &&
!( metadataFiles[i].getName().toUpperCase() ).endsWith( "MD5" ) )
{
try
{
Date lastUpdated = new Date();
ArchivaRepositoryMetadata metadata = metadataReader.read( metadataFiles[i] );
metadata.setAvailableVersions( availableVersions );
metadata.setLatestVersion( (String) availableVersions.get( availableVersions.size() - 1 ) );
metadata.setReleasedVersion( latestReleased );
metadata.setLastUpdatedTimestamp( lastUpdated );
metadata.setLastUpdated( Long.toString( lastUpdated.getTime() ) );

RepositoryMetadataWriter.write( metadata, metadataFiles[i] );
}
catch ( RepositoryMetadataException rme )
{
System.out.println( "Error updating metadata " + metadataFiles[i].getAbsoluteFile() );
}
}
}
}

private String getLatestReleased( List availableVersions )
{
List reversedOrder = new ArrayList( availableVersions );
Collections.reverse( reversedOrder );
String latestReleased = "";

for ( Iterator iter = reversedOrder.iterator(); iter.hasNext(); )
{
String version = (String) iter.next();
if ( !VersionUtil.getBaseVersion( version ).endsWith( SNAPSHOT ) )
{
latestReleased = version;
return latestReleased;
}
}

return latestReleased;
}

private List getVersionsInDir( File artifactIdDir )
{
String[] versionsAndMore = artifactIdDir.list();
List versions = new ArrayList();
for ( int j = 0; j < versionsAndMore.length; j++ )
{
if ( VersionUtil.isVersion( versionsAndMore[j] ) )
{
versions.add( versionsAndMore[j] );
}
}

return versions;
}

}

+ 34
- 16
archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/RepositoryPurgeConsumer.java View File

@@ -28,6 +28,7 @@ import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayoutFactory;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
@@ -72,18 +73,6 @@ public class RepositoryPurgeConsumer
*/
private BidirectionalRepositoryLayoutFactory layoutFactory;

private ArchivaRepository repository;

private BidirectionalRepositoryLayout repositoryLayout;

private List includes = new ArrayList();

private List propertyNameTriggers = new ArrayList();

private RepositoryPurge repoPurge;

private RepositoryContentIndex index;

/**
* @plexus.requirement role-hint="lucene"
*/
@@ -99,6 +88,20 @@ public class RepositoryPurgeConsumer
*/
private FileTypes filetypes;

private ArchivaRepository repository;

private BidirectionalRepositoryLayout repositoryLayout;

private List includes = new ArrayList();

private List propertyNameTriggers = new ArrayList();

private RepositoryPurge repoPurge;

private RepositoryContentIndex index;

private RepositoryPurge cleanUp;

public String getId()
{
return this.id;
@@ -145,13 +148,27 @@ public class RepositoryPurgeConsumer
"Unable to initialize consumer due to unknown repository layout: " + e.getMessage(), e );
}

// @todo check the repo configuration first which purge was set by the user
// temporarily default to DaysOldRepositoryPurge
repoPurge = new DaysOldRepositoryPurge();
// @todo handle better injection of RepositoryPurge
RepositoryConfiguration repoConfig = configuration.getConfiguration().findRepositoryById( repository.getId() );
if ( repoConfig.getDaysOlder() != 0 )
{
repoPurge = new DaysOldRepositoryPurge();
}
else
{
repoPurge = new RetentionCountRepositoryPurge();
}
repoPurge.setLayout( repositoryLayout );
repoPurge.setRepository( repository );
repoPurge.setIndex( index );
repoPurge.setArtifactDao( dao.getArtifactDAO() );

cleanUp = new DefaultCleanupReleasedSnapshots();
cleanUp.setRepository( repository );
cleanUp.setLayout( repositoryLayout );
cleanUp.setArtifactDao( dao.getArtifactDAO() );
cleanUp.setIndex( index );
}

public void processFile( String path )
@@ -159,8 +176,9 @@ public class RepositoryPurgeConsumer
{
try
{
repoPurge.process( path, configuration.getConfiguration() );
cleanUp.process( path, configuration.getConfiguration() );

repoPurge.process( path, configuration.getConfiguration() );
}
catch ( RepositoryPurgeException rpe )
{

+ 9
- 3
archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/RetentionCountRepositoryPurge.java View File

@@ -45,13 +45,19 @@ public class RetentionCountRepositoryPurge
throws RepositoryPurgeException
{
try
{
{
File artifactFile = new File( getRepository().getUrl().getPath(), path );

if( !artifactFile.exists() )
{
return;
}

FilenameParts parts = getFilenameParts( path );

if ( VersionUtil.isSnapshot( parts.version ) )
{
RepositoryConfiguration repoConfig = configuration.findRepositoryById( getRepository().getId() );
File artifactFile = new File( getRepository().getUrl().getPath(), path );
RepositoryConfiguration repoConfig = configuration.findRepositoryById( getRepository().getId() );
File parentDir = artifactFile.getParentFile();

if ( parentDir.isDirectory() )

+ 195
- 0
archiva-base/archiva-consumers/archiva-core-consumers/src/test/java/org/apache/maven/archiva/consumers/core/repository/AbstractRepositoryPurgeTest.java View File

@@ -0,0 +1,195 @@
package org.apache.maven.archiva.consumers.core.repository;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.codehaus.plexus.PlexusTestCase;
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
import org.codehaus.plexus.jdo.JdoFactory;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.layout.DefaultBidirectionalRepositoryLayout;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.jpox.SchemaTool;

import javax.jdo.PersistenceManagerFactory;
import javax.jdo.PersistenceManager;
import java.util.List;
import java.util.ArrayList;
import java.util.Properties;
import java.util.Iterator;
import java.util.Map;
import java.net.URL;
import java.io.File;

/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
*/
public class AbstractRepositoryPurgeTest
extends PlexusTestCase
{
public static final String TEST_REPO_ID = "test-repo";

public static final String TEST_REPO_NAME = "Test Repository";

public static final String TEST_REPO_URL = "file://" + getBasedir() + "/target/test-classes/test-repo/";

public static final int TEST_RETENTION_COUNT = 2;

public static final int TEST_DAYS_OLDER = 30;

private Configuration config;

private ArchivaRepository repo;

private BidirectionalRepositoryLayout layout;

protected ArtifactDAO dao;

protected RepositoryPurge repoPurge;

protected void setUp()
throws Exception
{
super.setUp();

DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );

jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );

jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:testdb" ) );

jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );

jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );

jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );

jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );

jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );

jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );

jdoFactory.setProperty( "javax.jdo.option.RestoreValues", "true" );

// jdoFactory.setProperty( "org.jpox.autoCreateColumns", "true" );

jdoFactory.setProperty( "org.jpox.validateTables", "true" );

jdoFactory.setProperty( "org.jpox.validateColumns", "true" );

jdoFactory.setProperty( "org.jpox.validateConstraints", "true" );

Properties properties = jdoFactory.getProperties();

for ( Iterator it = properties.entrySet().iterator(); it.hasNext(); )
{
Map.Entry entry = (Map.Entry) it.next();

System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
}

URL jdoFileUrls[] = new URL[]{getClass().getResource( "/org/apache/maven/archiva/model/package.jdo" )};

if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
{
fail( "Unable to process test " + getName() + " - missing package.jdo." );
}

File propsFile = null; // intentional
boolean verbose = true;

SchemaTool.deleteSchemaTables( jdoFileUrls, new URL[]{}, propsFile, verbose );
SchemaTool.createSchemaTables( jdoFileUrls, new URL[]{}, propsFile, verbose, null );

PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();

assertNotNull( pmf );

PersistenceManager pm = pmf.getPersistenceManager();

pm.close();

dao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
}

public void lookupRepositoryPurge( String role )
throws Exception
{
repoPurge = (RepositoryPurge) lookup( RepositoryPurge.class.getName(), role );

repoPurge.setArtifactDao( dao );

repoPurge.setRepository( getRepository() );

repoPurge.setLayout( getLayout() );
}

public Configuration getRepoConfiguration()
{
if ( config == null )
{
config = new Configuration();
}

RepositoryConfiguration repoConfig = new RepositoryConfiguration();
repoConfig.setId( TEST_REPO_ID );
repoConfig.setName( TEST_REPO_NAME );
repoConfig.setDaysOlder( TEST_DAYS_OLDER );
repoConfig.setUrl( TEST_REPO_URL );
repoConfig.setReleases( true );
repoConfig.setSnapshots( true );
repoConfig.setRetentionCount( TEST_RETENTION_COUNT );

List repos = new ArrayList();
repos.add( repoConfig );

config.setRepositories( repos );

return config;
}

public ArchivaRepository getRepository()
{
if ( repo == null )
{
repo = new ArchivaRepository( TEST_REPO_ID, TEST_REPO_NAME, TEST_REPO_URL );
}

return repo;
}

public BidirectionalRepositoryLayout getLayout()
throws LayoutException
{
if ( layout == null )
{
layout = new DefaultBidirectionalRepositoryLayout();
}

return layout;
}
}

+ 93
- 0
archiva-base/archiva-consumers/archiva-core-consumers/src/test/java/org/apache/maven/archiva/consumers/core/repository/DaysOldRepositoryPurgeTest.java View File

@@ -0,0 +1,93 @@
package org.apache.maven.archiva.consumers.core.repository;

import org.codehaus.plexus.PlexusTestCase;
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
import org.codehaus.plexus.jdo.JdoFactory;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.model.ArchivaArtifactModel;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.jdo.JdoAccess;

import javax.jdo.JDOFatalUserException;
import javax.jdo.JDOHelper;
import javax.jdo.spi.JDOImplHelper;
import java.util.List;
import java.util.ArrayList;
import java.util.Date;
import java.io.File;


/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
*/
public class DaysOldRepositoryPurgeTest
extends AbstractRepositoryPurgeTest
{
public static final String PATH_TO_BY_DAYS_OLD_ARTIFACT =
"org/apache/maven/plugins/maven-install-plugin/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.jar";

protected void setUp()
throws Exception
{
super.setUp();

lookupRepositoryPurge( "days-old" );
}

private void setLastModified()
{
File dir =
new File( "target/test-classes/test-repo/org/apache/maven/plugins/maven-install-plugin/2.2-SNAPSHOT/" );
File[] contents = dir.listFiles();
for ( int i = 0; i < contents.length; i++ )
{
contents[i].setLastModified( 1179382029 );
}
}

public void testIfAJarIsFound()
throws Exception
{
// Create it
ArchivaArtifact artifact =
dao.createArtifact( "org.apache.maven.plugins", "maven-install-plugin", "2.2-SNAPSHOT", "", "jar" );
assertNotNull( artifact );

artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );

// Save it.
ArchivaArtifact savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

setLastModified();

repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT, getRepoConfiguration() );

assertTrue( true );

assertFalse( new File(
"target/test-classes/test-repo/org/apache/maven/plugins/maven-install-plugin/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/apache/maven/plugins/maven-install-plugin/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/apache/maven/plugins/maven-install-plugin/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/apache/maven/plugins/maven-install-plugin/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/apache/maven/plugins/maven-install-plugin/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/apache/maven/plugins/maven-install-plugin/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.pom.sha1" ).exists() );
}

protected void tearDown()
throws Exception
{
super.tearDown();
repoPurge = null;
}
}

+ 303
- 0
archiva-base/archiva-consumers/archiva-core-consumers/src/test/java/org/apache/maven/archiva/consumers/core/repository/RetentionCountRepositoryPurgeTest.java View File

@@ -0,0 +1,303 @@
package org.apache.maven.archiva.consumers.core.repository;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.model.ArchivaArtifact;

import java.io.File;
import java.util.Date;

/**
* Test RetentionsCountRepositoryPurgeTest
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
*/
public class RetentionCountRepositoryPurgeTest
extends AbstractRepositoryPurgeTest
{

public static final String PATH_TO_BY_RETENTION_COUNT_ARTIFACT =
"org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar";

public static final String PATH_TO_BY_RETENTION_COUNT_POM =
"org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom";

protected void setUp()
throws Exception
{
super.setUp();

lookupRepositoryPurge( "retention-count" );
}

/**
* Test if the artifact to be processed was a jar.
*
* @throws Exception
*/
public void testIfAJarWasFound()
throws Exception
{
// Create it
ArchivaArtifact artifact =
dao.createArtifact( "org.jruby.plugins", "jruby-rake-plugin", "1.0RC1-20070504.153317-1", "", "jar" );
assertNotNull( artifact );

artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );

// Save it.
ArchivaArtifact savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

//POM
artifact =
dao.createArtifact( "org.jruby.plugins", "jruby-rake-plugin", "1.0RC1-20070504.153317-1", "", "pom" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

//JAR
artifact =
dao.createArtifact( "org.jruby.plugins", "jruby-rake-plugin", "1.0RC1-20070504.160758-2", "", "jar" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

//POM
artifact =
dao.createArtifact( "org.jruby.plugins", "jruby-rake-plugin", "1.0RC1-20070504.160758-2", "", "pom" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

//JAR
artifact =
dao.createArtifact( "org.jruby.plugins", "jruby-rake-plugin", "1.0RC1-20070505.090015-3", "", "jar" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

//POM
artifact =
dao.createArtifact( "org.jruby.plugins", "jruby-rake-plugin", "1.0RC1-20070505.090015-3", "", "pom" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

//JAR
artifact =
dao.createArtifact( "org.jruby.plugins", "jruby-rake-plugin", "1.0RC1-20070506.090132-4", "", "jar" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

//POM
artifact =
dao.createArtifact( "org.jruby.plugins", "jruby-rake-plugin", "1.0RC1-20070506.090132-4", "", "pom" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT, getRepoConfiguration() );

// assert if removed from repo
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.sha1" ).exists() );

assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.sha1" ).exists() );

// assert if not removed from repo
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.sha1" ).exists() );

assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.sha1" ).exists() );
}

/**
* Test if the artifact to be processed is a pom
*
* @throws Exception
*/
public void testIfAPomWasFound()
throws Exception
{
// Create it
ArchivaArtifact artifact =
dao.createArtifact( "org.codehaus.castor", "castor-anttasks", "1.1.2-20070427.065136-1", "", "jar" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
ArchivaArtifact savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

//POM
artifact = dao.createArtifact( "org.codehaus.castor", "castor-anttasks", "1.1.2-20070427.065136-1", "", "pom" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

//JAR
artifact = dao.createArtifact( "org.codehaus.castor", "castor-anttasks", "1.1.2-20070615.105019-3", "", "jar" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

//POM
artifact = dao.createArtifact( "org.codehaus.castor", "castor-anttasks", "1.1.2-20070615.105019-3", "", "pom" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

//JAR
artifact = dao.createArtifact( "org.codehaus.castor", "castor-anttasks", "1.1.2-20070506.163513-2", "", "jar" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );

savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

//POM
artifact = dao.createArtifact( "org.codehaus.castor", "castor-anttasks", "1.1.2-20070506.163513-2", "", "pom" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );

repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM, getRepoConfiguration() );

// assert if removed from repo
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.sha1" ).exists() );

// assert if not removed from repo
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.sha1" ).exists() );

assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.sha1" ).exists() );
}
}

+ 2
- 2
archiva-base/archiva-consumers/archiva-core-consumers/src/test/resources/org/apache/maven/archiva/consumers/core/repository/DaysOldRepositoryPurgeTest.xml View File

@@ -26,7 +26,7 @@
<role>org.apache.maven.archiva.consumers.core.repository.RepositoryPurge</role>
<role-hint>days-old</role-hint>
<implementation>org.apache.maven.archiva.consumers.core.repository.DaysOldRepositoryPurge</implementation>
<requirements>
<!--requirements>
<requirement>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
@@ -35,7 +35,7 @@
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
</requirement>
</requirements>
</requirements-->
</component>

<!-- LuceneRepositoryContentIndexFactory -->

+ 2
- 2
archiva-base/archiva-consumers/archiva-core-consumers/src/test/resources/org/apache/maven/archiva/consumers/core/repository/RetentionCountRepositoryPurgeTest.xml View File

@@ -26,7 +26,7 @@
<role>org.apache.maven.archiva.consumers.core.repository.RepositoryPurge</role>
<role-hint>retention-count</role-hint>
<implementation>org.apache.maven.archiva.consumers.core.repository.RetentionCountRepositoryPurge</implementation>
<requirements>
<!--requirements>
<requirement>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
@@ -35,7 +35,7 @@
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
</requirement>
</requirements>
</requirements-->
</component>

<!-- LuceneRepositoryContentIndexFactory -->

Loading…
Cancel
Save