* under the License.
*/
-import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.layout.FilenameParts;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.commons.lang.StringUtils;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File;
import java.io.FilenameFilter;
-import java.util.ArrayList;
-import java.util.List;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version
*/
public abstract class AbstractRepositoryPurge
implements RepositoryPurge
{
- private ArchivaRepository repository;
+ protected ArchivaRepository repository;
- private BidirectionalRepositoryLayout layout;
+ protected BidirectionalRepositoryLayout layout;
- private RepositoryContentIndex index;
+ protected ArtifactDAO artifactDao;
- private ArtifactDAO artifactDao;
+ public AbstractRepositoryPurge( ArchivaRepository repository,
+ BidirectionalRepositoryLayout layout, ArtifactDAO artifactDao )
+ {
+ this.repository = repository;
+ this.layout = layout;
+ this.artifactDao = artifactDao;
+ }
/**
* Get all files from the directory that matches the specified filename.
* @return
*/
protected File[] getFiles( File dir, String filename )
- throws RepositoryPurgeException
{
FilenameFilter filter = new ArtifactFilenameFilter( filename );
- if ( !dir.isDirectory() )
- {
- System.out.println( "File is not a directory." );
- }
-
File[] files = dir.listFiles( filter );
return files;
}
- public abstract void process( String path, Configuration configuration )
- throws RepositoryPurgeException;
-
/**
* Purge the repo. Update db and index of removed artifacts.
*
* @throws RepositoryIndexException
*/
protected void purge( File[] artifactFiles )
- throws RepositoryIndexException
{
- List records = new ArrayList();
-
for ( int i = 0; i < artifactFiles.length; i++ )
{
artifactFiles[i].delete();
- String[] artifactPathParts = artifactFiles[i].getAbsolutePath().split( getRepository().getUrl().getPath() );
+ String[] artifactPathParts = artifactFiles[i].getAbsolutePath().split( repository.getUrl().getPath() );
String artifactPath = artifactPathParts[artifactPathParts.length - 1];
if ( !artifactPath.toUpperCase().endsWith( "SHA1" ) && !artifactPath.toUpperCase().endsWith( "MD5" ) )
{
- updateDatabase( artifactPath );
+ // intended to be swallowed
+ // continue updating the database for all artifacts
+ try
+ {
+ updateDatabase( artifactPath );
+ }
+ catch ( ArchivaDatabaseException ae )
+ {
+ //@todo determine logging to be used
+ }
+ catch ( LayoutException le )
+ {
+
+ }
}
-
- FileContentRecord record = new FileContentRecord();
- record.setRepositoryId( this.repository.getId() );
- record.setFilename( artifactPath );
- records.add( record );
}
-
- //index.deleteRecords( records );
}
private void updateDatabase( String path )
+ throws ArchivaDatabaseException, LayoutException
{
- try
- {
- ArchivaArtifact artifact = layout.toArtifact( path );
- ArchivaArtifact queriedArtifact = artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(),
- artifact.getVersion(), artifact.getClassifier(),
- artifact.getType() );
-
- artifactDao.deleteArtifact( queriedArtifact );
- }
- catch ( ArchivaDatabaseException ae )
- {
- }
- catch ( LayoutException le )
- {
+ ArchivaArtifact artifact = layout.toArtifact( path );
+ ArchivaArtifact queriedArtifact = artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(),
+ artifact.getVersion(), artifact.getClassifier(),
+ artifact.getType() );
- }
+ artifactDao.deleteArtifact( queriedArtifact );
}
/**
return parts;
}
- public void setRepository( ArchivaRepository repository )
- {
- this.repository = repository;
- }
-
- public void setLayout( BidirectionalRepositoryLayout layout )
- {
- this.layout = layout;
- }
-
- public void setIndex( RepositoryContentIndex index )
- {
- this.index = index;
- }
-
- public void setArtifactDao( ArtifactDAO artifactDao )
- {
- this.artifactDao = artifactDao;
- }
-
- protected ArchivaRepository getRepository()
- {
- return repository;
- }
-
- protected BidirectionalRepositoryLayout getLayout()
- {
- return layout;
- }
-
}
--- /dev/null
+package org.apache.maven.archiva.consumers.core.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader;
+import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
+import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
+import org.apache.maven.archiva.repository.layout.FilenameParts;
+import org.apache.maven.archiva.repository.layout.LayoutException;
+import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.apache.maven.archiva.common.utils.VersionUtil;
+import org.apache.maven.archiva.common.utils.VersionComparator;
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.model.ArchivaRepositoryMetadata;
+import org.apache.maven.archiva.model.ArchivaRepository;
+import org.apache.maven.archiva.database.ArtifactDAO;
+import org.apache.commons.io.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+import java.util.Collections;
+import java.util.Date;
+import java.util.ArrayList;
+import java.util.Iterator;
+
+/**
+ * M2 implementation for cleaning up the released snapshots.
+ *
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version
+ */
+public class CleanupReleasedSnapshotsRepositoryPurge
+ extends AbstractRepositoryPurge
+{
+ public static final String SNAPSHOT = "-SNAPSHOT";
+
+ private RepositoryMetadataReader metadataReader;
+
+ public CleanupReleasedSnapshotsRepositoryPurge( ArchivaRepository repository, BidirectionalRepositoryLayout layout,
+ ArtifactDAO artifactDao )
+ {
+ super( repository, layout, artifactDao );
+ metadataReader = new RepositoryMetadataReader();
+ }
+
+ public void process( String path )
+ throws RepositoryPurgeException
+ {
+ try
+ {
+ File artifactFile = new File( repository.getUrl().getPath(), path );
+
+ if ( !artifactFile.exists() )
+ {
+ return;
+ }
+
+ FilenameParts parts = getFilenameParts( path );
+
+ if ( VersionUtil.isSnapshot( parts.version ) )
+ {
+ // version
+ File versionDir = artifactFile.getParentFile();
+
+ // artifactID - scan for other versions
+ File artifactIdDir = versionDir.getParentFile();
+
+ boolean updated = false;
+
+ List versions = getVersionsInDir( artifactIdDir );
+ Collections.sort( versions, VersionComparator.getInstance() );
+ for ( int j = 0; j < versions.size(); j++ )
+ {
+ String version = (String) versions.get( j );
+
+ if ( VersionComparator.getInstance().compare( version, versionDir.getName() ) > 0 )
+ {
+ purge( versionDir.listFiles() );
+
+ FileUtils.deleteDirectory( versionDir );
+
+ updated = true;
+
+ break;
+ }
+ }
+
+ if ( updated )
+ {
+ updateMetadata( artifactIdDir );
+ }
+ }
+ }
+ catch ( LayoutException le )
+ {
+ throw new RepositoryPurgeException( le.getMessage() );
+ }
+ catch ( IOException ie )
+ {
+ throw new RepositoryPurgeException( ie.getMessage() );
+ }
+ }
+
+ private void updateMetadata( File artifactIdDir )
+ throws RepositoryPurgeException
+ {
+
+ File[] metadataFiles = getFiles( artifactIdDir, "maven-metadata" );
+ List availableVersions = getVersionsInDir( artifactIdDir );
+
+ Collections.sort( availableVersions );
+
+ String latestReleased = getLatestReleased( availableVersions );
+ for ( int i = 0; i < metadataFiles.length; i++ )
+ {
+ if ( !( metadataFiles[i].getName().toUpperCase() ).endsWith( "SHA1" ) &&
+ !( metadataFiles[i].getName().toUpperCase() ).endsWith( "MD5" ) )
+ {
+ try
+ {
+ Date lastUpdated = new Date();
+ ArchivaRepositoryMetadata metadata = metadataReader.read( metadataFiles[i] );
+ metadata.setAvailableVersions( availableVersions );
+ metadata.setLatestVersion( (String) availableVersions.get( availableVersions.size() - 1 ) );
+ metadata.setReleasedVersion( latestReleased );
+ metadata.setLastUpdatedTimestamp( lastUpdated );
+ metadata.setLastUpdated( Long.toString( lastUpdated.getTime() ) );
+
+ RepositoryMetadataWriter.write( metadata, metadataFiles[i] );
+ }
+ catch ( RepositoryMetadataException rme )
+ {
+ // continue updating other metadata files even if there is an exception
+ // @todo log to console
+ }
+ }
+ }
+ }
+
+ private String getLatestReleased( List availableVersions )
+ {
+ List reversedOrder = new ArrayList( availableVersions );
+ Collections.reverse( reversedOrder );
+ String latestReleased = "";
+
+ for ( Iterator iter = reversedOrder.iterator(); iter.hasNext(); )
+ {
+ String version = (String) iter.next();
+ if ( !VersionUtil.getBaseVersion( version ).endsWith( SNAPSHOT ) )
+ {
+ latestReleased = version;
+ return latestReleased;
+ }
+ }
+
+ return latestReleased;
+ }
+
+ private List getVersionsInDir( File artifactIdDir )
+ {
+ String[] versionsAndMore = artifactIdDir.list();
+ List versions = new ArrayList();
+ for ( int j = 0; j < versionsAndMore.length; j++ )
+ {
+ if ( VersionUtil.isVersion( versionsAndMore[j] ) )
+ {
+ versions.add( versionsAndMore[j] );
+ }
+ }
+
+ return versions;
+ }
+}
import org.apache.maven.archiva.repository.layout.FilenameParts;
import org.apache.maven.archiva.repository.layout.LayoutException;
+import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.model.ArchivaRepository;
+import org.apache.maven.archiva.database.ArtifactDAO;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.io.File;
/**
+ * Purge repository for snapshots older than the specified days in the repository configuration.
+ *
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
- * @plexus.component role="org.apache.maven.archiva.consumers.core.repository.RepositoryPurge"
- * role-hint="days-old"
- * instantiation-strategy="per-lookup"
+ * @version
*/
public class DaysOldRepositoryPurge
extends AbstractRepositoryPurge
-{
+{
+ private RepositoryConfiguration repoConfig;
- public void process( String path, Configuration configuration )
+ public DaysOldRepositoryPurge( ArchivaRepository repository,
+ BidirectionalRepositoryLayout layout, ArtifactDAO artifactDao,
+ RepositoryConfiguration repoConfig)
+ {
+ super( repository, layout, artifactDao );
+ this.repoConfig = repoConfig;
+ }
+
+ public void process( String path )
throws RepositoryPurgeException
{
try
{
- File artifactFile = new File( getRepository().getUrl().getPath(), path );
+ File artifactFile = new File( repository.getUrl().getPath(), path );
if( !artifactFile.exists() )
{
if ( VersionUtil.isSnapshot( parts.version ) )
{
- RepositoryConfiguration repoConfig = configuration.findRepositoryById( getRepository().getId() );
-
- Calendar olderThanThisDate = new GregorianCalendar();
+ Calendar olderThanThisDate = Calendar.getInstance();
olderThanThisDate.add( Calendar.DATE, ( -1 * repoConfig.getDaysOlder() ) );
if ( artifactFile.lastModified() < olderThanThisDate.getTimeInMillis() )
{
throw new RepositoryPurgeException( le.getMessage() );
}
- catch ( RepositoryIndexException re )
- {
- throw new RepositoryPurgeException( re.getMessage() );
- }
}
}
+++ /dev/null
-package org.apache.maven.archiva.consumers.core.repository;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.model.ArchivaRepositoryMetadata;
-import org.apache.maven.archiva.repository.layout.FilenameParts;
-import org.apache.maven.archiva.repository.layout.LayoutException;
-import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader;
-import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
-import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.common.utils.VersionUtil;
-import org.apache.maven.archiva.common.utils.VersionComparator;
-import org.apache.commons.io.FileUtils;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.Date;
-
-/**
- * M2 implementation for cleaning up the released snapshots.
- *
- * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
- */
-public class DefaultCleanupReleasedSnapshots
- extends AbstractRepositoryPurge
-{
- public static final String SNAPSHOT = "-SNAPSHOT";
-
- private RepositoryMetadataReader metadataReader;
-
- public DefaultCleanupReleasedSnapshots()
- {
- metadataReader = new RepositoryMetadataReader();
- }
-
- public void process( String path, Configuration configuration )
- throws RepositoryPurgeException
- {
- try
- {
- File artifactFile = new File( getRepository().getUrl().getPath(), path );
-
- if ( !artifactFile.exists() )
- {
- return;
- }
-
- FilenameParts parts = getFilenameParts( path );
-
- if ( VersionUtil.isSnapshot( parts.version ) )
- {
- // version
- File versionDir = artifactFile.getParentFile();
-
- // artifactID - scan for other versions
- File artifactIdDir = versionDir.getParentFile();
-
- boolean updated = false;
-
- List versions = getVersionsInDir( artifactIdDir );
- Collections.sort( versions, VersionComparator.getInstance() );
- for ( int j = 0; j < versions.size(); j++ )
- {
- String version = (String) versions.get( j );
-
- if ( VersionComparator.getInstance().compare( version, versionDir.getName() ) > 0 )
- {
- purge( versionDir.listFiles() );
-
- FileUtils.deleteDirectory( versionDir );
-
- updated = true;
-
- break;
- }
- }
-
- if ( updated )
- {
- updateMetadata( artifactIdDir );
- }
- }
- }
- catch ( LayoutException le )
- {
- throw new RepositoryPurgeException( le.getMessage() );
- }
- catch ( IOException ie )
- {
- throw new RepositoryPurgeException( ie.getMessage() );
- }
- catch ( RepositoryIndexException re )
- {
- throw new RepositoryPurgeException( re.getMessage() );
- }
- }
-
- private void updateMetadata( File artifactIdDir )
- throws RepositoryPurgeException
- {
-
- File[] metadataFiles = getFiles( artifactIdDir, "maven-metadata" );
- List availableVersions = getVersionsInDir( artifactIdDir );
-
- Collections.sort( availableVersions );
-
- String latestReleased = getLatestReleased( availableVersions );
- for ( int i = 0; i < metadataFiles.length; i++ )
- {
- if ( !( metadataFiles[i].getName().toUpperCase() ).endsWith( "SHA1" ) &&
- !( metadataFiles[i].getName().toUpperCase() ).endsWith( "MD5" ) )
- {
- try
- {
- Date lastUpdated = new Date();
- ArchivaRepositoryMetadata metadata = metadataReader.read( metadataFiles[i] );
- metadata.setAvailableVersions( availableVersions );
- metadata.setLatestVersion( (String) availableVersions.get( availableVersions.size() - 1 ) );
- metadata.setReleasedVersion( latestReleased );
- metadata.setLastUpdatedTimestamp( lastUpdated );
- metadata.setLastUpdated( Long.toString( lastUpdated.getTime() ) );
-
- RepositoryMetadataWriter.write( metadata, metadataFiles[i] );
- }
- catch ( RepositoryMetadataException rme )
- {
- System.out.println( "Error updating metadata " + metadataFiles[i].getAbsoluteFile() );
- }
- }
- }
- }
-
- private String getLatestReleased( List availableVersions )
- {
- List reversedOrder = new ArrayList( availableVersions );
- Collections.reverse( reversedOrder );
- String latestReleased = "";
-
- for ( Iterator iter = reversedOrder.iterator(); iter.hasNext(); )
- {
- String version = (String) iter.next();
- if ( !VersionUtil.getBaseVersion( version ).endsWith( SNAPSHOT ) )
- {
- latestReleased = version;
- return latestReleased;
- }
- }
-
- return latestReleased;
- }
-
- private List getVersionsInDir( File artifactIdDir )
- {
- String[] versionsAndMore = artifactIdDir.list();
- List versions = new ArrayList();
- for ( int j = 0; j < versionsAndMore.length; j++ )
- {
- if ( VersionUtil.isVersion( versionsAndMore[j] ) )
- {
- versions.add( versionsAndMore[j] );
- }
- }
-
- return versions;
- }
-
-}
* Perform checking on artifact for repository purge
*
* @param path path to the scanned artifact
- * @param configuration the configuration for the repository currently being scanned
*/
- public void process( String path, Configuration configuration )
+ public void process( String path )
throws RepositoryPurgeException;
-
- /**
- * Set the repository to be purged
- *
- * @param repository
- */
- public void setRepository( ArchivaRepository repository );
-
- /**
- * Set the layout of the repository to be purged
- *
- * @param layout
- */
- public void setLayout( BidirectionalRepositoryLayout layout );
-
- /**
- * Set the index of the repository
- *
- * @param index
- */
- public void setIndex( RepositoryContentIndex index );
-
- /**
- * Set the artifact dao used for updating the database of the changes in the repo
- *
- * @param artifactDao
- */
- public void setArtifactDao( ArtifactDAO artifactDao );
-
+
}
*/
private BidirectionalRepositoryLayoutFactory layoutFactory;
- /**
- * @plexus.requirement role-hint="lucene"
- */
- private RepositoryContentIndexFactory indexFactory;
-
/**
* @plexus.requirement role-hint="jdo"
*/
- private ArchivaDAO dao;
+ private ArchivaDAO dao;
/**
* @plexus.requirement
private ArchivaRepository repository;
- private BidirectionalRepositoryLayout repositoryLayout;
-
private List includes = new ArrayList();
private List propertyNameTriggers = new ArrayList();
private RepositoryPurge repoPurge;
- private RepositoryContentIndex index;
-
private RepositoryPurge cleanUp;
public String getId()
public void beginScan( ArchivaRepository repository )
throws ConsumerException
{
+ BidirectionalRepositoryLayout repositoryLayout = null;
+
if ( !repository.isManaged() )
{
throw new ConsumerException( "Consumer requires managed repository." );
}
this.repository = repository;
- this.index = indexFactory.createFileContentIndex( repository );
try
{
- this.repositoryLayout = layoutFactory.getLayout( this.repository.getLayoutType() );
+ repositoryLayout = layoutFactory.getLayout( repository.getLayoutType() );
}
catch ( LayoutException e )
{
"Unable to initialize consumer due to unknown repository layout: " + e.getMessage(), e );
}
- // @todo handle better injection of RepositoryPurge
RepositoryConfiguration repoConfig = configuration.getConfiguration().findRepositoryById( repository.getId() );
if ( repoConfig.getDaysOlder() != 0 )
{
- repoPurge = new DaysOldRepositoryPurge();
+ repoPurge = new DaysOldRepositoryPurge( repository, repositoryLayout, dao.getArtifactDAO(), repoConfig );
}
else
{
- repoPurge = new RetentionCountRepositoryPurge();
+ repoPurge =
+ new RetentionCountRepositoryPurge( repository, repositoryLayout, dao.getArtifactDAO(), repoConfig );
}
-
- repoPurge.setLayout( repositoryLayout );
- repoPurge.setRepository( repository );
- repoPurge.setIndex( index );
- repoPurge.setArtifactDao( dao.getArtifactDAO() );
-
- cleanUp = new DefaultCleanupReleasedSnapshots();
- cleanUp.setRepository( repository );
- cleanUp.setLayout( repositoryLayout );
- cleanUp.setArtifactDao( dao.getArtifactDAO() );
- cleanUp.setIndex( index );
+
+ cleanUp = new CleanupReleasedSnapshotsRepositoryPurge( repository, repositoryLayout, dao.getArtifactDAO() );
}
public void processFile( String path )
{
try
{
- RepositoryConfiguration repoConfig = configuration.getConfiguration().findRepositoryById( repository.getId() );
- if( repoConfig.isDeleteReleasedSnapshots() )
+ RepositoryConfiguration repoConfig =
+ configuration.getConfiguration().findRepositoryById( repository.getId() );
+ if ( repoConfig.isDeleteReleasedSnapshots() )
{
- cleanUp.process( path, configuration.getConfiguration() );
+ cleanUp.process( path );
}
- repoPurge.process( path, configuration.getConfiguration() );
+ repoPurge.process( path );
}
catch ( RepositoryPurgeException rpe )
{
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.repository.layout.FilenameParts;
import org.apache.maven.archiva.repository.layout.LayoutException;
+import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.model.ArchivaRepository;
+import org.apache.maven.archiva.database.ArtifactDAO;
import java.io.File;
import java.util.List;
import java.util.Collections;
/**
+ * Purge the repository by retention count. Retain only the specified number of snapshots.
+ *
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
- * @plexus.component role="org.apache.maven.archiva.consumers.core.repository.RepositoryPurge"
- * role-hint="retention-count"
- * instantiation-strategy="per-lookup"
+ * @version
*/
public class RetentionCountRepositoryPurge
extends AbstractRepositoryPurge
{
- public void process( String path, Configuration configuration )
+ private RepositoryConfiguration repoConfig;
+
+ public RetentionCountRepositoryPurge( ArchivaRepository repository,
+ BidirectionalRepositoryLayout layout, ArtifactDAO artifactDao,
+ RepositoryConfiguration repoConfig )
+ {
+ super( repository, layout, artifactDao );
+ this.repoConfig = repoConfig;
+ }
+
+ public void process( String path )
throws RepositoryPurgeException
{
try
{
- File artifactFile = new File( getRepository().getUrl().getPath(), path );
+ File artifactFile = new File( repository.getUrl().getPath(), path );
if( !artifactFile.exists() )
{
FilenameParts parts = getFilenameParts( path );
if ( VersionUtil.isSnapshot( parts.version ) )
- {
- RepositoryConfiguration repoConfig = configuration.findRepositoryById( getRepository().getId() );
+ {
File parentDir = artifactFile.getParentFile();
if ( parentDir.isDirectory() )
{
throw new RepositoryPurgeException( le.getMessage() );
}
- catch ( RepositoryIndexException re )
- {
- throw new RepositoryPurgeException( re.getMessage() );
- }
}
private List getUniqueVersions( File[] files )
public static final int TEST_DAYS_OLDER = 30;
- private Configuration config;
+ private RepositoryConfiguration config;
private ArchivaRepository repo;
dao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
}
- public void lookupRepositoryPurge( String role )
- throws Exception
- {
- repoPurge = (RepositoryPurge) lookup( RepositoryPurge.class.getName(), role );
-
- repoPurge.setArtifactDao( dao );
-
- repoPurge.setRepository( getRepository() );
-
- repoPurge.setLayout( getLayout() );
- }
-
- public Configuration getRepoConfiguration()
+ public RepositoryConfiguration getRepoConfiguration()
{
if ( config == null )
{
- config = new Configuration();
+ config = new RepositoryConfiguration();
}
- RepositoryConfiguration repoConfig = new RepositoryConfiguration();
- repoConfig.setId( TEST_REPO_ID );
- repoConfig.setName( TEST_REPO_NAME );
- repoConfig.setDaysOlder( TEST_DAYS_OLDER );
- repoConfig.setUrl( TEST_REPO_URL );
- repoConfig.setReleases( true );
- repoConfig.setSnapshots( true );
- repoConfig.setRetentionCount( TEST_RETENTION_COUNT );
-
- List repos = new ArrayList();
- repos.add( repoConfig );
-
- config.setRepositories( repos );
+ config.setId( TEST_REPO_ID );
+ config.setName( TEST_REPO_NAME );
+ config.setDaysOlder( TEST_DAYS_OLDER );
+ config.setUrl( TEST_REPO_URL );
+ config.setReleases( true );
+ config.setSnapshots( true );
+ config.setRetentionCount( TEST_RETENTION_COUNT );
return config;
}
{
super.setUp();
- lookupRepositoryPurge( "days-old" );
+ repoPurge = new DaysOldRepositoryPurge( getRepository(), getLayout(), dao, getRepoConfiguration() );
}
private void setLastModified()
public void testIfAJarIsFound()
throws Exception
{
- // Create it
- ArchivaArtifact artifact =
- dao.createArtifact( "org.apache.maven.plugins", "maven-install-plugin", "2.2-SNAPSHOT", "", "jar" );
- assertNotNull( artifact );
-
- artifact.getModel().setLastModified( new Date() );
- artifact.getModel().setOrigin( "test" );
-
- // Save it.
- ArchivaArtifact savedArtifact = dao.saveArtifact( artifact );
- assertNotNull( savedArtifact );
-
+ populateDb();
+
setLastModified();
- repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT, getRepoConfiguration() );
+ repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT );
assertTrue( true );
super.tearDown();
repoPurge = null;
}
+
+ private void populateDb()
+ throws Exception
+ {
+ // Create it
+ ArchivaArtifact artifact =
+ dao.createArtifact( "org.apache.maven.plugins", "maven-install-plugin", "2.2-SNAPSHOT", "", "jar" );
+ assertNotNull( artifact );
+
+ artifact.getModel().setLastModified( new Date() );
+ artifact.getModel().setOrigin( "test" );
+
+ // Save it.
+ ArchivaArtifact savedArtifact = dao.saveArtifact( artifact );
+ assertNotNull( savedArtifact );
+
+ //POM
+ artifact =
+ dao.createArtifact( "org.apache.maven.plugins", "maven-install-plugin", "2.2-SNAPSHOT", "", "pom" );
+ assertNotNull( artifact );
+ artifact.getModel().setLastModified( new Date() );
+ artifact.getModel().setOrigin( "test" );
+ savedArtifact = dao.saveArtifact( artifact );
+ assertNotNull( savedArtifact );
+ }
}
{
super.setUp();
- lookupRepositoryPurge( "retention-count" );
+ repoPurge = new RetentionCountRepositoryPurge( getRepository(), getLayout(), dao, getRepoConfiguration() );
}
/**
*/
public void testIfAJarWasFound()
throws Exception
+ {
+ populateIfJarWasFoundDb();
+
+ repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT );
+
+ // assert if removed from repo
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.md5" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.sha1" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.md5" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.sha1" ).exists() );
+
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.md5" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.sha1" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.md5" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.sha1" ).exists() );
+
+ // assert if not removed from repo
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.md5" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.sha1" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.md5" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.sha1" ).exists() );
+
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.md5" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.sha1" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.md5" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.sha1" ).exists() );
+ }
+
+ /**
+ * Test if the artifact to be processed is a pom
+ *
+ * @throws Exception
+ */
+ public void testIfAPomWasFound()
+ throws Exception
+ {
+ populateIfPomWasFoundDb();
+
+ repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM );
+
+ // assert if removed from repo
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.md5" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.sha1" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.md5" ).exists() );
+ assertFalse( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.sha1" ).exists() );
+
+ // assert if not removed from repo
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.md5" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.sha1" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.md5" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.sha1" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.md5" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.sha1" ).exists() );
+
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.md5" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.sha1" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.md5" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.sha1" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.md5" ).exists() );
+ assertTrue( new File(
+ "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.sha1" ).exists() );
+ }
+
+ public void populateIfJarWasFoundDb()
+ throws Exception
{
// Create it
ArchivaArtifact artifact =
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );
-
- repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT, getRepoConfiguration() );
-
- // assert if removed from repo
- assertFalse( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.md5" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.sha1" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.md5" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.sha1" ).exists() );
-
- assertFalse( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.md5" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.sha1" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.md5" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.sha1" ).exists() );
-
- // assert if not removed from repo
- assertTrue( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.md5" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.sha1" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.md5" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.sha1" ).exists() );
-
- assertTrue( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.md5" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.sha1" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.md5" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.sha1" ).exists() );
}
- /**
- * Test if the artifact to be processed is a pom
- *
- * @throws Exception
- */
- public void testIfAPomWasFound()
+ public void populateIfPomWasFoundDb()
throws Exception
{
// Create it
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );
-
- repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM, getRepoConfiguration() );
-
- // assert if removed from repo
- assertFalse( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.md5" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.sha1" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.md5" ).exists() );
- assertFalse( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.sha1" ).exists() );
-
- // assert if not removed from repo
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.md5" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.sha1" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.md5" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.sha1" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.md5" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.sha1" ).exists() );
-
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.md5" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.sha1" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.md5" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.sha1" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.md5" ).exists() );
- assertTrue( new File(
- "target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.sha1" ).exists() );
}
}
<component-set>
<components>
-
- <!-- DaysOldRepositoryPurge -->
- <component>
- <role>org.apache.maven.archiva.consumers.core.repository.RepositoryPurge</role>
- <role-hint>days-old</role-hint>
- <implementation>org.apache.maven.archiva.consumers.core.repository.DaysOldRepositoryPurge</implementation>
- <!--requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.ArchivaDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
- <role-hint>lucene</role-hint>
- </requirement>
- </requirements-->
- </component>
-
- <!-- LuceneRepositoryContentIndexFactory -->
- <component>
- <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
- <role-hint>lucene</role-hint>
- <implementation>org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndexFactory</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
- <role-hint></role-hint>
- </requirement>
- </requirements>
- </component>
-
- <!-- ArchivaConfiguration -->
- <component>
- <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
- <role-hint>test-configuration</role-hint>
- <implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
- <requirements>
- <requirement>
- <role>org.codehaus.plexus.registry.Registry</role>
- <role-hint>configured</role-hint>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.codehaus.plexus.registry.Registry</role>
- <role-hint>configured</role-hint>
- <implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
- <configuration>
- <properties>
- <system/>
- <xml fileName="${basedir}/src/test/conf/repository-manager.xml"
- config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
- </properties>
- </configuration>
- </component>
-
- <!-- ArchivaDAO -->
- <component>
- <role>org.apache.maven.archiva.database.ArchivaDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoArchivaDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.ArtifactDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.ProjectModelDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.RepositoryDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- </requirements>
- </component>
-
+
<!-- JdoAccess -->
<component>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
</requirements>
</component>
- <!-- ArtifactDAO -->
- <component>
- <role>org.apache.maven.archiva.database.ArtifactDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoArtifactDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
-
- <!-- ProjectModelDAO -->
- <component>
- <role>org.apache.maven.archiva.database.ProjectModelDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoProjectModelDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
-
- <!-- RepositoryDAO -->
- <component>
- <role>org.apache.maven.archiva.database.RepositoryDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
-
- <!-- RepositoryProblemDAO -->
- <component>
- <role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryProblemDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
-
<!-- JDO Factory -->
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
<name>javax.jdo.PersistenceManagerFactoryClass</name>
<value>org.jpox.PersistenceManagerFactoryImpl</value>
</property>
- <!--property>
- <name>org.jpox.transactionIsolation</name>
- <value>READ_COMMITTED</value>
- </property>
- <property>
- <name>org.jpox.poid.transactionIsolation</name>
- <value>READ_COMMITTED</value>
- </property>
- <property>
- <name>org.jpox.autoCreateSchema</name>
- <value>true</value>
- </property>
- <property>
- <name>javax.jdo.option.RetainValues</name>
- <value>true</value>
- </property>
- <property>
- <name>javax.jdo.option.RestoreValues</name>
- <value>true</value>
- </property>
- <property>
- <name>org.jpox.validateTables</name>
- <value>true</value>
- </property>
- <property>
- <name>org.jpox.validateColumns</name>
- <value>true</value>
- </property>
- <property>
- <name>org.jpox.validateConstraints</name>
- <value>true</value>
- </property-->
</otherProperties>
</configuration>
</component>
-->
<component-set>
- <components>
-
- <!-- RetentionCountRepositoryPurge -->
- <component>
- <role>org.apache.maven.archiva.consumers.core.repository.RepositoryPurge</role>
- <role-hint>retention-count</role-hint>
- <implementation>org.apache.maven.archiva.consumers.core.repository.RetentionCountRepositoryPurge</implementation>
- <!--requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.ArchivaDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
- <role-hint>lucene</role-hint>
- </requirement>
- </requirements-->
- </component>
-
- <!-- LuceneRepositoryContentIndexFactory -->
- <component>
- <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
- <role-hint>lucene</role-hint>
- <implementation>org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndexFactory</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
- <role-hint></role-hint>
- </requirement>
- </requirements>
- </component>
-
- <!-- ArchivaConfiguration -->
- <component>
- <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
- <role-hint>test-configuration</role-hint>
- <implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
- <requirements>
- <requirement>
- <role>org.codehaus.plexus.registry.Registry</role>
- <role-hint>configured</role-hint>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.codehaus.plexus.registry.Registry</role>
- <role-hint>configured</role-hint>
- <implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
- <configuration>
- <properties>
- <system/>
- <xml fileName="${basedir}/src/test/conf/repository-manager.xml"
- config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
- </properties>
- </configuration>
- </component>
-
- <!-- ArchivaDAO -->
- <component>
- <role>org.apache.maven.archiva.database.ArchivaDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoArchivaDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.ArtifactDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.ProjectModelDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.RepositoryDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- </requirements>
- </component>
+ <components>
<!-- JdoAccess -->
<component>
</requirements>
</component>
- <!-- ArtifactDAO -->
- <component>
- <role>org.apache.maven.archiva.database.ArtifactDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoArtifactDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
-
- <!-- ProjectModelDAO -->
- <component>
- <role>org.apache.maven.archiva.database.ProjectModelDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoProjectModelDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
-
- <!-- RepositoryDAO -->
- <component>
- <role>org.apache.maven.archiva.database.RepositoryDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
-
- <!-- RepositoryProblemDAO -->
- <component>
- <role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryProblemDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
-
<!-- JDO Factory -->
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
<name>javax.jdo.PersistenceManagerFactoryClass</name>
<value>org.jpox.PersistenceManagerFactoryImpl</value>
</property>
- <property>
- <name>org.jpox.transactionIsolation</name>
- <value>READ_COMMITTED</value>
- </property>
- <property>
- <name>org.jpox.poid.transactionIsolation</name>
- <value>READ_COMMITTED</value>
- </property>
- <property>
- <name>org.jpox.autoCreateSchema</name>
- <value>true</value>
- </property>
- <property>
- <name>javax.jdo.option.RetainValues</name>
- <value>true</value>
- </property>
- <property>
- <name>javax.jdo.option.RestoreValues</name>
- <value>true</value>
- </property>
- <property>
- <name>org.jpox.validateTables</name>
- <value>true</value>
- </property>
- <property>
- <name>org.jpox.validateColumns</name>
- <value>true</value>
- </property>
- <property>
- <name>org.jpox.validateConstraints</name>
- <value>true</value>
- </property>
</otherProperties>
</configuration>
</component>