<name>Archiva Consumers :: Core Consumers</name>
<dependencies>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-database</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-indexer</artifactId>
- </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-configuration</artifactId>
* under the License.
*/
-import org.apache.maven.archiva.database.ArchivaDatabaseException;
-import org.apache.maven.archiva.database.ArtifactDAO;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
-import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
-import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
-import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
-import org.apache.maven.archiva.model.ArchivaArtifact;
-import org.apache.maven.archiva.model.ArtifactReference;
-import org.apache.maven.archiva.repository.ManagedRepositoryContent;
-import org.apache.maven.archiva.repository.layout.LayoutException;
-
import java.io.File;
import java.io.FilenameFilter;
-import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
import java.util.Set;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.model.ArtifactReference;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
+
/**
* Base class for all repository purge tasks.
*
public abstract class AbstractRepositoryPurge
implements RepositoryPurge
{
- protected ManagedRepositoryContent repository;
-
- protected ArtifactDAO artifactDao;
-
- private Map<String, RepositoryContentIndex> indices;
+ protected final ManagedRepositoryContent repository;
+
+ protected final List<RepositoryListener> listeners;
- public AbstractRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
- Map<String, RepositoryContentIndex> indices )
+ public AbstractRepositoryPurge( ManagedRepositoryContent repository, List<RepositoryListener> listeners )
{
this.repository = repository;
- this.artifactDao = artifactDao;
- this.indices = indices;
+ this.listeners = listeners;
}
/**
{
if( references != null && !references.isEmpty() )
{
- List<LuceneRepositoryContentRecord> fileContentRecords = new ArrayList<LuceneRepositoryContentRecord>();
- List<LuceneRepositoryContentRecord> hashcodeRecords = new ArrayList<LuceneRepositoryContentRecord>();
- List<LuceneRepositoryContentRecord> bytecodeRecords = new ArrayList<LuceneRepositoryContentRecord>();
-
for ( ArtifactReference reference : references )
{
File artifactFile = repository.toFile( reference );
new ArchivaArtifact( reference.getGroupId(), reference.getArtifactId(), reference.getVersion(),
reference.getClassifier(), reference.getType() );
- FileContentRecord fileContentRecord = new FileContentRecord();
- fileContentRecord.setFilename( repository.toPath( artifact ) );
- fileContentRecords.add( fileContentRecord );
-
- HashcodesRecord hashcodesRecord = new HashcodesRecord();
- hashcodesRecord.setArtifact( artifact );
- hashcodeRecords.add( hashcodesRecord );
-
- BytecodeRecord bytecodeRecord = new BytecodeRecord();
- bytecodeRecord.setArtifact( artifact );
- bytecodeRecords.add( bytecodeRecord );
-
+ for ( RepositoryListener listener : listeners )
+ {
+ listener.deleteArtifact( repository, artifact );
+ }
+
// TODO: this needs to be logged
artifactFile.delete();
purgeSupportFiles( artifactFile );
-
- // intended to be swallowed
- // continue updating the database for all artifacts
- try
- {
- String artifactPath = toRelativePath( artifactFile );
- updateDatabase( artifactPath );
- }
- catch ( ArchivaDatabaseException ae )
- {
- // TODO: determine logging to be used
- }
- catch ( LayoutException le )
- {
- // Ignore
- }
- }
-
- try
- {
- updateIndices( fileContentRecords, hashcodeRecords, bytecodeRecords );
- }
- catch ( RepositoryIndexException e )
- {
- // Ignore
}
}
}
}
}
}
-
- private void updateDatabase( String path )
- throws ArchivaDatabaseException, LayoutException
- {
- ArtifactReference artifact = repository.toArtifactReference( path );
- ArchivaArtifact queriedArtifact =
- artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
- artifact.getClassifier(), artifact.getType() );
-
- artifactDao.deleteArtifact( queriedArtifact );
-
- // TODO [MRM-37]: re-run the database consumers to clean up
- }
-
- private void updateIndices( List<LuceneRepositoryContentRecord> fileContentRecords,
- List<LuceneRepositoryContentRecord> hashcodeRecords,
- List<LuceneRepositoryContentRecord> bytecodeRecords )
- throws RepositoryIndexException
- {
- RepositoryContentIndex index = indices.get( "filecontent" );
- index.deleteRecords( fileContentRecords );
-
- index = indices.get( "hashcodes" );
- index.deleteRecords( hashcodeRecords );
-
- index = indices.get( "bytecode" );
- index.deleteRecords( bytecodeRecords );
- }
}
* under the License.
*/
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.database.ArtifactDAO;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
+import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.ProjectReference;
import org.apache.maven.archiva.model.VersionedReference;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.RepositoryNotFoundException;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.metadata.MetadataTools;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
/**
* <p>
* This will look in a single managed repository, and purge any snapshots that are present
private RepositoryContentFactory repoContentFactory;
- public CleanupReleasedSnapshotsRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
- MetadataTools metadataTools, Map<String, RepositoryContentIndex> indices,
- ArchivaConfiguration archivaConfig, RepositoryContentFactory repoContentFactory )
+ public CleanupReleasedSnapshotsRepositoryPurge( ManagedRepositoryContent repository, MetadataTools metadataTools,
+ ArchivaConfiguration archivaConfig,
+ RepositoryContentFactory repoContentFactory,
+ List<RepositoryListener> listeners )
{
- super( repository, artifactDao, indices );
+ super( repository, listeners );
this.metadataTools = metadataTools;
this.archivaConfig = archivaConfig;
this.repoContentFactory = repoContentFactory;
return;
}
- ArtifactReference artifact = repository.toArtifactReference( path );
+ ArtifactReference artifactRef = repository.toArtifactReference( path );
- if ( !VersionUtil.isSnapshot( artifact.getVersion() ) )
+ if ( !VersionUtil.isSnapshot( artifactRef.getVersion() ) )
{
// Nothing to do here, not a snapshot, skip it.
return;
}
ProjectReference reference = new ProjectReference();
- reference.setGroupId( artifact.getGroupId() );
- reference.setArtifactId( artifact.getArtifactId() );
+ reference.setGroupId( artifactRef.getGroupId() );
+ reference.setArtifactId( artifactRef.getArtifactId() );
// Gather up all of the versions.
List<String> allVersions = new ArrayList<String>( repository.getVersions( reference ) );
boolean needsMetadataUpdate = false;
VersionedReference versionRef = new VersionedReference();
- versionRef.setGroupId( artifact.getGroupId() );
- versionRef.setArtifactId( artifact.getArtifactId() );
+ versionRef.setGroupId( artifactRef.getGroupId() );
+ versionRef.setArtifactId( artifactRef.getArtifactId() );
+
+ ArchivaArtifact artifact =
+ new ArchivaArtifact( artifactRef.getGroupId(), artifactRef.getArtifactId(), artifactRef.getVersion(),
+ artifactRef.getClassifier(), artifactRef.getType() );
for ( String version : snapshotVersions )
{
{
versionRef.setVersion( version );
repository.deleteVersion( versionRef );
+
+ for ( RepositoryListener listener : listeners )
+ {
+ listener.deleteArtifact( repository, artifact );
+ }
+
needsMetadataUpdate = true;
}
}
if ( needsMetadataUpdate )
{
- updateMetadata( artifact );
+ updateMetadata( artifactRef );
}
}
catch ( LayoutException e )
* under the License.
*/
-import org.apache.commons.lang.time.DateUtils;
-import org.apache.maven.archiva.common.utils.VersionComparator;
-import org.apache.maven.archiva.common.utils.VersionUtil;
-import org.apache.maven.archiva.database.ArtifactDAO;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.model.ArtifactReference;
-import org.apache.maven.archiva.model.VersionedReference;
-import org.apache.maven.archiva.repository.ContentNotFoundException;
-import org.apache.maven.archiva.repository.ManagedRepositoryContent;
-import org.apache.maven.archiva.repository.layout.LayoutException;
-
import java.io.File;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.List;
-import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
+import org.apache.commons.lang.time.DateUtils;
+import org.apache.maven.archiva.common.utils.VersionComparator;
+import org.apache.maven.archiva.common.utils.VersionUtil;
+import org.apache.maven.archiva.model.ArtifactReference;
+import org.apache.maven.archiva.model.VersionedReference;
+import org.apache.maven.archiva.repository.ContentNotFoundException;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
+import org.apache.maven.archiva.repository.layout.LayoutException;
+
/**
* Purge from repository all snapshots older than the specified days in the repository configuration.
*
private int retentionCount;
- public DaysOldRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao, int daysOlder,
- int retentionCount, Map<String, RepositoryContentIndex> indices )
+ public DaysOldRepositoryPurge( ManagedRepositoryContent repository, int daysOlder,
+ int retentionCount, List<RepositoryListener> listeners )
{
- super( repository, artifactDao, indices );
+ super( repository, listeners );
this.daysOlder = daysOlder;
this.retentionCount = retentionCount;
timestampParser = new SimpleDateFormat( "yyyyMMdd.HHmmss" );
* under the License.
*/
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
-import org.apache.maven.archiva.database.ArchivaDAO;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.RepositoryNotFoundException;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.metadata.MetadataTools;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import edu.emory.mathcs.backport.java.util.Collections;
/**
* Consumer for removing old snapshots in the repository based on the criteria
*/
private ArchivaConfiguration configuration;
- /**
- * @plexus.requirement role-hint="jdo"
- */
- private ArchivaDAO dao;
-
/**
* @plexus.requirement
*/
private List<String> includes = new ArrayList<String>();
- private List<String> propertyNameTriggers = new ArrayList<String>();
-
private RepositoryPurge repoPurge;
private RepositoryPurge cleanUp;
private boolean deleteReleasedSnapshots;
-
- /**
- * @plexus.requirement role-hint="lucene"
- */
- private RepositoryContentIndexFactory indexFactory;
+ /** @plexus.requirement role="org.apache.maven.archiva.repository.events.RepositoryListener" */
+ private List<RepositoryListener> listeners = Collections.emptyList();
+
public String getId()
{
return this.id;
{
try
{
- Map<String, RepositoryContentIndex> indices = new HashMap<String, RepositoryContentIndex>();
- indices.put( "bytecode", indexFactory.createBytecodeIndex( repository ) );
- indices.put( "hashcodes", indexFactory.createHashcodeIndex( repository ) );
- indices.put( "filecontent", indexFactory.createFileContentIndex( repository ) );
-
ManagedRepositoryContent repositoryContent = repositoryFactory.getManagedRepositoryContent( repository
.getId() );
if ( repository.getDaysOlder() != 0 )
{
- repoPurge = new DaysOldRepositoryPurge( repositoryContent, dao.getArtifactDAO(), repository
- .getDaysOlder(), repository.getRetentionCount(), indices );
+ repoPurge = new DaysOldRepositoryPurge( repositoryContent, repository.getDaysOlder(),
+ repository.getRetentionCount(), listeners );
}
else
{
- repoPurge = new RetentionCountRepositoryPurge( repositoryContent, dao.getArtifactDAO(), repository
- .getRetentionCount(), indices );
+ repoPurge = new RetentionCountRepositoryPurge( repositoryContent, repository.getRetentionCount(),
+ listeners );
}
- cleanUp = new CleanupReleasedSnapshotsRepositoryPurge( repositoryContent, dao.getArtifactDAO(),
- metadataTools, indices, configuration, repositoryFactory );
+ cleanUp =
+ new CleanupReleasedSnapshotsRepositoryPurge( repositoryContent, metadataTools, configuration,
+ repositoryFactory, listeners );
deleteReleasedSnapshots = repository.isDeleteReleasedSnapshots();
}
// we need to check all files for deletion, especially if not modified
return true;
}
-
- public void setRepositoryContentIndexFactory( RepositoryContentIndexFactory indexFactory )
- {
- this.indexFactory = indexFactory;
- }
}
* under the License.
*/
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
-import org.apache.maven.archiva.database.ArtifactDAO;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.VersionedReference;
import org.apache.maven.archiva.repository.ContentNotFoundException;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.layout.LayoutException;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
/**
* Purge the repository by retention count. Retain only the specified number of snapshots.
*
{
private int retentionCount;
- public RetentionCountRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
- int retentionCount, Map<String, RepositoryContentIndex> indices )
+ public RetentionCountRepositoryPurge( ManagedRepositoryContent repository,
+ int retentionCount, List<RepositoryListener> listeners )
{
- super( repository, artifactDao, indices );
+ super( repository, listeners );
this.retentionCount = retentionCount;
}
* under the License.
*/
+import java.io.File;
+import java.io.IOException;
+
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.database.ArchivaDatabaseException;
-import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
-import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
-import org.codehaus.plexus.jdo.JdoFactory;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
-import org.jpox.SchemaTool;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.Properties;
-import java.util.Map.Entry;
-
-import javax.jdo.PersistenceManager;
-import javax.jdo.PersistenceManagerFactory;
+import org.easymock.MockControl;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
private ManagedRepositoryContent repo;
- protected ArtifactDAO dao;
-
protected RepositoryPurge repoPurge;
+ protected MockControl listenerControl;
+
+ protected RepositoryListener listener;
+
+ @Override
protected void setUp()
throws Exception
{
super.setUp();
+
+ listenerControl = MockControl.createControl( RepositoryListener.class );
- DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
- assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );
-
- jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
-
- jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
- jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:testdb" ) );
-
- jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
-
- jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
-
- jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
-
- jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
-
- jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
-
- jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );
-
- jdoFactory.setProperty( "javax.jdo.option.RestoreValues", "true" );
-
- // jdoFactory.setProperty( "org.jpox.autoCreateColumns", "true" );
-
- jdoFactory.setProperty( "org.jpox.validateTables", "true" );
-
- jdoFactory.setProperty( "org.jpox.validateColumns", "true" );
-
- jdoFactory.setProperty( "org.jpox.validateConstraints", "true" );
-
- Properties properties = jdoFactory.getProperties();
-
- for ( Entry<Object, Object> entry : properties.entrySet() )
- {
- System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
- }
-
- URL jdoFileUrls[] = new URL[] { getClass().getResource( "/org/apache/maven/archiva/model/package.jdo" ) };
-
- if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
- {
- fail( "Unable to process test " + getName() + " - missing package.jdo." );
- }
-
- File propsFile = null; // intentional
- boolean verbose = true;
-
- SchemaTool.deleteSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose );
- SchemaTool.createSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose, null );
-
- PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
-
- assertNotNull( pmf );
-
- PersistenceManager pm = pmf.getPersistenceManager();
-
- pm.close();
-
- dao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
+ listener = (RepositoryListener) listenerControl.getMock();
}
@Override
return repo;
}
- protected void populateDb( String groupId, String artifactId, List<String> versions )
- throws ArchivaDatabaseException
- {
- for ( String version : versions )
- {
- ArchivaArtifact artifact = dao.createArtifact( groupId, artifactId, version, "", "jar" );
- assertNotNull( artifact );
- artifact.getModel().setLastModified( new Date() );
- artifact.getModel().setOrigin( "test" );
- ArchivaArtifact savedArtifact = dao.saveArtifact( artifact );
- assertNotNull( savedArtifact );
-
- //POM
- artifact = dao.createArtifact( groupId, artifactId, version, "", "pom" );
- assertNotNull( artifact );
- artifact.getModel().setLastModified( new Date() );
- artifact.getModel().setOrigin( "test" );
- savedArtifact = dao.saveArtifact( artifact );
- assertNotNull( savedArtifact );
- }
- }
-
protected void assertDeleted( String path )
{
assertFalse( "File should have been deleted: " + path, new File( path ).exists() );
return testDir.getAbsolutePath();
}
-
- protected void populateDbForTestOrderOfDeletion()
- throws Exception
+
+ protected ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String type )
{
- List<String> versions = new ArrayList<String>();
- versions.add( "1.1.2-20070427.065136-1" );
- versions.add( "1.1.2-20070506.163513-2" );
- versions.add( "1.1.2-20070615.105019-3" );
-
- populateDb( "org.apache.maven.plugins", "maven-assembly-plugin", versions );
+ return new ArchivaArtifact( groupId, artifactId, version, null, type );
}
}
* under the License.
*/
+import java.io.File;
+import java.util.Collections;
+
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
-import org.apache.maven.archiva.database.ArchivaDatabaseException;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.metadata.MetadataTools;
import org.custommonkey.xmlunit.XMLAssert;
+import org.easymock.MockControl;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
extends AbstractRepositoryPurgeTest
{
private ArchivaConfiguration archivaConfiguration;
+
+ private MockControl listenerControl;
public static final String PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO =
"org/apache/archiva/released-artifact-in-diff-repo/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.jar";
public static final String PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO = "org/apache/maven/plugins/maven-source-plugin/2.0.3-SNAPSHOT/maven-source-plugin-2.0.3-SNAPSHOT.jar";
public static final String PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO = "org/apache/maven/plugins/maven-plugin-plugin/2.3-SNAPSHOT/maven-plugin-plugin-2.3-SNAPSHOT.jar";
+
+ private RepositoryListener listener;
protected void setUp()
throws Exception
{
- super.setUp();
-
- Map<String, RepositoryContentIndex> map = new HashMap<String, RepositoryContentIndex>();
- map.put( "filecontent", new LuceneRepositoryContentIndexStub() );
- map.put( "hashcodes", new LuceneRepositoryContentIndexStub() );
- map.put( "bytecode", new LuceneRepositoryContentIndexStub() );
+ super.setUp();
MetadataTools metadataTools = (MetadataTools) lookup( MetadataTools.class );
RepositoryContentFactory factory = (RepositoryContentFactory) lookup( RepositoryContentFactory.class, "cleanup-released-snapshots");
archivaConfiguration =
(ArchivaConfiguration) lookup( ArchivaConfiguration.class, "cleanup-released-snapshots" );
-
+
+ listenerControl = MockControl.createControl( RepositoryListener.class );
+
+ listener = (RepositoryListener) listenerControl.getMock();
repoPurge =
- new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), dao, metadataTools, map, archivaConfiguration, factory );
+ new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), metadataTools, archivaConfiguration, factory,
+ Collections.singletonList( listener ) );
}
public void testReleasedSnapshotsExistsInSameRepo()
throws Exception
{
-
Configuration config = archivaConfiguration.getConfiguration();
config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) );
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
- populateReleasedSnapshotsTest();
-
String repoRoot = prepareTestRepos();
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-plugin-plugin",
+ "2.3-SNAPSHOT", "maven-plugin" ) );
+ listenerControl.replay();
+
repoPurge.process( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
+
+ listenerControl.verify();
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-plugin-plugin";
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
config.addManagedRepository( getRepoConfiguration( RELEASES_TEST_REPO_ID, RELEASES_TEST_REPO_NAME ) );
- populateReleasedSnapshotsTestInDiffRepo();
-
String repoRoot = prepareTestRepos();
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.archiva",
+ "released-artifact-in-diff-repo", "1.0-SNAPSHOT",
+ "jar" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO );
+ listenerControl.verify();
+
String projectRoot = repoRoot + "/org/apache/archiva/released-artifact-in-diff-repo";
// check if the snapshot was removed
config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) );
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
- populateHigherSnapshotExistsTest();
-
String repoRoot = prepareTestRepos();
+ // test listeners for the correct artifacts - no deletions
+ listenerControl.replay();
+
repoPurge.process( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO );
+
+ listenerControl.verify();
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-source-plugin";
"//metadata/versioning/versions/version", metadataXml );
XMLAssert.assertXpathEvaluatesTo( "20070427033345", "//metadata/versioning/lastUpdated", metadataXml );
}
-
- private void populateReleasedSnapshotsTest()
- throws ArchivaDatabaseException
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "2.3-SNAPSHOT" );
-
- populateDb( "org.apache.maven.plugins", "maven-plugin-plugin", versions );
- }
-
- private void populateHigherSnapshotExistsTest()
- throws Exception
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "2.0.3-SNAPSHOT" );
-
- populateDb( "org.apache.maven.plugins", "maven-source-plugin", versions );
- }
-
- private void populateReleasedSnapshotsTestInDiffRepo()
- throws ArchivaDatabaseException
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "1.0-SNAPSHOT" );
-
- populateDb( "org.apache.archiva", "released-artifact-in-diff-repo", versions );
- }
-
}
* under the License.
*/
-import org.apache.commons.lang.time.DateUtils;
-import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
-import java.util.HashMap;
+import java.util.Collections;
import java.util.List;
-import java.util.Map;
+
+import org.apache.commons.lang.time.DateUtils;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
public class DaysOldRepositoryPurgeTest
extends AbstractRepositoryPurgeTest
{
-
- private Map<String, RepositoryContentIndex> map;
-
private static final String[] extensions =
new String[] { "-5.jar", "-5.pom", "-6.jar", "-6.pom", "-7.jar", "-7.pom" };
private String sec;
- protected void setUp()
- throws Exception
- {
- super.setUp();
- }
-
private void setLastModified( String dirPath, long lastModified )
{
File dir = new File( dirPath );
public void testByLastModified()
throws Exception
{
- map = new HashMap<String, RepositoryContentIndex>();
- map.put( "filecontent", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "hashcodes", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "bytecode", new LuceneRepositoryContentIndexStub( 2 ) );
-
repoPurge =
- new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(), map );
+ new DaysOldRepositoryPurge( getRepository(),
+ getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
+ getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
+ Collections.singletonList( listener ) );
String repoRoot = prepareTestRepos();
setLastModified( projectRoot + "/2.2-SNAPSHOT/", 1179382029 );
- populateDbForTestByLastModified();
-
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-install-plugin",
+ "2.2-SNAPSHOT", "maven-plugin" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-install-plugin",
+ "2.2-SNAPSHOT", "pom" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT );
+
+ listenerControl.verify();
assertDeleted( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.jar" );
assertDeleted( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.jar.md5" );
public void testOrderOfDeletion()
throws Exception
{
- map = new HashMap<String, RepositoryContentIndex>();
- map.put( "filecontent", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "hashcodes", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "bytecode", new LuceneRepositoryContentIndexStub( 2 ) );
-
repoPurge =
- new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(), map );
+ new DaysOldRepositoryPurge( getRepository(), getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
+ getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
+ Collections.singletonList( listener ) );
String repoRoot = prepareTestRepos();
setLastModified( projectRoot + "/1.1.2-SNAPSHOT/", 1179382029 );
- populateDbForTestOrderOfDeletion();
-
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-assembly-plugin",
+ "1.1.2-20070427.065136-1", "maven-plugin" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-assembly-plugin",
+ "1.1.2-20070427.065136-1", "pom" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION );
+ listenerControl.verify();
+
assertDeleted( projectRoot + "/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
assertDeleted( projectRoot + "/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070427.065136-1.jar.sha1" );
assertDeleted( projectRoot + "/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070427.065136-1.jar.md5" );
public void testMetadataDrivenSnapshots()
throws Exception
{
- map = new HashMap<String, RepositoryContentIndex>();
- map.put( "filecontent", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "hashcodes", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "bytecode", new LuceneRepositoryContentIndexStub( 2 ) );
-
repoPurge =
- new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(), map );
+ new DaysOldRepositoryPurge( getRepository(),
+ getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
+ getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
+ Collections.singletonList( listener ) );
String repoRoot = prepareTestRepos();
versions.add( "1.4.3-" + year + mon + day + "." + hr + min + sec + "-7" );
versions.add( "1.4.3-SNAPSHOT" );
- populateDb( "org.codehaus.plexus", "plexus-utils", versions );
-
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.codehaus.plexus", "plexus-utils",
+ "1.4.3-20070113.163208-4", "jar" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.codehaus.plexus", "plexus-utils",
+ "1.4.3-20070113.163208-4", "pom" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_BY_DAYS_OLD_METADATA_DRIVEN_ARTIFACT );
+ listenerControl.verify();
+
// this should be deleted since the filename version (timestamp) is older than
// 100 days even if the last modified date was <100 days ago
assertDeleted( versionRoot + "/plexus-utils-1.4.3-20070113.163208-4.jar" );
super.tearDown();
repoPurge = null;
}
-
- private void populateDbForTestByLastModified()
- throws Exception
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "2.2-20061118.060401-2" );
- versions.add( "2.2-20070513.034619-5" );
- versions.add( "2.2-SNAPSHOT" );
-
- populateDb( "org.apache.maven.plugins", "maven-install-plugin", versions );
- }
}
* under the License.
*/
+import java.io.File;
+
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.common.utils.BaseFile;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
-import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexFactoryStub;
-import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.repository.scanner.functors.ConsumerWantsFilePredicate;
import org.custommonkey.xmlunit.XMLAssert;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
*/
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
"repo-purge-consumer-by-retention-count" );
- LuceneRepositoryContentIndexFactoryStub indexFactory = new LuceneRepositoryContentIndexFactoryStub();
- indexFactory.setExpectedRecordsSize( 2 );
-
- ( (RepositoryPurgeConsumer) repoPurgeConsumer ).setRepositoryContentIndexFactory( indexFactory );
-
- populateDbForRetentionCountTest();
-
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDaysOlder( 0 ); // force days older off to allow retention count purge to execute.
repoConfiguration.setRetentionCount( TEST_RETENTION_COUNT );
public void testConsumerByDaysOld()
throws Exception
{
- populateDbForDaysOldTest();
-
KnownRepositoryContentConsumer repoPurgeConsumer =
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
"repo-purge-consumer-by-days-old" );
- LuceneRepositoryContentIndexFactoryStub indexFactory = new LuceneRepositoryContentIndexFactoryStub();
- indexFactory.setExpectedRecordsSize( 2 );
-
- ( (RepositoryPurgeConsumer) repoPurgeConsumer ).setRepositoryContentIndexFactory( indexFactory );
-
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDaysOlder( TEST_DAYS_OLDER );
addRepoToConfiguration( "days-old", repoConfiguration );
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
"repo-purge-consumer-by-retention-count" );
- populateDbForReleasedSnapshotsTest();
-
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDeleteReleasedSnapshots( false ); // Set to NOT delete released snapshots.
addRepoToConfiguration( "retention-count", repoConfiguration );
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
"repo-purge-consumer-by-days-old" );
- populateDbForReleasedSnapshotsTest();
-
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDeleteReleasedSnapshots( true );
addRepoToConfiguration( "days-old", repoConfiguration );
"//metadata/versioning/versions/version", metadataXml );
XMLAssert.assertXpathEvaluatesTo( "20070315032817", "//metadata/versioning/lastUpdated", metadataXml );
}
-
- public void populateDbForRetentionCountTest()
- throws ArchivaDatabaseException
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "1.0RC1-20070504.153317-1" );
- versions.add( "1.0RC1-20070504.160758-2" );
- versions.add( "1.0RC1-20070505.090015-3" );
- versions.add( "1.0RC1-20070506.090132-4" );
-
- populateDb( "org.jruby.plugins", "jruby-rake-plugin", versions );
- }
-
- private void populateDbForDaysOldTest()
- throws ArchivaDatabaseException
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "2.2-SNAPSHOT" );
-
- populateDb( "org.apache.maven.plugins", "maven-install-plugin", versions );
- }
-
- public void populateDbForReleasedSnapshotsTest()
- throws ArchivaDatabaseException
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "2.3-SNAPSHOT" );
-
- populateDb( "org.apache.maven.plugins", "maven-plugin-plugin", versions );
- }
}
package org.apache.maven.archiva.consumers.core.repository;
+import java.util.Collections;
+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* under the License.
*/
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-
/**
* Test RetentionsCountRepositoryPurgeTest
*
{
super.setUp();
- Map<String, RepositoryContentIndex> map = new HashMap<String, RepositoryContentIndex>();
- map.put( "filecontent", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "hashcodes", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "bytecode", new LuceneRepositoryContentIndexStub( 2 ) );
-
- repoPurge = new RetentionCountRepositoryPurge( getRepository(), dao,
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(), map );
+ repoPurge =
+ new RetentionCountRepositoryPurge(
+ getRepository(),
+ getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
+ Collections.singletonList( listener ) );
}
/**
public void testIfAJarWasFound()
throws Exception
{
- populateIfJarWasFoundDb();
-
String repoRoot = prepareTestRepos();
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.jruby.plugins", "jruby-rake-plugin",
+ "1.0RC1-20070504.153317-1", "jar" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.jruby.plugins", "jruby-rake-plugin",
+ "1.0RC1-20070504.153317-1", "pom" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.jruby.plugins", "jruby-rake-plugin",
+ "1.0RC1-20070504.160758-2", "jar" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.jruby.plugins", "jruby-rake-plugin",
+ "1.0RC1-20070504.160758-2", "pom" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT );
+ listenerControl.verify();
+
String versionRoot = repoRoot + "/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT";
// assert if removed from repo
public void testIfAPomWasFound()
throws Exception
{
- populateIfPomWasFoundDb();
-
String repoRoot = prepareTestRepos();
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.codehaus.castor", "castor-anttasks",
+ "1.1.2-20070427.065136-1", "jar" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.codehaus.castor", "castor-anttasks",
+ "1.1.2-20070427.065136-1", "pom" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM );
+
+ listenerControl.verify();
String versionRoot = repoRoot + "/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT";
public void testOrderOfDeletion()
throws Exception
{
- populateDbForTestOrderOfDeletion();
-
String repoRoot = prepareTestRepos();
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-assembly-plugin",
+ "1.1.2-20070427.065136-1", "maven-plugin" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-assembly-plugin",
+ "1.1.2-20070427.065136-1", "pom" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION );
+ listenerControl.verify();
+
String versionRoot = repoRoot +
"/org/apache/maven/plugins/maven-assembly-plugin/1.1.2-SNAPSHOT";
assertExists( versionRoot + "/maven-assembly-plugin-1.1.2-20070615.105019-3.pom.sha1" );
assertExists( versionRoot + "/maven-assembly-plugin-1.1.2-20070615.105019-3.pom.md5" );
}
-
- public void populateIfJarWasFoundDb()
- throws Exception
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "1.0RC1-20070504.153317-1" );
- versions.add( "1.0RC1-20070504.160758-2" );
- versions.add( "1.0RC1-20070505.090015-3" );
- versions.add( "1.0RC1-20070506.090132-4" );
-
- populateDb( "org.jruby.plugins", "jruby-rake-plugin", versions );
- }
-
- public void populateIfPomWasFoundDb()
- throws Exception
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "1.1.2-20070427.065136-1" );
- versions.add( "1.1.2-20070615.105019-3" );
- versions.add( "1.1.2-20070506.163513-2" );
-
- populateDb( "org.codehaus.castor", "castor-anttasks", versions );
- }
}
+++ /dev/null
-package org.apache.maven.archiva.consumers.core.repository.stubs;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
-
-/**
- * LuceneRepositoryContenIndexFactoryStub
- *
- * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
- * @version
- */
-public class LuceneRepositoryContentIndexFactoryStub
- implements RepositoryContentIndexFactory
-{
-
- private int expectedRecordsSize = 0;
-
- public RepositoryContentIndex createBytecodeIndex( ManagedRepositoryConfiguration repository )
- {
- // TODO Auto-generated method stub
- return new LuceneRepositoryContentIndexStub( expectedRecordsSize );
- }
-
- public RepositoryContentIndex createFileContentIndex( ManagedRepositoryConfiguration repository )
- {
- // TODO Auto-generated method stub
- return new LuceneRepositoryContentIndexStub( expectedRecordsSize );
- }
-
- public RepositoryContentIndex createHashcodeIndex( ManagedRepositoryConfiguration repository )
- {
- // TODO Auto-generated method stub
- return new LuceneRepositoryContentIndexStub( expectedRecordsSize );
- }
-
- public void setExpectedRecordsSize( int size )
- {
- expectedRecordsSize = size;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.consumers.core.repository.stubs;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.File;
-import java.util.Collection;
-
-import junit.framework.Assert;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.queryParser.QueryParser;
-import org.apache.lucene.search.Searchable;
-import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
-import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
-import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
-
-/**
- * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
- * @version
- */
-public class LuceneRepositoryContentIndexStub
- implements RepositoryContentIndex
-{
- private int expectedRecordsSize;
-
- public LuceneRepositoryContentIndexStub()
- {
-
- }
-
- public LuceneRepositoryContentIndexStub( int size )
- {
- expectedRecordsSize = size;
- }
-
- public void deleteRecords( Collection records )
- throws RepositoryIndexException
- {
- Assert.assertEquals( expectedRecordsSize, records.size() );
- }
-
- public boolean exists()
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
- return false;
- }
-
- public Collection getAllRecordKeys()
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public Analyzer getAnalyzer()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public LuceneEntryConverter getEntryConverter()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public String getId()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public File getIndexDirectory()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public QueryParser getQueryParser()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public ManagedRepositoryConfiguration getRepository()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public Searchable getSearchable()
- throws RepositoryIndexSearchException
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public void indexRecords( Collection records )
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
-
- }
-
- public void modifyRecord( LuceneRepositoryContentRecord record )
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
-
- }
-
- public void modifyRecords( Collection records )
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
-
- }
-
- public void deleteRecord( LuceneRepositoryContentRecord record )
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
-
- }
-
-}
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>retention-count</role-hint>
</requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.ArchivaDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
<requirement>
<role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
<role-hint>retention-count</role-hint>
<role>org.apache.maven.archiva.configuration.FileTypes</role>
<role-hint>retention-count</role-hint>
</requirement>
- <requirement>
- <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
- <role-hint>lucene</role-hint>
- <field-name>indexFactory</field-name>
- </requirement>
</requirements>
<configuration>
<id>repository-purge</id>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>days-old</role-hint>
</requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.ArchivaDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
<requirement>
<role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
<role-hint>days-old</role-hint>
<role>org.apache.maven.archiva.configuration.FileTypes</role>
<role-hint>days-old</role-hint>
</requirement>
- <requirement>
- <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
- <role-hint>lucene</role-hint>
- <field-name>indexFactory</field-name>
- </requirement>
</requirements>
<configuration>
<id>repository-purge</id>
</requirement>
</requirements>
</component>
-
-
- <!-- DAOs -->
- <component>
- <role>org.apache.maven.archiva.database.ArchivaDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoArchivaDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.ArtifactDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.ProjectModelDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.apache.maven.archiva.database.ArtifactDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoArtifactDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.apache.maven.archiva.database.ProjectModelDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoProjectModelDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryProblemDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
-
-
- <!-- JdoAccess -->
- <component>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoAccess</implementation>
- <requirements>
- <requirement>
- <role>org.codehaus.plexus.jdo.JdoFactory</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
-
- <!-- JDO Factory -->
- <component>
- <role>org.codehaus.plexus.jdo.JdoFactory</role>
- <role-hint>archiva</role-hint>
- <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
- <configuration>
- <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
- <driverName>org.hsqldb.jdbcDriver</driverName>
- <userName>sa</userName>
- <password></password>
- <url>jdbc:hsqldb:mem:testdb</url>
- <otherProperties>
- <property>
- <name>javax.jdo.PersistenceManagerFactoryClass</name>
- <value>org.jpox.PersistenceManagerFactoryImpl</value>
- </property>
- </otherProperties>
- </configuration>
- </component>
-
- <!-- LuceneRepositoryIndexFactory -->
- <component>
- <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
- <role-hint>lucene</role-hint>
- <implementation>org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexFactoryStub</implementation>
- </component>
-
</components>
</component-set>
--- /dev/null
+package org.apache.maven.archiva.indexer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
+import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
+import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
+
+/**
+ * Process repository management events and respond appropriately.
+ *
+ * @todo creating index instances every time is inefficient, the plugin needs to have a repository context to operate in
+ * @plexus.component role="org.apache.maven.archiva.repository.events.RepositoryListener" role-hint="indexer"
+ */
+public class RepositoryContentIndexEventListener
+ implements RepositoryListener
+{
+ /**
+ * @plexus.requirement role-hint="lucene"
+ */
+ private RepositoryContentIndexFactory indexFactory;
+
+ public void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact )
+ {
+ try
+ {
+ RepositoryContentIndex index = indexFactory.createFileContentIndex( repository.getRepository() );
+ FileContentRecord fileContentRecord = new FileContentRecord();
+ fileContentRecord.setRepositoryId( repository.getRepository().getId() );
+ fileContentRecord.setFilename( repository.toPath( artifact ) );
+ index.deleteRecord( fileContentRecord );
+
+ index = indexFactory.createHashcodeIndex( repository.getRepository() );
+ HashcodesRecord hashcodesRecord = new HashcodesRecord();
+ fileContentRecord.setRepositoryId( repository.getRepository().getId() );
+ hashcodesRecord.setArtifact( artifact );
+ index.deleteRecord( hashcodesRecord );
+
+ index = indexFactory.createBytecodeIndex( repository.getRepository() );
+ BytecodeRecord bytecodeRecord = new BytecodeRecord();
+ fileContentRecord.setRepositoryId( repository.getRepository().getId() );
+ bytecodeRecord.setArtifact( artifact );
+ index.deleteRecord( bytecodeRecord );
+ }
+ catch ( RepositoryIndexException e )
+ {
+ // Ignore
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.search.Hits;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Searcher;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
+import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
+import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
+import org.apache.maven.archiva.indexer.search.BytecodeIndexPopulator;
+import org.apache.maven.archiva.indexer.search.FileContentIndexPopulator;
+import org.apache.maven.archiva.indexer.search.HashcodesIndexPopulator;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
+import org.codehaus.plexus.spring.PlexusInSpringTestCase;
+import org.codehaus.plexus.spring.PlexusToSpringUtils;
+
+public class RepositoryContentIndexEventListenerTest
+ extends PlexusInSpringTestCase
+{
+ private static final String TEST_DEFAULT_REPOSITORY_NAME = "Test Default Repository";
+
+ private static final String TEST_DEFAULT_REPO_ID = "test-repo";
+
+ private RepositoryListener listener;
+
+ @Override
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ listener = (RepositoryListener) lookup( RepositoryListener.class.getName(), "indexer" );
+ }
+
+ public void testWiring()
+ {
+ List<RepositoryListener> listeners =
+ PlexusToSpringUtils.lookupList( PlexusToSpringUtils.buildSpringId( RepositoryListener.class ),
+ getApplicationContext() );
+
+ assertEquals( 1, listeners.size() );
+ assertEquals( listener, listeners.get( 0 ) );
+ }
+
+ public ArchivaArtifact createArtifact( String artifactId, String version )
+ {
+ ArchivaArtifact artifact =
+ new ArchivaArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar" );
+ artifact.getModel().setRepositoryId( "testable_repo" );
+ return artifact;
+ }
+
+ public void testDeleteArtifact()
+ throws Exception
+ {
+ RepositoryContentIndexFactory indexFactory =
+ (RepositoryContentIndexFactory) lookup( RepositoryContentIndexFactory.class.getName(), "lucene" );
+
+ File repoDir = new File( getBasedir(), "src/test/managed-repository" );
+
+ assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
+
+ ManagedRepositoryConfiguration repository =
+ createRepository( TEST_DEFAULT_REPO_ID, TEST_DEFAULT_REPOSITORY_NAME, repoDir );
+
+ File indexLocation = new File( "target/index-events-" + getName() + "/" );
+
+ MockConfiguration config = (MockConfiguration) lookup( ArchivaConfiguration.class.getName(), "mock" );
+
+ ManagedRepositoryConfiguration repoConfig = new ManagedRepositoryConfiguration();
+ repoConfig.setId( TEST_DEFAULT_REPO_ID );
+ repoConfig.setName( TEST_DEFAULT_REPOSITORY_NAME );
+ repoConfig.setLocation( repoDir.getAbsolutePath() );
+ repoConfig.setIndexDir( indexLocation.getAbsolutePath() );
+ repoConfig.setScanned( true );
+
+ if ( indexLocation.exists() )
+ {
+ FileUtils.deleteDirectory( indexLocation );
+ }
+
+ config.getConfiguration().addManagedRepository( repoConfig );
+
+ // Create the (empty) indexes.
+ RepositoryContentIndex indexHashcode = indexFactory.createHashcodeIndex( repository );
+ RepositoryContentIndex indexBytecode = indexFactory.createBytecodeIndex( repository );
+ RepositoryContentIndex indexContents = indexFactory.createFileContentIndex( repository );
+
+ // Now populate them.
+ Map<String, HashcodesRecord> hashcodesMap = new HashcodesIndexPopulator().populate( new File( getBasedir() ) );
+ indexHashcode.indexRecords( hashcodesMap.values() );
+ assertEquals( "Hashcode Key Count", hashcodesMap.size(), indexHashcode.getAllRecordKeys().size() );
+ assertRecordCount( indexHashcode, hashcodesMap.size() );
+
+ Map<String, BytecodeRecord> bytecodeMap = new BytecodeIndexPopulator().populate( new File( getBasedir() ) );
+ indexBytecode.indexRecords( bytecodeMap.values() );
+ assertEquals( "Bytecode Key Count", bytecodeMap.size(), indexBytecode.getAllRecordKeys().size() );
+ assertRecordCount( indexBytecode, bytecodeMap.size() );
+
+ Map<String, FileContentRecord> contentMap = new FileContentIndexPopulator().populate( new File( getBasedir() ) );
+ indexContents.indexRecords( contentMap.values() );
+ assertEquals( "File Content Key Count", contentMap.size(), indexContents.getAllRecordKeys().size() );
+ assertRecordCount( indexContents, contentMap.size() );
+
+ ManagedRepositoryContent repositoryContent =
+ (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class.getName(), "default" );
+ repositoryContent.setRepository( repository );
+
+ ArchivaArtifact artifact =
+ new ArchivaArtifact( "org.apache.maven.archiva", "archiva-common", "1.0", "", "jar" );
+ listener.deleteArtifact( repositoryContent, artifact );
+
+ artifact =
+ new ArchivaArtifact( "org.apache.maven.archiva.record", "test-pom", "1.0", "", "pom" );
+ listener.deleteArtifact( repositoryContent, artifact );
+
+ assertRecordCount( indexHashcode, hashcodesMap.size() - 1 );
+ assertRecordCount( indexBytecode, bytecodeMap.size() - 1 );
+ assertRecordCount( indexContents, contentMap.size() - 1 );
+ }
+
+ protected ManagedRepositoryConfiguration createRepository( String id, String name, File location )
+ {
+ ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
+ repo.setId( id );
+ repo.setName( name );
+ repo.setLocation( location.getAbsolutePath() );
+ return repo;
+ }
+
+ private void assertRecordCount( RepositoryContentIndex index, int expectedCount )
+ throws Exception
+ {
+ Query query = new MatchAllDocsQuery();
+ Searcher searcher = (Searcher) index.getSearchable();
+ Hits hits = searcher.search( query );
+ assertEquals( "Expected Record Count for " + index.getId(), expectedCount, hits.length() );
+ }
+}
--- /dev/null
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
+ <role-hint>mock</role-hint>
+ <implementation>org.apache.maven.archiva.indexer.MockConfiguration</implementation>
+ </component>
+ <component>
+ <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
+ <role-hint>lucene</role-hint>
+ <implementation>org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndexFactory</implementation>
+ <description>Factory for Lucene repository content index instances.</description>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
+ <role-hint>mock</role-hint>
+ <field-name>configuration</field-name>
+ </requirement>
+ </requirements>
+ </component>
+ </components>
+</component-set>
--- /dev/null
+package org.apache.maven.archiva.repository.events;
+
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Listen to events on the repository. This class is a stopgap
+ * refactoring measure until an event bus is in place to handle
+ * generic events such as these.
+ */
+public interface RepositoryListener
+{
+ /**
+ * Event for the deletion of a given artifact.
+ * @param artifactPath the path to the artifact that was deleted.
+ */
+ void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact );
+}
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
- <dependency>
- <groupId>org.apache.derby</groupId>
- <artifactId>derby</artifactId>
- </dependency>
<!-- TEST DEPS -->
<dependency>
<groupId>org.codehaus.plexus.registry</groupId>
--- /dev/null
+package org.apache.maven.archiva.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
+
+/**
+ * Process repository management events and respond appropriately.
+ *
+ * @plexus.component role="org.apache.maven.archiva.repository.events.RepositoryListener" role-hint="database"
+ */
+public class RepositoryDatabaseEventListener
+ implements RepositoryListener
+{
+ /**
+ * @plexus.requirement role-hint="jdo"
+ */
+ private ArtifactDAO artifactDAO;
+
+ public void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact )
+ {
+ try
+ {
+ ArchivaArtifact queriedArtifact =
+ artifactDAO.getArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
+ artifact.getClassifier(), artifact.getType() );
+ artifactDAO.deleteArtifact( queriedArtifact );
+ }
+ catch ( ArchivaDatabaseException e )
+ {
+ // ignored
+ }
+
+ // TODO [MRM-37]: re-run the database consumers to clean up
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.Date;
+import java.util.List;
+
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
+import org.codehaus.plexus.spring.PlexusToSpringUtils;
+
+public class RepositoryDatabaseEventListenerTest
+ extends AbstractArchivaDatabaseTestCase
+{
+ private RepositoryListener listener;
+
+ @Override
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ listener = (RepositoryListener) lookup( RepositoryListener.class.getName(), "database" );
+ }
+
+ public void testWiring()
+ {
+ List<RepositoryListener> listeners =
+ PlexusToSpringUtils.lookupList( PlexusToSpringUtils.buildSpringId( RepositoryListener.class ),
+ getApplicationContext() );
+
+ assertEquals( 1, listeners.size() );
+ assertEquals( listener, listeners.get( 0 ) );
+ }
+
+ public ArchivaArtifact createArtifact( String artifactId, String version, ArtifactDAO artifactDao )
+ {
+ ArchivaArtifact artifact =
+ artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar" );
+ artifact.getModel().setLastModified( new Date() );
+ artifact.getModel().setRepositoryId( "testable_repo" );
+ return artifact;
+ }
+
+ public void testDeleteArtifact()
+ throws Exception
+ {
+ ArtifactDAO artifactDao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
+
+ // Setup artifacts in fresh DB.
+ ArchivaArtifact artifact = createArtifact( "test-artifact", "1.0", artifactDao );
+ artifactDao.saveArtifact( artifact );
+
+ assertEquals( artifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null,
+ "jar" ) );
+
+ artifact = new ArchivaArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar" );
+ ManagedRepositoryContent repository =
+ (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class.getName(), "default" );
+ listener.deleteArtifact( repository, artifact );
+
+ try
+ {
+ artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar" );
+ fail( "Should not find artifact" );
+ }
+ catch ( ObjectNotFoundException e )
+ {
+ assertTrue( true );
+ }
+ }
+}
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-model</artifactId>
</dependency>
+ <!-- TODO: replace with metadata processor -->
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-database</artifactId>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
</dependency>
- <dependency>
- <groupId>org.apache.derby</groupId>
- <artifactId>derby</artifactId>
- <scope>provided</scope>
- </dependency>
<dependency>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
--- /dev/null
+Stage 1: remove use of database and index from core consumers (move implementation into respective database and index modules)
+
+Done!
+
+Stage 2: separate model from JPOX annotated classes, centralising JPOX use in database
+
+* archiva-model to be reviewed, possibly split into a basic model with extensible parts. See metadata proposal
+* add consumer to generate Archiva metadata at same time as database model
+
+Stage 3: add a basic repository querying API for base artifact information and retrieval of metadata
+
+* RSS, browse
+* consider repository-api refactorings
+* at this point, should be able to have functional Archiva without a database
+* note that metadata need not be stored with the artifacts themselves, but will be by default
+
+Stage 4: incorporation of event API
+
+* used to centralise arrival, removal, etc of files/artifacts in the repository
+* errors should be events as well to avoid exceptions in the logs and instead meaningful handling/reporting
+
+Stage 5: isolate scanning code
+
+* Repository should operate without scanning code, it should push events if enabled
+* better assessment of its progress, performance
+* removal of database / repository scanning duality - all operations are driven by the event bus
+* move some database operations to a housekeeping scheduled task (same for index), make scheduled tasks a listable item based on available plugins
+