<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-layer</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-scheduled</artifactId>
+ </dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-spring</artifactId>
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
+import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
+import org.codehaus.plexus.taskqueue.TaskQueueException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.sonatype.nexus.index.ArtifactContext;
-import org.sonatype.nexus.index.ArtifactContextProducer;
-import org.sonatype.nexus.index.DefaultArtifactContextProducer;
-import org.sonatype.nexus.index.NexusIndexer;
-import org.sonatype.nexus.index.context.DefaultIndexingContext;
-import org.sonatype.nexus.index.context.IndexingContext;
-import org.sonatype.nexus.index.context.UnsupportedExistingLuceneIndexException;
-import org.sonatype.nexus.index.IndexerEngine;
import java.io.File;
-import java.io.IOException;
import java.util.List;
/**
* LuceneCleanupRemoveIndexedConsumer
*
+ * Clean up the index of artifacts that are no longer existing in the file system (managed repositories).
+ *
* @version $Id$
*/
public class LuceneCleanupRemoveIndexedConsumer
private static final Logger log = LoggerFactory.getLogger( LuceneCleanupRemoveIndexedConsumer.class );
private RepositoryContentFactory repoFactory;
-
- private ArtifactContextProducer artifactContextProducer;
-
- private IndexingContext context;
-
- private IndexerEngine indexerEngine;
- //TODO - deng - use indexerEngine to remove documents instead of directly using the IndexingContext!
+ private ArchivaTaskScheduler scheduler;
- public LuceneCleanupRemoveIndexedConsumer( RepositoryContentFactory repoFactory, IndexerEngine indexerEngine )
+ public LuceneCleanupRemoveIndexedConsumer( RepositoryContentFactory repoFactory, ArchivaTaskScheduler scheduler )
{
this.repoFactory = repoFactory;
- this.indexerEngine = indexerEngine;
- this.artifactContextProducer = new DefaultArtifactContextProducer();
+ this.scheduler = scheduler;
}
-
+
public void beginScan()
{
}
public void completeScan()
{
- /*synchronized( indexerEngine )
- {
- try
- {
- //context.getIndexWriter().close();
-
- //indexerEngine.endIndexing( context );
- //indexer.removeIndexingContext( context, false );
- }
- catch ( IOException e )
- {
- log.error( e.getMessage() );
- }
- } */
+
}
public List<String> getIncludedTypes()
- {
- // TODO Auto-generated method stub
+ {
return null;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
- //synchronized( context )
- //{
- // TODO - deng - block this if there is the nexus indexer consumer is executing?
- ManagedRepositoryContent repoContent = null;
-
- try
- {
- repoContent =
- repoFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
- }
- catch ( RepositoryException e )
- {
- throw new ConsumerException( "Can't run index cleanup consumer: " + e.getMessage() );
- }
-
- ManagedRepositoryConfiguration repository = repoContent.getRepository();
- String indexDir = repository.getIndexDir();
- File managedRepository = new File( repository.getLocation() );
- File indexDirectory = null;
-
- if ( indexDir != null && !"".equals( indexDir ) )
- {
- indexDirectory = new File( repository.getIndexDir() );
- }
- else
- {
- indexDirectory = new File( managedRepository, ".indexer" );
- }
-
- try
- {
- context =
- new DefaultIndexingContext( repository.getId(), repository.getId(), managedRepository,
- indexDirectory, null, null, NexusIndexer.FULL_INDEX, false );
- //context =
- // indexer.addIndexingContext( repository.getId(), repository.getId(), managedRepository,
- // indexDirectory, null, null, NexusIndexer.FULL_INDEX );
- context.setSearchable( repository.isScanned() );
- }
- catch ( UnsupportedExistingLuceneIndexException e )
- {
- log.warn( "Unsupported index format.", e );
- return;
- }
- catch ( IOException e )
- {
- log.warn( "Unable to open index at " + indexDirectory.getAbsoluteFile(), e );
- return;
- }
+ ManagedRepositoryContent repoContent = null;
+
+ try
+ {
+ repoContent =
+ repoFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
+ }
+ catch ( RepositoryException e )
+ {
+ throw new ConsumerException( "Can't run index cleanup consumer: " + e.getMessage() );
+ }
- try
+ ManagedRepositoryConfiguration repository = repoContent.getRepository();
+
+ try
+ {
+ File artifactFile = new File( repoContent.getRepoRoot(), repoContent.toPath( artifact ) );
+
+ if ( !artifactFile.exists() )
{
- File artifactFile = new File( repoContent.getRepoRoot(), repoContent.toPath( artifact ) );
+ ArtifactIndexingTask task =
+ TaskCreator.createIndexingTask( repository.getId(), artifactFile, ArtifactIndexingTask.DELETE );
- if ( !artifactFile.exists() )
- {
- ArtifactContext artifactContext =
- artifactContextProducer.getArtifactContext( context, artifactFile );
-
- if ( artifactContext != null )
- {
- //indexerEngine.remove( context, artifactContext );
-
- indexerEngine.remove( context, artifactContext );
-
- context.close( false );
- // hack for deleting documents - indexer engine's remove(...) isn't working for me
- //removeDocuments( artifactContext );
- }
- }
- }
- catch ( IOException e )
- {
- log.error( "Unable to open index at " + indexDirectory.getAbsoluteFile(), e );
- }
- // }
- }
-
- /* private void removeDocuments( ArtifactContext ac )
- throws IOException
- {
- synchronized( indexerEngine )
+ log.debug( "Queueing indexing task '" + task.getName() + "' to remove the artifact from the index." );
+ scheduler.queueIndexingTask( task );
+ }
+
+ }
+ catch ( TaskQueueException e )
{
- IndexWriter w = context.getIndexWriter();
-
- ArtifactInfo ai = ac.getArtifactInfo();
- String uinfo = AbstractIndexCreator.getGAV( ai.groupId, ai.artifactId, ai.version, ai.classifier, ai.packaging );
-
- Document doc = new Document();
- doc.add( new Field( ArtifactInfo.DELETED, uinfo, Field.Store.YES, Field.Index.NO ) );
- doc.add( new Field( ArtifactInfo.LAST_MODIFIED, Long.toString( System.currentTimeMillis() ), Field.Store.YES,
- Field.Index.NO ) );
-
- w.addDocument( doc );
-
- w.deleteDocuments( new Term( ArtifactInfo.UINFO, uinfo ) );
-
- w.commit();
-
- context.updateTimestamp();
+ throw new ConsumerException( e.getMessage() );
}
- }*/
+ }
public String getDescription()
{
{
this.repoFactory = repoFactory;
}
-
- public void setArtifactContextProducer( ArtifactContextProducer artifactContextProducer )
- {
- this.artifactContextProducer = artifactContextProducer;
- }
}
*/
import java.io.File;
-import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
-import java.util.zip.ZipException;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.repository.content.ManagedDefaultRepositoryContent;
+import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
+import org.codehaus.plexus.taskqueue.TaskQueueException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.sonatype.nexus.index.ArtifactContext;
-import org.sonatype.nexus.index.ArtifactContextProducer;
-import org.sonatype.nexus.index.DefaultArtifactContextProducer;
-import org.sonatype.nexus.index.NexusIndexer;
-import org.sonatype.nexus.index.context.DefaultIndexingContext;
-import org.sonatype.nexus.index.context.IndexingContext;
-import org.sonatype.nexus.index.context.UnsupportedExistingLuceneIndexException;
-import org.sonatype.nexus.index.IndexerEngine;
-import org.sonatype.nexus.index.packer.IndexPacker;
-import org.sonatype.nexus.index.packer.IndexPackingRequest;
/**
* Consumer for indexing the repository to provide search and IDE integration features.
{
private static final Logger log = LoggerFactory.getLogger( NexusIndexerConsumer.class );
- private ArtifactContextProducer artifactContextProducer;
-
- private IndexPacker indexPacker;
-
private ManagedDefaultRepositoryContent repositoryContent;
- private IndexingContext context;
-
private File managedRepository;
-
- private IndexerEngine indexerEngine;
-
- //private IndexingContextMap indexingContextMap;
-
- public NexusIndexerConsumer( IndexPacker indexPacker, IndexerEngine indexerEngine )
+
+ private ArchivaTaskScheduler scheduler;
+
+ public NexusIndexerConsumer( ArchivaTaskScheduler scheduler )
{
- this.indexPacker = indexPacker;
- this.indexerEngine = indexerEngine;
- this.artifactContextProducer = new DefaultArtifactContextProducer();
+ this.scheduler = scheduler;
}
- /* public NexusIndexerConsumer( IndexPacker indexPacker, IndexerEngine indexerEngine, IndexingContextMap indexingContextMap )
- {
- this.indexPacker = indexPacker;
- this.indexerEngine = indexerEngine;
- this.indexingContextMap = indexingContextMap;
- this.artifactContextProducer = new DefaultArtifactContextProducer();
- }*/
-
public String getDescription()
{
return "Indexes the repository to provide search and IDE integration features";
public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered )
throws ConsumerException
- {
- //synchronized( context )
- //{
- log.debug( "Begin indexing of repository '" + repository.getId() + "'..");
-
- managedRepository = new File( repository.getLocation() );
- String indexDir = repository.getIndexDir();
-
- File indexDirectory = null;
- if( indexDir != null && !"".equals( indexDir ) )
- {
- indexDirectory = new File( repository.getIndexDir() );
- }
- else
- {
- indexDirectory = new File( managedRepository, ".indexer" );
- }
+ {
+ managedRepository = new File( repository.getLocation() );
- repositoryContent = new ManagedDefaultRepositoryContent();
- repositoryContent.setRepository( repository );
-
- try
- {
- context =
- new DefaultIndexingContext( repository.getId(), repository.getId(), managedRepository,
- indexDirectory, null, null, NexusIndexer.FULL_INDEX, false );
-
- //context = indexingContextMap.addIndexingContext( repository.getId(), repository.getId(), managedRepository,
- // indexDirectory, null, null, NexusIndexer.FULL_INDEX, false );
-
- context.setSearchable( repository.isScanned() );
-
- //indexerEngine.beginIndexing( context );
- }
- catch ( UnsupportedExistingLuceneIndexException e )
- {
- throw new ConsumerException( "Could not create index at " + indexDirectory.getAbsoluteFile(), e );
- }
- catch ( IOException e )
- {
- throw new ConsumerException( "Could not create index at " + indexDirectory.getAbsoluteFile(), e );
- }
- //}
+ repositoryContent = new ManagedDefaultRepositoryContent();
+ repositoryContent.setRepository( repository );
}
public void processFile( String path )
throws ConsumerException
{
- synchronized ( indexerEngine )
+ File artifactFile = new File( managedRepository, path );
+
+ ArtifactIndexingTask task =
+ TaskCreator.createIndexingTask( repositoryContent.getId(), artifactFile, ArtifactIndexingTask.ADD );
+ try
{
- if ( context == null )
- {
- // didn't start correctly, so skip
- return;
- }
-
- File artifactFile = new File( managedRepository, path );
- ArtifactContext artifactContext = artifactContextProducer.getArtifactContext( context, artifactFile );
-
- if ( artifactContext != null )
- {
- try
- {
- indexerEngine.index( context, artifactContext );
- }
- catch ( ZipException e )
- {
- // invalid JAR file
- log.info( e.getMessage() );
- }
- catch ( IOException e )
- {
- throw new ConsumerException( e.getMessage(), e );
- }
- }
+ log.debug( "Queueing indexing task + '" + task.getName() + "' to add or update the artifact in the index." );
+ scheduler.queueIndexingTask( task );
}
+ catch ( TaskQueueException e )
+ {
+ throw new ConsumerException( e.getMessage(), e );
+ }
}
public void completeScan()
{
- //synchronized( context )
- //{
- log.debug( "End indexing of repository '" + context.getRepositoryId() + "'..");
-
- final File indexLocation = new File( managedRepository, ".index" );
- try
- {
- //indexerEngine.endIndexing( context );
-
- IndexPackingRequest request = new IndexPackingRequest( context, indexLocation );
- indexPacker.packIndex( request );
-
- //indexingContextMap.removeIndexingContext( context.getId() );
-
- context.close( false );
- }
- catch ( IOException e )
- {
- log.error( "Could not pack index" + indexLocation.getAbsolutePath(), e );
- }
- //}
+
}
public List<String> getExcludes()
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
- <bean id="indexerConsumer" class="org.apache.archiva.consumers.lucene.NexusIndexerConsumer">
- <constructor-arg ref="indexPacker"/>
- <constructor-arg ref="indexerEngine"/>
- <!-- <constructor-arg ref="indexingContextMap"/> -->
+ <bean id="indexerConsumer" class="org.apache.archiva.consumers.lucene.NexusIndexerConsumer" lazy-init="true">
+ <constructor-arg ref="archivaTaskScheduler"/>
</bean>
- <bean id="indexCleanupConsumer" class="org.apache.archiva.consumers.lucene.LuceneCleanupRemoveIndexedConsumer">
+ <bean id="indexCleanupConsumer" class="org.apache.archiva.consumers.lucene.LuceneCleanupRemoveIndexedConsumer" lazy-init="true">
<constructor-arg ref="repositoryContentFactory"/>
- <constructor-arg ref="indexerEngine"/>
- <!-- <constructor-arg ref="nexusIndexer#archiva"/> -->
+ <constructor-arg ref="archivaTaskScheduler"/>
</bean>
</beans>
\ No newline at end of file
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.content.ManagedDefaultRepositoryContent;
+import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.easymock.MockControl;
import org.easymock.classextension.MockClassControl;
-import org.sonatype.nexus.index.ArtifactContext;
-import org.sonatype.nexus.index.ArtifactContextProducer;
-import org.sonatype.nexus.index.IndexerEngine;
-import org.sonatype.nexus.index.context.IndexingContext;
+/**
+ * LuceneCleanupRemoveIndexedConsumerTest
+ */
public class LuceneCleanupRemoveIndexedConsumerTest
extends PlexusInSpringTestCase
{
private ManagedRepositoryConfiguration repositoryConfig;
- private MockControl indexerEngineControl;
-
- private IndexerEngine indexerEngine;
-
- private MockControl contextProducerControl;
-
- private ArtifactContextProducer artifactContextProducer;
-
- private MockControl acControl;
-
- private ArtifactContext ac;
+ private ArchivaTaskScheduler scheduler;
public void setUp()
throws Exception
{
super.setUp();
- indexerEngineControl = MockControl.createControl( IndexerEngine.class );
- indexerEngineControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
- indexerEngine = (IndexerEngine) indexerEngineControl.getMock();
-
repoFactoryControl = MockClassControl.createControl( RepositoryContentFactory.class );
repoFactory = (RepositoryContentFactory) repoFactoryControl.getMock();
- consumer = new LuceneCleanupRemoveIndexedConsumer( repoFactory, indexerEngine );
+ scheduler = ( ArchivaTaskScheduler ) lookup( ArchivaTaskScheduler.class );
+
+ consumer = new LuceneCleanupRemoveIndexedConsumer( repoFactory, scheduler );
repositoryConfig = new ManagedRepositoryConfiguration();
repositoryConfig.setId( "test-repo" );
repositoryConfig.setSnapshots( false );
repositoryConfig.setReleases( true );
repositoryConfig.setIndexDir( getBasedir() + "/target/test-classes/test-repo/.cleanup-index" );
-
- contextProducerControl = MockControl.createControl( ArtifactContextProducer.class );
- contextProducerControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
- artifactContextProducer = (ArtifactContextProducer) contextProducerControl.getMock();
-
- consumer.setArtifactContextProducer( artifactContextProducer );
-
- acControl = MockClassControl.createControl( ArtifactContext.class );
- ac = (ArtifactContext) acControl.getMock();
}
public void tearDown()
{
ArchivaArtifact artifact =
new ArchivaArtifact( "org.apache.archiva", "archiva-lucene-consumers", "1.2", null, "jar", "test-repo" );
+
ManagedRepositoryContent repoContent = new ManagedDefaultRepositoryContent();
repoContent.setRepository( repositoryConfig );
-
- IndexingContext context = null;
-
- File artifactFile =
- new File( repositoryConfig.getLocation(),
- "org/apache/archiva/archiva-lucene-consumers/1.2/archiva-lucene-consumers-1.2.jar" );
+
+ File artifactFile = new File( repoContent.getRepoRoot(), repoContent.toPath( artifact ) );
+ ArtifactIndexingTask task =
+ TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.DELETE );
+
repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( repositoryConfig.getId() ),
repoContent );
- contextProducerControl.expectAndReturn( artifactContextProducer.getArtifactContext( context, artifactFile ), ac );
- indexerEngine.remove( context, ac );
- indexerEngineControl.setDefaultVoidCallable();
-
- repoFactoryControl.replay();
- contextProducerControl.replay();
- indexerEngineControl.replay();
+ repoFactoryControl.replay();
+
consumer.processArchivaArtifact( artifact );
repoFactoryControl.verify();
- contextProducerControl.verify();
- indexerEngineControl.verify();
+
+ assertTrue( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );
}
public void testProcessArtifactArtifactExists()
ManagedRepositoryContent repoContent = new ManagedDefaultRepositoryContent();
repoContent.setRepository( repositoryConfig );
+ File artifactFile = new File( repoContent.getRepoRoot(), repoContent.toPath( artifact ) );
+
+ ArtifactIndexingTask task =
+ TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.DELETE );
+
repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( repositoryConfig.getId() ),
repoContent );
consumer.processArchivaArtifact( artifact );
repoFactoryControl.verify();
+
+ assertFalse( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );
+ }
+
+ @Override
+ protected String getPlexusConfigLocation()
+ {
+ return "/org/apache/archiva/consumers/lucene/LuceneConsumersTest.xml";
}
}
import java.io.File;
import java.util.Calendar;
import java.util.Date;
-import java.util.Set;
import org.apache.commons.io.FileUtils;
-import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
+import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
-import org.sonatype.nexus.index.ArtifactInfo;
-import org.sonatype.nexus.index.FlatSearchRequest;
-import org.sonatype.nexus.index.FlatSearchResponse;
-import org.sonatype.nexus.index.NexusIndexer;
-import org.sonatype.nexus.index.context.IndexingContext;
-import org.sonatype.nexus.index.IndexerEngine;
-import org.sonatype.nexus.index.packer.IndexPacker;
+/**
+ * NexusIndexerConsumerTest
+ */
public class NexusIndexerConsumerTest
extends PlexusInSpringTestCase
{
private KnownRepositoryContentConsumer nexusIndexerConsumer;
private ManagedRepositoryConfiguration repositoryConfig;
-
- private NexusIndexer nexusIndexer;
-
- private IndexPacker indexPacker;
-
- private IndexerEngine indexerEngine;
+
+ private ArchivaTaskScheduler scheduler;
@Override
protected void setUp()
{
super.setUp();
- nexusIndexer = ( NexusIndexer ) lookup( NexusIndexer.class );
+ scheduler = ( ArchivaTaskScheduler ) lookup( ArchivaTaskScheduler.class );
- indexPacker = ( IndexPacker ) lookup( IndexPacker.class );
-
- indexerEngine = ( IndexerEngine ) lookup( IndexerEngine.class );
-
- //nexusIndexerConsumer = new NexusIndexerConsumer( nexusIndexer, indexPacker, indexerEngine );
-
- nexusIndexerConsumer = new NexusIndexerConsumer( indexPacker, indexerEngine );
+ nexusIndexerConsumer = new NexusIndexerConsumer( scheduler );
repositoryConfig = new ManagedRepositoryConfiguration();
repositoryConfig.setId( "test-repo" );
public void testIndexerIndexArtifact()
throws Exception
{
+ File artifactFile =
+ new File( repositoryConfig.getLocation(),
+ "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+ ArtifactIndexingTask task =
+ TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+
// begin scan
Date now = Calendar.getInstance().getTime();
nexusIndexerConsumer.beginScan( repositoryConfig, now );
nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
nexusIndexerConsumer.completeScan();
-
- // search!
- BooleanQuery q = new BooleanQuery();
- q.add( nexusIndexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
- q.add( nexusIndexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
-
- IndexingContext context = nexusIndexer.addIndexingContext( repositoryConfig.getId(), repositoryConfig.getId(), new File( repositoryConfig.getLocation() ),
- new File( repositoryConfig.getLocation(), ".indexer" ), null, null, NexusIndexer.FULL_INDEX );
- context.setSearchable( true );
-
- FlatSearchRequest request = new FlatSearchRequest( q );
- FlatSearchResponse response = nexusIndexer.searchFlat( request );
-
- assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
- assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
- assertEquals( 1, response.getTotalHits() );
-
- Set<ArtifactInfo> results = response.getResults();
-
- ArtifactInfo artifactInfo = (ArtifactInfo) results.iterator().next();
- assertEquals( "org.apache.archiva", artifactInfo.groupId );
- assertEquals( "archiva-index-methods-jar-test", artifactInfo.artifactId );
- assertEquals( "test-repo", artifactInfo.repository );
+
+ assertTrue( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );
}
public void testIndexerArtifactAlreadyIndexed()
throws Exception
{
+ File artifactFile =
+ new File( repositoryConfig.getLocation(),
+ "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+ ArtifactIndexingTask task =
+ TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+
// begin scan
Date now = Calendar.getInstance().getTime();
nexusIndexerConsumer.beginScan( repositoryConfig, now );
nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
nexusIndexerConsumer.completeScan();
+ assertTrue( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );
+
// scan and index again
now = Calendar.getInstance().getTime();
nexusIndexerConsumer.beginScan( repositoryConfig, now );
nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
nexusIndexerConsumer.completeScan();
- // search!
- BooleanQuery q = new BooleanQuery();
- q.add( nexusIndexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
- q.add( nexusIndexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
-
- IndexSearcher searcher = new IndexSearcher( repositoryConfig.getLocation() + "/.indexer" );
- TopDocs topDocs = searcher.search( q, null, 10 );
-
- assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
- assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
-
- // should return 2 hits - this will be filtered out by the NexusRespositorySearch when it returns the results!
- assertEquals( 2, topDocs.totalHits );
+ assertTrue( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );
}
public void testIndexerIndexArtifactThenPom()
throws Exception
- {
+ {
+ File artifactFile =
+ new File( repositoryConfig.getLocation(),
+ "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+ ArtifactIndexingTask task =
+ TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+
// begin scan
Date now = Calendar.getInstance().getTime();
nexusIndexerConsumer.beginScan( repositoryConfig, now );
nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
nexusIndexerConsumer.completeScan();
+ assertTrue( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );
+
+ artifactFile =
+ new File( repositoryConfig.getLocation(),
+ "org/apache/archiva/archiva-index-methods-jar-test/1.0/pom.xml" );
+
+ task =
+ TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+
// scan and index again
now = Calendar.getInstance().getTime();
nexusIndexerConsumer.beginScan( repositoryConfig, now );
nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/pom.xml" );
nexusIndexerConsumer.completeScan();
- // search!
- BooleanQuery q = new BooleanQuery();
- q.add( nexusIndexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
- q.add( nexusIndexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
-
- IndexSearcher searcher = new IndexSearcher( repositoryConfig.getLocation() + "/.indexer" );
- TopDocs topDocs = searcher.search( q, null, 10 );
-
- assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
- assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
-
- // should return only 1 hit
- assertEquals( 1, topDocs.totalHits );
- }
+ assertTrue( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );
+ }
+
+ @Override
+ protected String getPlexusConfigLocation()
+ {
+ return "/org/apache/archiva/consumers/lucene/LuceneConsumersTest.xml";
+ }
}
--- /dev/null
+package org.apache.archiva.consumers.lucene.stubs;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.maven.archiva.database.ArchivaDAO;
+import org.apache.maven.archiva.database.ArtifactDAO;
+import org.apache.maven.archiva.database.ProjectModelDAO;
+import org.apache.maven.archiva.database.RepositoryContentStatisticsDAO;
+import org.apache.maven.archiva.database.RepositoryProblemDAO;
+import org.apache.maven.archiva.database.SimpleConstraint;
+
+/**
+ * Using a stub for faster tests! Not really used for the unit tests, just for dependency injection.
+ */
+public class ArchivaDAOStub
+ implements ArchivaDAO
+{
+
+ public ArtifactDAO getArtifactDAO()
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public ProjectModelDAO getProjectModelDAO()
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public RepositoryContentStatisticsDAO getRepositoryContentStatisticsDAO()
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public RepositoryProblemDAO getRepositoryProblemDAO()
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public List<?> query( SimpleConstraint constraint )
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public Object save( Serializable obj )
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+}
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.database.ArchivaDAO</role>
+ <role-hint>jdo</role-hint>
+ <implementation>org.apache.archiva.consumers.lucene.stubs.ArchivaDAOStub</implementation>
+ </component>
+ </components>
+</component-set>
\ No newline at end of file
throws RepositorySearchException
{
addIndexingContexts( selectedRepos );
-
+
+ // since upgrade to nexus 2.0.0, query has changed from g:[QUERIED TERM]* to g:*[QUERIED TERM]*
+ // resulting to more wildcard searches so we need to increase max clause count
+ BooleanQuery.setMaxClauseCount( Integer.MAX_VALUE );
BooleanQuery q = new BooleanQuery();
+
if( previousSearchTerms == null || previousSearchTerms.isEmpty() )
{
constructQuery( term, q );
BooleanQuery iQuery = new BooleanQuery();
constructQuery( term, iQuery );
q.add( iQuery, Occur.MUST );
- }
+ }
return search( limits, q );
}
<groupId>org.codehaus.plexus.registry</groupId>
<artifactId>plexus-registry-api</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.sonatype.nexus</groupId>
+ <artifactId>nexus-indexer</artifactId>
+ </dependency>
<!-- Test Dependencies -->
<dependency>
<groupId>org.apache.archiva</groupId>
*/
import org.apache.maven.archiva.common.ArchivaException;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.codehaus.plexus.taskqueue.TaskQueueException;
*/
public final static String ROLE = ArchivaTaskScheduler.class.getName();
+ /**
+ * Checks if there is any repository scanning task queued.
+ *
+ * @return
+ * @throws ArchivaException
+ */
public boolean isProcessingAnyRepositoryTask()
throws ArchivaException;
+ /**
+ * Checks if there is any database scanning task queued.
+ *
+ * @return
+ * @throws ArchivaException
+ */
public boolean isProcessingDatabaseTask()
throws ArchivaException;
+ /**
+ * Checks if a repository scanning task for the specified repository is queuedd.
+ *
+ * @param repositoryId
+ * @return
+ * @throws ArchivaException
+ */
public boolean isProcessingRepositoryTask( String repositoryId )
throws ArchivaException;
+ /**
+ * Checks if a repository scanning task with the specified name is queued.
+ *
+ * @param taskName
+ * @return
+ * @throws ArchivaException
+ */
public boolean isProcessingRepositoryTaskWithName( String taskName )
throws ArchivaException;
+
+ /**
+ * Checks is an indexing task with the specified name is queued.
+ *
+ * @param taskName
+ * @return
+ * @throws ArchivaException
+ */
+ public boolean isProcessingIndexingTaskWithName( String taskName )
+ throws ArchivaException;
+ /**
+ * Adds the database task to the database scanning queue.
+ *
+ * @param task
+ * @throws TaskQueueException
+ */
public void queueDatabaseTask( DatabaseTask task )
throws TaskQueueException;
+ /**
+ * Adds the repository task to the repo scanning queue.
+ *
+ * @param task
+ * @throws TaskQueueException
+ */
public void queueRepositoryTask( RepositoryTask task )
throws TaskQueueException;
+
+ /**
+ * Adds the indexing task to the indexing queue.
+ *
+ * @param task
+ * @throws TaskQueueException
+ */
+ public void queueIndexingTask( ArtifactIndexingTask task )
+ throws TaskQueueException;
+ /**
+ * Schedules the database tasks using the set cron expression.
+ *
+ * @throws TaskExecutionException
+ */
public void scheduleDatabaseTasks()
throws TaskExecutionException;
import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
import org.apache.maven.archiva.repository.scanner.RepositoryScanStatistics;
import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
-import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskNameSelectionPredicate;
+import org.apache.maven.archiva.scheduled.tasks.ArchivaTaskNameSelectionPredicate;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskSelectionPredicate;
import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
* @plexus.requirement role-hint="repository-scanning"
*/
private TaskQueue repositoryScanningQueue;
+
+ /**
+ * @plexus.requirement role-hint="indexing"
+ */
+ private TaskQueue indexingQueue;
/**
* @plexus.requirement
public static final String REPOSITORY_JOB = "repository-job";
public static final String REPOSITORY_JOB_TRIGGER = "repository-job-trigger";
+
+ public static final String INDEXING_JOB = "indexing-job";
public static final String CRON_HOURLY = "0 0 * * * ?";
}
}
- @SuppressWarnings("unchecked")
- private boolean isPreviouslyScanned( ManagedRepositoryConfiguration repoConfig )
- {
- List<RepositoryScanStatistics> results =
- (List<RepositoryScanStatistics>) dao.query( new MostRecentRepositoryScanStatistics( repoConfig.getId() ) );
-
- if ( results != null && !results.isEmpty() )
- {
- return true;
- }
-
- return false;
- }
-
- // MRM-848: Pre-configured repository initially appear to be empty
- private synchronized void queueInitialRepoScan( ManagedRepositoryConfiguration repoConfig )
- {
- String repoId = repoConfig.getId();
- RepositoryTask task = TaskCreator.createRepositoryTask( repoId, "initial-scan" );
-
- if ( queuedRepos.contains( repoId ) )
- {
- log.error( "Repository [" + repoId + "] is currently being processed or is already queued." );
- }
- else
- {
- try
- {
- queuedRepos.add( repoConfig.getId() );
- this.queueRepositoryTask( task );
- }
- catch ( TaskQueueException e )
- {
- log.error( "Error occurred while queueing repository [" + repoId + "] task : " + e.getMessage() );
- }
- }
- }
-
- private synchronized void scheduleRepositoryJobs( ManagedRepositoryConfiguration repoConfig )
- throws SchedulerException
- {
- if ( repoConfig.getRefreshCronExpression() == null )
- {
- log.warn( "Skipping job, no cron expression for " + repoConfig.getId() );
- return;
- }
-
- if ( !repoConfig.isScanned() )
- {
- log.warn( "Skipping job, repository scannable has been disabled for " + repoConfig.getId() );
- return;
- }
-
- // get the cron string for these database scanning jobs
- String cronString = repoConfig.getRefreshCronExpression();
-
- CronExpressionValidator cronValidator = new CronExpressionValidator();
- if ( !cronValidator.validate( cronString ) )
- {
- log.warn( "Cron expression [" + cronString + "] for repository [" + repoConfig.getId() +
- "] is invalid. Defaulting to hourly." );
- cronString = CRON_HOURLY;
- }
-
- // setup the unprocessed artifact job
- JobDetail repositoryJob =
- new JobDetail( REPOSITORY_JOB + ":" + repoConfig.getId(), REPOSITORY_SCAN_GROUP, RepositoryTaskJob.class );
-
- JobDataMap dataMap = new JobDataMap();
- dataMap.put( RepositoryTaskJob.TASK_QUEUE, repositoryScanningQueue );
- dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, ArchivaTask.QUEUE_POLICY_WAIT );
- dataMap.put( RepositoryTaskJob.TASK_REPOSITORY, repoConfig.getId() );
- repositoryJob.setJobDataMap( dataMap );
-
- try
- {
- CronTrigger trigger =
- new CronTrigger( REPOSITORY_JOB_TRIGGER + ":" + repoConfig.getId(), REPOSITORY_SCAN_GROUP, cronString );
-
- jobs.add( REPOSITORY_JOB + ":" + repoConfig.getId() );
- scheduler.scheduleJob( repositoryJob, trigger );
- }
- catch ( ParseException e )
- {
- log.error(
- "ParseException in repository scanning cron expression, disabling repository scanning for '" +
- repoConfig.getId() + "': " + e.getMessage() );
- }
-
- }
-
- private synchronized void scheduleDatabaseJobs()
- throws SchedulerException
- {
- String cronString = archivaConfiguration.getConfiguration().getDatabaseScanning().getCronExpression();
-
- // setup the unprocessed artifact job
- JobDetail databaseJob = new JobDetail( DATABASE_JOB, DATABASE_SCAN_GROUP, DatabaseTaskJob.class );
-
- JobDataMap dataMap = new JobDataMap();
- dataMap.put( DatabaseTaskJob.TASK_QUEUE, databaseUpdateQueue );
- databaseJob.setJobDataMap( dataMap );
-
- CronExpressionValidator cronValidator = new CronExpressionValidator();
- if ( !cronValidator.validate( cronString ) )
- {
- log.warn(
- "Cron expression [" + cronString + "] for database update is invalid. Defaulting to hourly." );
- cronString = CRON_HOURLY;
- }
-
- try
- {
- CronTrigger trigger = new CronTrigger( DATABASE_JOB_TRIGGER, DATABASE_SCAN_GROUP, cronString );
-
- scheduler.scheduleJob( databaseJob, trigger );
- }
- catch ( ParseException e )
- {
- log.error(
- "ParseException in database scanning cron expression, disabling database scanning: " + e.getMessage() );
- }
-
- }
-
public void stop()
throws StoppingException
{
}
}
+ /**
+ * @see ArchivaTaskScheduler#scheduleDatabaseTasks()
+ */
public void scheduleDatabaseTasks()
throws TaskExecutionException
{
}
}
+ /**
+ * @see ArchivaTaskScheduler#isProcessingAnyRepositoryTask()
+ */
@SuppressWarnings("unchecked")
public boolean isProcessingAnyRepositoryTask()
throws ArchivaException
}
}
+ /**
+ * @see ArchivaTaskScheduler#isProcessingRepositoryTask(String)
+ */
@SuppressWarnings("unchecked")
public boolean isProcessingRepositoryTask( String repositoryId )
throws ArchivaException
}
}
+ /**
+ * @see ArchivaTaskScheduler#isProcessingIndexingTaskWithName(String)
+ */
@SuppressWarnings("unchecked")
public boolean isProcessingRepositoryTaskWithName( String taskName )
throws ArchivaException
throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
}
- return CollectionUtils.exists( queue, new RepositoryTaskNameSelectionPredicate( taskName ) );
+ return CollectionUtils.exists( queue, new ArchivaTaskNameSelectionPredicate( taskName ) );
}
}
+ /**
+ * @see ArchivaTaskScheduler#isProcessingIndexingTaskWithName(String)
+ */
+ @SuppressWarnings("unchecked")
+ public boolean isProcessingIndexingTaskWithName( String taskName )
+ throws ArchivaException
+ {
+ synchronized( indexingQueue )
+ {
+ List<? extends Task> queue = null;
+
+ try
+ {
+ queue = indexingQueue.getQueueSnapshot();
+ }
+ catch ( TaskQueueException e )
+ {
+ throw new ArchivaException( "Unable to get indexing scanning queue:" + e.getMessage(), e );
+ }
+
+ return CollectionUtils.exists( queue, new ArchivaTaskNameSelectionPredicate( taskName ) );
+ }
+ }
+
+ /**
+ * @see ArchivaTaskScheduler#isProcessingDatabaseTask()
+ */
@SuppressWarnings("unchecked")
public boolean isProcessingDatabaseTask()
throws ArchivaException
return !queue.isEmpty();
}
+ /**
+ * @see ArchivaTaskScheduler#queueRepositoryTask(RepositoryTask)
+ */
public void queueRepositoryTask( RepositoryTask task )
throws TaskQueueException
{
}
}
+ /**
+ * @see ArchivaTaskScheduler#queueDatabaseTask(DatabaseTask)
+ */
public void queueDatabaseTask( DatabaseTask task )
throws TaskQueueException
{
databaseUpdateQueue.put( task );
}
+
+ /**
+ * @see ArchivaTaskScheduler#queueIndexingTask(ArtifactIndexingTask)
+ */
+ public void queueIndexingTask( ArtifactIndexingTask task )
+ throws TaskQueueException
+ {
+ indexingQueue.put( task );
+ }
public void configurationEvent( ConfigurationEvent event )
{
}
}
}
+
+ @SuppressWarnings("unchecked")
+ private boolean isPreviouslyScanned( ManagedRepositoryConfiguration repoConfig )
+ {
+ List<RepositoryScanStatistics> results =
+ (List<RepositoryScanStatistics>) dao.query( new MostRecentRepositoryScanStatistics( repoConfig.getId() ) );
+
+ if ( results != null && !results.isEmpty() )
+ {
+ return true;
+ }
+
+ return false;
+ }
+
+ // MRM-848: Pre-configured repository initially appear to be empty
+ private synchronized void queueInitialRepoScan( ManagedRepositoryConfiguration repoConfig )
+ {
+ String repoId = repoConfig.getId();
+ RepositoryTask task = TaskCreator.createRepositoryTask( repoId, "initial-scan" );
+
+ if ( queuedRepos.contains( repoId ) )
+ {
+ log.error( "Repository [" + repoId + "] is currently being processed or is already queued." );
+ }
+ else
+ {
+ try
+ {
+ queuedRepos.add( repoConfig.getId() );
+ this.queueRepositoryTask( task );
+ }
+ catch ( TaskQueueException e )
+ {
+ log.error( "Error occurred while queueing repository [" + repoId + "] task : " + e.getMessage() );
+ }
+ }
+ }
+
+ private synchronized void scheduleRepositoryJobs( ManagedRepositoryConfiguration repoConfig )
+ throws SchedulerException
+ {
+ if ( repoConfig.getRefreshCronExpression() == null )
+ {
+ log.warn( "Skipping job, no cron expression for " + repoConfig.getId() );
+ return;
+ }
+
+ if ( !repoConfig.isScanned() )
+ {
+ log.warn( "Skipping job, repository scannable has been disabled for " + repoConfig.getId() );
+ return;
+ }
+
+ // get the cron string for these database scanning jobs
+ String cronString = repoConfig.getRefreshCronExpression();
+
+ CronExpressionValidator cronValidator = new CronExpressionValidator();
+ if ( !cronValidator.validate( cronString ) )
+ {
+ log.warn( "Cron expression [" + cronString + "] for repository [" + repoConfig.getId() +
+ "] is invalid. Defaulting to hourly." );
+ cronString = CRON_HOURLY;
+ }
+
+ // setup the unprocessed artifact job
+ JobDetail repositoryJob =
+ new JobDetail( REPOSITORY_JOB + ":" + repoConfig.getId(), REPOSITORY_SCAN_GROUP, RepositoryTaskJob.class );
+
+ JobDataMap dataMap = new JobDataMap();
+ dataMap.put( RepositoryTaskJob.TASK_QUEUE, repositoryScanningQueue );
+ dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, ArchivaTask.QUEUE_POLICY_WAIT );
+ dataMap.put( RepositoryTaskJob.TASK_REPOSITORY, repoConfig.getId() );
+ repositoryJob.setJobDataMap( dataMap );
+
+ try
+ {
+ CronTrigger trigger =
+ new CronTrigger( REPOSITORY_JOB_TRIGGER + ":" + repoConfig.getId(), REPOSITORY_SCAN_GROUP, cronString );
+
+ jobs.add( REPOSITORY_JOB + ":" + repoConfig.getId() );
+ scheduler.scheduleJob( repositoryJob, trigger );
+ }
+ catch ( ParseException e )
+ {
+ log.error(
+ "ParseException in repository scanning cron expression, disabling repository scanning for '" +
+ repoConfig.getId() + "': " + e.getMessage() );
+ }
+
+ }
+
+ private synchronized void scheduleDatabaseJobs()
+ throws SchedulerException
+ {
+ String cronString = archivaConfiguration.getConfiguration().getDatabaseScanning().getCronExpression();
+
+ // setup the unprocessed artifact job
+ JobDetail databaseJob = new JobDetail( DATABASE_JOB, DATABASE_SCAN_GROUP, DatabaseTaskJob.class );
+
+ JobDataMap dataMap = new JobDataMap();
+ dataMap.put( DatabaseTaskJob.TASK_QUEUE, databaseUpdateQueue );
+ databaseJob.setJobDataMap( dataMap );
+
+ CronExpressionValidator cronValidator = new CronExpressionValidator();
+ if ( !cronValidator.validate( cronString ) )
+ {
+ log.warn(
+ "Cron expression [" + cronString + "] for database update is invalid. Defaulting to hourly." );
+ cronString = CRON_HOURLY;
+ }
+
+ try
+ {
+ CronTrigger trigger = new CronTrigger( DATABASE_JOB_TRIGGER, DATABASE_SCAN_GROUP, cronString );
+
+ scheduler.scheduleJob( databaseJob, trigger );
+ }
+ catch ( ParseException e )
+ {
+ log.error(
+ "ParseException in database scanning cron expression, disabling database scanning: " + e.getMessage() );
+ }
+
+ }
}
--- /dev/null
+package org.apache.maven.archiva.scheduled.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexReader;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
+import org.codehaus.plexus.taskqueue.Task;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.sonatype.nexus.index.ArtifactContext;
+import org.sonatype.nexus.index.ArtifactContextProducer;
+import org.sonatype.nexus.index.ArtifactInfo;
+import org.sonatype.nexus.index.DefaultArtifactContextProducer;
+import org.sonatype.nexus.index.IndexerEngine;
+import org.sonatype.nexus.index.NexusIndexer;
+import org.sonatype.nexus.index.context.DefaultIndexingContext;
+import org.sonatype.nexus.index.context.IndexingContext;
+import org.sonatype.nexus.index.context.UnsupportedExistingLuceneIndexException;
+import org.sonatype.nexus.index.packer.IndexPacker;
+import org.sonatype.nexus.index.packer.IndexPackingRequest;
+
+/**
+ * ArchivaIndexingTaskExecutor
+ *
+ * Executes all indexing tasks. Adding, updating and removing artifacts from the index are all performed by
+ * this executor. Add and update artifact in index tasks are added in the indexing task queue by the NexusIndexerConsumer while
+ * remove artifact from index tasks are added by the LuceneCleanupRemoveIndexedConsumer.
+ *
+ * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexing"
+ */
+public class ArchivaIndexingTaskExecutor
+ implements TaskExecutor, Initializable
+{
+ private Logger log = LoggerFactory.getLogger( ArchivaIndexingTaskExecutor.class );
+
+ /**
+ * @plexus.requirement
+ */
+ private IndexerEngine indexerEngine;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArchivaConfiguration archivaConfiguration;
+
+ /**
+ * @plexus.requirement
+ */
+ private IndexPacker indexPacker;
+
+ private ArtifactContextProducer artifactContextProducer;
+
+ public void executeTask( Task task )
+ throws TaskExecutionException
+ {
+ synchronized( indexerEngine )
+ {
+ ArtifactIndexingTask indexingTask = ( ArtifactIndexingTask ) task;
+
+ ManagedRepositoryConfiguration repository =
+ archivaConfiguration.getConfiguration().findManagedRepositoryById( indexingTask.getRepositoryId() );
+
+ String indexDir = repository.getIndexDir();
+ File managedRepository = new File( repository.getLocation() );
+
+ File indexDirectory = null;
+ if( indexDir != null && !"".equals( indexDir ) )
+ {
+ indexDirectory = new File( repository.getIndexDir() );
+ }
+ else
+ {
+ indexDirectory = new File( managedRepository, ".indexer" );
+ }
+
+ IndexingContext context = null;
+ try
+ {
+ context =
+ new DefaultIndexingContext( repository.getId(), repository.getId(), managedRepository,
+ indexDirectory, null, null, NexusIndexer.FULL_INDEX, false );
+ context.setSearchable( repository.isScanned() );
+
+ File artifactFile = indexingTask.getResourceFile();
+ ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile );
+
+ if( ac != null )
+ {
+ if( indexingTask.getAction().equals( ArtifactIndexingTask.ADD ) )
+ {
+ boolean add = true;
+ IndexReader r = context.getIndexReader();
+ for ( int i = 0; i < r.numDocs(); i++ )
+ {
+ if ( !r.isDeleted( i ) )
+ {
+ Document d = r.document( i );
+ String uinfo = d.get( ArtifactInfo.UINFO );
+ if( ac.getArtifactInfo().getUinfo().equals( uinfo ) )
+ {
+ add = false;
+ break;
+ }
+ }
+ }
+
+ if( add )
+ {
+ log.debug( "Adding artifact '" + ac.getArtifactInfo() + "' to index.." );
+ indexerEngine.index( context, ac );
+ }
+ else
+ {
+ log.debug( "Updating artifact '" + ac.getArtifactInfo() + "' in index.." );
+ indexerEngine.update( context, ac );
+ }
+ }
+ else
+ {
+ log.debug( "removing artifact '" + ac.getArtifactInfo() + "' from index.." );
+ indexerEngine.remove( context, ac );
+ }
+
+ final File indexLocation = new File( managedRepository, ".index" );
+ IndexPackingRequest request = new IndexPackingRequest( context, indexLocation );
+ indexPacker.packIndex( request );
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new TaskExecutionException( "Error occurred while executing indexing task '" +
+ indexingTask.getName() + "'" );
+ }
+ catch ( UnsupportedExistingLuceneIndexException e )
+ {
+ throw new TaskExecutionException( "Unsupported Lucene index format: " + e.getMessage() );
+ }
+ finally
+ {
+ if( context != null )
+ {
+ try
+ {
+ context.close( false );
+ }
+ catch ( IOException e )
+ {
+ throw new TaskExecutionException( "Error occurred while closing context: " + e.getMessage() );
+ }
+ }
+ }
+ }
+ }
+
+ public void initialize()
+ throws InitializationException
+ {
+ log.info( "Initialized " + this.getClass().getName() );
+
+ artifactContextProducer = new DefaultArtifactContextProducer();
+ }
+
+ public void setIndexerEngine( IndexerEngine indexerEngine )
+ {
+ this.indexerEngine = indexerEngine;
+ }
+
+ public void setIndexPacker( IndexPacker indexPacker )
+ {
+ this.indexPacker = indexPacker;
+ }
+
+ public void setArchivaConfiguration( ArchivaConfiguration archivaConfiguration )
+ {
+ this.archivaConfiguration = archivaConfiguration;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.scheduled.tasks;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.collections.Predicate;
+import org.apache.commons.lang.StringUtils;
+
+public class ArchivaTaskNameSelectionPredicate
+ implements Predicate
+{
+ private String taskName;
+
+ public ArchivaTaskNameSelectionPredicate( String taskName )
+ {
+ this.taskName = taskName;
+ }
+
+ public boolean evaluate( Object object )
+ {
+ boolean satisfies = false;
+
+ if ( object instanceof ArchivaTask )
+ {
+ ArchivaTask task = (ArchivaTask) object;
+ return StringUtils.equals( taskName, task.getName() );
+ }
+
+ return satisfies;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.scheduled.tasks;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+
+import org.sonatype.nexus.index.ArtifactContext;
+
+public class ArtifactIndexingTask
+ implements ArchivaTask
+{
+ public static final String ADD = "add";
+
+ public static final String DELETE = "delete";
+
+ String repositoryId;
+
+ String name;
+
+ String queuePolicy;
+
+ long maxExecutionTime;
+
+ File resourceFile;
+
+ ArtifactContext artifactContext;
+
+ String action;
+
+ public String getRepositoryId()
+ {
+ return repositoryId;
+ }
+
+ public void setRepositoryId( String repositoryId )
+ {
+ this.repositoryId = repositoryId;
+ }
+
+ public long getMaxExecutionTime()
+ {
+ return maxExecutionTime;
+ }
+
+ public void setMaxExecutionTime( long maxExecutionTime )
+ {
+ this.maxExecutionTime = maxExecutionTime;
+ }
+
+ public String getName()
+ {
+ return name;
+ }
+
+ public void setName( String name )
+ {
+ this.name = name;
+ }
+
+ public String getQueuePolicy()
+ {
+ return queuePolicy;
+ }
+
+ public void setQueuePolicy( String queuePolicy )
+ {
+ this.queuePolicy = queuePolicy;
+ }
+
+ public File getResourceFile()
+ {
+ return resourceFile;
+ }
+
+ public void setResourceFile( File resourceFile )
+ {
+ this.resourceFile = resourceFile;
+ }
+
+ public ArtifactContext getArtifactContext()
+ {
+ return artifactContext;
+ }
+
+ public void setArtifactContext( ArtifactContext artifactContext )
+ {
+ this.artifactContext = artifactContext;
+ }
+
+ public String getAction()
+ {
+ return action;
+ }
+
+ public void setAction( String action )
+ {
+ this.action = action;
+ }
+
+}
+++ /dev/null
-package org.apache.maven.archiva.scheduled.tasks;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.collections.Predicate;
-import org.apache.commons.lang.StringUtils;
-
-public class RepositoryTaskNameSelectionPredicate
- implements Predicate
-{
- private String taskName;
-
- public RepositoryTaskNameSelectionPredicate( String taskName )
- {
- this.taskName = taskName;
- }
-
- public boolean evaluate( Object object )
- {
- boolean satisfies = false;
-
- if ( object instanceof RepositoryTask )
- {
- RepositoryTask task = (RepositoryTask) object;
- return StringUtils.equals( taskName, task.getName() );
- }
-
- return satisfies;
- }
-}
import org.apache.maven.archiva.scheduled.DefaultArchivaTaskScheduler;
/**
- *
+ * TaskCreator
+ *
+ * Convenience class for creating Archiva tasks.
*/
public class TaskCreator
{
return task;
}
+ public static ArtifactIndexingTask createIndexingTask( String repositoryId, File resource,
+ String action )
+ {
+ ArtifactIndexingTask task = new ArtifactIndexingTask();
+ task.setRepositoryId( repositoryId );
+ task.setName( DefaultArchivaTaskScheduler.INDEXING_JOB + ":" + repositoryId + ":" + resource.getName() + ":" +
+ action );
+ task.setAction( action );
+ task.setQueuePolicy( ArchivaTask.QUEUE_POLICY_WAIT );
+ task.setResourceFile( resource );
+
+ return task;
+ }
+
}
</configuration>
</component>
</components>
+
+ <!--
+ |
+ | Indexing Task Queue / Executor
+ |
+ -->
+ <component>
+ <role>org.codehaus.plexus.taskqueue.TaskQueue</role>
+ <role-hint>indexing</role-hint>
+ <implementation>org.codehaus.plexus.taskqueue.DefaultTaskQueue</implementation>
+ <lifecycle-handler>plexus-configurable</lifecycle-handler>
+ <configuration>
+ <task-entry-evaluators>
+ </task-entry-evaluators>
+ <task-exit-evaluators>
+ </task-exit-evaluators>
+ <task-viability-evaluators>
+ </task-viability-evaluators>
+ </configuration>
+ </component>
+
+ <component>
+ <role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
+ <role-hint>indexing</role-hint>
+ <implementation>org.codehaus.plexus.taskqueue.execution.ThreadedTaskQueueExecutor</implementation>
+ <instantiation-strategy>singleton</instantiation-strategy>
+ <requirements>
+ <requirement>
+ <role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
+ <role-hint>indexing</role-hint>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.taskqueue.TaskQueue</role>
+ <role-hint>indexing</role-hint>
+ </requirement>
+ </requirements>
+ <configuration>
+ <name>indexing</name>
+ </configuration>
+ </component>
</component-set>
--- /dev/null
+package org.apache.maven.archiva.scheduled.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.util.Set;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
+import org.codehaus.plexus.spring.PlexusInSpringTestCase;
+import org.easymock.MockControl;
+import org.sonatype.nexus.index.ArtifactInfo;
+import org.sonatype.nexus.index.FlatSearchRequest;
+import org.sonatype.nexus.index.FlatSearchResponse;
+import org.sonatype.nexus.index.IndexerEngine;
+import org.sonatype.nexus.index.NexusIndexer;
+import org.sonatype.nexus.index.context.IndexingContext;
+import org.sonatype.nexus.index.packer.IndexPacker;
+
+/**
+ * ArchivaIndexingTaskExecutorTest
+ */
+public class ArchivaIndexingTaskExecutorTest
+ extends PlexusInSpringTestCase
+{
+ private ArchivaIndexingTaskExecutor indexingExecutor;
+
+ private IndexerEngine indexerEngine;
+
+ private IndexPacker indexPacker;
+
+ private MockControl archivaConfigControl;
+
+ private ArchivaConfiguration archivaConfiguration;
+
+ private ManagedRepositoryConfiguration repositoryConfig;
+
+ private Configuration configuration;
+
+ private NexusIndexer indexer;
+
+ protected void setUp() throws Exception
+ {
+ super.setUp();
+
+ indexingExecutor = new ArchivaIndexingTaskExecutor();
+ indexingExecutor.initialize();
+
+ repositoryConfig = new ManagedRepositoryConfiguration();
+ repositoryConfig.setId( "test-repo" );
+ repositoryConfig.setLocation( getBasedir() + "/target/test-classes/test-repo" );
+ repositoryConfig.setLayout( "default" );
+ repositoryConfig.setName( "Test Repository" );
+ repositoryConfig.setScanned( true );
+ repositoryConfig.setSnapshots( false );
+ repositoryConfig.setReleases( true );
+
+ configuration = new Configuration();
+ configuration.addManagedRepository( repositoryConfig );
+
+ archivaConfigControl = MockControl.createControl( ArchivaConfiguration.class );
+ archivaConfiguration = ( ArchivaConfiguration ) archivaConfigControl.getMock();
+
+ indexer = ( NexusIndexer ) lookup( NexusIndexer.class );
+ indexerEngine = ( IndexerEngine ) lookup ( IndexerEngine.class );
+ indexPacker = ( IndexPacker ) lookup( IndexPacker.class );
+
+ indexingExecutor.setIndexerEngine( indexerEngine );
+ indexingExecutor.setIndexPacker( indexPacker );
+ indexingExecutor.setArchivaConfiguration( archivaConfiguration );
+ }
+
+ protected void tearDown() throws Exception
+ {
+ // delete created index in the repository
+ File indexDir = new File( repositoryConfig.getLocation(), ".indexer" );
+ FileUtils.deleteDirectory( indexDir );
+ assertFalse( indexDir.exists() );
+
+ indexDir = new File( repositoryConfig.getLocation(), ".index" );
+ FileUtils.deleteDirectory( indexDir );
+ assertFalse( indexDir.exists() );
+
+ super.tearDown();
+ }
+
+ public void testAddArtifactToIndex()
+ throws Exception
+ {
+ File artifactFile =
+ new File( repositoryConfig.getLocation(),
+ "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+ ArtifactIndexingTask task =
+ TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+
+ archivaConfigControl.expectAndReturn( archivaConfiguration.getConfiguration(), configuration );
+
+ archivaConfigControl.replay();
+
+ indexingExecutor.executeTask( task );
+
+ archivaConfigControl.verify();
+
+ BooleanQuery q = new BooleanQuery();
+ q.add( indexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
+ q.add( indexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
+
+ IndexingContext context = indexer.addIndexingContext( repositoryConfig.getId(), repositoryConfig.getId(), new File( repositoryConfig.getLocation() ),
+ new File( repositoryConfig.getLocation(), ".indexer" ), null, null, NexusIndexer.FULL_INDEX );
+ context.setSearchable( true );
+
+ FlatSearchRequest request = new FlatSearchRequest( q );
+ FlatSearchResponse response = indexer.searchFlat( request );
+
+ assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
+ assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
+ assertEquals( 1, response.getTotalHits() );
+
+ Set<ArtifactInfo> results = response.getResults();
+
+ ArtifactInfo artifactInfo = (ArtifactInfo) results.iterator().next();
+ assertEquals( "org.apache.archiva", artifactInfo.groupId );
+ assertEquals( "archiva-index-methods-jar-test", artifactInfo.artifactId );
+ assertEquals( "test-repo", artifactInfo.repository );
+ }
+
+ public void testUpdateArtifactInIndex()
+ throws Exception
+ {
+ File artifactFile =
+ new File( repositoryConfig.getLocation(),
+ "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+ ArtifactIndexingTask task =
+ TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+
+ archivaConfigControl.expectAndReturn( archivaConfiguration.getConfiguration(), configuration, 2 );
+
+ archivaConfigControl.replay();
+
+ indexingExecutor.executeTask( task );
+ indexingExecutor.executeTask( task );
+
+ archivaConfigControl.verify();
+
+ BooleanQuery q = new BooleanQuery();
+ q.add( indexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
+ q.add( indexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
+
+ IndexSearcher searcher = new IndexSearcher( repositoryConfig.getLocation() + "/.indexer" );
+ TopDocs topDocs = searcher.search( q, null, 10 );
+
+ assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
+ assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
+
+ // should only return 1 hit!
+ assertEquals( 1, topDocs.totalHits );
+ }
+
+ public void testRemoveArtifactFromIndex()
+ throws Exception
+ {
+ File artifactFile =
+ new File( repositoryConfig.getLocation(),
+ "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+ ArtifactIndexingTask task =
+ TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+
+ archivaConfigControl.expectAndReturn( archivaConfiguration.getConfiguration(), configuration, 2 );
+
+ archivaConfigControl.replay();
+
+ // add artifact to index
+ indexingExecutor.executeTask( task );
+
+ BooleanQuery q = new BooleanQuery();
+ q.add( indexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
+ q.add( indexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
+
+ IndexSearcher searcher = new IndexSearcher( repositoryConfig.getLocation() + "/.indexer" );
+ TopDocs topDocs = searcher.search( q, null, 10 );
+
+ assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
+ assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
+
+ // should return 1 hit
+ assertEquals( 1, topDocs.totalHits );
+
+ // remove added artifact from index
+ task =
+ TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.DELETE );
+ indexingExecutor.executeTask( task );
+
+ archivaConfigControl.verify();
+
+ q = new BooleanQuery();
+ q.add( indexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
+ q.add( indexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
+
+ searcher = new IndexSearcher( repositoryConfig.getLocation() + "/.indexer" );
+ topDocs = searcher.search( q, null, 10 );
+
+ assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
+ assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
+
+ // artifact should have been removed from the index!
+ assertEquals( 0, topDocs.totalHits );
+ }
+
+}
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-index-methods-jar-test</artifactId>
+ <packaging>jar</packaging>
+ <version>1.0</version>
+ <name>archiva-index-methods-jar-test</name>
+ <url>http://maven.apache.org</url>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
ArchivaTaskScheduler taskScheduler = (ArchivaTaskScheduler) wac.getBean(PlexusToSpringUtils.buildSpringId(ArchivaTaskScheduler.class));
wac.getBean(PlexusToSpringUtils.buildSpringId(TaskQueueExecutor.class, "database-update"));
wac.getBean(PlexusToSpringUtils.buildSpringId(TaskQueueExecutor.class, "repository-scanning"));
+ wac.getBean(PlexusToSpringUtils.buildSpringId(TaskQueueExecutor.class, "indexing"));
try
{