]> source.dussan.org Git - archiva.git/commitdiff
[MRM-1188]
authorMaria Odea B. Ching <oching@apache.org>
Sun, 24 May 2009 06:11:19 +0000 (06:11 +0000)
committerMaria Odea B. Ching <oching@apache.org>
Sun, 24 May 2009 06:11:19 +0000 (06:11 +0000)
o put all indexing tasks (add, update and remove) into one queue; tasks added by NexusIndexerConsumer and LuceneCleanupRemoveIndexedConsumers
o all index updates are performed by ArchivaIndexingTaskExecutor
o added and updated tests

git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@778093 13f79535-47bb-0310-9956-ffa450edef68

22 files changed:
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/pom.xml
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/main/java/org/apache/archiva/consumers/lucene/LuceneCleanupRemoveIndexedConsumer.java
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/main/java/org/apache/archiva/consumers/lucene/NexusIndexerConsumer.java
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/main/resources/META-INF/spring-context.xml
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/java/org/apache/archiva/consumers/lucene/LuceneCleanupRemoveIndexedConsumerTest.java
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/java/org/apache/archiva/consumers/lucene/NexusIndexerConsumerTest.java
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/java/org/apache/archiva/consumers/lucene/stubs/ArchivaDAOStub.java [new file with mode: 0644]
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/resources/org/apache/archiva/consumers/lucene/LuceneConsumersTest.xml [new file with mode: 0644]
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/archiva/indexer/search/NexusRepositorySearch.java
archiva-modules/archiva-scheduled/pom.xml
archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/ArchivaTaskScheduler.java
archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/DefaultArchivaTaskScheduler.java
archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/executors/ArchivaIndexingTaskExecutor.java [new file with mode: 0644]
archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/ArchivaTaskNameSelectionPredicate.java [new file with mode: 0644]
archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/ArtifactIndexingTask.java [new file with mode: 0644]
archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/RepositoryTaskNameSelectionPredicate.java [deleted file]
archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/TaskCreator.java
archiva-modules/archiva-scheduled/src/main/resources/META-INF/plexus/components.xml
archiva-modules/archiva-scheduled/src/test/java/org/apache/maven/archiva/scheduled/executors/ArchivaIndexingTaskExecutorTest.java [new file with mode: 0644]
archiva-modules/archiva-scheduled/src/test/resources/test-repo/org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar [new file with mode: 0644]
archiva-modules/archiva-scheduled/src/test/resources/test-repo/org/apache/archiva/archiva-index-methods-jar-test/1.0/pom.xml [new file with mode: 0644]
archiva-modules/archiva-web/archiva-webapp/src/main/java/org/apache/maven/archiva/web/startup/ArchivaStartup.java

index 7c3b52ea0ecd438bb8d461e21cda1d0ef26c79cc..4ead5172440064f53bcc94d8d3ae05a46e647e71 100644 (file)
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-repository-layer</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.archiva</groupId>
+      <artifactId>archiva-scheduled</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.codehaus.plexus</groupId>
       <artifactId>plexus-spring</artifactId>
index 8f37ac79984fcf74d5f597eb4dca04a0d9a6420b..a3aab84fe2b3c184a0e88b2addf12ffd2da8b4ea 100644 (file)
@@ -27,24 +27,21 @@ import org.apache.maven.archiva.model.ArchivaArtifact;
 import org.apache.maven.archiva.repository.ManagedRepositoryContent;
 import org.apache.maven.archiva.repository.RepositoryContentFactory;
 import org.apache.maven.archiva.repository.RepositoryException;
+import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
+import org.codehaus.plexus.taskqueue.TaskQueueException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.sonatype.nexus.index.ArtifactContext;
-import org.sonatype.nexus.index.ArtifactContextProducer;
-import org.sonatype.nexus.index.DefaultArtifactContextProducer;
-import org.sonatype.nexus.index.NexusIndexer;
-import org.sonatype.nexus.index.context.DefaultIndexingContext;
-import org.sonatype.nexus.index.context.IndexingContext;
-import org.sonatype.nexus.index.context.UnsupportedExistingLuceneIndexException;
-import org.sonatype.nexus.index.IndexerEngine;
 
 import java.io.File;
-import java.io.IOException;
 import java.util.List;
 
 /**
  * LuceneCleanupRemoveIndexedConsumer
  * 
+ * Clean up the index of artifacts that are no longer existing in the file system (managed repositories).  
+ * 
  * @version $Id$
  */
 public class LuceneCleanupRemoveIndexedConsumer
@@ -54,155 +51,65 @@ public class LuceneCleanupRemoveIndexedConsumer
     private static final Logger log = LoggerFactory.getLogger( LuceneCleanupRemoveIndexedConsumer.class );
 
     private RepositoryContentFactory repoFactory;
-
-    private ArtifactContextProducer artifactContextProducer;
-
-    private IndexingContext context;
-    
-    private IndexerEngine indexerEngine;
     
-    //TODO - deng - use indexerEngine to remove documents instead of directly using the IndexingContext!
+    private ArchivaTaskScheduler scheduler;
     
-    public LuceneCleanupRemoveIndexedConsumer( RepositoryContentFactory repoFactory, IndexerEngine indexerEngine )
+    public LuceneCleanupRemoveIndexedConsumer( RepositoryContentFactory repoFactory, ArchivaTaskScheduler scheduler )
     {
         this.repoFactory = repoFactory;
-        this.indexerEngine = indexerEngine;
-        this.artifactContextProducer = new DefaultArtifactContextProducer();
+        this.scheduler = scheduler;     
     }
-  
+      
     public void beginScan()
     {
     }
 
     public void completeScan()
     {
-        /*synchronized( indexerEngine )
-        {
-            try
-            {
-                //context.getIndexWriter().close();
-
-                //indexerEngine.endIndexing( context );
-                //indexer.removeIndexingContext( context, false );
-            }
-            catch ( IOException e )
-            {
-                log.error( e.getMessage() );
-            }
-        }        */
+        
     }
 
     public List<String> getIncludedTypes()
-    {
-        // TODO Auto-generated method stub
+    {        
         return null;
     }
 
     public void processArchivaArtifact( ArchivaArtifact artifact )
         throws ConsumerException
     {
-        //synchronized( context )
-        //{
-            // TODO - deng - block this if there is the nexus indexer consumer is executing?
-            ManagedRepositoryContent repoContent = null;
-            
-            try
-            {
-               repoContent =
-                    repoFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
-            }
-            catch ( RepositoryException e )
-            {
-                throw new ConsumerException( "Can't run index cleanup consumer: " + e.getMessage() );
-            }
-    
-            ManagedRepositoryConfiguration repository = repoContent.getRepository();
-            String indexDir = repository.getIndexDir();
-            File managedRepository = new File( repository.getLocation() );
-            File indexDirectory = null;
-
-            if ( indexDir != null && !"".equals( indexDir ) )
-            {
-                indexDirectory = new File( repository.getIndexDir() );
-            }
-            else
-            {
-                indexDirectory = new File( managedRepository, ".indexer" );
-            }    
-           
-            try
-            {
-                context =
-                    new DefaultIndexingContext( repository.getId(), repository.getId(), managedRepository,
-                                                indexDirectory, null, null, NexusIndexer.FULL_INDEX, false );
-                //context =
-                //    indexer.addIndexingContext( repository.getId(), repository.getId(), managedRepository,
-                //                                indexDirectory, null, null, NexusIndexer.FULL_INDEX );
-                context.setSearchable( repository.isScanned() );
-            }
-            catch ( UnsupportedExistingLuceneIndexException e )
-            {
-                log.warn( "Unsupported index format.", e );
-                return;
-            }
-            catch ( IOException e )
-            {   
-                log.warn( "Unable to open index at " + indexDirectory.getAbsoluteFile(), e );
-                return;
-            }
+        ManagedRepositoryContent repoContent = null;
+        
+        try
+        {
+           repoContent =
+                repoFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
+        }
+        catch ( RepositoryException e )
+        {
+            throw new ConsumerException( "Can't run index cleanup consumer: " + e.getMessage() );
+        }
 
-            try
+        ManagedRepositoryConfiguration repository = repoContent.getRepository();
+       
+        try
+        {
+            File artifactFile = new File( repoContent.getRepoRoot(), repoContent.toPath( artifact ) );
+            
+            if ( !artifactFile.exists() )
             {
-                File artifactFile = new File( repoContent.getRepoRoot(), repoContent.toPath( artifact ) );
+                ArtifactIndexingTask task =                
+                        TaskCreator.createIndexingTask( repository.getId(), artifactFile, ArtifactIndexingTask.DELETE );
                 
-                if ( !artifactFile.exists() )
-                {
-                    ArtifactContext artifactContext =
-                        artifactContextProducer.getArtifactContext( context, artifactFile );
-
-                    if ( artifactContext != null )
-                    {
-                        //indexerEngine.remove( context, artifactContext );
-
-                        indexerEngine.remove( context, artifactContext );
-                        
-                        context.close( false );
-                        // hack for deleting documents - indexer engine's remove(...) isn't working for me
-                        //removeDocuments( artifactContext );
-                    }
-                }
-            }                
-            catch ( IOException e )
-            {
-                log.error( "Unable to open index at " + indexDirectory.getAbsoluteFile(), e );
-            }           
-       // }
-    }
-
-   /* private void removeDocuments( ArtifactContext ac )
-        throws IOException
-    {
-        synchronized( indexerEngine )
+                log.debug( "Queueing indexing task '" + task.getName() + "' to remove the artifact from the index." );
+                scheduler.queueIndexingTask( task );
+            }
+                   
+        } 
+        catch ( TaskQueueException e )
         {
-            IndexWriter w = context.getIndexWriter();
-    
-            ArtifactInfo ai = ac.getArtifactInfo();
-            String uinfo = AbstractIndexCreator.getGAV( ai.groupId, ai.artifactId, ai.version, ai.classifier, ai.packaging );
-    
-            Document doc = new Document();
-            doc.add( new Field( ArtifactInfo.DELETED, uinfo, Field.Store.YES, Field.Index.NO ) );
-            doc.add( new Field( ArtifactInfo.LAST_MODIFIED, Long.toString( System.currentTimeMillis() ), Field.Store.YES,
-                                Field.Index.NO ) );
-    
-            w.addDocument( doc );
-    
-            w.deleteDocuments( new Term( ArtifactInfo.UINFO, uinfo ) );
-    
-            w.commit();
-    
-            context.updateTimestamp();
+            throw new ConsumerException( e.getMessage() );
         }
-    }*/
+    }
 
     public String getDescription()
     {
@@ -223,9 +130,4 @@ public class LuceneCleanupRemoveIndexedConsumer
     {
         this.repoFactory = repoFactory;
     }
-
-    public void setArtifactContextProducer( ArtifactContextProducer artifactContextProducer )
-    {
-        this.artifactContextProducer = artifactContextProducer;
-    }
 }
index 78109ed633e2049965841dae94f4c2f9fb272e3d..8e62eee38ad7ca9725e171ac5fe53f280c7e087f 100644 (file)
@@ -20,30 +20,22 @@ package org.apache.archiva.consumers.lucene;
  */
 
 import java.io.File;
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Date;
 import java.util.List;
-import java.util.zip.ZipException;
 
 import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
 import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
 import org.apache.maven.archiva.consumers.ConsumerException;
 import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
 import org.apache.maven.archiva.repository.content.ManagedDefaultRepositoryContent;
+import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
+import org.codehaus.plexus.taskqueue.TaskQueueException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.sonatype.nexus.index.ArtifactContext;
-import org.sonatype.nexus.index.ArtifactContextProducer;
-import org.sonatype.nexus.index.DefaultArtifactContextProducer;
-import org.sonatype.nexus.index.NexusIndexer;
-import org.sonatype.nexus.index.context.DefaultIndexingContext;
-import org.sonatype.nexus.index.context.IndexingContext;
-import org.sonatype.nexus.index.context.UnsupportedExistingLuceneIndexException;
-import org.sonatype.nexus.index.IndexerEngine;
-import org.sonatype.nexus.index.packer.IndexPacker;
-import org.sonatype.nexus.index.packer.IndexPackingRequest;
 
 /**
  * Consumer for indexing the repository to provide search and IDE integration features.
@@ -54,35 +46,17 @@ public class NexusIndexerConsumer
 {
     private static final Logger log = LoggerFactory.getLogger( NexusIndexerConsumer.class );
 
-    private ArtifactContextProducer artifactContextProducer;
-
-    private IndexPacker indexPacker;
-
     private ManagedDefaultRepositoryContent repositoryContent;
 
-    private IndexingContext context;
-
     private File managedRepository;
-    
-    private IndexerEngine indexerEngine;
-    
-    //private IndexingContextMap indexingContextMap;
-    
-    public NexusIndexerConsumer( IndexPacker indexPacker, IndexerEngine indexerEngine )
+        
+    private ArchivaTaskScheduler scheduler;
+       
+    public NexusIndexerConsumer( ArchivaTaskScheduler scheduler )
     {
-        this.indexPacker = indexPacker;
-        this.indexerEngine = indexerEngine;        
-        this.artifactContextProducer = new DefaultArtifactContextProducer();
+        this.scheduler = scheduler;
     }
     
-   /* public NexusIndexerConsumer( IndexPacker indexPacker, IndexerEngine indexerEngine, IndexingContextMap indexingContextMap )
-    {
-        this.indexPacker = indexPacker;
-        this.indexerEngine = indexerEngine;
-        this.indexingContextMap = indexingContextMap;
-        this.artifactContextProducer = new DefaultArtifactContextProducer();
-    }*/
-    
     public String getDescription()
     {
         return "Indexes the repository to provide search and IDE integration features";
@@ -100,107 +74,34 @@ public class NexusIndexerConsumer
 
     public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered )
         throws ConsumerException
-    {   
-        //synchronized( context )
-        //{            
-            log.debug( "Begin indexing of repository '" + repository.getId() + "'..");
-            
-            managedRepository = new File( repository.getLocation() );
-            String indexDir = repository.getIndexDir();
-            
-            File indexDirectory = null;
-            if( indexDir != null && !"".equals( indexDir ) )
-            {
-                indexDirectory = new File( repository.getIndexDir() );
-            }
-            else
-            {
-                indexDirectory = new File( managedRepository, ".indexer" );
-            }
+    {       
+        managedRepository = new File( repository.getLocation() );
 
-            repositoryContent = new ManagedDefaultRepositoryContent();
-            repositoryContent.setRepository( repository );
-            
-            try
-            {   
-                context =
-                    new DefaultIndexingContext( repository.getId(), repository.getId(), managedRepository,
-                                                indexDirectory, null, null, NexusIndexer.FULL_INDEX, false );
-                
-                //context = indexingContextMap.addIndexingContext( repository.getId(), repository.getId(), managedRepository,
-                //                                indexDirectory, null, null, NexusIndexer.FULL_INDEX, false );
-                
-                context.setSearchable( repository.isScanned() );
-                
-                //indexerEngine.beginIndexing( context );
-            }
-            catch ( UnsupportedExistingLuceneIndexException e )
-            {
-                throw new ConsumerException( "Could not create index at " + indexDirectory.getAbsoluteFile(), e );
-            }
-            catch ( IOException e )
-            {
-                throw new ConsumerException( "Could not create index at " + indexDirectory.getAbsoluteFile(), e );
-            }
-        //}
+        repositoryContent = new ManagedDefaultRepositoryContent();
+        repositoryContent.setRepository( repository );
     }
     
     public void processFile( String path )
         throws ConsumerException
     {
-        synchronized ( indexerEngine )
+        File artifactFile = new File( managedRepository, path );
+                
+        ArtifactIndexingTask task =
+            TaskCreator.createIndexingTask( repositoryContent.getId(), artifactFile, ArtifactIndexingTask.ADD );
+        try
         {
-            if ( context == null )
-            {
-                // didn't start correctly, so skip
-                return;
-            }
-            
-            File artifactFile = new File( managedRepository, path );        
-            ArtifactContext artifactContext = artifactContextProducer.getArtifactContext( context, artifactFile );
-            
-            if ( artifactContext != null )
-            {
-                try
-                {                                           
-                    indexerEngine.index( context, artifactContext );                        
-                }
-                catch ( ZipException e )
-                {
-                    // invalid JAR file
-                    log.info( e.getMessage() );
-                }
-                catch ( IOException e )
-                {
-                    throw new ConsumerException( e.getMessage(), e );
-                }
-            }
+            log.debug( "Queueing indexing task + '" + task.getName() + "' to add or update the artifact in the index." );
+            scheduler.queueIndexingTask( task );
         }
+        catch ( TaskQueueException e )
+        {
+            throw new ConsumerException( e.getMessage(), e );
+        }        
     }
 
     public void completeScan()
     {   
-        //synchronized( context )
-        //{
-            log.debug( "End indexing of repository '" + context.getRepositoryId() + "'..");
-            
-            final File indexLocation = new File( managedRepository, ".index" );
-            try
-            {
-                //indexerEngine.endIndexing( context );
-                
-                IndexPackingRequest request = new IndexPackingRequest( context, indexLocation );
-                indexPacker.packIndex( request );
-
-                //indexingContextMap.removeIndexingContext( context.getId() );
-                
-                context.close( false );
-            }
-            catch ( IOException e )
-            {
-                log.error( "Could not pack index" + indexLocation.getAbsolutePath(), e );
-            }
-        //}
+        
     }
 
     public List<String> getExcludes()
index a982d83c85f0c5c217ba1d313c352d15b4a9f0f9..00902d2371de63b87130b87601b5241a1bc522a5 100644 (file)
@@ -3,15 +3,12 @@
        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
        xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
 
-    <bean id="indexerConsumer" class="org.apache.archiva.consumers.lucene.NexusIndexerConsumer">      
-        <constructor-arg ref="indexPacker"/>
-        <constructor-arg ref="indexerEngine"/>
-        <!-- <constructor-arg ref="indexingContextMap"/>  -->
+    <bean id="indexerConsumer" class="org.apache.archiva.consumers.lucene.NexusIndexerConsumer" lazy-init="true">      
+        <constructor-arg ref="archivaTaskScheduler"/>
     </bean>
     
-    <bean id="indexCleanupConsumer" class="org.apache.archiva.consumers.lucene.LuceneCleanupRemoveIndexedConsumer">
+    <bean id="indexCleanupConsumer" class="org.apache.archiva.consumers.lucene.LuceneCleanupRemoveIndexedConsumer" lazy-init="true">
         <constructor-arg ref="repositoryContentFactory"/>
-        <constructor-arg ref="indexerEngine"/>
-        <!--  <constructor-arg ref="nexusIndexer#archiva"/>  -->
+        <constructor-arg ref="archivaTaskScheduler"/>
     </bean>
 </beans>
\ No newline at end of file
index ccca2f535e22563402976da705e73515746fbd08..18873598144788fab95cf4127d6ea4619b745d4a 100644 (file)
@@ -27,14 +27,16 @@ import org.apache.maven.archiva.model.ArchivaArtifact;
 import org.apache.maven.archiva.repository.ManagedRepositoryContent;
 import org.apache.maven.archiva.repository.RepositoryContentFactory;
 import org.apache.maven.archiva.repository.content.ManagedDefaultRepositoryContent;
+import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
 import org.codehaus.plexus.spring.PlexusInSpringTestCase;
 import org.easymock.MockControl;
 import org.easymock.classextension.MockClassControl;
-import org.sonatype.nexus.index.ArtifactContext;
-import org.sonatype.nexus.index.ArtifactContextProducer;
-import org.sonatype.nexus.index.IndexerEngine;
-import org.sonatype.nexus.index.context.IndexingContext;
 
+/**
+ * LuceneCleanupRemoveIndexedConsumerTest
+ */
 public class LuceneCleanupRemoveIndexedConsumerTest
     extends PlexusInSpringTestCase
 {
@@ -46,31 +48,19 @@ public class LuceneCleanupRemoveIndexedConsumerTest
 
     private ManagedRepositoryConfiguration repositoryConfig;
     
-    private MockControl indexerEngineControl;
-    
-    private IndexerEngine indexerEngine;
-
-    private MockControl contextProducerControl;
-
-    private ArtifactContextProducer artifactContextProducer;
-
-    private MockControl acControl;
-
-    private ArtifactContext ac;
+    private ArchivaTaskScheduler scheduler;
 
     public void setUp()
         throws Exception
     {
         super.setUp();
 
-        indexerEngineControl = MockControl.createControl( IndexerEngine.class );
-        indexerEngineControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
-        indexerEngine = (IndexerEngine) indexerEngineControl.getMock();
-        
         repoFactoryControl = MockClassControl.createControl( RepositoryContentFactory.class );
         repoFactory = (RepositoryContentFactory) repoFactoryControl.getMock();
 
-        consumer = new LuceneCleanupRemoveIndexedConsumer( repoFactory, indexerEngine );
+        scheduler = ( ArchivaTaskScheduler ) lookup( ArchivaTaskScheduler.class );
+        
+        consumer = new LuceneCleanupRemoveIndexedConsumer( repoFactory, scheduler );
 
         repositoryConfig = new ManagedRepositoryConfiguration();
         repositoryConfig.setId( "test-repo" );
@@ -81,15 +71,6 @@ public class LuceneCleanupRemoveIndexedConsumerTest
         repositoryConfig.setSnapshots( false );
         repositoryConfig.setReleases( true );
         repositoryConfig.setIndexDir( getBasedir() + "/target/test-classes/test-repo/.cleanup-index" );
-
-        contextProducerControl = MockControl.createControl( ArtifactContextProducer.class );
-        contextProducerControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
-        artifactContextProducer = (ArtifactContextProducer) contextProducerControl.getMock();
-
-        consumer.setArtifactContextProducer( artifactContextProducer );
-
-        acControl = MockClassControl.createControl( ArtifactContext.class );
-        ac = (ArtifactContext) acControl.getMock();
     }
 
     public void tearDown()
@@ -105,30 +86,25 @@ public class LuceneCleanupRemoveIndexedConsumerTest
     {
         ArchivaArtifact artifact =
             new ArchivaArtifact( "org.apache.archiva", "archiva-lucene-consumers", "1.2", null, "jar", "test-repo" );
+        
         ManagedRepositoryContent repoContent = new ManagedDefaultRepositoryContent();
         repoContent.setRepository( repositoryConfig );
-
-        IndexingContext context = null; 
-            
-        File artifactFile =
-            new File( repositoryConfig.getLocation(),
-                      "org/apache/archiva/archiva-lucene-consumers/1.2/archiva-lucene-consumers-1.2.jar" );
+        
+        File artifactFile = new File( repoContent.getRepoRoot(), repoContent.toPath( artifact ) );
        
+        ArtifactIndexingTask task =
+            TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.DELETE );
+        
         repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( repositoryConfig.getId() ),
                                             repoContent );
-        contextProducerControl.expectAndReturn( artifactContextProducer.getArtifactContext( context, artifactFile ), ac );        
-        indexerEngine.remove( context, ac );
-        indexerEngineControl.setDefaultVoidCallable();
-        
-        repoFactoryControl.replay();      
-        contextProducerControl.replay();
-        indexerEngineControl.replay();
        
+        repoFactoryControl.replay();      
+
         consumer.processArchivaArtifact( artifact );
 
         repoFactoryControl.verify();       
-        contextProducerControl.verify();
-        indexerEngineControl.verify();       
+        
+        assertTrue( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );               
     }
 
     public void testProcessArtifactArtifactExists()
@@ -139,6 +115,11 @@ public class LuceneCleanupRemoveIndexedConsumerTest
         ManagedRepositoryContent repoContent = new ManagedDefaultRepositoryContent();
         repoContent.setRepository( repositoryConfig );
 
+        File artifactFile = new File( repoContent.getRepoRoot(), repoContent.toPath( artifact ) );
+        
+        ArtifactIndexingTask task =
+            TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.DELETE );
+        
         repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( repositoryConfig.getId() ),
                                             repoContent );
 
@@ -147,5 +128,13 @@ public class LuceneCleanupRemoveIndexedConsumerTest
         consumer.processArchivaArtifact( artifact );
 
         repoFactoryControl.verify();
+        
+        assertFalse( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );
+    }
+    
+    @Override
+    protected String getPlexusConfigLocation()
+    {
+        return "/org/apache/archiva/consumers/lucene/LuceneConsumersTest.xml";
     }
 }
index 02ed2a9ed592f387662c91f435ff269951ade43d..2bb33524d7aef2756ba51886df37755bc77fa460 100644 (file)
@@ -22,36 +22,26 @@ package org.apache.archiva.consumers.lucene;
 import java.io.File;
 import java.util.Calendar;
 import java.util.Date;
-import java.util.Set;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.search.BooleanClause.Occur;
 import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
 import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
+import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
 import org.codehaus.plexus.spring.PlexusInSpringTestCase;
-import org.sonatype.nexus.index.ArtifactInfo;
-import org.sonatype.nexus.index.FlatSearchRequest;
-import org.sonatype.nexus.index.FlatSearchResponse;
-import org.sonatype.nexus.index.NexusIndexer;
-import org.sonatype.nexus.index.context.IndexingContext;
-import org.sonatype.nexus.index.IndexerEngine;
-import org.sonatype.nexus.index.packer.IndexPacker;
 
+/**
+ * NexusIndexerConsumerTest
+ */
 public class NexusIndexerConsumerTest
     extends PlexusInSpringTestCase
 {
     private KnownRepositoryContentConsumer nexusIndexerConsumer;
         
     private ManagedRepositoryConfiguration repositoryConfig;
-
-    private NexusIndexer nexusIndexer;
-
-    private IndexPacker indexPacker;
-
-    private IndexerEngine indexerEngine;
+    
+    private ArchivaTaskScheduler scheduler;
     
     @Override
     protected void setUp() 
@@ -59,15 +49,9 @@ public class NexusIndexerConsumerTest
     {
         super.setUp();
         
-        nexusIndexer = ( NexusIndexer ) lookup( NexusIndexer.class );
+        scheduler = ( ArchivaTaskScheduler ) lookup( ArchivaTaskScheduler.class );
         
-        indexPacker = ( IndexPacker ) lookup( IndexPacker.class );
-        
-        indexerEngine = ( IndexerEngine ) lookup( IndexerEngine.class );
-        
-        //nexusIndexerConsumer = new NexusIndexerConsumer( nexusIndexer, indexPacker, indexerEngine );
-        
-        nexusIndexerConsumer = new NexusIndexerConsumer( indexPacker, indexerEngine );
+        nexusIndexerConsumer = new NexusIndexerConsumer( scheduler );
                 
         repositoryConfig = new ManagedRepositoryConfiguration();
         repositoryConfig.setId( "test-repo" );
@@ -98,93 +82,86 @@ public class NexusIndexerConsumerTest
     public void testIndexerIndexArtifact()
         throws Exception
     {        
+        File artifactFile =
+            new File( repositoryConfig.getLocation(),
+                      "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+        ArtifactIndexingTask task =
+            TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+        
         // begin scan
         Date now = Calendar.getInstance().getTime();
         nexusIndexerConsumer.beginScan( repositoryConfig, now );
         nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
         nexusIndexerConsumer.completeScan();
-        
-        // search!
-        BooleanQuery q = new BooleanQuery();        
-        q.add( nexusIndexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
-        q.add( nexusIndexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
-        
-        IndexingContext context = nexusIndexer.addIndexingContext( repositoryConfig.getId(), repositoryConfig.getId(), new File( repositoryConfig.getLocation() ),
-                                    new File( repositoryConfig.getLocation(), ".indexer" ), null, null, NexusIndexer.FULL_INDEX );
-        context.setSearchable( true );
-        
-        FlatSearchRequest request = new FlatSearchRequest( q );
-        FlatSearchResponse response = nexusIndexer.searchFlat( request );
-        
-        assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
-        assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
-        assertEquals( 1, response.getTotalHits() );
-        
-        Set<ArtifactInfo> results = response.getResults();
-        
-        ArtifactInfo artifactInfo = (ArtifactInfo) results.iterator().next();
-        assertEquals( "org.apache.archiva", artifactInfo.groupId );
-        assertEquals( "archiva-index-methods-jar-test", artifactInfo.artifactId );
-        assertEquals( "test-repo", artifactInfo.repository );  
+                
+        assertTrue( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );        
     }
     
     public void testIndexerArtifactAlreadyIndexed()
         throws Exception
     {
+        File artifactFile =
+            new File( repositoryConfig.getLocation(),
+                      "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+        ArtifactIndexingTask task =
+            TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+        
         // begin scan
         Date now = Calendar.getInstance().getTime();
         nexusIndexerConsumer.beginScan( repositoryConfig, now );
         nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
         nexusIndexerConsumer.completeScan();
         
+        assertTrue( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );
+        
         // scan and index again
         now = Calendar.getInstance().getTime();
         nexusIndexerConsumer.beginScan( repositoryConfig, now );
         nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );        
         nexusIndexerConsumer.completeScan();
         
-        // search!
-        BooleanQuery q = new BooleanQuery();        
-        q.add( nexusIndexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
-        q.add( nexusIndexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
-        
-        IndexSearcher searcher = new IndexSearcher( repositoryConfig.getLocation() + "/.indexer" );
-        TopDocs topDocs = searcher.search( q, null, 10 );
-        
-        assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
-        assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
-        
-        // should return 2 hits - this will be filtered out by the NexusRespositorySearch when it returns the results!
-        assertEquals( 2, topDocs.totalHits );
+        assertTrue( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );        
     }
     
     public void testIndexerIndexArtifactThenPom()
         throws Exception
-    {        
+    {    
+        File artifactFile =
+            new File( repositoryConfig.getLocation(),
+                      "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+        ArtifactIndexingTask task =
+            TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+        
         // begin scan
         Date now = Calendar.getInstance().getTime();
         nexusIndexerConsumer.beginScan( repositoryConfig, now );
         nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
         nexusIndexerConsumer.completeScan();
         
+        assertTrue( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );
+        
+        artifactFile =
+            new File( repositoryConfig.getLocation(),
+                      "org/apache/archiva/archiva-index-methods-jar-test/1.0/pom.xml" );
+
+        task =
+            TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+        
         // scan and index again
         now = Calendar.getInstance().getTime();
         nexusIndexerConsumer.beginScan( repositoryConfig, now );
         nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/pom.xml" );        
         nexusIndexerConsumer.completeScan();
         
-        // search!
-        BooleanQuery q = new BooleanQuery();        
-        q.add( nexusIndexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
-        q.add( nexusIndexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
-        
-        IndexSearcher searcher = new IndexSearcher( repositoryConfig.getLocation() + "/.indexer" );
-        TopDocs topDocs = searcher.search( q, null, 10 );
-        
-        assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
-        assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
-        
-        // should return only 1 hit 
-        assertEquals( 1, topDocs.totalHits );
-    }    
+        assertTrue( scheduler.isProcessingIndexingTaskWithName( task.getName() ) );
+    }  
+    
+    @Override
+    protected String getPlexusConfigLocation()
+    {
+        return "/org/apache/archiva/consumers/lucene/LuceneConsumersTest.xml";
+    }
 }
diff --git a/archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/java/org/apache/archiva/consumers/lucene/stubs/ArchivaDAOStub.java b/archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/java/org/apache/archiva/consumers/lucene/stubs/ArchivaDAOStub.java
new file mode 100644 (file)
index 0000000..47ebdce
--- /dev/null
@@ -0,0 +1,74 @@
+package org.apache.archiva.consumers.lucene.stubs;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.maven.archiva.database.ArchivaDAO;
+import org.apache.maven.archiva.database.ArtifactDAO;
+import org.apache.maven.archiva.database.ProjectModelDAO;
+import org.apache.maven.archiva.database.RepositoryContentStatisticsDAO;
+import org.apache.maven.archiva.database.RepositoryProblemDAO;
+import org.apache.maven.archiva.database.SimpleConstraint;
+
+/**
+ * Using a stub for faster tests! Not really used for the unit tests, just for dependency injection.
+ */
+public class ArchivaDAOStub
+    implements ArchivaDAO
+{
+
+    public ArtifactDAO getArtifactDAO()
+    {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    public ProjectModelDAO getProjectModelDAO()
+    {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    public RepositoryContentStatisticsDAO getRepositoryContentStatisticsDAO()
+    {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    public RepositoryProblemDAO getRepositoryProblemDAO()
+    {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    public List<?> query( SimpleConstraint constraint )
+    {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    public Object save( Serializable obj )
+    {
+        // TODO Auto-generated method stub
+        return null;
+    }
+}
diff --git a/archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/resources/org/apache/archiva/consumers/lucene/LuceneConsumersTest.xml b/archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/resources/org/apache/archiva/consumers/lucene/LuceneConsumersTest.xml
new file mode 100644 (file)
index 0000000..4b3f8e2
--- /dev/null
@@ -0,0 +1,28 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<component-set>
+  <components>
+    <component>
+      <role>org.apache.maven.archiva.database.ArchivaDAO</role>
+      <role-hint>jdo</role-hint>
+      <implementation>org.apache.archiva.consumers.lucene.stubs.ArchivaDAOStub</implementation>
+    </component>
+  </components>
+</component-set>
\ No newline at end of file
index 9d88d3d77eb7a4aac9976847c44edbf10a96169f..3c8f5a577cbc51b7390ef86dae653c6515970c86 100644 (file)
@@ -66,8 +66,12 @@ public class NexusRepositorySearch
         throws RepositorySearchException
     {   
         addIndexingContexts( selectedRepos );
-                
+        
+        // since upgrade to nexus 2.0.0, query has changed from g:[QUERIED TERM]* to g:*[QUERIED TERM]*
+        //      resulting to more wildcard searches so we need to increase max clause count
+        BooleanQuery.setMaxClauseCount( Integer.MAX_VALUE );
         BooleanQuery q = new BooleanQuery();
+        
         if( previousSearchTerms == null || previousSearchTerms.isEmpty() )
         {            
             constructQuery( term, q );
@@ -85,7 +89,7 @@ public class NexusRepositorySearch
             BooleanQuery iQuery = new BooleanQuery();
             constructQuery( term, iQuery );
             q.add( iQuery, Occur.MUST );
-        }        
+        }      
                     
         return search( limits, q );
     }
index 664ff1d2408b9c48b94f2b4cb55283beaf3a8413..445d70962412735bb07a6405555de7c6f7e38949 100644 (file)
       <groupId>org.codehaus.plexus.registry</groupId>
       <artifactId>plexus-registry-api</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.sonatype.nexus</groupId>
+      <artifactId>nexus-indexer</artifactId>
+    </dependency>
     <!-- Test Dependencies -->
     <dependency>
       <groupId>org.apache.archiva</groupId>
index 2edb02150120bd31c1493484b89dac6aac63830b..d3e9592ad0ef1a27c71cfa06e819f6b57b23829f 100644 (file)
@@ -20,6 +20,7 @@ package org.apache.maven.archiva.scheduled;
  */
 
 import org.apache.maven.archiva.common.ArchivaException;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
 import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
 import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
 import org.codehaus.plexus.taskqueue.TaskQueueException;
@@ -36,24 +37,86 @@ public interface ArchivaTaskScheduler
      */
     public final static String ROLE = ArchivaTaskScheduler.class.getName();
 
+    /**
+     * Checks if there is any repository scanning task queued.
+     * 
+     * @return
+     * @throws ArchivaException
+     */
     public boolean isProcessingAnyRepositoryTask()
         throws ArchivaException;
 
+    /**
+     * Checks if there is any database scanning task queued.
+     * 
+     * @return
+     * @throws ArchivaException
+     */
     public boolean isProcessingDatabaseTask()
         throws ArchivaException;
 
+    /**
+     * Checks if a repository scanning task for the specified repository is queuedd.
+     * 
+     * @param repositoryId
+     * @return
+     * @throws ArchivaException
+     */
     public boolean isProcessingRepositoryTask( String repositoryId )
         throws ArchivaException;
     
+    /**
+     * Checks if a repository scanning task with the specified name is queued.
+     * 
+     * @param taskName
+     * @return
+     * @throws ArchivaException
+     */
     public boolean isProcessingRepositoryTaskWithName( String taskName )
         throws ArchivaException;
+    
+    /**
+     * Checks is an indexing task with the specified name is queued.
+     * 
+     * @param taskName
+     * @return
+     * @throws ArchivaException
+     */
+    public boolean isProcessingIndexingTaskWithName( String taskName )
+        throws ArchivaException;
 
+    /**
+     * Adds the database task to the database scanning queue.
+     * 
+     * @param task
+     * @throws TaskQueueException
+     */
     public void queueDatabaseTask( DatabaseTask task )
         throws TaskQueueException;
 
+    /**
+     * Adds the repository task to the repo scanning queue.
+     * 
+     * @param task
+     * @throws TaskQueueException
+     */
     public void queueRepositoryTask( RepositoryTask task )
         throws TaskQueueException;
+    
+    /**
+     * Adds the indexing task to the indexing queue.
+     * 
+     * @param task
+     * @throws TaskQueueException
+     */
+    public void queueIndexingTask( ArtifactIndexingTask task )
+        throws TaskQueueException;
 
+    /**
+     * Schedules the database tasks using the set cron expression.
+     * 
+     * @throws TaskExecutionException
+     */
     public void scheduleDatabaseTasks()
         throws TaskExecutionException;
 
index cde1d29feed0489e19e1ddbceb2baf3e405360fa..de956ef8ccc31b8747d68542d80791111f3328e1 100644 (file)
@@ -29,9 +29,10 @@ import org.apache.maven.archiva.database.ArchivaDAO;
 import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
 import org.apache.maven.archiva.repository.scanner.RepositoryScanStatistics;
 import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
 import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
 import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
-import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskNameSelectionPredicate;
+import org.apache.maven.archiva.scheduled.tasks.ArchivaTaskNameSelectionPredicate;
 import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskSelectionPredicate;
 import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
 import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
@@ -80,6 +81,11 @@ public class DefaultArchivaTaskScheduler
      * @plexus.requirement role-hint="repository-scanning"
      */
     private TaskQueue repositoryScanningQueue;
+    
+    /**
+     * @plexus.requirement role-hint="indexing"
+     */
+    private TaskQueue indexingQueue;
 
     /**
      * @plexus.requirement
@@ -102,6 +108,8 @@ public class DefaultArchivaTaskScheduler
     public static final String REPOSITORY_JOB = "repository-job";
 
     public static final String REPOSITORY_JOB_TRIGGER = "repository-job-trigger";
+    
+    public static final String INDEXING_JOB = "indexing-job";
 
     public static final String CRON_HOURLY = "0 0 * * * ?";
 
@@ -153,131 +161,6 @@ public class DefaultArchivaTaskScheduler
         }
     }
 
-    @SuppressWarnings("unchecked")
-    private boolean isPreviouslyScanned( ManagedRepositoryConfiguration repoConfig )
-    {
-        List<RepositoryScanStatistics> results =
-            (List<RepositoryScanStatistics>) dao.query( new MostRecentRepositoryScanStatistics( repoConfig.getId() ) );
-
-        if ( results != null && !results.isEmpty() )
-        {
-            return true;
-        }
-
-        return false;
-    }
-    
-    // MRM-848: Pre-configured repository initially appear to be empty
-    private synchronized void queueInitialRepoScan( ManagedRepositoryConfiguration repoConfig )
-    {
-        String repoId = repoConfig.getId();        
-        RepositoryTask task = TaskCreator.createRepositoryTask( repoId, "initial-scan" );
-
-        if ( queuedRepos.contains( repoId ) )
-        {
-            log.error( "Repository [" + repoId + "] is currently being processed or is already queued." );
-        }
-        else
-        {
-            try
-            {
-                queuedRepos.add( repoConfig.getId() );
-                this.queueRepositoryTask( task );
-            }
-            catch ( TaskQueueException e )
-            {
-                log.error( "Error occurred while queueing repository [" + repoId + "] task : " + e.getMessage() );
-            }
-        }
-    }
-    
-    private synchronized void scheduleRepositoryJobs( ManagedRepositoryConfiguration repoConfig )
-        throws SchedulerException
-    {
-        if ( repoConfig.getRefreshCronExpression() == null )
-        {
-            log.warn( "Skipping job, no cron expression for " + repoConfig.getId() );
-            return;
-        }
-        
-        if ( !repoConfig.isScanned() )
-        {
-            log.warn( "Skipping job, repository scannable has been disabled for " + repoConfig.getId() );
-            return;
-        }
-
-        // get the cron string for these database scanning jobs
-        String cronString = repoConfig.getRefreshCronExpression();
-
-        CronExpressionValidator cronValidator = new CronExpressionValidator();
-        if ( !cronValidator.validate( cronString ) )
-        {
-            log.warn( "Cron expression [" + cronString + "] for repository [" + repoConfig.getId() +
-                "] is invalid.  Defaulting to hourly." );
-            cronString = CRON_HOURLY;
-        }
-
-        // setup the unprocessed artifact job
-        JobDetail repositoryJob =
-            new JobDetail( REPOSITORY_JOB + ":" + repoConfig.getId(), REPOSITORY_SCAN_GROUP, RepositoryTaskJob.class );
-
-        JobDataMap dataMap = new JobDataMap();
-        dataMap.put( RepositoryTaskJob.TASK_QUEUE, repositoryScanningQueue );
-        dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, ArchivaTask.QUEUE_POLICY_WAIT );
-        dataMap.put( RepositoryTaskJob.TASK_REPOSITORY, repoConfig.getId() );
-        repositoryJob.setJobDataMap( dataMap );
-
-        try
-        {
-            CronTrigger trigger =
-                new CronTrigger( REPOSITORY_JOB_TRIGGER + ":" + repoConfig.getId(), REPOSITORY_SCAN_GROUP, cronString );
-
-            jobs.add( REPOSITORY_JOB + ":" + repoConfig.getId() );
-            scheduler.scheduleJob( repositoryJob, trigger );
-        }
-        catch ( ParseException e )
-        {
-            log.error(
-                "ParseException in repository scanning cron expression, disabling repository scanning for '" +
-                    repoConfig.getId() + "': " + e.getMessage() );
-        }
-
-    }
-
-    private synchronized void scheduleDatabaseJobs()
-        throws SchedulerException
-    {
-        String cronString = archivaConfiguration.getConfiguration().getDatabaseScanning().getCronExpression();
-
-        // setup the unprocessed artifact job
-        JobDetail databaseJob = new JobDetail( DATABASE_JOB, DATABASE_SCAN_GROUP, DatabaseTaskJob.class );
-
-        JobDataMap dataMap = new JobDataMap();
-        dataMap.put( DatabaseTaskJob.TASK_QUEUE, databaseUpdateQueue );
-        databaseJob.setJobDataMap( dataMap );
-
-        CronExpressionValidator cronValidator = new CronExpressionValidator();
-        if ( !cronValidator.validate( cronString ) )
-        {
-            log.warn(
-                "Cron expression [" + cronString + "] for database update is invalid.  Defaulting to hourly." );
-            cronString = CRON_HOURLY;
-        }
-
-        try
-        {
-            CronTrigger trigger = new CronTrigger( DATABASE_JOB_TRIGGER, DATABASE_SCAN_GROUP, cronString );
-
-            scheduler.scheduleJob( databaseJob, trigger );
-        }
-        catch ( ParseException e )
-        {
-            log.error(
-                "ParseException in database scanning cron expression, disabling database scanning: " + e.getMessage() );
-        }
-
-    }
-
     public void stop()
         throws StoppingException
     {
@@ -298,6 +181,9 @@ public class DefaultArchivaTaskScheduler
         }
     }
 
+    /**
+     * @see ArchivaTaskScheduler#scheduleDatabaseTasks()
+     */
     public void scheduleDatabaseTasks()
         throws TaskExecutionException
     {
@@ -312,6 +198,9 @@ public class DefaultArchivaTaskScheduler
         }
     }
 
+    /**
+     * @see ArchivaTaskScheduler#isProcessingAnyRepositoryTask()
+     */
     @SuppressWarnings("unchecked")
     public boolean isProcessingAnyRepositoryTask()
         throws ArchivaException
@@ -333,6 +222,9 @@ public class DefaultArchivaTaskScheduler
         }
     }
 
+    /**
+     * @see ArchivaTaskScheduler#isProcessingRepositoryTask(String)
+     */
     @SuppressWarnings("unchecked")
     public boolean isProcessingRepositoryTask( String repositoryId )
         throws ArchivaException
@@ -354,6 +246,9 @@ public class DefaultArchivaTaskScheduler
         }
     }
     
+    /**
+     * @see ArchivaTaskScheduler#isProcessingIndexingTaskWithName(String)
+     */
     @SuppressWarnings("unchecked")
     public boolean isProcessingRepositoryTaskWithName( String taskName )
         throws ArchivaException
@@ -371,10 +266,37 @@ public class DefaultArchivaTaskScheduler
                 throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
             }
     
-            return CollectionUtils.exists( queue, new RepositoryTaskNameSelectionPredicate( taskName ) );
+            return CollectionUtils.exists( queue, new ArchivaTaskNameSelectionPredicate( taskName ) );
         }
     }
 
+    /**
+     * @see ArchivaTaskScheduler#isProcessingIndexingTaskWithName(String)
+     */
+    @SuppressWarnings("unchecked")
+    public boolean isProcessingIndexingTaskWithName( String taskName )
+        throws ArchivaException
+    {
+        synchronized( indexingQueue )
+        {
+            List<? extends Task> queue = null;
+            
+            try
+            {
+                queue = indexingQueue.getQueueSnapshot();
+            }
+            catch ( TaskQueueException e )
+            {
+                throw new ArchivaException( "Unable to get indexing scanning queue:" + e.getMessage(), e );
+            }
+    
+            return CollectionUtils.exists( queue, new ArchivaTaskNameSelectionPredicate( taskName ) );
+        }
+    }
+    
+    /**
+     * @see ArchivaTaskScheduler#isProcessingDatabaseTask()
+     */
     @SuppressWarnings("unchecked")
     public boolean isProcessingDatabaseTask()
         throws ArchivaException
@@ -393,6 +315,9 @@ public class DefaultArchivaTaskScheduler
         return !queue.isEmpty();
     }
 
+    /**
+     * @see ArchivaTaskScheduler#queueRepositoryTask(RepositoryTask)
+     */
     public void queueRepositoryTask( RepositoryTask task )
         throws TaskQueueException
     {
@@ -420,11 +345,23 @@ public class DefaultArchivaTaskScheduler
         }
     }
 
+    /**
+     * @see ArchivaTaskScheduler#queueDatabaseTask(DatabaseTask)
+     */
     public void queueDatabaseTask( DatabaseTask task )
         throws TaskQueueException
     {
         databaseUpdateQueue.put( task );
     }
+    
+    /**
+     * @see ArchivaTaskScheduler#queueIndexingTask(ArtifactIndexingTask)
+     */
+    public void queueIndexingTask( ArtifactIndexingTask task )
+        throws TaskQueueException
+    {
+        indexingQueue.put( task );
+    }
 
     public void configurationEvent( ConfigurationEvent event )
     {
@@ -472,4 +409,129 @@ public class DefaultArchivaTaskScheduler
             }
         }
     }
+    
+    @SuppressWarnings("unchecked")
+    private boolean isPreviouslyScanned( ManagedRepositoryConfiguration repoConfig )
+    {
+        List<RepositoryScanStatistics> results =
+            (List<RepositoryScanStatistics>) dao.query( new MostRecentRepositoryScanStatistics( repoConfig.getId() ) );
+
+        if ( results != null && !results.isEmpty() )
+        {
+            return true;
+        }
+
+        return false;
+    }
+    
+    // MRM-848: Pre-configured repository initially appear to be empty
+    private synchronized void queueInitialRepoScan( ManagedRepositoryConfiguration repoConfig )
+    {
+        String repoId = repoConfig.getId();        
+        RepositoryTask task = TaskCreator.createRepositoryTask( repoId, "initial-scan" );
+
+        if ( queuedRepos.contains( repoId ) )
+        {
+            log.error( "Repository [" + repoId + "] is currently being processed or is already queued." );
+        }
+        else
+        {
+            try
+            {
+                queuedRepos.add( repoConfig.getId() );
+                this.queueRepositoryTask( task );
+            }
+            catch ( TaskQueueException e )
+            {
+                log.error( "Error occurred while queueing repository [" + repoId + "] task : " + e.getMessage() );
+            }
+        }
+    }
+    
+    private synchronized void scheduleRepositoryJobs( ManagedRepositoryConfiguration repoConfig )
+        throws SchedulerException
+    {
+        if ( repoConfig.getRefreshCronExpression() == null )
+        {
+            log.warn( "Skipping job, no cron expression for " + repoConfig.getId() );
+            return;
+        }
+        
+        if ( !repoConfig.isScanned() )
+        {
+            log.warn( "Skipping job, repository scannable has been disabled for " + repoConfig.getId() );
+            return;
+        }
+
+        // get the cron string for these database scanning jobs
+        String cronString = repoConfig.getRefreshCronExpression();
+
+        CronExpressionValidator cronValidator = new CronExpressionValidator();
+        if ( !cronValidator.validate( cronString ) )
+        {
+            log.warn( "Cron expression [" + cronString + "] for repository [" + repoConfig.getId() +
+                "] is invalid.  Defaulting to hourly." );
+            cronString = CRON_HOURLY;
+        }
+
+        // setup the unprocessed artifact job
+        JobDetail repositoryJob =
+            new JobDetail( REPOSITORY_JOB + ":" + repoConfig.getId(), REPOSITORY_SCAN_GROUP, RepositoryTaskJob.class );
+
+        JobDataMap dataMap = new JobDataMap();
+        dataMap.put( RepositoryTaskJob.TASK_QUEUE, repositoryScanningQueue );
+        dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, ArchivaTask.QUEUE_POLICY_WAIT );
+        dataMap.put( RepositoryTaskJob.TASK_REPOSITORY, repoConfig.getId() );
+        repositoryJob.setJobDataMap( dataMap );
+
+        try
+        {
+            CronTrigger trigger =
+                new CronTrigger( REPOSITORY_JOB_TRIGGER + ":" + repoConfig.getId(), REPOSITORY_SCAN_GROUP, cronString );
+
+            jobs.add( REPOSITORY_JOB + ":" + repoConfig.getId() );
+            scheduler.scheduleJob( repositoryJob, trigger );
+        }
+        catch ( ParseException e )
+        {
+            log.error(
+                "ParseException in repository scanning cron expression, disabling repository scanning for '" +
+                    repoConfig.getId() + "': " + e.getMessage() );
+        }
+
+    }
+
+    private synchronized void scheduleDatabaseJobs()
+        throws SchedulerException
+    {
+        String cronString = archivaConfiguration.getConfiguration().getDatabaseScanning().getCronExpression();
+
+        // setup the unprocessed artifact job
+        JobDetail databaseJob = new JobDetail( DATABASE_JOB, DATABASE_SCAN_GROUP, DatabaseTaskJob.class );
+
+        JobDataMap dataMap = new JobDataMap();
+        dataMap.put( DatabaseTaskJob.TASK_QUEUE, databaseUpdateQueue );
+        databaseJob.setJobDataMap( dataMap );
+
+        CronExpressionValidator cronValidator = new CronExpressionValidator();
+        if ( !cronValidator.validate( cronString ) )
+        {
+            log.warn(
+                "Cron expression [" + cronString + "] for database update is invalid.  Defaulting to hourly." );
+            cronString = CRON_HOURLY;
+        }
+
+        try
+        {
+            CronTrigger trigger = new CronTrigger( DATABASE_JOB_TRIGGER, DATABASE_SCAN_GROUP, cronString );
+
+            scheduler.scheduleJob( databaseJob, trigger );
+        }
+        catch ( ParseException e )
+        {
+            log.error(
+                "ParseException in database scanning cron expression, disabling database scanning: " + e.getMessage() );
+        }
+
+    }
 }
diff --git a/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/executors/ArchivaIndexingTaskExecutor.java b/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/executors/ArchivaIndexingTaskExecutor.java
new file mode 100644 (file)
index 0000000..31d0a9f
--- /dev/null
@@ -0,0 +1,205 @@
+package org.apache.maven.archiva.scheduled.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexReader;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
+import org.codehaus.plexus.taskqueue.Task;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.sonatype.nexus.index.ArtifactContext;
+import org.sonatype.nexus.index.ArtifactContextProducer;
+import org.sonatype.nexus.index.ArtifactInfo;
+import org.sonatype.nexus.index.DefaultArtifactContextProducer;
+import org.sonatype.nexus.index.IndexerEngine;
+import org.sonatype.nexus.index.NexusIndexer;
+import org.sonatype.nexus.index.context.DefaultIndexingContext;
+import org.sonatype.nexus.index.context.IndexingContext;
+import org.sonatype.nexus.index.context.UnsupportedExistingLuceneIndexException;
+import org.sonatype.nexus.index.packer.IndexPacker;
+import org.sonatype.nexus.index.packer.IndexPackingRequest;
+
+/**
+ * ArchivaIndexingTaskExecutor
+ * 
+ * Executes all indexing tasks. Adding, updating and removing artifacts from the index are all performed by
+ * this executor. Add and update artifact in index tasks are added in the indexing task queue by the NexusIndexerConsumer while
+ * remove artifact from index tasks are added by the LuceneCleanupRemoveIndexedConsumer.
+ * 
+ * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexing"
+ */
+public class ArchivaIndexingTaskExecutor
+    implements TaskExecutor, Initializable
+{
+    private Logger log = LoggerFactory.getLogger( ArchivaIndexingTaskExecutor.class );
+
+    /**
+     * @plexus.requirement
+     */
+    private IndexerEngine indexerEngine;
+    
+    /**
+     * @plexus.requirement
+     */
+    private ArchivaConfiguration archivaConfiguration;
+    
+    /**
+     * @plexus.requirement
+     */
+    private IndexPacker indexPacker;
+    
+    private ArtifactContextProducer artifactContextProducer;
+        
+    public void executeTask( Task task )
+        throws TaskExecutionException
+    {
+        synchronized( indexerEngine )
+        {
+            ArtifactIndexingTask indexingTask = ( ArtifactIndexingTask ) task;
+            
+            ManagedRepositoryConfiguration repository =
+                archivaConfiguration.getConfiguration().findManagedRepositoryById( indexingTask.getRepositoryId() );
+    
+            String indexDir = repository.getIndexDir();
+            File managedRepository = new File( repository.getLocation() );
+            
+            File indexDirectory = null;
+            if( indexDir != null && !"".equals( indexDir ) )
+            {
+                indexDirectory = new File( repository.getIndexDir() );
+            }
+            else
+            {
+                indexDirectory = new File( managedRepository, ".indexer" );
+            }
+            
+            IndexingContext context = null;
+            try
+            {
+                context =
+                    new DefaultIndexingContext( repository.getId(), repository.getId(), managedRepository,
+                                            indexDirectory, null, null, NexusIndexer.FULL_INDEX, false );
+                context.setSearchable( repository.isScanned() );
+                
+                File artifactFile = indexingTask.getResourceFile();                
+                ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile );
+                
+                if( ac != null )
+                {   
+                    if( indexingTask.getAction().equals( ArtifactIndexingTask.ADD ) )
+                    {
+                        boolean add = true;
+                        IndexReader r = context.getIndexReader();      
+                        for ( int i = 0; i < r.numDocs(); i++ )
+                        {
+                            if ( !r.isDeleted( i ) )
+                            {
+                                Document d = r.document( i );          
+                                String uinfo = d.get( ArtifactInfo.UINFO );                                
+                                if( ac.getArtifactInfo().getUinfo().equals( uinfo ) )
+                                {
+                                    add = false;
+                                    break;
+                                }
+                            }
+                        }
+                        
+                        if( add )
+                        {   
+                            log.debug( "Adding artifact '" + ac.getArtifactInfo() + "' to index.." );
+                            indexerEngine.index( context, ac );
+                        }
+                        else
+                        {
+                            log.debug( "Updating artifact '" + ac.getArtifactInfo() + "' in index.." );
+                            indexerEngine.update( context, ac );
+                        }
+                    }
+                    else
+                    {                           
+                        log.debug( "removing artifact '" + ac.getArtifactInfo() + "' from index.." );
+                        indexerEngine.remove( context, ac );
+                    }
+                    
+                    final File indexLocation = new File( managedRepository, ".index" );
+                    IndexPackingRequest request = new IndexPackingRequest( context, indexLocation );
+                    indexPacker.packIndex( request );
+                }                
+            }
+            catch ( IOException e )
+            {
+                throw new TaskExecutionException( "Error occurred while executing indexing task '" +
+                    indexingTask.getName() + "'" );
+            }
+            catch ( UnsupportedExistingLuceneIndexException e )
+            {
+                throw new TaskExecutionException( "Unsupported Lucene index format: " + e.getMessage() );
+            }
+            finally
+            {
+                if( context != null )
+                {
+                    try
+                    {
+                        context.close( false );
+                    }
+                    catch ( IOException e )
+                    {
+                        throw new TaskExecutionException( "Error occurred while closing context: " + e.getMessage() );
+                    }
+                }
+            }
+        }
+    }
+
+    public void initialize()
+        throws InitializationException
+    {
+        log.info( "Initialized " + this.getClass().getName() );
+        
+        artifactContextProducer = new DefaultArtifactContextProducer();
+    }
+
+    public void setIndexerEngine( IndexerEngine indexerEngine )
+    {
+        this.indexerEngine = indexerEngine;
+    }
+
+    public void setIndexPacker( IndexPacker indexPacker )
+    {
+        this.indexPacker = indexPacker;
+    }
+
+    public void setArchivaConfiguration( ArchivaConfiguration archivaConfiguration )
+    {
+        this.archivaConfiguration = archivaConfiguration;
+    }
+
+}
diff --git a/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/ArchivaTaskNameSelectionPredicate.java b/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/ArchivaTaskNameSelectionPredicate.java
new file mode 100644 (file)
index 0000000..a2b47c1
--- /dev/null
@@ -0,0 +1,47 @@
+package org.apache.maven.archiva.scheduled.tasks;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.collections.Predicate;
+import org.apache.commons.lang.StringUtils;
+
+public class ArchivaTaskNameSelectionPredicate
+    implements Predicate
+{
+    private String taskName;
+
+    public ArchivaTaskNameSelectionPredicate( String taskName )
+    {
+        this.taskName = taskName;
+    }
+
+    public boolean evaluate( Object object )
+    {
+        boolean satisfies = false;
+
+        if ( object instanceof ArchivaTask )
+        {
+            ArchivaTask task = (ArchivaTask) object;
+            return StringUtils.equals( taskName, task.getName() );
+        }
+
+        return satisfies;
+    }
+}
diff --git a/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/ArtifactIndexingTask.java b/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/ArtifactIndexingTask.java
new file mode 100644 (file)
index 0000000..5af3c58
--- /dev/null
@@ -0,0 +1,117 @@
+package org.apache.maven.archiva.scheduled.tasks;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+
+import org.sonatype.nexus.index.ArtifactContext;
+
+public class ArtifactIndexingTask
+    implements ArchivaTask
+{
+    public static final String ADD = "add";
+    
+    public static final String DELETE = "delete";
+    
+    String repositoryId;
+    
+    String name;
+    
+    String queuePolicy;
+
+    long maxExecutionTime;
+    
+    File resourceFile;
+    
+    ArtifactContext artifactContext;
+    
+    String action;
+    
+    public String getRepositoryId()
+    {
+        return repositoryId;
+    }
+
+    public void setRepositoryId( String repositoryId )
+    {
+        this.repositoryId = repositoryId;
+    }
+
+    public long getMaxExecutionTime()
+    {
+        return maxExecutionTime;
+    }
+
+    public void setMaxExecutionTime( long maxExecutionTime )
+    {
+        this.maxExecutionTime = maxExecutionTime;
+    }
+
+    public String getName()
+    {
+        return name;
+    }
+
+    public void setName( String name )
+    {
+        this.name = name;
+    }
+
+    public String getQueuePolicy()
+    {
+        return queuePolicy;
+    }
+
+    public void setQueuePolicy( String queuePolicy )
+    {
+        this.queuePolicy = queuePolicy;
+    }
+
+    public File getResourceFile()
+    {
+        return resourceFile;
+    }
+
+    public void setResourceFile( File resourceFile )
+    {
+        this.resourceFile = resourceFile;
+    }
+
+    public ArtifactContext getArtifactContext()
+    {
+        return artifactContext;
+    }
+
+    public void setArtifactContext( ArtifactContext artifactContext )
+    {
+        this.artifactContext = artifactContext;
+    }
+
+    public String getAction()
+    {
+        return action;
+    }
+
+    public void setAction( String action )
+    {
+        this.action = action;
+    }
+
+}
diff --git a/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/RepositoryTaskNameSelectionPredicate.java b/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/RepositoryTaskNameSelectionPredicate.java
deleted file mode 100644 (file)
index 5e2f460..0000000
+++ /dev/null
@@ -1,47 +0,0 @@
-package org.apache.maven.archiva.scheduled.tasks;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.collections.Predicate;
-import org.apache.commons.lang.StringUtils;
-
-public class RepositoryTaskNameSelectionPredicate
-    implements Predicate
-{
-    private String taskName;
-
-    public RepositoryTaskNameSelectionPredicate( String taskName )
-    {
-        this.taskName = taskName;
-    }
-
-    public boolean evaluate( Object object )
-    {
-        boolean satisfies = false;
-
-        if ( object instanceof RepositoryTask )
-        {
-            RepositoryTask task = (RepositoryTask) object;
-            return StringUtils.equals( taskName, task.getName() );
-        }
-
-        return satisfies;
-    }
-}
index 3eca9adff542c15fc8faef042912439c5911550b..1a769276193af7d6ace26290abaf5ffda555ffb8 100644 (file)
@@ -25,7 +25,9 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.maven.archiva.scheduled.DefaultArchivaTaskScheduler;
 
 /**
- *
+ * TaskCreator
+ * 
+ * Convenience class for creating Archiva tasks.
  */
 public class TaskCreator
 {
@@ -53,4 +55,18 @@ public class TaskCreator
         return task;
     }
     
+    public static ArtifactIndexingTask createIndexingTask( String repositoryId, File resource,
+                                                           String action )
+    {
+        ArtifactIndexingTask task = new ArtifactIndexingTask();
+        task.setRepositoryId( repositoryId );
+        task.setName( DefaultArchivaTaskScheduler.INDEXING_JOB + ":" + repositoryId + ":" + resource.getName() + ":" +
+            action );
+        task.setAction( action );
+        task.setQueuePolicy( ArchivaTask.QUEUE_POLICY_WAIT );
+        task.setResourceFile( resource );
+        
+        return task;
+    }
+    
 }
index 4d6d6370fe2743f01544c3d3f28c442b0fab4b6c..27e491340086c2a74776f0b56a5f6b5feac45e1f 100644 (file)
       </configuration>
     </component>
   </components>
+  
+  <!--
+   |
+   |  Indexing Task Queue / Executor
+   |
+   -->
+   <component>
+      <role>org.codehaus.plexus.taskqueue.TaskQueue</role>
+      <role-hint>indexing</role-hint>
+      <implementation>org.codehaus.plexus.taskqueue.DefaultTaskQueue</implementation>
+      <lifecycle-handler>plexus-configurable</lifecycle-handler>
+      <configuration>
+        <task-entry-evaluators>
+        </task-entry-evaluators>
+        <task-exit-evaluators>
+        </task-exit-evaluators>
+        <task-viability-evaluators>
+        </task-viability-evaluators>
+      </configuration>
+    </component>
+
+    <component>
+      <role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
+      <role-hint>indexing</role-hint>
+      <implementation>org.codehaus.plexus.taskqueue.execution.ThreadedTaskQueueExecutor</implementation>
+      <instantiation-strategy>singleton</instantiation-strategy>
+      <requirements>
+        <requirement>
+          <role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
+          <role-hint>indexing</role-hint>
+        </requirement>
+        <requirement>
+          <role>org.codehaus.plexus.taskqueue.TaskQueue</role>
+          <role-hint>indexing</role-hint>
+        </requirement>
+      </requirements>
+      <configuration>
+        <name>indexing</name>
+      </configuration>
+    </component>
 
 </component-set>
diff --git a/archiva-modules/archiva-scheduled/src/test/java/org/apache/maven/archiva/scheduled/executors/ArchivaIndexingTaskExecutorTest.java b/archiva-modules/archiva-scheduled/src/test/java/org/apache/maven/archiva/scheduled/executors/ArchivaIndexingTaskExecutorTest.java
new file mode 100644 (file)
index 0000000..1359099
--- /dev/null
@@ -0,0 +1,237 @@
+package org.apache.maven.archiva.scheduled.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.util.Set;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
+import org.codehaus.plexus.spring.PlexusInSpringTestCase;
+import org.easymock.MockControl;
+import org.sonatype.nexus.index.ArtifactInfo;
+import org.sonatype.nexus.index.FlatSearchRequest;
+import org.sonatype.nexus.index.FlatSearchResponse;
+import org.sonatype.nexus.index.IndexerEngine;
+import org.sonatype.nexus.index.NexusIndexer;
+import org.sonatype.nexus.index.context.IndexingContext;
+import org.sonatype.nexus.index.packer.IndexPacker;
+
+/**
+ * ArchivaIndexingTaskExecutorTest
+ */
+public class ArchivaIndexingTaskExecutorTest
+    extends PlexusInSpringTestCase
+{
+    private ArchivaIndexingTaskExecutor indexingExecutor;
+    
+    private IndexerEngine indexerEngine;
+    
+    private IndexPacker indexPacker;
+        
+    private MockControl archivaConfigControl;
+    
+    private ArchivaConfiguration archivaConfiguration;
+    
+    private ManagedRepositoryConfiguration repositoryConfig;
+    
+    private Configuration configuration;
+    
+    private NexusIndexer indexer;
+    
+    protected void setUp() throws Exception
+    {
+        super.setUp();
+        
+        indexingExecutor = new ArchivaIndexingTaskExecutor();
+        indexingExecutor.initialize();    
+        
+        repositoryConfig = new ManagedRepositoryConfiguration();
+        repositoryConfig.setId( "test-repo" );
+        repositoryConfig.setLocation( getBasedir() + "/target/test-classes/test-repo" );
+        repositoryConfig.setLayout( "default" );
+        repositoryConfig.setName( "Test Repository" );
+        repositoryConfig.setScanned( true );
+        repositoryConfig.setSnapshots( false );
+        repositoryConfig.setReleases( true );
+        
+        configuration = new Configuration();
+        configuration.addManagedRepository( repositoryConfig );
+        
+        archivaConfigControl = MockControl.createControl( ArchivaConfiguration.class );
+        archivaConfiguration = ( ArchivaConfiguration ) archivaConfigControl.getMock();
+        
+        indexer = ( NexusIndexer ) lookup( NexusIndexer.class );        
+        indexerEngine = ( IndexerEngine ) lookup ( IndexerEngine.class );        
+        indexPacker = ( IndexPacker ) lookup( IndexPacker.class );
+        
+        indexingExecutor.setIndexerEngine( indexerEngine );        
+        indexingExecutor.setIndexPacker( indexPacker );        
+        indexingExecutor.setArchivaConfiguration( archivaConfiguration );
+    }
+    
+    protected void tearDown() throws Exception
+    {
+        // delete created index in the repository
+        File indexDir = new File( repositoryConfig.getLocation(), ".indexer" );
+        FileUtils.deleteDirectory( indexDir );
+        assertFalse( indexDir.exists() );
+        
+        indexDir = new File( repositoryConfig.getLocation(), ".index" );
+        FileUtils.deleteDirectory( indexDir );
+        assertFalse( indexDir.exists() );
+        
+        super.tearDown();
+    }
+    
+    public void testAddArtifactToIndex()
+        throws Exception
+    {
+        File artifactFile =
+            new File( repositoryConfig.getLocation(),
+                      "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+        ArtifactIndexingTask task =
+            TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+        
+        archivaConfigControl.expectAndReturn( archivaConfiguration.getConfiguration(), configuration );
+        
+        archivaConfigControl.replay();
+        
+        indexingExecutor.executeTask( task );
+        
+        archivaConfigControl.verify();
+        
+        BooleanQuery q = new BooleanQuery();        
+        q.add( indexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
+        q.add( indexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
+        
+        IndexingContext context = indexer.addIndexingContext( repositoryConfig.getId(), repositoryConfig.getId(), new File( repositoryConfig.getLocation() ),
+                                    new File( repositoryConfig.getLocation(), ".indexer" ), null, null, NexusIndexer.FULL_INDEX );
+        context.setSearchable( true );
+        
+        FlatSearchRequest request = new FlatSearchRequest( q );
+        FlatSearchResponse response = indexer.searchFlat( request );
+        
+        assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
+        assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
+        assertEquals( 1, response.getTotalHits() );
+        
+        Set<ArtifactInfo> results = response.getResults();
+        
+        ArtifactInfo artifactInfo = (ArtifactInfo) results.iterator().next();
+        assertEquals( "org.apache.archiva", artifactInfo.groupId );
+        assertEquals( "archiva-index-methods-jar-test", artifactInfo.artifactId );
+        assertEquals( "test-repo", artifactInfo.repository );
+    }
+    
+    public void testUpdateArtifactInIndex()
+        throws Exception
+    {
+        File artifactFile =
+            new File( repositoryConfig.getLocation(),
+                      "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+        ArtifactIndexingTask task =
+            TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+        
+        archivaConfigControl.expectAndReturn( archivaConfiguration.getConfiguration(), configuration, 2 );
+        
+        archivaConfigControl.replay();
+        
+        indexingExecutor.executeTask( task );
+        indexingExecutor.executeTask( task );
+        
+        archivaConfigControl.verify();
+                        
+        BooleanQuery q = new BooleanQuery();        
+        q.add( indexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
+        q.add( indexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
+        
+        IndexSearcher searcher = new IndexSearcher( repositoryConfig.getLocation() + "/.indexer" );
+        TopDocs topDocs = searcher.search( q, null, 10 );
+        
+        assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
+        assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
+        
+        // should only return 1 hit!
+        assertEquals( 1, topDocs.totalHits );
+    }
+    
+    public void testRemoveArtifactFromIndex()
+        throws Exception
+    {
+        File artifactFile =
+            new File( repositoryConfig.getLocation(),
+                      "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+        ArtifactIndexingTask task =
+            TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.ADD );
+        
+        archivaConfigControl.expectAndReturn( archivaConfiguration.getConfiguration(), configuration, 2 );
+        
+        archivaConfigControl.replay();
+        
+        // add artifact to index
+        indexingExecutor.executeTask( task );
+        
+        BooleanQuery q = new BooleanQuery();        
+        q.add( indexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
+        q.add( indexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
+        
+        IndexSearcher searcher = new IndexSearcher( repositoryConfig.getLocation() + "/.indexer" );
+        TopDocs topDocs = searcher.search( q, null, 10 );
+        
+        assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
+        assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
+        
+        // should return 1 hit
+        assertEquals( 1, topDocs.totalHits );
+        
+        // remove added artifact from index
+        task =
+            TaskCreator.createIndexingTask( repositoryConfig.getId(), artifactFile, ArtifactIndexingTask.DELETE );
+        indexingExecutor.executeTask( task );
+        
+        archivaConfigControl.verify();
+        
+        q = new BooleanQuery();        
+        q.add( indexer.constructQuery( ArtifactInfo.GROUP_ID, "org.apache.archiva" ), Occur.SHOULD );
+        q.add( indexer.constructQuery( ArtifactInfo.ARTIFACT_ID, "archiva-index-methods-jar-test" ), Occur.SHOULD );
+        
+        searcher = new IndexSearcher( repositoryConfig.getLocation() + "/.indexer" );
+        topDocs = searcher.search( q, null, 10 );
+        
+        assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
+        assertTrue( new File( repositoryConfig.getLocation(), ".index" ).exists() );
+        
+        // artifact should have been removed from the index!
+        assertEquals( 0, topDocs.totalHits );
+    }
+    
+}
diff --git a/archiva-modules/archiva-scheduled/src/test/resources/test-repo/org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar b/archiva-modules/archiva-scheduled/src/test/resources/test-repo/org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar
new file mode 100644 (file)
index 0000000..cc03dac
Binary files /dev/null and b/archiva-modules/archiva-scheduled/src/test/resources/test-repo/org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar differ
diff --git a/archiva-modules/archiva-scheduled/src/test/resources/test-repo/org/apache/archiva/archiva-index-methods-jar-test/1.0/pom.xml b/archiva-modules/archiva-scheduled/src/test/resources/test-repo/org/apache/archiva/archiva-index-methods-jar-test/1.0/pom.xml
new file mode 100644 (file)
index 0000000..e7bf54b
--- /dev/null
@@ -0,0 +1,18 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.archiva</groupId>
+  <artifactId>archiva-index-methods-jar-test</artifactId>
+  <packaging>jar</packaging>
+  <version>1.0</version>
+  <name>archiva-index-methods-jar-test</name>
+  <url>http://maven.apache.org</url>
+  <dependencies>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>3.8.1</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+</project>
index 5694e6ab7b5f19d6f398d2eb8398225ec635fbe8..9adf31458b8d72cae5c58fe176bbb4595251b8cb 100644 (file)
@@ -47,6 +47,7 @@ public class ArchivaStartup
         ArchivaTaskScheduler taskScheduler = (ArchivaTaskScheduler) wac.getBean(PlexusToSpringUtils.buildSpringId(ArchivaTaskScheduler.class));
         wac.getBean(PlexusToSpringUtils.buildSpringId(TaskQueueExecutor.class, "database-update"));
         wac.getBean(PlexusToSpringUtils.buildSpringId(TaskQueueExecutor.class, "repository-scanning"));
+        wac.getBean(PlexusToSpringUtils.buildSpringId(TaskQueueExecutor.class, "indexing"));
 
         try
         {