]> source.dussan.org Git - archiva.git/commitdiff
PR: MRM-139
authorEdwin L. Punzalan <epunzalan@apache.org>
Wed, 25 Oct 2006 03:18:54 +0000 (03:18 +0000)
committerEdwin L. Punzalan <epunzalan@apache.org>
Wed, 25 Oct 2006 03:18:54 +0000 (03:18 +0000)
enabled use of plexus-taskqueue in addition to the scheduler. The scheduler now queues the tasks.

git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@467546 13f79535-47bb-0310-9956-ffa450edef68

archiva-core/pom.xml
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/DefaultRepositoryTaskScheduler.java
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskJob.java
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutor.java [new file with mode: 0644]
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/IndexerTask.java
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/RepositoryTask.java
archiva-core/src/main/resources/META-INF/plexus/components.xml [new file with mode: 0644]
archiva-webapp/src/main/resources/META-INF/plexus/application.xml

index 8862ab6b3d990709db9fe22dd4a49c7dd6ebdfd4..78e4b9384f4e3d447e71c5796c9894d59f8326ae 100644 (file)
       <artifactId>plexus-quartz</artifactId>
       <version>1.0-alpha-2</version>
     </dependency>
+    <dependency>
+      <groupId>org.codehaus.plexus</groupId>
+      <artifactId>plexus-taskqueue</artifactId>
+      <version>1.0-alpha-4-SNAPSHOT</version>
+    </dependency>
   </dependencies>
   <build>
     <plugins>
index bee12f0784a93ab1ec367c7135bbdd4335e0c68d..4724fedc6e1942fab422cb79f10620c5ee48e385 100644 (file)
@@ -22,25 +22,31 @@ import org.apache.maven.archiva.configuration.ConfigurationChangeListener;
 import org.apache.maven.archiva.configuration.ConfigurationStore;
 import org.apache.maven.archiva.configuration.ConfigurationStoreException;
 import org.apache.maven.archiva.configuration.InvalidConfigurationException;
-import org.apache.maven.archiva.scheduler.task.RepositoryTask;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.scheduler.executors.IndexerTaskExecutor;
+import org.apache.maven.archiva.scheduler.task.IndexerTask;
 import org.codehaus.plexus.logging.AbstractLogEnabled;
 import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
 import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
 import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
-import org.codehaus.plexus.scheduler.AbstractJob;
 import org.codehaus.plexus.scheduler.Scheduler;
+import org.codehaus.plexus.taskqueue.TaskQueue;
+import org.codehaus.plexus.taskqueue.TaskQueueException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
 import org.quartz.CronTrigger;
 import org.quartz.JobDataMap;
 import org.quartz.JobDetail;
 import org.quartz.SchedulerException;
 
+import java.io.File;
 import java.text.ParseException;
 
 /**
  * Default implementation of a scheduling component for the application.
  *
  * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo should we use plexus-taskqueue instead of or in addition to this?
  * @plexus.component role="org.apache.maven.archiva.scheduler.RepositoryTaskScheduler"
  */
 public class DefaultRepositoryTaskScheduler
@@ -52,20 +58,30 @@ public class DefaultRepositoryTaskScheduler
      */
     private Scheduler scheduler;
 
+    /**
+     * @plexus.requirement role-hint="indexer"
+     */
+    private TaskQueue indexerQueue;
+
+    /**
+     * @plexus.requirement role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
+     */
+    private IndexerTaskExecutor indexerTaskExecutor;
+
     /**
      * @plexus.requirement
      */
     private ConfigurationStore configurationStore;
 
+    /**
+     * @plexus.requirement
+     */
+    private RepositoryArtifactIndexFactory indexFactory;
+
     private static final String DISCOVERER_GROUP = "DISCOVERER";
 
     private static final String INDEXER_JOB = "indexerTask";
 
-    /**
-     * @plexus.requirement role-hint="indexer"
-     */
-    private RepositoryTask indexerTask;
-
     public void start()
         throws StartingException
     {
@@ -97,22 +113,20 @@ public class DefaultRepositoryTaskScheduler
     private void scheduleJobs( Configuration configuration )
         throws ParseException, SchedulerException
     {
-        // TODO: would be nice to queue jobs that are triggered so we could avoid two running at the same time (so have a queue for discovery based jobs so they didn't thrash the repo)
         if ( configuration.getIndexPath() != null )
         {
-            JobDetail jobDetail = createJobDetail( INDEXER_JOB, indexerTask );
+            JobDetail jobDetail = createJobDetail( INDEXER_JOB );
 
             getLogger().info( "Scheduling indexer: " + configuration.getIndexerCronExpression() );
             CronTrigger trigger =
                 new CronTrigger( INDEXER_JOB + "Trigger", DISCOVERER_GROUP, configuration.getIndexerCronExpression() );
             scheduler.scheduleJob( jobDetail, trigger );
 
-            // TODO: run as a job so it doesn't block startup/configuration saving
             try
             {
-                indexerTask.executeNowIfNeeded();
+                queueNowIfNeeded();
             }
-            catch ( TaskExecutionException e )
+            catch ( org.codehaus.plexus.taskqueue.execution.TaskExecutionException e )
             {
                 getLogger().error( "Error executing task first time, continuing anyway: " + e.getMessage(), e );
             }
@@ -123,13 +137,14 @@ public class DefaultRepositoryTaskScheduler
         }
     }
 
-    private JobDetail createJobDetail( String jobName, RepositoryTask task )
+    private JobDetail createJobDetail( String jobName )
     {
         JobDetail jobDetail = new JobDetail( jobName, DISCOVERER_GROUP, RepositoryTaskJob.class );
+
         JobDataMap dataMap = new JobDataMap();
-        dataMap.put( AbstractJob.LOGGER, getLogger() );
-        dataMap.put( RepositoryTaskJob.TASK_KEY, task );
+        dataMap.put( RepositoryTaskJob.TASK_QUEUE, indexerQueue );
         jobDetail.setJobDataMap( dataMap );
+
         return jobDetail;
     }
 
@@ -170,9 +185,51 @@ public class DefaultRepositoryTaskScheduler
     }
 
     public void runIndexer()
-        throws TaskExecutionException
+        throws org.apache.maven.archiva.scheduler.TaskExecutionException
+    {
+        IndexerTask task = new IndexerTask();
+        task.setJobName( "INDEX_INIT" );
+        try
+        {
+            indexerQueue.put( task );
+        }
+        catch ( TaskQueueException e )
+        {
+            throw new org.apache.maven.archiva.scheduler.TaskExecutionException( e.getMessage(), e );
+        }
+    }
+
+    public void queueNowIfNeeded()
+        throws org.codehaus.plexus.taskqueue.execution.TaskExecutionException
     {
-        indexerTask.execute();
+        Configuration configuration;
+        try
+        {
+            configuration = configurationStore.getConfigurationFromStore();
+        }
+        catch ( ConfigurationStoreException e )
+        {
+            throw new TaskExecutionException( e.getMessage(), e );
+        }
+
+        File indexPath = new File( configuration.getIndexPath() );
+
+        try
+        {
+            RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
+            if ( !artifactIndex.exists() )
+            {
+                runIndexer();
+            }
+        }
+        catch ( RepositoryIndexException e )
+        {
+            throw new TaskExecutionException( e.getMessage(), e );
+        }
+        catch ( org.apache.maven.archiva.scheduler.TaskExecutionException e )
+        {
+            throw new TaskExecutionException( e.getMessage(), e );
+        }
     }
 
 }
index f94ae2130d98c745d9c717769acbccc82427ff10..6b859a5262814919dd765e3a301b2687b7ead6ad 100644 (file)
@@ -16,8 +16,10 @@ package org.apache.maven.archiva.scheduler;
  * limitations under the License.
  */
 
-import org.apache.maven.archiva.scheduler.task.RepositoryTask;
+import org.apache.maven.archiva.scheduler.task.IndexerTask;
 import org.codehaus.plexus.scheduler.AbstractJob;
+import org.codehaus.plexus.taskqueue.TaskQueue;
+import org.codehaus.plexus.taskqueue.TaskQueueException;
 import org.quartz.JobDataMap;
 import org.quartz.JobExecutionContext;
 import org.quartz.JobExecutionException;
@@ -30,6 +32,8 @@ public class RepositoryTaskJob
 {
     static final String TASK_KEY = "EXECUTION";
 
+    static final String TASK_QUEUE = "TASK_QUEUE";
+
     /**
      * Execute the discoverer and the indexer.
      *
@@ -43,12 +47,16 @@ public class RepositoryTaskJob
         JobDataMap dataMap = context.getJobDetail().getJobDataMap();
         setJobDataMap( dataMap );
 
-        RepositoryTask executor = (RepositoryTask) dataMap.get( TASK_KEY );
+        TaskQueue indexerQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
+
+        IndexerTask task = new IndexerTask();
+        task.setJobName( context.getJobDetail().getName() );
+
         try
         {
-            executor.execute();
+            indexerQueue.put( task );
         }
-        catch ( TaskExecutionException e )
+        catch ( TaskQueueException e )
         {
             throw new JobExecutionException( e );
         }
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutor.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutor.java
new file mode 100644 (file)
index 0000000..e91949a
--- /dev/null
@@ -0,0 +1,320 @@
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfigurationStore;
+import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
+import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.apache.maven.archiva.configuration.ConfigurationStoreException;
+import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
+import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
+import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.indexer.record.IndexRecordExistsArtifactFilter;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.archiva.reporting.ReportExecutor;
+import org.apache.maven.archiva.reporting.ReportGroup;
+import org.apache.maven.archiva.reporting.ReportingDatabase;
+import org.apache.maven.archiva.reporting.ReportingMetadataFilter;
+import org.apache.maven.archiva.reporting.ReportingStoreException;
+import org.apache.maven.archiva.scheduler.task.IndexerTask;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.resolver.filter.AndArtifactFilter;
+import org.apache.maven.project.MavenProjectBuilder;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.taskqueue.Task;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @author Edwin Punzalan
+ *
+ * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
+ */
+public class IndexerTaskExecutor
+    extends AbstractLogEnabled
+    implements TaskExecutor
+{
+    /**
+     * Configuration store.
+     *
+     * @plexus.requirement
+     */
+    private ConfigurationStore configurationStore;
+
+    /**
+     * @plexus.requirement
+     */
+    private RepositoryArtifactIndexFactory indexFactory;
+
+    /**
+     * @plexus.requirement
+     */
+    private ConfiguredRepositoryFactory repoFactory;
+
+    /**
+     * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer"
+     */
+    private Map artifactDiscoverers;
+
+    /**
+     * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
+     */
+    private Map metadataDiscoverers;
+
+    /**
+     * @plexus.requirement role-hint="standard"
+     */
+    private RepositoryIndexRecordFactory recordFactory;
+
+    /**
+     * @plexus.requirement
+     */
+    private ReportExecutor reportExecutor;
+
+    /**
+     * @plexus.requirement role-hint="health"
+     */
+    private ReportGroup reportGroup;
+
+    private static final int ARTIFACT_BUFFER_SIZE = 1000;
+
+    public void executeTask( Task task )
+        throws TaskExecutionException
+    {
+        IndexerTask indexerTask = (IndexerTask) task;
+
+        getLogger().info( "Executing task from queue with job name: " + indexerTask.getJobName() );
+
+        execute();
+    }
+
+    public void execute()
+        throws TaskExecutionException
+    {
+        Configuration configuration;
+        try
+        {
+            configuration = configurationStore.getConfigurationFromStore();
+        }
+        catch ( ConfigurationStoreException e )
+        {
+            throw new TaskExecutionException( e.getMessage(), e );
+        }
+
+        File indexPath = new File( configuration.getIndexPath() );
+
+        execute( configuration, indexPath );
+    }
+
+    public void executeNowIfNeeded()
+        throws TaskExecutionException
+    {
+        Configuration configuration;
+        try
+        {
+            configuration = configurationStore.getConfigurationFromStore();
+        }
+        catch ( ConfigurationStoreException e )
+        {
+            throw new TaskExecutionException( e.getMessage(), e );
+        }
+
+        File indexPath = new File( configuration.getIndexPath() );
+
+        try
+        {
+            RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
+            if ( !artifactIndex.exists() )
+            {
+                execute( configuration, indexPath );
+            }
+        }
+        catch ( RepositoryIndexException e )
+        {
+            throw new TaskExecutionException( e.getMessage(), e );
+        }
+    }
+
+    private void execute( Configuration configuration, File indexPath )
+        throws TaskExecutionException
+    {
+        long time = System.currentTimeMillis();
+        getLogger().info( "Starting repository indexing process" );
+
+        RepositoryArtifactIndex index = indexFactory.createStandardIndex( indexPath );
+
+        try
+        {
+            Collection keys;
+            if ( index.exists() )
+            {
+                keys = index.getAllRecordKeys();
+            }
+            else
+            {
+                keys = Collections.EMPTY_LIST;
+            }
+
+            for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
+            {
+                RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
+
+                if ( repositoryConfiguration.isIndexed() )
+                {
+                    List blacklistedPatterns = new ArrayList();
+                    if ( repositoryConfiguration.getBlackListPatterns() != null )
+                    {
+                        blacklistedPatterns.addAll( repositoryConfiguration.getBlackListPatterns() );
+                    }
+                    if ( configuration.getGlobalBlackListPatterns() != null )
+                    {
+                        blacklistedPatterns.addAll( configuration.getGlobalBlackListPatterns() );
+                    }
+                    boolean includeSnapshots = repositoryConfiguration.isIncludeSnapshots();
+
+                    ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
+                    ReportingDatabase reporter = reportExecutor.getReportDatabase( repository, reportGroup );
+
+                    // keep original value in case there is another process under way
+                    long origStartTime = reporter.getStartTime();
+                    reporter.setStartTime( System.currentTimeMillis() );
+
+                    // Discovery process
+                    String layoutProperty = repositoryConfiguration.getLayout();
+                    ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
+                    AndArtifactFilter filter = new AndArtifactFilter();
+                    filter.add( new IndexRecordExistsArtifactFilter( keys ) );
+                    if ( !includeSnapshots )
+                    {
+                        filter.add( new SnapshotArtifactFilter() );
+                    }
+
+                    // Save some memory by not tracking paths we won't use
+                    // TODO: Plexus CDC should be able to inject this configuration
+                    discoverer.setTrackOmittedPaths( false );
+
+                    getLogger().info( "Searching repository " + repositoryConfiguration.getName() );
+                    List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
+
+                    if ( !artifacts.isEmpty() )
+                    {
+                        getLogger().info( "Discovered " + artifacts.size() + " unindexed artifacts" );
+
+                        // Work through these in batches, then flush the project cache.
+                        for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
+                        {
+                            int end = j + ARTIFACT_BUFFER_SIZE;
+                            List currentArtifacts =
+                                artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
+
+                            // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal)
+
+                            // run the reports. Done intermittently to avoid losing track of what is indexed since
+                            // that is what the filter is based on.
+                            reportExecutor.runArtifactReports( reportGroup, currentArtifacts, repository );
+
+                            index.indexArtifacts( currentArtifacts, recordFactory );
+
+                            // MNG-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
+                            // around that. TODO: remove when it is configurable
+                            flushProjectBuilderCacheHack();
+                        }
+                    }
+
+                    MetadataFilter metadataFilter = new ReportingMetadataFilter( reporter );
+
+                    MetadataDiscoverer metadataDiscoverer =
+                        (MetadataDiscoverer) metadataDiscoverers.get( layoutProperty );
+                    List metadata =
+                        metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, metadataFilter );
+
+                    if ( !metadata.isEmpty() )
+                    {
+                        getLogger().info( "Discovered " + metadata.size() + " unprocessed metadata files" );
+
+                        // run the reports
+                        reportExecutor.runMetadataReports( reportGroup, metadata, repository );
+                    }
+
+                    reporter.setStartTime( origStartTime );
+                }
+            }
+        }
+        catch ( RepositoryIndexException e )
+        {
+            throw new TaskExecutionException( e.getMessage(), e );
+        }
+        catch ( DiscovererException e )
+        {
+            throw new TaskExecutionException( e.getMessage(), e );
+        }
+        catch ( ReportingStoreException e )
+        {
+            throw new TaskExecutionException( e.getMessage(), e );
+        }
+
+        time = System.currentTimeMillis() - time;
+        getLogger().info( "Finished repository indexing process in " + time + "ms" );
+    }
+
+    /**
+     * @todo remove when no longer needed (MNG-142)
+     * @plexus.requirement
+     */
+    private MavenProjectBuilder projectBuilder;
+
+    private void flushProjectBuilderCacheHack()
+    {
+        try
+        {
+            if ( projectBuilder != null )
+            {
+                java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
+                f.setAccessible( true );
+                Map cache = (Map) f.get( projectBuilder );
+                cache.clear();
+
+                f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
+                f.setAccessible( true );
+                cache = (Map) f.get( projectBuilder );
+                cache.clear();
+            }
+        }
+        catch ( NoSuchFieldException e )
+        {
+            throw new RuntimeException( e );
+        }
+        catch ( IllegalAccessException e )
+        {
+            throw new RuntimeException( e );
+        }
+    }
+}
index d563b7b28772dc4b1965f7577aa7f85a8f1dc31d..ec653e112ed5b5984425f758844f7bef9ba45d44 100644 (file)
@@ -16,294 +16,30 @@ package org.apache.maven.archiva.scheduler.task;
  * limitations under the License.
  */
 
-import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.configuration.ConfigurationStore;
-import org.apache.maven.archiva.configuration.ConfigurationStoreException;
-import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
-import org.apache.maven.archiva.configuration.RepositoryConfiguration;
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.record.IndexRecordExistsArtifactFilter;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
-import org.apache.maven.archiva.reporting.ReportExecutor;
-import org.apache.maven.archiva.reporting.ReportGroup;
-import org.apache.maven.archiva.reporting.ReportingDatabase;
-import org.apache.maven.archiva.reporting.ReportingMetadataFilter;
-import org.apache.maven.archiva.reporting.ReportingStoreException;
-import org.apache.maven.archiva.scheduler.TaskExecutionException;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.AndArtifactFilter;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
+import org.codehaus.plexus.taskqueue.Task;
 
 /**
  * Task for discovering changes in the repository and updating the index accordingly.
  *
  * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @plexus.component role="org.apache.maven.archiva.scheduler.task.RepositoryTask" role-hint="indexer"
  */
 public class IndexerTask
-    extends AbstractLogEnabled
-    implements RepositoryTask
+    implements Task
 {
-    /**
-     * Configuration store.
-     *
-     * @plexus.requirement
-     */
-    private ConfigurationStore configurationStore;
-
-    /**
-     * @plexus.requirement
-     */
-    private RepositoryArtifactIndexFactory indexFactory;
-
-    /**
-     * @plexus.requirement
-     */
-    private ConfiguredRepositoryFactory repoFactory;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer"
-     */
-    private Map artifactDiscoverers;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
-     */
-    private Map metadataDiscoverers;
-
-    /**
-     * @plexus.requirement role-hint="standard"
-     */
-    private RepositoryIndexRecordFactory recordFactory;
-
-    /**
-     * @plexus.requirement
-     */
-    private ReportExecutor reportExecutor;
-
-    /**
-     * @plexus.requirement role-hint="health"
-     */
-    private ReportGroup reportGroup;
-
-    private static final int ARTIFACT_BUFFER_SIZE = 1000;
-
-    public void execute()
-        throws TaskExecutionException
-    {
-        Configuration configuration;
-        try
-        {
-            configuration = configurationStore.getConfigurationFromStore();
-        }
-        catch ( ConfigurationStoreException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-
-        File indexPath = new File( configuration.getIndexPath() );
+    private String jobName;
 
-        execute( configuration, indexPath );
-    }
-
-    private void execute( Configuration configuration, File indexPath )
-        throws TaskExecutionException
+    public long getMaxExecutionTime()
     {
-        long time = System.currentTimeMillis();
-        getLogger().info( "Starting repository indexing process" );
-
-        RepositoryArtifactIndex index = indexFactory.createStandardIndex( indexPath );
-
-        try
-        {
-            Collection keys;
-            if ( index.exists() )
-            {
-                keys = index.getAllRecordKeys();
-            }
-            else
-            {
-                keys = Collections.EMPTY_LIST;
-            }
-
-            for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
-            {
-                RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
-
-                if ( repositoryConfiguration.isIndexed() )
-                {
-                    List blacklistedPatterns = new ArrayList();
-                    if ( repositoryConfiguration.getBlackListPatterns() != null )
-                    {
-                        blacklistedPatterns.addAll( repositoryConfiguration.getBlackListPatterns() );
-                    }
-                    if ( configuration.getGlobalBlackListPatterns() != null )
-                    {
-                        blacklistedPatterns.addAll( configuration.getGlobalBlackListPatterns() );
-                    }
-                    boolean includeSnapshots = repositoryConfiguration.isIncludeSnapshots();
-
-                    ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
-                    ReportingDatabase reporter = reportExecutor.getReportDatabase( repository, reportGroup );
-
-                    // keep original value in case there is another process under way
-                    long origStartTime = reporter.getStartTime();
-                    reporter.setStartTime( System.currentTimeMillis() );
-
-                    // Discovery process
-                    String layoutProperty = repositoryConfiguration.getLayout();
-                    ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
-                    AndArtifactFilter filter = new AndArtifactFilter();
-                    filter.add( new IndexRecordExistsArtifactFilter( keys ) );
-                    if ( !includeSnapshots )
-                    {
-                        filter.add( new SnapshotArtifactFilter() );
-                    }
-
-                    // Save some memory by not tracking paths we won't use
-                    // TODO: Plexus CDC should be able to inject this configuration
-                    discoverer.setTrackOmittedPaths( false );
-
-                    getLogger().info( "Searching repository " + repositoryConfiguration.getName() );
-                    List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
-
-                    if ( !artifacts.isEmpty() )
-                    {
-                        getLogger().info( "Discovered " + artifacts.size() + " unindexed artifacts" );
-
-                        // Work through these in batches, then flush the project cache.
-                        for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
-                        {
-                            int end = j + ARTIFACT_BUFFER_SIZE;
-                            List currentArtifacts =
-                                artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
-
-                            // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal)
-
-                            // run the reports. Done intermittently to avoid losing track of what is indexed since
-                            // that is what the filter is based on.
-                            reportExecutor.runArtifactReports( reportGroup, currentArtifacts, repository );
-
-                            index.indexArtifacts( currentArtifacts, recordFactory );
-
-                            // MNG-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
-                            // around that. TODO: remove when it is configurable
-                            flushProjectBuilderCacheHack();
-                        }
-                    }
-
-                    MetadataFilter metadataFilter = new ReportingMetadataFilter( reporter );
-
-                    MetadataDiscoverer metadataDiscoverer =
-                        (MetadataDiscoverer) metadataDiscoverers.get( layoutProperty );
-                    List metadata =
-                        metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, metadataFilter );
-
-                    if ( !metadata.isEmpty() )
-                    {
-                        getLogger().info( "Discovered " + metadata.size() + " unprocessed metadata files" );
-
-                        // run the reports
-                        reportExecutor.runMetadataReports( reportGroup, metadata, repository );
-                    }
-
-                    reporter.setStartTime( origStartTime );
-                }
-            }
-        }
-        catch ( RepositoryIndexException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-        catch ( DiscovererException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-        catch ( ReportingStoreException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-
-        time = System.currentTimeMillis() - time;
-        getLogger().info( "Finished repository indexing process in " + time + "ms" );
+        return 0;
     }
 
-    public void executeNowIfNeeded()
-        throws TaskExecutionException
+    public String getJobName()
     {
-        Configuration configuration;
-        try
-        {
-            configuration = configurationStore.getConfigurationFromStore();
-        }
-        catch ( ConfigurationStoreException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-
-        File indexPath = new File( configuration.getIndexPath() );
-
-        try
-        {
-            RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
-            if ( !artifactIndex.exists() )
-            {
-                execute( configuration, indexPath );
-            }
-        }
-        catch ( RepositoryIndexException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
+        return jobName;
     }
 
-    /**
-     * @todo remove when no longer needed (MNG-142)
-     * @plexus.requirement
-     */
-    private MavenProjectBuilder projectBuilder;
-
-    private void flushProjectBuilderCacheHack()
+    public void setJobName( String jobName )
     {
-        try
-        {
-            if ( projectBuilder != null )
-            {
-                java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
-                f.setAccessible( true );
-                Map cache = (Map) f.get( projectBuilder );
-                cache.clear();
-
-                f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
-                f.setAccessible( true );
-                cache = (Map) f.get( projectBuilder );
-                cache.clear();
-            }
-        }
-        catch ( NoSuchFieldException e )
-        {
-            throw new RuntimeException( e );
-        }
-        catch ( IllegalAccessException e )
-        {
-            throw new RuntimeException( e );
-        }
+        this.jobName = jobName;
     }
-
 }
index ae4162c293706074b29789387c3860c4ab54e65c..fc92f690baaebe0dbc1ecbbebb2f3c6f6a7511a4 100644 (file)
@@ -17,6 +17,7 @@ package org.apache.maven.archiva.scheduler.task;
  */
 
 import org.apache.maven.archiva.scheduler.TaskExecutionException;
+import org.codehaus.plexus.taskqueue.Task;
 
 /**
  * A repository task.
@@ -24,6 +25,7 @@ import org.apache.maven.archiva.scheduler.TaskExecutionException;
  * @author <a href="mailto:brett@apache.org">Brett Porter</a>
  */
 public interface RepositoryTask
+    extends Task
 {
     /**
      * Execute the task.
diff --git a/archiva-core/src/main/resources/META-INF/plexus/components.xml b/archiva-core/src/main/resources/META-INF/plexus/components.xml
new file mode 100644 (file)
index 0000000..38934f2
--- /dev/null
@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<component-set>
+  <components>
+
+    <component>
+      <role>org.codehaus.plexus.taskqueue.TaskQueue</role>
+      <role-hint>indexer</role-hint>
+      <implementation>org.codehaus.plexus.taskqueue.DefaultTaskQueue</implementation>
+      <lifecycle-handler>plexus-configurable</lifecycle-handler>
+      <configuration>
+        <task-entry-evaluators>
+        </task-entry-evaluators>
+        <task-exit-evaluators>
+        </task-exit-evaluators>
+        <task-viability-evaluators>
+        </task-viability-evaluators>
+      </configuration>
+    </component>
+
+    <component>
+      <role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
+      <role-hint>indexer</role-hint>
+      <implementation>org.codehaus.plexus.taskqueue.execution.ThreadedTaskQueueExecutor</implementation>
+      <requirements>
+        <requirement>
+          <role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
+          <role-hint>indexer</role-hint>
+        </requirement>
+        <requirement>
+          <role>org.codehaus.plexus.taskqueue.TaskQueue</role>
+          <role-hint>indexer</role-hint>
+        </requirement>
+      </requirements>
+      <configuration>
+        <name>indexer</name>
+      </configuration>
+    </component>
+
+  </components>
+</component-set>
index 2c39f447ea06c3a92fdcf89adfc2607f35a89722..ddf294dd9584ea40253ac9525ccc622b28509bb7 100644 (file)
     <component>
       <role>org.apache.maven.archiva.scheduler.RepositoryTaskScheduler</role>
     </component>
+    <component>
+      <role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
+      <role-hint>indexer</role-hint>
+    </component>
   </load-on-start>
 </plexus>