this.count = 0;
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repository, whenGathered );
+ }
+
public void processFile( String path )
throws ConsumerException
{
}
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ processFile( path );
+ }
+
public void completeScan()
{
System.out.println( "Final Count of Artifacts processed by " + getId() + ": " + count );
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
}
*/
public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered ) throws ConsumerException;
+ /**
+ * <p>
+ * Event that triggers at the beginning of a scan, where you can also indicate whether the consumers will be
+ * executed on an entire repository or on a specific resource.
+ * </p>
+ *
+ * @see RepositoryContentConsumer#beginScan(org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration, java.util.Date )
+ *
+ * @param repository the repository that this consumer is being used for.
+ * @param whenGathered the start of the repository scan
+ * @param executeOnEntireRepo flags whether the consumer will be executed on an entire repository or just on a specific resource
+ * @throws ConsumerException if there was a problem with using the provided repository with the consumer.
+ */
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException;
+
/**
* <p>
* Event indicating a file is to be processed by this consumer.
* @throws ConsumerException if there was a problem processing this file.
*/
public void processFile( String path ) throws ConsumerException;
+
+ /**
+ *
+ * @param path
+ * @param executeOnEntireRepo
+ * @throws Exception
+ */
+ public void processFile( String path, boolean executeOnEntireRepo ) throws Exception;
/**
* <p>
*/
public void completeScan();
+ /**
+ *
+ * @param executeOnEntireRepo
+ * @throws Exception
+ */
+ public void completeScan( boolean executeOnEntireRepo );
+
/**
* Whether the consumer should process files that have not been modified since the time passed in to the scan
* method.
this.repositoryDir = new File( repo.getLocation() );
}
+ public void beginScan( ManagedRepositoryConfiguration repo, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repo, whenGathered );
+ }
+
public void completeScan()
{
/* do nothing */
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
public List<String> getExcludes()
{
return getDefaultArtifactExclusions();
createFixChecksum( path, new ChecksumAlgorithm[] { ChecksumAlgorithm.SHA1 } );
createFixChecksum( path, new ChecksumAlgorithm[] { ChecksumAlgorithm.MD5 } );
}
+
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ processFile( path );
+ }
private void createFixChecksum( String path, ChecksumAlgorithm checksumAlgorithm[] )
{
this.repositoryDir = new File( repository.getLocation() );
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repository, whenGathered );
+ }
+
public void completeScan()
{
/* do nothing */
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
public List<String> getExcludes()
{
return null;
}
}
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ processFile( path );
+ }
+
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
if ( ConfigurationNames.isRepositoryScanning( propertyName ) )
this.repositoryDir = new File( repository.getLocation() );
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repository, whenGathered );
+ }
+
public void completeScan()
{
/* do nothing */
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
public List<String> getExcludes()
{
return null;
file.delete();
}
}
+
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ processFile( path );
+ }
}
}
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repository, whenGathered );
+ }
+
public void completeScan()
{
/* do nothing here */
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
public List<String> getExcludes()
{
return getDefaultArtifactExclusions();
}
}
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws Exception
+ {
+ processFile( path );
+ }
+
private void updateProjectMetadata( ArtifactReference artifact, String path )
{
ProjectReference projectRef = new ProjectReference();
this.repositoryDir = new File( repository.getLocation() );
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repository, whenGathered );
+ }
+
public void completeScan()
{
/* nothing to do */
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
public List<String> getExcludes()
{
return null;
}
}
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws Exception
+ {
+ processFile( path );
+ }
+
public void initialize()
throws InitializationException
{
}
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repository, whenGathered );
+ }
+
public void processFile( String path )
throws ConsumerException
{
}
}
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws Exception
+ {
+ processFile( path );
+ }
+
public void completeScan()
{
/* do nothing */
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
if ( ConfigurationNames.isRepositoryScanning( propertyName ) )
}
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repository, whenGathered );
+ }
+
public void processFile( String path )
throws ConsumerException
{
}
}
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws Exception
+ {
+ processFile( path );
+ }
+
/**
* Get a Live Artifact from a Path.
* <p/>
/* do nothing */
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
if ( ConfigurationNames.isRepositoryScanning( propertyName ) )
}
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repository, whenGathered );
+ }
+
public void completeScan()
{
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
public List<String> getExcludes()
{
return null;
}
}
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ processFile( path );
+ }
+
private void flushProjectCache( MavenProjectBuilder projectBuilder )
{
try
<artifactId>bcel</artifactId>
<version>5.2</version>
</dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
private List<String> includes = new ArrayList<String>();
+ private ManagedRepositoryConfiguration repository;
+
public NexusIndexerConsumer( ArchivaTaskScheduler scheduler, ArchivaConfiguration configuration, FileTypes filetypes )
{
this.configuration = configuration;
try
{
+ log.info( "Creating indexing context for repo : " + repository.getId() );
context = TaskCreator.createContext( repository );
}
catch ( IOException e )
}
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ if( executeOnEntireRepo )
+ {
+ beginScan( repository, whenGathered );
+ }
+ else
+ {
+ this.repository = repository;
+ managedRepository = new File( repository.getLocation() );
+ }
+ }
+
public void processFile( String path )
throws ConsumerException
{
}
}
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws Exception
+ {
+ if( executeOnEntireRepo )
+ {
+ processFile( path );
+ }
+ else
+ {
+ File artifactFile = new File( managedRepository, path );
+
+ // specify in indexing task that this is not a repo scan request!
+ ArtifactIndexingTask task =
+ new ArtifactIndexingTask( repository, artifactFile, ArtifactIndexingTask.Action.ADD, context, false );
+ try
+ {
+ log.debug( "Queueing indexing task + '" + task + "' to add or update the artifact in the index." );
+ scheduler.queueIndexingTask( task );
+ }
+ catch ( TaskQueueException e )
+ {
+ throw new ConsumerException( e.getMessage(), e );
+ }
+ }
+ }
+
public void completeScan()
{
ArtifactIndexingTask task =
context = null;
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ if( executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
+ // else, do nothing as the context will be closed when indexing task is executed if not a repo scan request!
+ }
+
public List<String> getExcludes()
{
return Collections.emptyList();
<constructor-arg ref="repositoryContentFactory"/>
<constructor-arg ref="archivaTaskScheduler"/>
</bean>
+
+ <bean id="logger" class="org.apache.maven.archiva.common.utils.Slf4JPlexusLogger">
+ <constructor-arg type="java.lang.Class"><value>org.sonatype.nexus.index.DefaultNexusIndexer</value></constructor-arg>
+ </bean>
</beans>
\ No newline at end of file
this.managedRepository.setRepository( repository );
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repository, whenGathered );
+ }
+
public void completeScan()
{
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
public List<String> getExcludes()
{
return excludes;
}
}
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws Exception
+ {
+ processFile( path );
+ }
+
public String getDescription()
{
return "Legacy Artifact to Default Artifact Converter";
import org.sonatype.nexus.index.FlatSearchRequest;
import org.sonatype.nexus.index.FlatSearchResponse;
import org.sonatype.nexus.index.NexusIndexer;
+import org.sonatype.nexus.index.context.IndexCreator;
import org.sonatype.nexus.index.context.IndexingContext;
import org.sonatype.nexus.index.context.UnsupportedExistingLuceneIndexException;
<constructor-arg ref="nexusIndexer"/>
<constructor-arg ref="archivaConfiguration"/>
</bean>
-
+
+ <bean id="logger" class="org.apache.maven.archiva.common.utils.Slf4JPlexusLogger">
+ <constructor-arg type="java.lang.Class"><value>org.sonatype.nexus.index.DefaultNexusIndexer</value></constructor-arg>
+ </bean>
+
<!-- <bean id="indexingContextMap" class="org.apache.archiva.indexer.IndexingContextMap"/> -->
</beans>
\ No newline at end of file
private final ManagedRepositoryConfiguration repository;
+ private boolean executeOnEntireRepo = true;
+
public TriggerScanCompletedClosure( ManagedRepositoryConfiguration repository )
{
this.repository = repository;
}
+ public TriggerScanCompletedClosure( ManagedRepositoryConfiguration repository, boolean executeOnEntireRepo )
+ {
+ this( repository );
+ this.executeOnEntireRepo = executeOnEntireRepo;
+ }
+
public void execute( Object input )
{
if ( input instanceof RepositoryContentConsumer )
{
RepositoryContentConsumer consumer = (RepositoryContentConsumer) input;
- consumer.completeScan();
+ consumer.completeScan( executeOnEntireRepo );
log.debug( "Consumer [" + consumer.getId() + "] completed for repository [" + repository.getId() + "]" );
}
}
// Run the repository consumers
try
{
- Closure triggerBeginScan = new TriggerBeginScanClosure( repository, getStartTime() );
+ Closure triggerBeginScan = new TriggerBeginScanClosure( repository, getStartTime(), false );
List<KnownRepositoryContentConsumer> selectedKnownConsumers = getSelectedKnownConsumers();
BaseFile baseFile = new BaseFile( repository.getLocation(), localFile );
ConsumerWantsFilePredicate predicate = new ConsumerWantsFilePredicate();
predicate.setBasefile( baseFile );
+ predicate.setCaseSensitive( false );
+
ConsumerProcessFileClosure closure = new ConsumerProcessFileClosure();
closure.setBasefile( baseFile );
- predicate.setCaseSensitive( false );
+ closure.setExecuteOnEntireRepo( false );
+
Closure processIfWanted = IfClosure.getInstance( predicate, closure );
CollectionUtils.forAllDo( selectedKnownConsumers, processIfWanted );
CollectionUtils.forAllDo( selectedInvalidConsumers, closure );
}
- TriggerScanCompletedClosure scanCompletedClosure = new TriggerScanCompletedClosure( repository );
+ TriggerScanCompletedClosure scanCompletedClosure = new TriggerScanCompletedClosure( repository, false );
CollectionUtils.forAllDo( selectedKnownConsumers, scanCompletedClosure );
}
stats = new RepositoryScanStatistics();
stats.setRepositoryId( repository.getId() );
- Closure triggerBeginScan = new TriggerBeginScanClosure( repository, new Date( System.currentTimeMillis() ) );
+ Closure triggerBeginScan = new TriggerBeginScanClosure( repository, new Date( System.currentTimeMillis() ), true );
CollectionUtils.forAllDo( knownConsumerList, triggerBeginScan );
CollectionUtils.forAllDo( invalidConsumerList, triggerBeginScan );
}
consumerProcessFile.setBasefile( basefile );
+ consumerProcessFile.setExecuteOnEntireRepo( true );
consumerWantsFile.setBasefile( basefile );
Closure processIfWanted = IfClosure.getInstance( consumerWantsFile, consumerProcessFile );
public void directoryWalkFinished()
{
- TriggerScanCompletedClosure scanCompletedClosure = new TriggerScanCompletedClosure(repository);
+ TriggerScanCompletedClosure scanCompletedClosure = new TriggerScanCompletedClosure( repository, true );
CollectionUtils.forAllDo( knownConsumers, scanCompletedClosure );
CollectionUtils.forAllDo( invalidConsumers, scanCompletedClosure );
private BaseFile basefile;
+ private boolean executeOnEntireRepo;
+
public void execute( Object input )
{
if ( input instanceof RepositoryContentConsumer )
{
log.debug( "Sending to consumer: " + consumer.getId() );
- consumer.processFile( basefile.getRelativePath() );
+ consumer.processFile( basefile.getRelativePath(), executeOnEntireRepo );
}
catch ( Exception e )
{
this.basefile = basefile;
}
+ public boolean isExecuteOnEntireRepo()
+ {
+ return executeOnEntireRepo;
+ }
+
+ public void setExecuteOnEntireRepo( boolean executeOnEntireRepo )
+ {
+ this.executeOnEntireRepo = executeOnEntireRepo;
+ }
+
public Logger getLogger()
{
return log;
private Date whenGathered;
+ private boolean executeOnEntireRepo = true;
+
public TriggerBeginScanClosure( ManagedRepositoryConfiguration repository )
{
this.repository = repository;
this.whenGathered = whenGathered;
}
+ public TriggerBeginScanClosure( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ {
+ this( repository, whenGathered );
+ this.executeOnEntireRepo = executeOnEntireRepo;
+ }
+
public void execute( Object input )
{
if ( input instanceof RepositoryContentConsumer )
try
{
- consumer.beginScan( repository, whenGathered );
+ consumer.beginScan( repository, whenGathered, executeOnEntireRepo );
}
catch ( ConsumerException e )
{
/* do nothing */
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repository, whenGathered );
+ }
+
public void completeScan()
{
/* do nothing */
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
public List<String> getExcludes()
{
return null;
processCount++;
}
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ processFile( path );
+ }
+
public String getDescription()
{
return "Bad Content Scan Consumer (for testing)";
/* do nothing */
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repository, whenGathered );
+ }
+
public void processFile( String path )
throws ConsumerException
{
this.processCount++;
}
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws Exception
+ {
+ processFile( path );
+ }
+
public void completeScan()
{
/* do nothing */
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
public int getProcessCount()
{
return processCount;
Date startTime = new Date( System.currentTimeMillis() );
startTime.setTime( 12345678 );
- selectedKnownConsumer.beginScan( repo, startTime );
+ selectedKnownConsumer.beginScan( repo, startTime, false );
selectedKnownConsumer.getExcludes();
knownControl.setReturnValue( Collections.EMPTY_LIST );
selectedKnownConsumer.getIncludes();
knownControl.setReturnValue( Collections.singletonList( "**/*.txt" ) );
- selectedKnownConsumer.processFile( _OS( "path/to/test-file.txt" ) );
+ selectedKnownConsumer.processFile( _OS( "path/to/test-file.txt" ), false );
// knownConsumer.completeScan();
knownControl.replay();
- selectedInvalidConsumer.beginScan( repo, startTime );
+ selectedInvalidConsumer.beginScan( repo, startTime, false );
// invalidConsumer.completeScan();
invalidControl.replay();
File notIncludedTestFile = getTestFile( "target/test-repo/path/to/test-file.xml" );
- selectedKnownConsumer.beginScan( repo, startTime );
+ selectedKnownConsumer.beginScan( repo, startTime, false );
selectedKnownConsumer.getExcludes();
knownControl.setReturnValue( Collections.EMPTY_LIST );
selectedKnownConsumer.getIncludes();
// knownConsumer.completeScan();
knownControl.replay();
- selectedInvalidConsumer.beginScan( repo, startTime );
- selectedInvalidConsumer.processFile( _OS( "path/to/test-file.xml" ) );
+ selectedInvalidConsumer.beginScan( repo, startTime, false );
+ selectedInvalidConsumer.processFile( _OS( "path/to/test-file.xml" ), false );
selectedInvalidConsumer.getId();
invalidControl.setReturnValue( "invalid" );
// invalidConsumer.completeScan();
File excludedTestFile = getTestFile( "target/test-repo/path/to/test-file.txt" );
- selectedKnownConsumer.beginScan( repo, startTime );
+ selectedKnownConsumer.beginScan( repo, startTime, false );
selectedKnownConsumer.getExcludes();
knownControl.setReturnValue( Collections.singletonList( "**/test-file.txt" ) );
// knownConsumer.completeScan();
knownControl.replay();
- selectedInvalidConsumer.beginScan( repo, startTime );
- selectedInvalidConsumer.processFile( _OS( "path/to/test-file.txt" ) );
+ selectedInvalidConsumer.beginScan( repo, startTime, false );
+ selectedInvalidConsumer.processFile( _OS( "path/to/test-file.txt" ), false );
selectedInvalidConsumer.getId();
invalidControl.setReturnValue( "invalid" );
// invalidConsumer.completeScan();
/* nothing to do */
}
+ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
+ throws ConsumerException
+ {
+ beginScan( repository, whenGathered );
+ }
+
public void completeScan()
{
/* nothing to do */
}
+ public void completeScan( boolean executeOnEntireRepo )
+ {
+ completeScan();
+ }
+
public List<String> getExcludes()
{
return null;
/* nothing to do */
}
+ public void processFile( String path, boolean executeOnEntireRepo )
+ throws Exception
+ {
+ processFile( path );
+ }
+
public String getDescription()
{
return "Sample Known Consumer";
import java.io.File;
import java.io.IOException;
+import java.util.List;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.scheduled.tasks.ArtifactIndexingTask;
+import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.taskqueue.Task;
import org.sonatype.nexus.index.ArtifactInfo;
import org.sonatype.nexus.index.DefaultArtifactContextProducer;
import org.sonatype.nexus.index.IndexerEngine;
+import org.sonatype.nexus.index.context.IndexCreator;
import org.sonatype.nexus.index.context.IndexingContext;
+import org.sonatype.nexus.index.context.UnsupportedExistingLuceneIndexException;
import org.sonatype.nexus.index.packer.IndexPacker;
import org.sonatype.nexus.index.packer.IndexPackingRequest;
ManagedRepositoryConfiguration repository = indexingTask.getRepository();
IndexingContext context = indexingTask.getContext();
- if ( ArtifactIndexingTask.Action.FINISH.equals( indexingTask.getAction() ) )
+ if ( ArtifactIndexingTask.Action.FINISH.equals( indexingTask.getAction() )
+ && indexingTask.isExecuteOnEntireRepo() )
{
- try
- {
- context.optimize();
-
- File managedRepository = new File( repository.getLocation() );
- final File indexLocation = new File( managedRepository, ".index" );
- IndexPackingRequest request = new IndexPackingRequest( context, indexLocation );
- indexPacker.packIndex( request );
-
- log.debug( "Index file packaged at '" + indexLocation.getPath() + "'." );
- }
- catch ( IOException e )
- {
- log.error( "Error occurred while executing indexing task '" + indexingTask + "': " + e.getMessage() );
- throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask
- + "'", e );
- }
- finally
+ log.debug( "Finishing indexing task on repo: " + repository.getId() );
+ finishIndexingTask( indexingTask, repository, context );
+ }
+ else
+ {
+ // create context if not a repo scan request
+ if( !indexingTask.isExecuteOnEntireRepo() )
{
- if ( context != null )
+ try
{
- try
- {
- context.close( false );
- }
- catch ( IOException e )
- {
- log.error( "Error occurred while closing context: " + e.getMessage() );
- throw new TaskExecutionException( "Error occurred while closing context: " + e.getMessage() );
- }
+ log.debug( "Creating indexing context on resource: " + indexingTask.getResourceFile().getPath() );
+ context = TaskCreator.createContext( repository );
+ }
+ catch( IOException e )
+ {
+ log.error( "Error occurred while creating context: " + e.getMessage() );
+ throw new TaskExecutionException( "Error occurred while creating context: " + e.getMessage() );
+ }
+ catch( UnsupportedExistingLuceneIndexException e )
+ {
+ log.error( "Error occurred while creating context: " + e.getMessage() );
+ throw new TaskExecutionException( "Error occurred while creating context: " + e.getMessage() );
}
}
- }
- else
- {
- if ( context.getIndexDirectory() == null )
+
+ if ( context == null || context.getIndexDirectory() == null )
{
throw new TaskExecutionException( "Trying to index an artifact but the context is already closed" );
}
indexerEngine.update( context, ac );
context.getIndexWriter().commit();
}
+
+ // close the context if not a repo scan request
+ if( !indexingTask.isExecuteOnEntireRepo() )
+ {
+ log.debug( "Finishing indexing task on resource file : " + indexingTask.getResourceFile().getPath() );
+ finishIndexingTask( indexingTask, repository, context );
+ }
}
else
{
}
}
+ private void finishIndexingTask( ArtifactIndexingTask indexingTask, ManagedRepositoryConfiguration repository,
+ IndexingContext context )
+ throws TaskExecutionException
+ {
+ try
+ {
+ context.optimize();
+
+ File managedRepository = new File( repository.getLocation() );
+ final File indexLocation = new File( managedRepository, ".index" );
+ IndexPackingRequest request = new IndexPackingRequest( context, indexLocation );
+ indexPacker.packIndex( request );
+
+ log.debug( "Index file packaged at '" + indexLocation.getPath() + "'." );
+ }
+ catch ( IOException e )
+ {
+ log.error( "Error occurred while executing indexing task '" + indexingTask + "': " + e.getMessage() );
+ throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask
+ + "'", e );
+ }
+ finally
+ {
+ if ( context != null )
+ {
+ try
+ {
+ context.close( false );
+ }
+ catch ( IOException e )
+ {
+ log.error( "Error occurred while closing context: " + e.getMessage() );
+ throw new TaskExecutionException( "Error occurred while closing context: " + e.getMessage() );
+ }
+ }
+ }
+ }
+
public void initialize()
throws InitializationException
{
private final IndexingContext context;
+ private boolean executeOnEntireRepo = true;
+
public ArtifactIndexingTask( ManagedRepositoryConfiguration repository, File resourceFile, Action action,
IndexingContext context )
{
this.context = context;
}
+ public ArtifactIndexingTask( ManagedRepositoryConfiguration repository, File resourceFile, Action action,
+ IndexingContext context, boolean executeOnEntireRepo )
+ {
+ this( repository, resourceFile, action, context );
+ this.executeOnEntireRepo = executeOnEntireRepo;
+ }
+
+ public boolean isExecuteOnEntireRepo()
+ {
+ return executeOnEntireRepo;
+ }
+
+ public void setExecuteOnEntireRepo( boolean executeOnEntireRepo )
+ {
+ this.executeOnEntireRepo = executeOnEntireRepo;
+ }
+
public long getMaxExecutionTime()
{
return 0;
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
+
+ <bean id="logger" class="org.apache.maven.archiva.common.utils.Slf4JPlexusLogger">
+ <constructor-arg type="java.lang.Class"><value>org.sonatype.nexus.index.DefaultNexusIndexer</value></constructor-arg>
+ </bean>
+</beans>
\ No newline at end of file
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.sonatype.nexus.index.FlatSearchResponse;
import org.sonatype.nexus.index.IndexerEngine;
import org.sonatype.nexus.index.NexusIndexer;
+import org.sonatype.nexus.index.context.IndexCreator;
import org.sonatype.nexus.index.context.IndexingContext;
import org.sonatype.nexus.index.packer.IndexPacker;
<lifecycle-handler>basic</lifecycle-handler>
</component>
+ <component>
+ <role>org.codehaus.plexus.logging.Logger</role>
+ <implementation>org.apache.maven.archiva.common.utils.Slf4JPlexusLogger</implementation>
+ <role-hint>logger</role-hint>
+ </component>
+
<!--
PLXREDBACK-81 bad role hint, redefining here until redback alpha-2 is released.
-->
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
- <version>2.4.0</version>
+ <version>2.4.1</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queries</artifactId>
- <version>2.4.0</version>
+ <version>2.4.1</version>
</dependency>
<dependency>
<groupId>javax.mail</groupId>