Browse Source

Adapting the scheduler to the new repository api

pull/46/head
Martin Stockhammer 6 years ago
parent
commit
32ff2ca265
11 changed files with 135 additions and 103 deletions
  1. 63
    56
      archiva-modules/archiva-scheduler/archiva-scheduler-indexing/src/main/java/org/apache/archiva/scheduler/indexing/ArchivaIndexingTaskExecutor.java
  2. 1
    1
      archiva-modules/archiva-scheduler/archiva-scheduler-indexing/src/main/java/org/apache/archiva/scheduler/indexing/ArtifactIndexingTask.java
  3. 32
    26
      archiva-modules/archiva-scheduler/archiva-scheduler-indexing/src/test/java/org/apache/archiva/scheduler/indexing/ArchivaIndexingTaskExecutorTest.java
  4. 2
    1
      archiva-modules/archiva-scheduler/archiva-scheduler-indexing/src/test/java/org/apache/archiva/scheduler/indexing/DownloadRemoteIndexTaskTest.java
  5. 7
    1
      archiva-modules/archiva-scheduler/archiva-scheduler-indexing/src/test/resources/spring-context.xml
  6. 0
    1
      archiva-modules/archiva-scheduler/archiva-scheduler-repository/pom.xml
  7. 6
    2
      archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/main/java/org/apache/archiva/scheduler/repository/ArchivaRepositoryScanningTaskExecutor.java
  8. 9
    2
      archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/MockManagedRepositoryAdmin.java
  9. 9
    2
      archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/scheduler/repository/AbstractArchivaRepositoryScanningTaskExecutorTest.java
  10. 2
    10
      archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/scheduler/repository/TestConsumer.java
  11. 4
    1
      archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/resources/spring-context.xml

+ 63
- 56
archiva-modules/archiva-scheduler/archiva-scheduler-indexing/src/main/java/org/apache/archiva/scheduler/indexing/ArchivaIndexingTaskExecutor.java View File

@@ -21,11 +21,12 @@ package org.apache.archiva.scheduler.indexing;
*/

import org.apache.archiva.admin.model.RepositoryAdminException;
import org.apache.archiva.admin.model.beans.ManagedRepository;
import org.apache.archiva.admin.model.managed.ManagedRepositoryAdmin;
import org.apache.archiva.redback.components.taskqueue.Task;
import org.apache.archiva.redback.components.taskqueue.execution.TaskExecutionException;
import org.apache.archiva.redback.components.taskqueue.execution.TaskExecutor;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.maven.index.ArtifactContext;
import org.apache.maven.index.ArtifactContextProducer;
import org.apache.maven.index.FlatSearchRequest;
@@ -84,138 +85,138 @@ public class ArchivaIndexingTaskExecutor
{
ArtifactIndexingTask indexingTask = (ArtifactIndexingTask) task;

ManagedRepository repository = indexingTask.getRepository();
IndexingContext context = indexingTask.getContext();
ManagedRepository repository = indexingTask.getRepository( );
IndexingContext context = indexingTask.getContext( );

if ( ArtifactIndexingTask.Action.FINISH.equals( indexingTask.getAction() )
&& indexingTask.isExecuteOnEntireRepo() )
if ( ArtifactIndexingTask.Action.FINISH.equals( indexingTask.getAction( ) )
&& indexingTask.isExecuteOnEntireRepo( ) )
{
try
{
long start = System.currentTimeMillis();
nexusIndexer.scan( context, null, indexingTask.isOnlyUpdate() );
long end = System.currentTimeMillis();
log.info( "indexed maven repository: {}, onlyUpdate: {}, time {} ms", repository.getId(),
indexingTask.isOnlyUpdate(), ( end - start ) );
long start = System.currentTimeMillis( );
nexusIndexer.scan( context, null, indexingTask.isOnlyUpdate( ) );
long end = System.currentTimeMillis( );
log.info( "indexed maven repository: {}, onlyUpdate: {}, time {} ms", repository.getId( ),
indexingTask.isOnlyUpdate( ), ( end - start ) );
}
catch ( IOException e )
{
throw new TaskExecutionException( "Error scan repository " + repository, e );
}
log.debug( "Finishing indexing task on repo: {}", repository.getId() );
log.debug( "Finishing indexing task on repo: {}", repository.getId( ) );
finishIndexingTask( indexingTask, repository, context );
}
else
{
// create context if not a repo scan request
if ( !indexingTask.isExecuteOnEntireRepo() )
if ( !indexingTask.isExecuteOnEntireRepo( ) )
{
try
{
log.debug( "Creating indexing context on resource: {}", //
( indexingTask.getResourceFile() == null
? "none"
: indexingTask.getResourceFile() ) );
( indexingTask.getResourceFile( ) == null
? "none"
: indexingTask.getResourceFile( ) ) );
context = managedRepositoryAdmin.createIndexContext( repository );
}
catch ( RepositoryAdminException e )
{
log.error( "Error occurred while creating context: {}", e.getMessage() );
throw new TaskExecutionException( "Error occurred while creating context: " + e.getMessage(), e );
log.error( "Error occurred while creating context: {}", e.getMessage( ) );
throw new TaskExecutionException( "Error occurred while creating context: " + e.getMessage( ), e );
}
}

if ( context == null || context.getIndexDirectory() == null )
if ( context == null || context.getIndexDirectory( ) == null )
{
throw new TaskExecutionException( "Trying to index an artifact but the context is already closed" );
}

try
{
Path artifactFile = indexingTask.getResourceFile();
Path artifactFile = indexingTask.getResourceFile( );
if ( artifactFile == null )
{
log.debug( "no artifact pass in indexing task so skip it" );
}
else
{
ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile.toFile() );
ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile.toFile( ) );

if ( ac != null )
{
// MRM-1779 pom must be indexed too
// TODO make that configurable?
if ( artifactFile.getFileName().toString().endsWith( ".pom" ) )
if ( artifactFile.getFileName( ).toString( ).endsWith( ".pom" ) )
{
ac.getArtifactInfo().setFileExtension( "pom" );
ac.getArtifactInfo().setPackaging( "pom" );
ac.getArtifactInfo().setClassifier( "pom" );
ac.getArtifactInfo( ).setFileExtension( "pom" );
ac.getArtifactInfo( ).setPackaging( "pom" );
ac.getArtifactInfo( ).setClassifier( "pom" );
}
if ( indexingTask.getAction().equals( ArtifactIndexingTask.Action.ADD ) )
if ( indexingTask.getAction( ).equals( ArtifactIndexingTask.Action.ADD ) )
{
//IndexSearcher s = context.getIndexSearcher();
//String uinfo = ac.getArtifactInfo().getUinfo();
//TopDocs d = s.search( new TermQuery( new Term( ArtifactInfo.UINFO, uinfo ) ), 1 );

BooleanQuery q = new BooleanQuery();
BooleanQuery q = new BooleanQuery( );
q.add( nexusIndexer.constructQuery( MAVEN.GROUP_ID, new SourcedSearchExpression(
ac.getArtifactInfo().getGroupId() ) ), BooleanClause.Occur.MUST );
ac.getArtifactInfo( ).getGroupId( ) ) ), BooleanClause.Occur.MUST );
q.add( nexusIndexer.constructQuery( MAVEN.ARTIFACT_ID, new SourcedSearchExpression(
ac.getArtifactInfo().getArtifactId() ) ), BooleanClause.Occur.MUST );
ac.getArtifactInfo( ).getArtifactId( ) ) ), BooleanClause.Occur.MUST );
q.add( nexusIndexer.constructQuery( MAVEN.VERSION, new SourcedSearchExpression(
ac.getArtifactInfo().getVersion() ) ), BooleanClause.Occur.MUST );
if ( ac.getArtifactInfo().getClassifier() != null )
ac.getArtifactInfo( ).getVersion( ) ) ), BooleanClause.Occur.MUST );
if ( ac.getArtifactInfo( ).getClassifier( ) != null )
{
q.add( nexusIndexer.constructQuery( MAVEN.CLASSIFIER, new SourcedSearchExpression(
ac.getArtifactInfo().getClassifier() ) ), BooleanClause.Occur.MUST );
ac.getArtifactInfo( ).getClassifier( ) ) ), BooleanClause.Occur.MUST );
}
if ( ac.getArtifactInfo().getPackaging() != null )
if ( ac.getArtifactInfo( ).getPackaging( ) != null )
{
q.add( nexusIndexer.constructQuery( MAVEN.PACKAGING, new SourcedSearchExpression(
ac.getArtifactInfo().getPackaging() ) ), BooleanClause.Occur.MUST );
ac.getArtifactInfo( ).getPackaging( ) ) ), BooleanClause.Occur.MUST );
}
FlatSearchRequest flatSearchRequest = new FlatSearchRequest( q, context );
FlatSearchResponse flatSearchResponse = nexusIndexer.searchFlat( flatSearchRequest );
if ( flatSearchResponse.getResults().isEmpty() )
if ( flatSearchResponse.getResults( ).isEmpty( ) )
{
log.debug( "Adding artifact '{}' to index..", ac.getArtifactInfo() );
log.debug( "Adding artifact '{}' to index..", ac.getArtifactInfo( ) );
nexusIndexer.addArtifactToIndex( ac, context );
}
else
{
log.debug( "Updating artifact '{}' in index..", ac.getArtifactInfo() );
log.debug( "Updating artifact '{}' in index..", ac.getArtifactInfo( ) );
// TODO check if update exists !!
nexusIndexer.deleteArtifactFromIndex( ac, context );
nexusIndexer.addArtifactToIndex( ac, context );
}

context.updateTimestamp();
context.commit();
context.updateTimestamp( );
context.commit( );


}
else
{
log.debug( "Removing artifact '{}' from index..", ac.getArtifactInfo() );
log.debug( "Removing artifact '{}' from index..", ac.getArtifactInfo( ) );
nexusIndexer.deleteArtifactFromIndex( ac, context );
}
}
}
// close the context if not a repo scan request
if ( !indexingTask.isExecuteOnEntireRepo() )
if ( !indexingTask.isExecuteOnEntireRepo( ) )
{
log.debug( "Finishing indexing task on resource file : {}", indexingTask.getResourceFile() != null
? indexingTask.getResourceFile()
log.debug( "Finishing indexing task on resource file : {}", indexingTask.getResourceFile( ) != null
? indexingTask.getResourceFile( )
: " none " );
finishIndexingTask( indexingTask, repository, context );
}
}
catch ( IOException e )
{
log.error( "Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage(),
e );
log.error( "Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage( ),
e );
throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask + "'",
e );
e );
}
}

@@ -228,20 +229,26 @@ public class ArchivaIndexingTaskExecutor
try
{

context.optimize();
context.optimize( );

if ( !repository.isSkipPackedIndexCreation() )
if ( repository.supportsFeature( IndexCreationFeature.class ) )
{
IndexCreationFeature icf = repository.getFeature( IndexCreationFeature.class ).get( );
if ( !icf.isSkipPackedIndexCreation( ) )
{

IndexPackingRequest request = new IndexPackingRequest( context, //
context.acquireIndexSearcher().getIndexReader(),
//
context.getIndexDirectoryFile() );
IndexPackingRequest request = new IndexPackingRequest( context, //
context.acquireIndexSearcher( ).getIndexReader( ),
//
context.getIndexDirectoryFile( ) );

indexPacker.packIndex( request );
context.updateTimestamp( true );
indexPacker.packIndex( request );
context.updateTimestamp( true );

log.debug( "Index file packaged at '{}'.", context.getIndexDirectoryFile() );
log.debug( "Index file packaged at '{}'.", context.getIndexDirectoryFile( ) );
} else {
log.debug( "skip packed index creation" );
}
}
else
{
@@ -250,9 +257,9 @@ public class ArchivaIndexingTaskExecutor
}
catch ( IOException e )
{
log.error( "Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage() );
log.error( "Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage( ) );
throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask + "'",
e );
e );
}
}


+ 1
- 1
archiva-modules/archiva-scheduler/archiva-scheduler-indexing/src/main/java/org/apache/archiva/scheduler/indexing/ArtifactIndexingTask.java View File

@@ -19,8 +19,8 @@ package org.apache.archiva.scheduler.indexing;
* under the License.
*/

import org.apache.archiva.admin.model.beans.ManagedRepository;
import org.apache.archiva.redback.components.taskqueue.Task;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.maven.index.context.IndexingContext;

import java.nio.file.Path;

+ 32
- 26
archiva-modules/archiva-scheduler/archiva-scheduler-indexing/src/test/java/org/apache/archiva/scheduler/indexing/ArchivaIndexingTaskExecutorTest.java View File

@@ -20,8 +20,12 @@ package org.apache.archiva.scheduler.indexing;
*/

import junit.framework.TestCase;
import org.apache.archiva.admin.model.beans.ManagedRepository;
import org.apache.archiva.admin.model.managed.ManagedRepositoryAdmin;
import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.repository.BasicManagedRepository;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.ReleaseScheme;
import org.apache.archiva.repository.features.ArtifactCleanupFeature;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.maven.index.ArtifactInfo;
import org.apache.maven.index.FlatSearchRequest;
@@ -52,6 +56,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Locale;
import java.util.Set;

/**
@@ -65,7 +70,7 @@ public class ArchivaIndexingTaskExecutorTest
@Inject
private ArchivaIndexingTaskExecutor indexingExecutor;

private ManagedRepository repositoryConfig;
private BasicManagedRepository repositoryConfig;

@Inject
private NexusIndexer indexer;
@@ -86,16 +91,13 @@ public class ArchivaIndexingTaskExecutorTest
{
super.setUp();

repositoryConfig = new ManagedRepository();
repositoryConfig.setId( "test-repo" );
repositoryConfig.setLocation(
Paths.get( System.getProperty( "basedir" ), "target/test-classes/test-repo" ).toAbsolutePath().toString() );
repositoryConfig = new BasicManagedRepository( "test-repo", "Test Repository");
Path repoLocation = Paths.get( System.getProperty( "basedir" ), "target/test-classes/test-repo" ).toAbsolutePath();
repositoryConfig.setLocation(repoLocation.toUri() );
repositoryConfig.setLayout( "default" );
repositoryConfig.setName( "Test Repository" );
repositoryConfig.setScanned( true );
repositoryConfig.setSnapshots( false );
repositoryConfig.setReleases( true );

repositoryConfig.addActiveReleaseScheme( ReleaseScheme.RELEASE );
repositoryConfig.removeActiveReleaseScheme( ReleaseScheme.SNAPSHOT );
managedRepositoryAdmin.createIndexContext( repositoryConfig );
}

@@ -132,7 +134,8 @@ public class ArchivaIndexingTaskExecutorTest
public void testAddArtifactToIndex()
throws Exception
{
Path artifactFile = Paths.get( repositoryConfig.getLocation(),
Path basePath = PathUtil.getPathFromUri( repositoryConfig.getLocation() );
Path artifactFile = basePath.resolve(
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );

ArtifactIndexingTask task =
@@ -152,8 +155,8 @@ public class ArchivaIndexingTaskExecutorTest
{
IndexingContext context = indexer.addIndexingContext( repositoryConfig.getId(), //
repositoryConfig.getId(), //
Paths.get( repositoryConfig.getLocation() ).toFile(), //
Paths.get( repositoryConfig.getLocation(), ".indexer" ).toFile()
basePath.toFile(), //
basePath.resolve(".indexer" ).toFile()
//
, null, null, indexCreators );
context.setSearchable( true );
@@ -162,8 +165,8 @@ public class ArchivaIndexingTaskExecutorTest
FlatSearchRequest request = new FlatSearchRequest( q );
FlatSearchResponse response = indexer.searchFlat( request );

assertTrue( Files.exists(Paths.get( repositoryConfig.getLocation(), ".indexer" )) );
assertFalse( Files.exists(Paths.get( repositoryConfig.getLocation(), ".index" )) );
assertTrue( Files.exists(basePath.resolve( ".indexer" )) );
assertFalse( Files.exists(basePath.resolve(".index" )) );
assertEquals( 1, response.getTotalHits() );

Set<ArtifactInfo> results = response.getResults();
@@ -179,7 +182,8 @@ public class ArchivaIndexingTaskExecutorTest
public void testUpdateArtifactInIndex()
throws Exception
{
Path artifactFile = Paths.get( repositoryConfig.getLocation(),
Path basePath = PathUtil.getPathFromUri( repositoryConfig.getLocation( ) );
Path artifactFile = basePath.resolve(
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );

ArtifactIndexingTask task =
@@ -204,8 +208,8 @@ public class ArchivaIndexingTaskExecutorTest
//searcher.close();
ctx.releaseIndexSearcher( searcher );

assertTrue( Files.exists(Paths.get( repositoryConfig.getLocation(), ".indexer" )) );
assertFalse( Files.exists(Paths.get( repositoryConfig.getLocation(), ".index" )) );
assertTrue( Files.exists(basePath.resolve(".indexer" )) );
assertFalse( Files.exists(basePath.resolve(".index" )) );

// should only return 1 hit!
assertEquals( 1, topDocs.totalHits );
@@ -215,7 +219,8 @@ public class ArchivaIndexingTaskExecutorTest
public void testRemoveArtifactFromIndex()
throws Exception
{
Path artifactFile = Paths.get( repositoryConfig.getLocation(),
Path basePath = PathUtil.getPathFromUri( repositoryConfig.getLocation( ) );
Path artifactFile = basePath.resolve(
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );

ArtifactIndexingTask task =
@@ -237,8 +242,8 @@ public class ArchivaIndexingTaskExecutorTest

FlatSearchResponse response = indexer.searchFlat( flatSearchRequest );

assertTrue( Files.exists(Paths.get( repositoryConfig.getLocation(), ".indexer" )) );
assertFalse( Files.exists(Paths.get( repositoryConfig.getLocation(), ".index" )) );
assertTrue( Files.exists(basePath.resolve(".indexer" )) );
assertFalse( Files.exists(basePath.resolve( ".index" )) );

// should return 1 hit
assertEquals( 1, response.getTotalHitsCount() );
@@ -259,8 +264,8 @@ public class ArchivaIndexingTaskExecutorTest
new SourcedSearchExpression( "archiva-index-methods-jar-test" ) ),
BooleanClause.Occur.SHOULD );

assertTrue( Files.exists(Paths.get( repositoryConfig.getLocation(), ".indexer" )) );
assertFalse( Files.exists(Paths.get( repositoryConfig.getLocation(), ".index" )) );
assertTrue( Files.exists(basePath.resolve( ".indexer" )) );
assertFalse( Files.exists(basePath.resolve(".index" )) );

flatSearchRequest = new FlatSearchRequest( q, getIndexingContext() );

@@ -276,7 +281,8 @@ public class ArchivaIndexingTaskExecutorTest
throws Exception
{

Path indexerDirectory =Paths.get( repositoryConfig.getLocation(), ".indexer" );
Path basePath = PathUtil.getPathFromUri( repositoryConfig.getLocation());
Path indexerDirectory =basePath.resolve( ".indexer" );

Files.list(indexerDirectory).filter( path -> path.getFileName().toString().startsWith("nexus-maven-repository-index") )
.forEach( path ->
@@ -292,7 +298,7 @@ public class ArchivaIndexingTaskExecutorTest
} );


Path artifactFile = Paths.get( repositoryConfig.getLocation(),
Path artifactFile = basePath.resolve(
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );

ArtifactIndexingTask task =
@@ -318,7 +324,7 @@ public class ArchivaIndexingTaskExecutorTest
Assertions.assertThat( Files.exists(indexerDirectory.resolve("nexus-maven-repository-index.gz" ) ));

// unpack .zip index
Path destDir = Paths.get( repositoryConfig.getLocation(), ".indexer/tmp" );
Path destDir = basePath.resolve( ".indexer/tmp" );
//unzipIndex( indexerDirectory.getPath(), destDir.getPath() );

DefaultIndexUpdater.FileFetcher fetcher = new DefaultIndexUpdater.FileFetcher( indexerDirectory.toFile() );

+ 2
- 1
archiva-modules/archiva-scheduler/archiva-scheduler-indexing/src/test/java/org/apache/archiva/scheduler/indexing/DownloadRemoteIndexTaskTest.java View File

@@ -50,6 +50,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Locale;
import java.util.concurrent.TimeUnit;

import static org.assertj.core.api.Assertions.assertThat;
@@ -145,7 +146,7 @@ public class DownloadRemoteIndexTaskTest

protected RemoteRepository getRemoteRepository() throws IOException
{
RemoteRepository remoteRepository = new RemoteRepository();
RemoteRepository remoteRepository = new RemoteRepository( Locale.getDefault());
Path indexDirectory =
Paths.get( FileUtils.getBasedir(), "target/index/test-" + Long.toString( System.currentTimeMillis() ) );
Files.createDirectories( indexDirectory );

+ 7
- 1
archiva-modules/archiva-scheduler/archiva-scheduler-indexing/src/test/resources/spring-context.xml View File

@@ -20,10 +20,16 @@
-->
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:tx="http://www.springframework.org/schema/tx"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"
default-lazy-init="true">

<context:component-scan base-package="org.apache.archiva.repository.content.maven2" />

<alias name="managedRepositoryContent#maven" alias="managedRepositoryContent#default" />
<alias name="remoteRepositoryContent#maven" alias="remoteRepositoryContent#default" />

<bean name="scheduler" class="org.apache.archiva.redback.components.scheduler.DefaultScheduler">
<property name="properties">
<props>

+ 0
- 1
archiva-modules/archiva-scheduler/archiva-scheduler-repository/pom.xml View File

@@ -89,7 +89,6 @@
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-layer</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>

+ 6
- 2
archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/main/java/org/apache/archiva/scheduler/repository/ArchivaRepositoryScanningTaskExecutor.java View File

@@ -19,8 +19,8 @@ package org.apache.archiva.scheduler.repository;
* under the License.
*/

import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.admin.model.RepositoryAdminException;
import org.apache.archiva.admin.model.beans.ManagedRepository;
import org.apache.archiva.admin.model.managed.ManagedRepositoryAdmin;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
@@ -28,6 +28,7 @@ import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.stats.model.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.model.RepositoryStatisticsManager;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.scanner.RepositoryContentConsumers;
import org.apache.archiva.repository.scanner.RepositoryScanStatistics;
import org.apache.archiva.repository.scanner.RepositoryScanner;
@@ -57,6 +58,9 @@ public class ArchivaRepositoryScanningTaskExecutor
{
private Logger log = LoggerFactory.getLogger( ArchivaRepositoryScanningTaskExecutor.class );

@Inject
RepositoryRegistry repositoryRegistry;

@Inject
private ManagedRepositoryAdmin managedRepositoryAdmin;

@@ -107,7 +111,7 @@ public class ArchivaRepositoryScanningTaskExecutor
throw new TaskExecutionException( "Unable to execute RepositoryTask with blank repository Id." );
}

ManagedRepository arepo = managedRepositoryAdmin.getManagedRepository( repoId );
ManagedRepository arepo = repositoryRegistry.getManagedRepository( repoId );

// execute consumers on resource file if set
if ( task.getResourceFile() != null )

+ 9
- 2
archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/MockManagedRepositoryAdmin.java View File

@@ -29,6 +29,7 @@ import org.apache.maven.index.context.IndexingContext;

import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;

/**
@@ -52,10 +53,10 @@ public class MockManagedRepositoryAdmin
{
// TODO add staging repo information back too
ManagedRepository repo =
new ManagedRepository( repoConfig.getId(), repoConfig.getName(), repoConfig.getLocation(),
new ManagedRepository( Locale.getDefault( ), repoConfig.getId(), repoConfig.getName(), repoConfig.getLocation(),
repoConfig.getLayout(), repoConfig.isSnapshots(), repoConfig.isReleases(),
repoConfig.isBlockRedeployments(), repoConfig.getRefreshCronExpression(),
repoConfig.getIndexDir(), repoConfig.isScanned(), repoConfig.getRetentionTime(),
repoConfig.getIndexDir(), repoConfig.isScanned(), repoConfig.getRetentionPeriod(),
repoConfig.getRetentionCount(), repoConfig.isDeleteReleasedSnapshots(), true );

managedRepos.add( repo );
@@ -126,4 +127,10 @@ public class MockManagedRepositoryAdmin
{
return null;
}

@Override
public IndexingContext createIndexContext( org.apache.archiva.repository.ManagedRepository repository ) throws RepositoryAdminException
{
return null;
}
}

+ 9
- 2
archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/scheduler/repository/AbstractArchivaRepositoryScanningTaskExecutorTest.java View File

@@ -26,6 +26,8 @@ import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.stats.model.RepositoryStatisticsManager;
import org.apache.archiva.mock.MockRepositorySessionFactory;
import org.apache.archiva.redback.components.taskqueue.execution.TaskExecutor;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.codehaus.plexus.util.FileUtils;
import org.junit.After;
@@ -59,6 +61,9 @@ import static org.mockito.Mockito.mock;
public abstract class AbstractArchivaRepositoryScanningTaskExecutorTest
extends TestCase
{
@Inject
RepositoryRegistry repositoryRegistry;

@Inject
@Named( value = "taskExecutor#test-repository-scanning" )
protected TaskExecutor taskExecutor;
@@ -134,8 +139,10 @@ public abstract class AbstractArchivaRepositoryScanningTaskExecutorTest
repositoryConfiguration.setId( TEST_REPO_ID );
repositoryConfiguration.setName( "Test Repository" );
repositoryConfiguration.setLocation( repoDir.toAbsolutePath().toString() );
archivaConfig.getConfiguration().getManagedRepositories().clear();
archivaConfig.getConfiguration().addManagedRepository( repositoryConfiguration );
for ( ManagedRepository repo : repositoryRegistry.getManagedRepositories()) {
repositoryRegistry.removeRepository( repo );
}
repositoryRegistry.putRepository( repositoryConfiguration );

metadataRepository = mock( MetadataRepository.class );


+ 2
- 10
archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/scheduler/repository/TestConsumer.java View File

@@ -19,14 +19,13 @@ package org.apache.archiva.scheduler.repository;
* under the License.
*/

import org.apache.archiva.admin.model.beans.ManagedRepository;
import org.apache.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.archiva.consumers.ConsumerException;
import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryContentFactory;
import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.layout.LayoutException;
import org.springframework.stereotype.Service;

@@ -80,14 +79,7 @@ public class TestConsumer
{
consumed.clear();

try
{
this.repository = factory.getManagedRepositoryContent( repository.getId() );
}
catch ( RepositoryException e )
{
throw new ConsumerException( e.getMessage(), e );
}
this.repository = repository.getContent();
}

@Override

+ 4
- 1
archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/resources/spring-context.xml View File

@@ -28,7 +28,10 @@
default-lazy-init="false">

<context:annotation-config/>
<context:component-scan base-package="org.apache.archiva.metadata.repository"/>
<context:component-scan base-package="org.apache.archiva.metadata.repository,org.apache.archiva.repository.content.maven2"/>

<alias name="managedRepositoryContent#maven" alias="managedRepositoryContent#default" />
<alias name="remoteRepositoryContent#maven" alias="remoteRepositoryContent#default" />

<bean name="managedRepoAdmin#test" class="org.apache.archiva.mock.MockManagedRepositoryAdmin">
<property name="archivaConfiguration" ref="archivaConfiguration#test-repository-scanning"/>

Loading…
Cancel
Save