--- /dev/null
+package org.apache.archiva.maven.scheduler.indexing;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.indexer.UnsupportedBaseContextException;
+import org.apache.archiva.components.taskqueue.Task;
+import org.apache.archiva.components.taskqueue.execution.TaskExecutionException;
+import org.apache.archiva.components.taskqueue.execution.TaskExecutor;
+import org.apache.archiva.repository.ManagedRepository;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.scheduler.indexing.ArtifactIndexingTask;
+import org.apache.maven.index.ArtifactContext;
+import org.apache.maven.index.ArtifactContextProducer;
+import org.apache.maven.index.DefaultScannerListener;
+import org.apache.maven.index.FlatSearchRequest;
+import org.apache.maven.index.FlatSearchResponse;
+import org.apache.maven.index.Indexer;
+import org.apache.maven.index.IndexerEngine;
+import org.apache.maven.index.MAVEN;
+import org.apache.maven.index.Scanner;
+import org.apache.maven.index.ScanningRequest;
+import org.apache.maven.index.ScanningResult;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.expr.SourcedSearchExpression;
+import org.apache.maven.index.packer.IndexPacker;
+import org.apache.maven.index.packer.IndexPackingRequest;
+import org.apache.maven.index_shaded.lucene.search.BooleanClause;
+import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import java.io.IOException;
+import java.nio.file.Path;
+
+/**
+ * ArchivaIndexingTaskExecutor Executes all indexing tasks. Adding, updating and removing artifacts from the index are
+ * all performed by this executor. Add and update artifact in index tasks are added in the indexing task queue by the
+ * NexusIndexerConsumer while remove artifact from index tasks are added by the LuceneCleanupRemoveIndexedConsumer.
+ */
+@Service( "taskExecutor#indexing" )
+public class ArchivaIndexingTaskExecutor
+ implements TaskExecutor
+{
+ private Logger log = LoggerFactory.getLogger( ArchivaIndexingTaskExecutor.class );
+
+ @Inject
+ private IndexPacker indexPacker;
+
+ @Inject
+ private ArtifactContextProducer artifactContextProducer;
+
+ @Inject
+ private Indexer indexer;
+
+ @Inject
+ private Scanner scanner;
+
+ @Inject
+ IndexerEngine indexerEngine;
+
+ /**
+ * depending on current {@link Task} you have.
+ * If {@link org.apache.archiva.scheduler.indexing.ArtifactIndexingTask.Action#FINISH} && isExecuteOnEntireRepo:
+ * repository will be scanned.
+ *
+ * @param task
+ * @throws TaskExecutionException
+ */
+ @Override
+ public void executeTask( Task task )
+ throws TaskExecutionException
+ {
+ ArtifactIndexingTask indexingTask = (ArtifactIndexingTask) task;
+
+ ManagedRepository repository = indexingTask.getRepository( );
+ ArchivaIndexingContext archivaContext = indexingTask.getContext( );
+ IndexingContext context = null;
+ try
+ {
+ context = archivaContext.getBaseContext( IndexingContext.class );
+ }
+ catch ( UnsupportedBaseContextException e )
+ {
+ throw new TaskExecutionException( "Bad repository type.", e );
+ }
+
+ if ( ArtifactIndexingTask.Action.FINISH.equals( indexingTask.getAction( ) )
+ && indexingTask.isExecuteOnEntireRepo( ) )
+ {
+ long start = System.currentTimeMillis( );
+ try
+ {
+ context.updateTimestamp( );
+ DefaultScannerListener listener = new DefaultScannerListener( context, indexerEngine, true, null );
+ ScanningRequest request = new ScanningRequest( context, listener );
+ ScanningResult result = scanner.scan( request );
+ if ( result.hasExceptions( ) )
+ {
+ log.error( "Exceptions occured during index scan of " + context.getId( ) );
+ result.getExceptions( ).stream( ).map( e -> e.getMessage( ) ).distinct( ).limit( 5 ).forEach(
+ s -> log.error( "Message: " + s )
+ );
+ }
+ }
+ catch ( IOException e )
+ {
+ log.error( "Error during context scan {}: {}", context.getId( ), context.getIndexDirectory( ) );
+ }
+ long end = System.currentTimeMillis( );
+ log.info( "indexed maven repository: {}, onlyUpdate: {}, time {} ms", repository.getId( ),
+ indexingTask.isOnlyUpdate( ), ( end - start ) );
+ log.debug( "Finishing indexing task on repo: {}", repository.getId( ) );
+ finishIndexingTask( indexingTask, repository, context );
+ }
+ else
+ {
+ // create context if not a repo scan request
+ if ( !indexingTask.isExecuteOnEntireRepo( ) )
+ {
+ try
+ {
+ log.debug( "Creating indexing context on resource: {}", //
+ ( indexingTask.getResourceFile( ) == null
+ ? "none"
+ : indexingTask.getResourceFile( ) ) );
+ archivaContext = repository.getIndexingContext( );
+ context = archivaContext.getBaseContext( IndexingContext.class );
+ }
+ catch ( UnsupportedBaseContextException e )
+ {
+ log.error( "Error occurred while creating context: {}", e.getMessage( ) );
+ throw new TaskExecutionException( "Error occurred while creating context: " + e.getMessage( ), e );
+ }
+ }
+
+ if ( context == null || context.getIndexDirectory( ) == null )
+ {
+ throw new TaskExecutionException( "Trying to index an artifact but the context is already closed" );
+ }
+
+ try
+ {
+ Path artifactFile = indexingTask.getResourceFile( );
+ if ( artifactFile == null )
+ {
+ log.debug( "no artifact pass in indexing task so skip it" );
+ }
+ else
+ {
+ ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile.toFile( ) );
+
+ if ( ac != null )
+ {
+ // MRM-1779 pom must be indexed too
+ // TODO make that configurable?
+ if ( artifactFile.getFileName( ).toString( ).endsWith( ".pom" ) )
+ {
+ ac.getArtifactInfo( ).setFileExtension( "pom" );
+ ac.getArtifactInfo( ).setPackaging( "pom" );
+ ac.getArtifactInfo( ).setClassifier( "pom" );
+ }
+ if ( indexingTask.getAction( ).equals( ArtifactIndexingTask.Action.ADD ) )
+ {
+ //IndexSearcher s = context.getIndexSearcher();
+ //String uinfo = ac.getArtifactInfo().getUinfo();
+ //TopDocs d = s.search( new TermQuery( new Term( ArtifactInfo.UINFO, uinfo ) ), 1 );
+
+ BooleanQuery.Builder qb = new BooleanQuery.Builder();
+ qb.add( indexer.constructQuery( MAVEN.GROUP_ID, new SourcedSearchExpression(
+ ac.getArtifactInfo( ).getGroupId( ) ) ), BooleanClause.Occur.MUST );
+ qb.add( indexer.constructQuery( MAVEN.ARTIFACT_ID, new SourcedSearchExpression(
+ ac.getArtifactInfo( ).getArtifactId( ) ) ), BooleanClause.Occur.MUST );
+ qb.add( indexer.constructQuery( MAVEN.VERSION, new SourcedSearchExpression(
+ ac.getArtifactInfo( ).getVersion( ) ) ), BooleanClause.Occur.MUST );
+ if ( ac.getArtifactInfo( ).getClassifier( ) != null )
+ {
+ qb.add( indexer.constructQuery( MAVEN.CLASSIFIER, new SourcedSearchExpression(
+ ac.getArtifactInfo( ).getClassifier( ) ) ), BooleanClause.Occur.MUST );
+ }
+ if ( ac.getArtifactInfo( ).getPackaging( ) != null )
+ {
+ qb.add( indexer.constructQuery( MAVEN.PACKAGING, new SourcedSearchExpression(
+ ac.getArtifactInfo( ).getPackaging( ) ) ), BooleanClause.Occur.MUST );
+ }
+ FlatSearchRequest flatSearchRequest = new FlatSearchRequest( qb.build(), context );
+ FlatSearchResponse flatSearchResponse = indexer.searchFlat( flatSearchRequest );
+ if ( flatSearchResponse.getResults( ).isEmpty( ) )
+ {
+ log.debug( "Adding artifact '{}' to index..", ac.getArtifactInfo( ) );
+ indexerEngine.index( context, ac );
+ }
+ else
+ {
+ log.debug( "Updating artifact '{}' in index..", ac.getArtifactInfo( ) );
+ // TODO check if update exists !!
+ indexerEngine.update( context, ac );
+ }
+
+ context.updateTimestamp( );
+ context.commit( );
+
+
+ }
+ else
+ {
+ log.debug( "Removing artifact '{}' from index..", ac.getArtifactInfo( ) );
+ indexerEngine.remove( context, ac );
+ }
+ }
+ }
+ // close the context if not a repo scan request
+ if ( !indexingTask.isExecuteOnEntireRepo( ) )
+ {
+ log.debug( "Finishing indexing task on resource file : {}", indexingTask.getResourceFile( ) != null
+ ? indexingTask.getResourceFile( )
+ : " none " );
+ finishIndexingTask( indexingTask, repository, context );
+ }
+ }
+ catch ( IOException e )
+ {
+ log.error( "Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage( ),
+ e );
+ throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask + "'",
+ e );
+ }
+ }
+
+ }
+
+ private void finishIndexingTask( ArtifactIndexingTask indexingTask, ManagedRepository repository,
+ IndexingContext context )
+ throws TaskExecutionException
+ {
+ try
+ {
+
+ log.debug( "Finishing indexing" );
+ context.optimize( );
+
+ if ( repository.supportsFeature( IndexCreationFeature.class ) )
+ {
+ IndexCreationFeature icf = repository.getFeature( IndexCreationFeature.class ).get( );
+ if ( !icf.isSkipPackedIndexCreation( ) && icf.getLocalPackedIndexPath( ) != null && icf.getLocalIndexPath().getFilePath()!=null )
+ {
+
+ log.debug( "Creating packed index from {} on {}", context.getIndexDirectoryFile( ), icf.getLocalPackedIndexPath( ) );
+ IndexPackingRequest request = new IndexPackingRequest( context, //
+ context.acquireIndexSearcher( ).getIndexReader( ),
+ //
+ icf.getLocalPackedIndexPath( ).getFilePath().toFile( ) );
+
+ indexPacker.packIndex( request );
+ context.updateTimestamp( true );
+
+ log.debug( "Index file packed at '{}'.", icf.getLocalPackedIndexPath( ) );
+ }
+ else
+ {
+ log.debug( "skip packed index creation" );
+ }
+ }
+ else
+ {
+ log.debug( "skip packed index creation" );
+ }
+ }
+ catch ( IOException e )
+ {
+ log.error( "Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage( ) );
+ throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask + "'",
+ e );
+ }
+ }
+
+ public void setIndexPacker( IndexPacker indexPacker )
+ {
+ this.indexPacker = indexPacker;
+ }
+
+}
--- /dev/null
+package org.apache.archiva.maven.scheduler.indexing;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.proxy.ProxyRegistry;
+import org.apache.archiva.proxy.model.NetworkProxy;
+import org.apache.archiva.scheduler.indexing.DownloadRemoteIndexException;
+import org.apache.archiva.scheduler.indexing.DownloadRemoteIndexScheduler;
+import org.apache.archiva.configuration.ArchivaConfiguration;
+import org.apache.archiva.configuration.ConfigurationEvent;
+import org.apache.archiva.configuration.ConfigurationListener;
+import org.apache.archiva.indexer.UnsupportedBaseContextException;
+import org.apache.archiva.maven.common.proxy.WagonFactory;
+import org.apache.archiva.repository.RepositoryRegistry;
+import org.apache.archiva.repository.features.RemoteIndexFeature;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.packer.IndexPacker;
+import org.apache.maven.index.updater.IndexUpdater;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.scheduling.TaskScheduler;
+import org.springframework.scheduling.support.CronTrigger;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import javax.inject.Inject;
+import javax.inject.Named;
+import java.util.Date;
+import java.util.List;
+import java.util.concurrent.CopyOnWriteArrayList;
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M1
+ */
+@Service( "downloadRemoteIndexScheduler#default" )
+public class DefaultDownloadRemoteIndexScheduler
+ implements ConfigurationListener, DownloadRemoteIndexScheduler
+{
+
+ private Logger log = LoggerFactory.getLogger( getClass() );
+
+ @Inject
+ @Named( value = "taskScheduler#indexDownloadRemote" )
+ private TaskScheduler taskScheduler;
+
+ @Inject
+ RepositoryRegistry repositoryRegistry;
+
+ @Inject
+ private ArchivaConfiguration archivaConfiguration;
+
+ @Inject
+ private WagonFactory wagonFactory;
+
+ @Inject
+ private IndexUpdater indexUpdater;
+
+ @Inject
+ private IndexPacker indexPacker;
+
+ @Inject
+ private ProxyRegistry proxyRegistry;
+
+ // store ids about currently running remote download : updated in DownloadRemoteIndexTask
+ private List<String> runningRemoteDownloadIds = new CopyOnWriteArrayList<String>();
+
+ @PostConstruct
+ public void startup()
+ throws
+ DownloadRemoteIndexException, UnsupportedBaseContextException {
+ archivaConfiguration.addListener( this );
+ // TODO add indexContexts even if null
+
+ for ( org.apache.archiva.repository.RemoteRepository remoteRepository : repositoryRegistry.getRemoteRepositories() )
+ {
+ String contextKey = "remote-" + remoteRepository.getId();
+ IndexingContext context = remoteRepository.getIndexingContext().getBaseContext(IndexingContext.class);
+ if ( context == null )
+ {
+ continue;
+ }
+ RemoteIndexFeature rif = remoteRepository.getFeature(RemoteIndexFeature.class).get();
+
+
+ // TODO record jobs from configuration
+ if ( rif.isDownloadRemoteIndex() && StringUtils.isNotEmpty(
+ remoteRepository.getSchedulingDefinition() ) )
+ {
+ boolean fullDownload = context.getIndexDirectoryFile().list().length == 0;
+ scheduleDownloadRemote( remoteRepository.getId(), false, fullDownload );
+ }
+ }
+
+
+ }
+
+ @Override
+ public void configurationEvent( ConfigurationEvent event )
+ {
+ // TODO remove jobs and add again
+ }
+
+
+ @Override
+ public void scheduleDownloadRemote( String repositoryId, boolean now, boolean fullDownload )
+ throws DownloadRemoteIndexException
+ {
+ org.apache.archiva.repository.RemoteRepository remoteRepo = repositoryRegistry.getRemoteRepository(repositoryId);
+
+ if ( remoteRepo == null )
+ {
+ log.warn( "ignore scheduleDownloadRemote for repo with id {} as not exists", repositoryId );
+ return;
+ }
+ if (!remoteRepo.supportsFeature(RemoteIndexFeature.class)) {
+ log.warn("ignore scheduleDownloadRemote for repo with id {}. Does not support remote index.", repositoryId);
+ return;
+ }
+ RemoteIndexFeature rif = remoteRepo.getFeature(RemoteIndexFeature.class).get();
+ NetworkProxy networkProxy = null;
+ if ( StringUtils.isNotBlank( rif.getProxyId() ) )
+ {
+ networkProxy = proxyRegistry.getNetworkProxy( rif.getProxyId() );
+ if ( networkProxy == null )
+ {
+ log.warn(
+ "your remote repository is configured to download remote index trought a proxy we cannot find id:{}",
+ rif.getProxyId() );
+ }
+ }
+
+ DownloadRemoteIndexTaskRequest downloadRemoteIndexTaskRequest = new DownloadRemoteIndexTaskRequest() //
+ .setRemoteRepository( remoteRepo ) //
+ .setNetworkProxy( networkProxy ) //
+ .setFullDownload( fullDownload ) //
+ .setWagonFactory( wagonFactory ) //
+ .setIndexUpdater( indexUpdater ) //
+ .setIndexPacker( this.indexPacker );
+
+ if ( now )
+ {
+ log.info( "schedule download remote index for repository {}", remoteRepo.getId() );
+ // do it now
+ taskScheduler.schedule(
+ new DownloadRemoteIndexTask( downloadRemoteIndexTaskRequest, this.runningRemoteDownloadIds ),
+ new Date() );
+ }
+ else
+ {
+ log.info( "schedule download remote index for repository {} with cron expression {}",
+ remoteRepo.getId(), remoteRepo.getSchedulingDefinition());
+ try
+ {
+ CronTrigger cronTrigger = new CronTrigger( remoteRepo.getSchedulingDefinition());
+ taskScheduler.schedule(
+ new DownloadRemoteIndexTask( downloadRemoteIndexTaskRequest, this.runningRemoteDownloadIds ),
+ cronTrigger );
+ }
+ catch ( IllegalArgumentException e )
+ {
+ log.warn( "Unable to schedule remote index download: {}", e.getLocalizedMessage() );
+ }
+
+ if ( rif.isDownloadRemoteIndexOnStartup() )
+ {
+ log.info(
+ "remote repository {} configured with downloadRemoteIndexOnStartup schedule now a download",
+ remoteRepo.getId() );
+ taskScheduler.schedule(
+ new DownloadRemoteIndexTask( downloadRemoteIndexTaskRequest, this.runningRemoteDownloadIds ),
+ new Date() );
+ }
+ }
+
+ }
+
+ public TaskScheduler getTaskScheduler()
+ {
+ return taskScheduler;
+ }
+
+ public void setTaskScheduler( TaskScheduler taskScheduler )
+ {
+ this.taskScheduler = taskScheduler;
+ }
+
+ @Override
+ public List<String> getRunningRemoteDownloadIds()
+ {
+ return runningRemoteDownloadIds;
+ }
+}
--- /dev/null
+package org.apache.archiva.maven.scheduler.indexing;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.updater.IndexUpdateSideEffect;
+import org.apache.maven.index_shaded.lucene.store.Directory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+/**
+ * Not doing much but required at least one implementation
+ *
+ * @since 3.0.0
+ */
+@Service
+public class DefaultIndexUpdateSideEffect
+ implements IndexUpdateSideEffect
+{
+ private static final Logger LOGGER = LoggerFactory.getLogger( DefaultIndexUpdateSideEffect.class );
+
+ @Override
+ public void updateIndex( Directory directory, IndexingContext indexingContext, boolean b )
+ {
+ LOGGER.info( "updating index: {} with directory: {}", //
+ indexingContext.getId(), //
+ directory.toString() );
+ }
+}
--- /dev/null
+package org.apache.archiva.maven.scheduler.indexing;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.maven.common.proxy.WagonFactory;
+import org.apache.archiva.maven.common.proxy.WagonFactoryRequest;
+import org.apache.archiva.proxy.model.NetworkProxy;
+import org.apache.archiva.repository.base.PasswordCredentials;
+import org.apache.archiva.repository.RemoteRepository;
+import org.apache.archiva.repository.RepositoryException;
+import org.apache.archiva.repository.RepositoryType;
+import org.apache.archiva.repository.features.RemoteIndexFeature;
+import org.apache.commons.lang3.time.StopWatch;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.updater.IndexUpdateRequest;
+import org.apache.maven.index.updater.IndexUpdateResult;
+import org.apache.maven.index.updater.IndexUpdater;
+import org.apache.maven.index.updater.ResourceFetcher;
+import org.apache.maven.index_shaded.lucene.index.IndexNotFoundException;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.StreamWagon;
+import org.apache.maven.wagon.TransferFailedException;
+import org.apache.maven.wagon.Wagon;
+import org.apache.maven.wagon.authentication.AuthenticationInfo;
+import org.apache.maven.wagon.authorization.AuthorizationException;
+import org.apache.maven.wagon.events.TransferEvent;
+import org.apache.maven.wagon.events.TransferListener;
+import org.apache.maven.wagon.proxy.ProxyInfo;
+import org.apache.maven.wagon.repository.Repository;
+import org.apache.maven.wagon.shared.http.AbstractHttpClientWagon;
+import org.apache.maven.wagon.shared.http.HttpConfiguration;
+import org.apache.maven.wagon.shared.http.HttpMethodConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M1
+ */
+public class DownloadRemoteIndexTask
+ implements Runnable
+{
+ private Logger log = LoggerFactory.getLogger( getClass() );
+
+ private RemoteRepository remoteRepository;
+
+ private WagonFactory wagonFactory;
+
+ private NetworkProxy networkProxy;
+
+ private boolean fullDownload;
+
+ private List<String> runningRemoteDownloadIds;
+
+ private IndexUpdater indexUpdater;
+
+
+ public DownloadRemoteIndexTask( DownloadRemoteIndexTaskRequest downloadRemoteIndexTaskRequest,
+ List<String> runningRemoteDownloadIds )
+ {
+ this.remoteRepository = downloadRemoteIndexTaskRequest.getRemoteRepository();
+ this.wagonFactory = downloadRemoteIndexTaskRequest.getWagonFactory();
+ this.networkProxy = downloadRemoteIndexTaskRequest.getNetworkProxy();
+ this.fullDownload = downloadRemoteIndexTaskRequest.isFullDownload();
+ this.runningRemoteDownloadIds = runningRemoteDownloadIds;
+ this.indexUpdater = downloadRemoteIndexTaskRequest.getIndexUpdater();
+ }
+
+ @Override
+ public void run()
+ {
+
+ // so short lock : not sure we need it
+ synchronized ( this.runningRemoteDownloadIds )
+ {
+ if ( this.runningRemoteDownloadIds.contains( this.remoteRepository.getId() ) )
+ {
+ // skip it as it's running
+ log.info( "skip download index remote for repo {} it's already running",
+ this.remoteRepository.getId() );
+ return;
+ }
+ this.runningRemoteDownloadIds.add( this.remoteRepository.getId() );
+ }
+ Path tempIndexDirectory = null;
+ StopWatch stopWatch = new StopWatch();
+ stopWatch.start();
+ try
+ {
+ log.info( "start download remote index for remote repository {}", this.remoteRepository.getId() );
+ if (this.remoteRepository.getIndexingContext()==null) {
+ throw new IndexNotFoundException("No index context set for repository "+remoteRepository.getId());
+ }
+ if (this.remoteRepository.getType()!= RepositoryType.MAVEN) {
+ throw new RepositoryException("Bad repository type");
+ }
+ if (!this.remoteRepository.supportsFeature(RemoteIndexFeature.class)) {
+ throw new RepositoryException("Repository does not support RemotIndexFeature "+remoteRepository.getId());
+ }
+ RemoteIndexFeature rif = this.remoteRepository.getFeature(RemoteIndexFeature.class).get();
+ IndexingContext indexingContext = this.remoteRepository.getIndexingContext().getBaseContext(IndexingContext.class);
+ // create a temp directory to download files
+ tempIndexDirectory = Paths.get(indexingContext.getIndexDirectoryFile().getParent(), ".tmpIndex" );
+ Path indexCacheDirectory = Paths.get( indexingContext.getIndexDirectoryFile().getParent(), ".indexCache" );
+ Files.createDirectories( indexCacheDirectory );
+ if ( Files.exists(tempIndexDirectory) )
+ {
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( tempIndexDirectory );
+ }
+ Files.createDirectories( tempIndexDirectory );
+ tempIndexDirectory.toFile().deleteOnExit();
+ String baseIndexUrl = indexingContext.getIndexUpdateUrl();
+
+ String wagonProtocol = this.remoteRepository.getLocation().getScheme();
+
+ final StreamWagon wagon = (StreamWagon) wagonFactory.getWagon(
+ new WagonFactoryRequest( wagonProtocol, this.remoteRepository.getExtraHeaders() ).networkProxy(
+ this.networkProxy )
+ );
+ // FIXME olamy having 2 config values
+ wagon.setReadTimeout( (int)rif.getDownloadTimeout().toMillis());
+ wagon.setTimeout( (int)remoteRepository.getTimeout().toMillis());
+
+ if ( wagon instanceof AbstractHttpClientWagon )
+ {
+ HttpConfiguration httpConfiguration = new HttpConfiguration();
+ HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration();
+ httpMethodConfiguration.setUsePreemptive( true );
+ httpMethodConfiguration.setReadTimeout( (int)rif.getDownloadTimeout().toMillis() );
+ httpConfiguration.setGet( httpMethodConfiguration );
+ AbstractHttpClientWagon.class.cast( wagon ).setHttpConfiguration( httpConfiguration );
+ }
+
+ wagon.addTransferListener( new DownloadListener() );
+ ProxyInfo proxyInfo = null;
+ if ( this.networkProxy != null )
+ {
+ proxyInfo = new ProxyInfo();
+ proxyInfo.setType( this.networkProxy.getProtocol() );
+ proxyInfo.setHost( this.networkProxy.getHost() );
+ proxyInfo.setPort( this.networkProxy.getPort() );
+ proxyInfo.setUserName( this.networkProxy.getUsername() );
+ proxyInfo.setPassword( new String(this.networkProxy.getPassword()) );
+ }
+ AuthenticationInfo authenticationInfo = null;
+ if ( this.remoteRepository.getLoginCredentials()!=null && this.remoteRepository.getLoginCredentials() instanceof PasswordCredentials )
+ {
+ PasswordCredentials creds = (PasswordCredentials) this.remoteRepository.getLoginCredentials();
+ authenticationInfo = new AuthenticationInfo();
+ authenticationInfo.setUserName( creds.getUsername());
+ authenticationInfo.setPassword( new String(creds.getPassword()) );
+ }
+ log.debug("Connection to {}, authInfo={}", this.remoteRepository.getId(), authenticationInfo);
+ wagon.connect( new Repository( this.remoteRepository.getId(), baseIndexUrl ), authenticationInfo,
+ proxyInfo );
+
+ Path indexDirectory = indexingContext.getIndexDirectoryFile().toPath();
+ if ( !Files.exists(indexDirectory) )
+ {
+ Files.createDirectories( indexDirectory );
+ }
+ log.debug("Downloading index file to {}", indexDirectory);
+ log.debug("Index cache dir {}", indexCacheDirectory);
+
+ ResourceFetcher resourceFetcher =
+ new WagonResourceFetcher( log, tempIndexDirectory, wagon, remoteRepository );
+ IndexUpdateRequest request = new IndexUpdateRequest( indexingContext, resourceFetcher );
+ request.setForceFullUpdate( this.fullDownload );
+ request.setLocalIndexCacheDir( indexCacheDirectory.toFile() );
+
+ IndexUpdateResult result = this.indexUpdater.fetchAndUpdateIndex(request);
+ log.debug("Update result success: {}", result.isSuccessful());
+ stopWatch.stop();
+ log.info( "time update index from remote for repository {}: {}ms", this.remoteRepository.getId(),
+ ( stopWatch.getTime() ) );
+
+ // index packing optionnal ??
+ //IndexPackingRequest indexPackingRequest =
+ // new IndexPackingRequest( indexingContext, indexingContext.getIndexDirectoryFile() );
+ //indexPacker.packIndex( indexPackingRequest );
+ indexingContext.updateTimestamp( true );
+
+ }
+ catch ( Exception e )
+ {
+ log.error( e.getMessage(), e );
+ throw new RuntimeException( e.getMessage(), e );
+ }
+ finally
+ {
+ deleteDirectoryQuiet( tempIndexDirectory );
+ this.runningRemoteDownloadIds.remove( this.remoteRepository.getId() );
+ }
+ log.info( "end download remote index for remote repository {}", this.remoteRepository.getId() );
+ }
+
+ private void deleteDirectoryQuiet( Path f )
+ {
+ try
+ {
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( f );
+ }
+ catch ( IOException e )
+ {
+ log.warn( "skip error delete {} : {}", f, e.getMessage() );
+ }
+ }
+
+
+ private static final class DownloadListener
+ implements TransferListener
+ {
+ private Logger log = LoggerFactory.getLogger( getClass() );
+
+ private String resourceName;
+
+ private long startTime;
+
+ private int totalLength = 0;
+
+ @Override
+ public void transferInitiated( TransferEvent transferEvent )
+ {
+ startTime = System.currentTimeMillis();
+ resourceName = transferEvent.getResource().getName();
+ log.debug( "initiate transfer of {}", resourceName );
+ }
+
+ @Override
+ public void transferStarted( TransferEvent transferEvent )
+ {
+ this.totalLength = 0;
+ resourceName = transferEvent.getResource().getName();
+ log.info("Transferring: {}, {}", transferEvent.getResource().getContentLength(), transferEvent.getLocalFile().toString());
+ log.info( "start transfer of {}", transferEvent.getResource().getName() );
+ }
+
+ @Override
+ public void transferProgress( TransferEvent transferEvent, byte[] buffer, int length )
+ {
+ log.debug( "transfer of {} : {}/{}", transferEvent.getResource().getName(), buffer.length, length );
+ this.totalLength += length;
+ }
+
+ @Override
+ public void transferCompleted( TransferEvent transferEvent )
+ {
+ resourceName = transferEvent.getResource().getName();
+ long endTime = System.currentTimeMillis();
+ log.info( "end of transfer file {}: {}b, {}ms", transferEvent.getResource().getName(),
+ this.totalLength, ( endTime - startTime ) );
+ }
+
+ @Override
+ public void transferError( TransferEvent transferEvent )
+ {
+ log.info( "error of transfer file {}: {}", transferEvent.getResource().getName(),
+ transferEvent.getException().getMessage(), transferEvent.getException() );
+ }
+
+ @Override
+ public void debug( String message )
+ {
+ log.debug( "transfer debug {}", message );
+ }
+ }
+
+ private static class WagonResourceFetcher
+ implements ResourceFetcher
+ {
+
+ Logger log;
+
+ Path tempIndexDirectory;
+
+ Wagon wagon;
+
+ RemoteRepository remoteRepository;
+
+ private WagonResourceFetcher( Logger log, Path tempIndexDirectory, Wagon wagon,
+ RemoteRepository remoteRepository )
+ {
+ this.log = log;
+ this.tempIndexDirectory = tempIndexDirectory;
+ this.wagon = wagon;
+ this.remoteRepository = remoteRepository;
+ }
+
+ @Override
+ public void connect( String id, String url )
+ throws IOException
+ {
+ //no op
+ }
+
+ @Override
+ public void disconnect()
+ throws IOException
+ {
+ // no op
+ }
+
+ @Override
+ public InputStream retrieve( String name )
+ throws IOException, FileNotFoundException
+ {
+ try
+ {
+ log.info( "index update retrieve file, name:{}", name );
+ Path file = tempIndexDirectory.resolve( name );
+ Files.deleteIfExists( file );
+ file.toFile().deleteOnExit();
+ wagon.get( addParameters( name, this.remoteRepository ), file.toFile() );
+ return Files.newInputStream( file );
+ }
+ catch ( AuthorizationException | TransferFailedException e )
+ {
+ throw new IOException( e.getMessage(), e );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ FileNotFoundException fnfe = new FileNotFoundException( e.getMessage() );
+ fnfe.initCause( e );
+ throw fnfe;
+ }
+ }
+
+ // FIXME remove crappy copy/paste
+ protected String addParameters( String path, RemoteRepository remoteRepository )
+ {
+ if ( remoteRepository.getExtraParameters().isEmpty() )
+ {
+ return path;
+ }
+
+ boolean question = false;
+
+ StringBuilder res = new StringBuilder( path == null ? "" : path );
+
+ for ( Map.Entry<String, String> entry : remoteRepository.getExtraParameters().entrySet() )
+ {
+ if ( !question )
+ {
+ res.append( '?' ).append( entry.getKey() ).append( '=' ).append( entry.getValue() );
+ }
+ }
+
+ return res.toString();
+ }
+
+ }
+
+
+}
+
--- /dev/null
+package org.apache.archiva.maven.scheduler.indexing;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.maven.common.proxy.WagonFactory;
+import org.apache.archiva.proxy.model.NetworkProxy;
+import org.apache.archiva.repository.RemoteRepository;
+import org.apache.maven.index.packer.IndexPacker;
+import org.apache.maven.index.updater.IndexUpdater;
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M1
+ */
+public class DownloadRemoteIndexTaskRequest
+{
+ private RemoteRepository remoteRepository;
+
+ private WagonFactory wagonFactory;
+
+ private NetworkProxy networkProxy;
+
+ private boolean fullDownload;
+
+ private IndexUpdater indexUpdater;
+
+ private IndexPacker indexPacker;
+
+ public DownloadRemoteIndexTaskRequest()
+ {
+ // no op
+ }
+
+ public RemoteRepository getRemoteRepository()
+ {
+ return remoteRepository;
+ }
+
+ public DownloadRemoteIndexTaskRequest setRemoteRepository( RemoteRepository remoteRepository )
+ {
+ this.remoteRepository = remoteRepository;
+ return this;
+ }
+
+
+ public WagonFactory getWagonFactory()
+ {
+ return wagonFactory;
+ }
+
+ public DownloadRemoteIndexTaskRequest setWagonFactory( WagonFactory wagonFactory )
+ {
+ this.wagonFactory = wagonFactory;
+ return this;
+ }
+
+ public NetworkProxy getNetworkProxy()
+ {
+ return networkProxy;
+ }
+
+ public DownloadRemoteIndexTaskRequest setNetworkProxy( NetworkProxy networkProxy )
+ {
+ this.networkProxy = networkProxy;
+ return this;
+ }
+
+ public boolean isFullDownload()
+ {
+ return fullDownload;
+ }
+
+ public DownloadRemoteIndexTaskRequest setFullDownload( boolean fullDownload )
+ {
+ this.fullDownload = fullDownload;
+ return this;
+ }
+
+ public IndexUpdater getIndexUpdater()
+ {
+ return indexUpdater;
+ }
+
+ public DownloadRemoteIndexTaskRequest setIndexUpdater( IndexUpdater indexUpdater )
+ {
+ this.indexUpdater = indexUpdater;
+ return this;
+ }
+
+ public IndexPacker getIndexPacker()
+ {
+ return indexPacker;
+ }
+
+ public DownloadRemoteIndexTaskRequest setIndexPacker( IndexPacker indexPacker )
+ {
+ this.indexPacker = indexPacker;
+ return this;
+ }
+}
+++ /dev/null
-package org.apache.archiva.scheduler.indexing.maven;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the Li
- * cense is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.indexer.UnsupportedBaseContextException;
-import org.apache.archiva.components.taskqueue.Task;
-import org.apache.archiva.components.taskqueue.execution.TaskExecutionException;
-import org.apache.archiva.components.taskqueue.execution.TaskExecutor;
-import org.apache.archiva.repository.ManagedRepository;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.scheduler.indexing.ArtifactIndexingTask;
-import org.apache.maven.index.ArtifactContext;
-import org.apache.maven.index.ArtifactContextProducer;
-import org.apache.maven.index.DefaultScannerListener;
-import org.apache.maven.index.FlatSearchRequest;
-import org.apache.maven.index.FlatSearchResponse;
-import org.apache.maven.index.Indexer;
-import org.apache.maven.index.IndexerEngine;
-import org.apache.maven.index.MAVEN;
-import org.apache.maven.index.Scanner;
-import org.apache.maven.index.ScanningRequest;
-import org.apache.maven.index.ScanningResult;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.expr.SourcedSearchExpression;
-import org.apache.maven.index.packer.IndexPacker;
-import org.apache.maven.index.packer.IndexPackingRequest;
-import org.apache.maven.index_shaded.lucene.search.BooleanClause;
-import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import java.io.IOException;
-import java.nio.file.Path;
-
-/**
- * ArchivaIndexingTaskExecutor Executes all indexing tasks. Adding, updating and removing artifacts from the index are
- * all performed by this executor. Add and update artifact in index tasks are added in the indexing task queue by the
- * NexusIndexerConsumer while remove artifact from index tasks are added by the LuceneCleanupRemoveIndexedConsumer.
- */
-@Service( "taskExecutor#indexing" )
-public class ArchivaIndexingTaskExecutor
- implements TaskExecutor
-{
- private Logger log = LoggerFactory.getLogger( ArchivaIndexingTaskExecutor.class );
-
- @Inject
- private IndexPacker indexPacker;
-
- @Inject
- private ArtifactContextProducer artifactContextProducer;
-
- @Inject
- private Indexer indexer;
-
- @Inject
- private Scanner scanner;
-
- @Inject
- IndexerEngine indexerEngine;
-
- /**
- * depending on current {@link Task} you have.
- * If {@link org.apache.archiva.scheduler.indexing.ArtifactIndexingTask.Action#FINISH} && isExecuteOnEntireRepo:
- * repository will be scanned.
- *
- * @param task
- * @throws TaskExecutionException
- */
- @Override
- public void executeTask( Task task )
- throws TaskExecutionException
- {
- ArtifactIndexingTask indexingTask = (ArtifactIndexingTask) task;
-
- ManagedRepository repository = indexingTask.getRepository( );
- ArchivaIndexingContext archivaContext = indexingTask.getContext( );
- IndexingContext context = null;
- try
- {
- context = archivaContext.getBaseContext( IndexingContext.class );
- }
- catch ( UnsupportedBaseContextException e )
- {
- throw new TaskExecutionException( "Bad repository type.", e );
- }
-
- if ( ArtifactIndexingTask.Action.FINISH.equals( indexingTask.getAction( ) )
- && indexingTask.isExecuteOnEntireRepo( ) )
- {
- long start = System.currentTimeMillis( );
- try
- {
- context.updateTimestamp( );
- DefaultScannerListener listener = new DefaultScannerListener( context, indexerEngine, true, null );
- ScanningRequest request = new ScanningRequest( context, listener );
- ScanningResult result = scanner.scan( request );
- if ( result.hasExceptions( ) )
- {
- log.error( "Exceptions occured during index scan of " + context.getId( ) );
- result.getExceptions( ).stream( ).map( e -> e.getMessage( ) ).distinct( ).limit( 5 ).forEach(
- s -> log.error( "Message: " + s )
- );
- }
- }
- catch ( IOException e )
- {
- log.error( "Error during context scan {}: {}", context.getId( ), context.getIndexDirectory( ) );
- }
- long end = System.currentTimeMillis( );
- log.info( "indexed maven repository: {}, onlyUpdate: {}, time {} ms", repository.getId( ),
- indexingTask.isOnlyUpdate( ), ( end - start ) );
- log.debug( "Finishing indexing task on repo: {}", repository.getId( ) );
- finishIndexingTask( indexingTask, repository, context );
- }
- else
- {
- // create context if not a repo scan request
- if ( !indexingTask.isExecuteOnEntireRepo( ) )
- {
- try
- {
- log.debug( "Creating indexing context on resource: {}", //
- ( indexingTask.getResourceFile( ) == null
- ? "none"
- : indexingTask.getResourceFile( ) ) );
- archivaContext = repository.getIndexingContext( );
- context = archivaContext.getBaseContext( IndexingContext.class );
- }
- catch ( UnsupportedBaseContextException e )
- {
- log.error( "Error occurred while creating context: {}", e.getMessage( ) );
- throw new TaskExecutionException( "Error occurred while creating context: " + e.getMessage( ), e );
- }
- }
-
- if ( context == null || context.getIndexDirectory( ) == null )
- {
- throw new TaskExecutionException( "Trying to index an artifact but the context is already closed" );
- }
-
- try
- {
- Path artifactFile = indexingTask.getResourceFile( );
- if ( artifactFile == null )
- {
- log.debug( "no artifact pass in indexing task so skip it" );
- }
- else
- {
- ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile.toFile( ) );
-
- if ( ac != null )
- {
- // MRM-1779 pom must be indexed too
- // TODO make that configurable?
- if ( artifactFile.getFileName( ).toString( ).endsWith( ".pom" ) )
- {
- ac.getArtifactInfo( ).setFileExtension( "pom" );
- ac.getArtifactInfo( ).setPackaging( "pom" );
- ac.getArtifactInfo( ).setClassifier( "pom" );
- }
- if ( indexingTask.getAction( ).equals( ArtifactIndexingTask.Action.ADD ) )
- {
- //IndexSearcher s = context.getIndexSearcher();
- //String uinfo = ac.getArtifactInfo().getUinfo();
- //TopDocs d = s.search( new TermQuery( new Term( ArtifactInfo.UINFO, uinfo ) ), 1 );
-
- BooleanQuery.Builder qb = new BooleanQuery.Builder();
- qb.add( indexer.constructQuery( MAVEN.GROUP_ID, new SourcedSearchExpression(
- ac.getArtifactInfo( ).getGroupId( ) ) ), BooleanClause.Occur.MUST );
- qb.add( indexer.constructQuery( MAVEN.ARTIFACT_ID, new SourcedSearchExpression(
- ac.getArtifactInfo( ).getArtifactId( ) ) ), BooleanClause.Occur.MUST );
- qb.add( indexer.constructQuery( MAVEN.VERSION, new SourcedSearchExpression(
- ac.getArtifactInfo( ).getVersion( ) ) ), BooleanClause.Occur.MUST );
- if ( ac.getArtifactInfo( ).getClassifier( ) != null )
- {
- qb.add( indexer.constructQuery( MAVEN.CLASSIFIER, new SourcedSearchExpression(
- ac.getArtifactInfo( ).getClassifier( ) ) ), BooleanClause.Occur.MUST );
- }
- if ( ac.getArtifactInfo( ).getPackaging( ) != null )
- {
- qb.add( indexer.constructQuery( MAVEN.PACKAGING, new SourcedSearchExpression(
- ac.getArtifactInfo( ).getPackaging( ) ) ), BooleanClause.Occur.MUST );
- }
- FlatSearchRequest flatSearchRequest = new FlatSearchRequest( qb.build(), context );
- FlatSearchResponse flatSearchResponse = indexer.searchFlat( flatSearchRequest );
- if ( flatSearchResponse.getResults( ).isEmpty( ) )
- {
- log.debug( "Adding artifact '{}' to index..", ac.getArtifactInfo( ) );
- indexerEngine.index( context, ac );
- }
- else
- {
- log.debug( "Updating artifact '{}' in index..", ac.getArtifactInfo( ) );
- // TODO check if update exists !!
- indexerEngine.update( context, ac );
- }
-
- context.updateTimestamp( );
- context.commit( );
-
-
- }
- else
- {
- log.debug( "Removing artifact '{}' from index..", ac.getArtifactInfo( ) );
- indexerEngine.remove( context, ac );
- }
- }
- }
- // close the context if not a repo scan request
- if ( !indexingTask.isExecuteOnEntireRepo( ) )
- {
- log.debug( "Finishing indexing task on resource file : {}", indexingTask.getResourceFile( ) != null
- ? indexingTask.getResourceFile( )
- : " none " );
- finishIndexingTask( indexingTask, repository, context );
- }
- }
- catch ( IOException e )
- {
- log.error( "Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage( ),
- e );
- throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask + "'",
- e );
- }
- }
-
- }
-
- private void finishIndexingTask( ArtifactIndexingTask indexingTask, ManagedRepository repository,
- IndexingContext context )
- throws TaskExecutionException
- {
- try
- {
-
- log.debug( "Finishing indexing" );
- context.optimize( );
-
- if ( repository.supportsFeature( IndexCreationFeature.class ) )
- {
- IndexCreationFeature icf = repository.getFeature( IndexCreationFeature.class ).get( );
- if ( !icf.isSkipPackedIndexCreation( ) && icf.getLocalPackedIndexPath( ) != null && icf.getLocalIndexPath().getFilePath()!=null )
- {
-
- log.debug( "Creating packed index from {} on {}", context.getIndexDirectoryFile( ), icf.getLocalPackedIndexPath( ) );
- IndexPackingRequest request = new IndexPackingRequest( context, //
- context.acquireIndexSearcher( ).getIndexReader( ),
- //
- icf.getLocalPackedIndexPath( ).getFilePath().toFile( ) );
-
- indexPacker.packIndex( request );
- context.updateTimestamp( true );
-
- log.debug( "Index file packed at '{}'.", icf.getLocalPackedIndexPath( ) );
- }
- else
- {
- log.debug( "skip packed index creation" );
- }
- }
- else
- {
- log.debug( "skip packed index creation" );
- }
- }
- catch ( IOException e )
- {
- log.error( "Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage( ) );
- throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask + "'",
- e );
- }
- }
-
- public void setIndexPacker( IndexPacker indexPacker )
- {
- this.indexPacker = indexPacker;
- }
-
-}
+++ /dev/null
-package org.apache.archiva.scheduler.indexing.maven;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.proxy.ProxyRegistry;
-import org.apache.archiva.proxy.model.NetworkProxy;
-import org.apache.archiva.scheduler.indexing.DownloadRemoteIndexException;
-import org.apache.archiva.scheduler.indexing.DownloadRemoteIndexScheduler;
-import org.apache.archiva.configuration.ArchivaConfiguration;
-import org.apache.archiva.configuration.ConfigurationEvent;
-import org.apache.archiva.configuration.ConfigurationListener;
-import org.apache.archiva.indexer.UnsupportedBaseContextException;
-import org.apache.archiva.maven.common.proxy.WagonFactory;
-import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.repository.features.RemoteIndexFeature;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.packer.IndexPacker;
-import org.apache.maven.index.updater.IndexUpdater;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.scheduling.TaskScheduler;
-import org.springframework.scheduling.support.CronTrigger;
-import org.springframework.stereotype.Service;
-
-import javax.annotation.PostConstruct;
-import javax.inject.Inject;
-import javax.inject.Named;
-import java.util.Date;
-import java.util.List;
-import java.util.concurrent.CopyOnWriteArrayList;
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M1
- */
-@Service( "downloadRemoteIndexScheduler#default" )
-public class DefaultDownloadRemoteIndexScheduler
- implements ConfigurationListener, DownloadRemoteIndexScheduler
-{
-
- private Logger log = LoggerFactory.getLogger( getClass() );
-
- @Inject
- @Named( value = "taskScheduler#indexDownloadRemote" )
- private TaskScheduler taskScheduler;
-
- @Inject
- RepositoryRegistry repositoryRegistry;
-
- @Inject
- private ArchivaConfiguration archivaConfiguration;
-
- @Inject
- private WagonFactory wagonFactory;
-
- @Inject
- private IndexUpdater indexUpdater;
-
- @Inject
- private IndexPacker indexPacker;
-
- @Inject
- private ProxyRegistry proxyRegistry;
-
- // store ids about currently running remote download : updated in DownloadRemoteIndexTask
- private List<String> runningRemoteDownloadIds = new CopyOnWriteArrayList<String>();
-
- @PostConstruct
- public void startup()
- throws
- DownloadRemoteIndexException, UnsupportedBaseContextException {
- archivaConfiguration.addListener( this );
- // TODO add indexContexts even if null
-
- for ( org.apache.archiva.repository.RemoteRepository remoteRepository : repositoryRegistry.getRemoteRepositories() )
- {
- String contextKey = "remote-" + remoteRepository.getId();
- IndexingContext context = remoteRepository.getIndexingContext().getBaseContext(IndexingContext.class);
- if ( context == null )
- {
- continue;
- }
- RemoteIndexFeature rif = remoteRepository.getFeature(RemoteIndexFeature.class).get();
-
-
- // TODO record jobs from configuration
- if ( rif.isDownloadRemoteIndex() && StringUtils.isNotEmpty(
- remoteRepository.getSchedulingDefinition() ) )
- {
- boolean fullDownload = context.getIndexDirectoryFile().list().length == 0;
- scheduleDownloadRemote( remoteRepository.getId(), false, fullDownload );
- }
- }
-
-
- }
-
- @Override
- public void configurationEvent( ConfigurationEvent event )
- {
- // TODO remove jobs and add again
- }
-
-
- @Override
- public void scheduleDownloadRemote( String repositoryId, boolean now, boolean fullDownload )
- throws DownloadRemoteIndexException
- {
- org.apache.archiva.repository.RemoteRepository remoteRepo = repositoryRegistry.getRemoteRepository(repositoryId);
-
- if ( remoteRepo == null )
- {
- log.warn( "ignore scheduleDownloadRemote for repo with id {} as not exists", repositoryId );
- return;
- }
- if (!remoteRepo.supportsFeature(RemoteIndexFeature.class)) {
- log.warn("ignore scheduleDownloadRemote for repo with id {}. Does not support remote index.", repositoryId);
- return;
- }
- RemoteIndexFeature rif = remoteRepo.getFeature(RemoteIndexFeature.class).get();
- NetworkProxy networkProxy = null;
- if ( StringUtils.isNotBlank( rif.getProxyId() ) )
- {
- networkProxy = proxyRegistry.getNetworkProxy( rif.getProxyId() );
- if ( networkProxy == null )
- {
- log.warn(
- "your remote repository is configured to download remote index trought a proxy we cannot find id:{}",
- rif.getProxyId() );
- }
- }
-
- DownloadRemoteIndexTaskRequest downloadRemoteIndexTaskRequest = new DownloadRemoteIndexTaskRequest() //
- .setRemoteRepository( remoteRepo ) //
- .setNetworkProxy( networkProxy ) //
- .setFullDownload( fullDownload ) //
- .setWagonFactory( wagonFactory ) //
- .setIndexUpdater( indexUpdater ) //
- .setIndexPacker( this.indexPacker );
-
- if ( now )
- {
- log.info( "schedule download remote index for repository {}", remoteRepo.getId() );
- // do it now
- taskScheduler.schedule(
- new DownloadRemoteIndexTask( downloadRemoteIndexTaskRequest, this.runningRemoteDownloadIds ),
- new Date() );
- }
- else
- {
- log.info( "schedule download remote index for repository {} with cron expression {}",
- remoteRepo.getId(), remoteRepo.getSchedulingDefinition());
- try
- {
- CronTrigger cronTrigger = new CronTrigger( remoteRepo.getSchedulingDefinition());
- taskScheduler.schedule(
- new DownloadRemoteIndexTask( downloadRemoteIndexTaskRequest, this.runningRemoteDownloadIds ),
- cronTrigger );
- }
- catch ( IllegalArgumentException e )
- {
- log.warn( "Unable to schedule remote index download: {}", e.getLocalizedMessage() );
- }
-
- if ( rif.isDownloadRemoteIndexOnStartup() )
- {
- log.info(
- "remote repository {} configured with downloadRemoteIndexOnStartup schedule now a download",
- remoteRepo.getId() );
- taskScheduler.schedule(
- new DownloadRemoteIndexTask( downloadRemoteIndexTaskRequest, this.runningRemoteDownloadIds ),
- new Date() );
- }
- }
-
- }
-
- public TaskScheduler getTaskScheduler()
- {
- return taskScheduler;
- }
-
- public void setTaskScheduler( TaskScheduler taskScheduler )
- {
- this.taskScheduler = taskScheduler;
- }
-
- @Override
- public List<String> getRunningRemoteDownloadIds()
- {
- return runningRemoteDownloadIds;
- }
-}
+++ /dev/null
-package org.apache.archiva.scheduler.indexing.maven;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.updater.IndexUpdateSideEffect;
-import org.apache.maven.index_shaded.lucene.store.Directory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-/**
- * Not doing much but required at least one implementation
- *
- * @since 3.0.0
- */
-@Service
-public class DefaultIndexUpdateSideEffect
- implements IndexUpdateSideEffect
-{
- private static final Logger LOGGER = LoggerFactory.getLogger( DefaultIndexUpdateSideEffect.class );
-
- @Override
- public void updateIndex( Directory directory, IndexingContext indexingContext, boolean b )
- {
- LOGGER.info( "updating index: {} with directory: {}", //
- indexingContext.getId(), //
- directory.toString() );
- }
-}
+++ /dev/null
-package org.apache.archiva.scheduler.indexing.maven;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.maven.common.proxy.WagonFactory;
-import org.apache.archiva.maven.common.proxy.WagonFactoryRequest;
-import org.apache.archiva.proxy.model.NetworkProxy;
-import org.apache.archiva.repository.base.PasswordCredentials;
-import org.apache.archiva.repository.RemoteRepository;
-import org.apache.archiva.repository.RepositoryException;
-import org.apache.archiva.repository.RepositoryType;
-import org.apache.archiva.repository.features.RemoteIndexFeature;
-import org.apache.commons.lang3.time.StopWatch;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.updater.IndexUpdateRequest;
-import org.apache.maven.index.updater.IndexUpdateResult;
-import org.apache.maven.index.updater.IndexUpdater;
-import org.apache.maven.index.updater.ResourceFetcher;
-import org.apache.maven.index_shaded.lucene.index.IndexNotFoundException;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.StreamWagon;
-import org.apache.maven.wagon.TransferFailedException;
-import org.apache.maven.wagon.Wagon;
-import org.apache.maven.wagon.authentication.AuthenticationInfo;
-import org.apache.maven.wagon.authorization.AuthorizationException;
-import org.apache.maven.wagon.events.TransferEvent;
-import org.apache.maven.wagon.events.TransferListener;
-import org.apache.maven.wagon.proxy.ProxyInfo;
-import org.apache.maven.wagon.repository.Repository;
-import org.apache.maven.wagon.shared.http.AbstractHttpClientWagon;
-import org.apache.maven.wagon.shared.http.HttpConfiguration;
-import org.apache.maven.wagon.shared.http.HttpMethodConfiguration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M1
- */
-public class DownloadRemoteIndexTask
- implements Runnable
-{
- private Logger log = LoggerFactory.getLogger( getClass() );
-
- private RemoteRepository remoteRepository;
-
- private WagonFactory wagonFactory;
-
- private NetworkProxy networkProxy;
-
- private boolean fullDownload;
-
- private List<String> runningRemoteDownloadIds;
-
- private IndexUpdater indexUpdater;
-
-
- public DownloadRemoteIndexTask( DownloadRemoteIndexTaskRequest downloadRemoteIndexTaskRequest,
- List<String> runningRemoteDownloadIds )
- {
- this.remoteRepository = downloadRemoteIndexTaskRequest.getRemoteRepository();
- this.wagonFactory = downloadRemoteIndexTaskRequest.getWagonFactory();
- this.networkProxy = downloadRemoteIndexTaskRequest.getNetworkProxy();
- this.fullDownload = downloadRemoteIndexTaskRequest.isFullDownload();
- this.runningRemoteDownloadIds = runningRemoteDownloadIds;
- this.indexUpdater = downloadRemoteIndexTaskRequest.getIndexUpdater();
- }
-
- @Override
- public void run()
- {
-
- // so short lock : not sure we need it
- synchronized ( this.runningRemoteDownloadIds )
- {
- if ( this.runningRemoteDownloadIds.contains( this.remoteRepository.getId() ) )
- {
- // skip it as it's running
- log.info( "skip download index remote for repo {} it's already running",
- this.remoteRepository.getId() );
- return;
- }
- this.runningRemoteDownloadIds.add( this.remoteRepository.getId() );
- }
- Path tempIndexDirectory = null;
- StopWatch stopWatch = new StopWatch();
- stopWatch.start();
- try
- {
- log.info( "start download remote index for remote repository {}", this.remoteRepository.getId() );
- if (this.remoteRepository.getIndexingContext()==null) {
- throw new IndexNotFoundException("No index context set for repository "+remoteRepository.getId());
- }
- if (this.remoteRepository.getType()!= RepositoryType.MAVEN) {
- throw new RepositoryException("Bad repository type");
- }
- if (!this.remoteRepository.supportsFeature(RemoteIndexFeature.class)) {
- throw new RepositoryException("Repository does not support RemotIndexFeature "+remoteRepository.getId());
- }
- RemoteIndexFeature rif = this.remoteRepository.getFeature(RemoteIndexFeature.class).get();
- IndexingContext indexingContext = this.remoteRepository.getIndexingContext().getBaseContext(IndexingContext.class);
- // create a temp directory to download files
- tempIndexDirectory = Paths.get(indexingContext.getIndexDirectoryFile().getParent(), ".tmpIndex" );
- Path indexCacheDirectory = Paths.get( indexingContext.getIndexDirectoryFile().getParent(), ".indexCache" );
- Files.createDirectories( indexCacheDirectory );
- if ( Files.exists(tempIndexDirectory) )
- {
- org.apache.archiva.common.utils.FileUtils.deleteDirectory( tempIndexDirectory );
- }
- Files.createDirectories( tempIndexDirectory );
- tempIndexDirectory.toFile().deleteOnExit();
- String baseIndexUrl = indexingContext.getIndexUpdateUrl();
-
- String wagonProtocol = this.remoteRepository.getLocation().getScheme();
-
- final StreamWagon wagon = (StreamWagon) wagonFactory.getWagon(
- new WagonFactoryRequest( wagonProtocol, this.remoteRepository.getExtraHeaders() ).networkProxy(
- this.networkProxy )
- );
- // FIXME olamy having 2 config values
- wagon.setReadTimeout( (int)rif.getDownloadTimeout().toMillis());
- wagon.setTimeout( (int)remoteRepository.getTimeout().toMillis());
-
- if ( wagon instanceof AbstractHttpClientWagon )
- {
- HttpConfiguration httpConfiguration = new HttpConfiguration();
- HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration();
- httpMethodConfiguration.setUsePreemptive( true );
- httpMethodConfiguration.setReadTimeout( (int)rif.getDownloadTimeout().toMillis() );
- httpConfiguration.setGet( httpMethodConfiguration );
- AbstractHttpClientWagon.class.cast( wagon ).setHttpConfiguration( httpConfiguration );
- }
-
- wagon.addTransferListener( new DownloadListener() );
- ProxyInfo proxyInfo = null;
- if ( this.networkProxy != null )
- {
- proxyInfo = new ProxyInfo();
- proxyInfo.setType( this.networkProxy.getProtocol() );
- proxyInfo.setHost( this.networkProxy.getHost() );
- proxyInfo.setPort( this.networkProxy.getPort() );
- proxyInfo.setUserName( this.networkProxy.getUsername() );
- proxyInfo.setPassword( new String(this.networkProxy.getPassword()) );
- }
- AuthenticationInfo authenticationInfo = null;
- if ( this.remoteRepository.getLoginCredentials()!=null && this.remoteRepository.getLoginCredentials() instanceof PasswordCredentials )
- {
- PasswordCredentials creds = (PasswordCredentials) this.remoteRepository.getLoginCredentials();
- authenticationInfo = new AuthenticationInfo();
- authenticationInfo.setUserName( creds.getUsername());
- authenticationInfo.setPassword( new String(creds.getPassword()) );
- }
- log.debug("Connection to {}, authInfo={}", this.remoteRepository.getId(), authenticationInfo);
- wagon.connect( new Repository( this.remoteRepository.getId(), baseIndexUrl ), authenticationInfo,
- proxyInfo );
-
- Path indexDirectory = indexingContext.getIndexDirectoryFile().toPath();
- if ( !Files.exists(indexDirectory) )
- {
- Files.createDirectories( indexDirectory );
- }
- log.debug("Downloading index file to {}", indexDirectory);
- log.debug("Index cache dir {}", indexCacheDirectory);
-
- ResourceFetcher resourceFetcher =
- new WagonResourceFetcher( log, tempIndexDirectory, wagon, remoteRepository );
- IndexUpdateRequest request = new IndexUpdateRequest( indexingContext, resourceFetcher );
- request.setForceFullUpdate( this.fullDownload );
- request.setLocalIndexCacheDir( indexCacheDirectory.toFile() );
-
- IndexUpdateResult result = this.indexUpdater.fetchAndUpdateIndex(request);
- log.debug("Update result success: {}", result.isSuccessful());
- stopWatch.stop();
- log.info( "time update index from remote for repository {}: {}ms", this.remoteRepository.getId(),
- ( stopWatch.getTime() ) );
-
- // index packing optionnal ??
- //IndexPackingRequest indexPackingRequest =
- // new IndexPackingRequest( indexingContext, indexingContext.getIndexDirectoryFile() );
- //indexPacker.packIndex( indexPackingRequest );
- indexingContext.updateTimestamp( true );
-
- }
- catch ( Exception e )
- {
- log.error( e.getMessage(), e );
- throw new RuntimeException( e.getMessage(), e );
- }
- finally
- {
- deleteDirectoryQuiet( tempIndexDirectory );
- this.runningRemoteDownloadIds.remove( this.remoteRepository.getId() );
- }
- log.info( "end download remote index for remote repository {}", this.remoteRepository.getId() );
- }
-
- private void deleteDirectoryQuiet( Path f )
- {
- try
- {
- org.apache.archiva.common.utils.FileUtils.deleteDirectory( f );
- }
- catch ( IOException e )
- {
- log.warn( "skip error delete {} : {}", f, e.getMessage() );
- }
- }
-
-
- private static final class DownloadListener
- implements TransferListener
- {
- private Logger log = LoggerFactory.getLogger( getClass() );
-
- private String resourceName;
-
- private long startTime;
-
- private int totalLength = 0;
-
- @Override
- public void transferInitiated( TransferEvent transferEvent )
- {
- startTime = System.currentTimeMillis();
- resourceName = transferEvent.getResource().getName();
- log.debug( "initiate transfer of {}", resourceName );
- }
-
- @Override
- public void transferStarted( TransferEvent transferEvent )
- {
- this.totalLength = 0;
- resourceName = transferEvent.getResource().getName();
- log.info("Transferring: {}, {}", transferEvent.getResource().getContentLength(), transferEvent.getLocalFile().toString());
- log.info( "start transfer of {}", transferEvent.getResource().getName() );
- }
-
- @Override
- public void transferProgress( TransferEvent transferEvent, byte[] buffer, int length )
- {
- log.debug( "transfer of {} : {}/{}", transferEvent.getResource().getName(), buffer.length, length );
- this.totalLength += length;
- }
-
- @Override
- public void transferCompleted( TransferEvent transferEvent )
- {
- resourceName = transferEvent.getResource().getName();
- long endTime = System.currentTimeMillis();
- log.info( "end of transfer file {}: {}b, {}ms", transferEvent.getResource().getName(),
- this.totalLength, ( endTime - startTime ) );
- }
-
- @Override
- public void transferError( TransferEvent transferEvent )
- {
- log.info( "error of transfer file {}: {}", transferEvent.getResource().getName(),
- transferEvent.getException().getMessage(), transferEvent.getException() );
- }
-
- @Override
- public void debug( String message )
- {
- log.debug( "transfer debug {}", message );
- }
- }
-
- private static class WagonResourceFetcher
- implements ResourceFetcher
- {
-
- Logger log;
-
- Path tempIndexDirectory;
-
- Wagon wagon;
-
- RemoteRepository remoteRepository;
-
- private WagonResourceFetcher( Logger log, Path tempIndexDirectory, Wagon wagon,
- RemoteRepository remoteRepository )
- {
- this.log = log;
- this.tempIndexDirectory = tempIndexDirectory;
- this.wagon = wagon;
- this.remoteRepository = remoteRepository;
- }
-
- @Override
- public void connect( String id, String url )
- throws IOException
- {
- //no op
- }
-
- @Override
- public void disconnect()
- throws IOException
- {
- // no op
- }
-
- @Override
- public InputStream retrieve( String name )
- throws IOException, FileNotFoundException
- {
- try
- {
- log.info( "index update retrieve file, name:{}", name );
- Path file = tempIndexDirectory.resolve( name );
- Files.deleteIfExists( file );
- file.toFile().deleteOnExit();
- wagon.get( addParameters( name, this.remoteRepository ), file.toFile() );
- return Files.newInputStream( file );
- }
- catch ( AuthorizationException | TransferFailedException e )
- {
- throw new IOException( e.getMessage(), e );
- }
- catch ( ResourceDoesNotExistException e )
- {
- FileNotFoundException fnfe = new FileNotFoundException( e.getMessage() );
- fnfe.initCause( e );
- throw fnfe;
- }
- }
-
- // FIXME remove crappy copy/paste
- protected String addParameters( String path, RemoteRepository remoteRepository )
- {
- if ( remoteRepository.getExtraParameters().isEmpty() )
- {
- return path;
- }
-
- boolean question = false;
-
- StringBuilder res = new StringBuilder( path == null ? "" : path );
-
- for ( Map.Entry<String, String> entry : remoteRepository.getExtraParameters().entrySet() )
- {
- if ( !question )
- {
- res.append( '?' ).append( entry.getKey() ).append( '=' ).append( entry.getValue() );
- }
- }
-
- return res.toString();
- }
-
- }
-
-
-}
-
+++ /dev/null
-package org.apache.archiva.scheduler.indexing.maven;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.maven.common.proxy.WagonFactory;
-import org.apache.archiva.proxy.model.NetworkProxy;
-import org.apache.archiva.repository.RemoteRepository;
-import org.apache.maven.index.packer.IndexPacker;
-import org.apache.maven.index.updater.IndexUpdater;
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M1
- */
-public class DownloadRemoteIndexTaskRequest
-{
- private RemoteRepository remoteRepository;
-
- private WagonFactory wagonFactory;
-
- private NetworkProxy networkProxy;
-
- private boolean fullDownload;
-
- private IndexUpdater indexUpdater;
-
- private IndexPacker indexPacker;
-
- public DownloadRemoteIndexTaskRequest()
- {
- // no op
- }
-
- public RemoteRepository getRemoteRepository()
- {
- return remoteRepository;
- }
-
- public DownloadRemoteIndexTaskRequest setRemoteRepository( RemoteRepository remoteRepository )
- {
- this.remoteRepository = remoteRepository;
- return this;
- }
-
-
- public WagonFactory getWagonFactory()
- {
- return wagonFactory;
- }
-
- public DownloadRemoteIndexTaskRequest setWagonFactory( WagonFactory wagonFactory )
- {
- this.wagonFactory = wagonFactory;
- return this;
- }
-
- public NetworkProxy getNetworkProxy()
- {
- return networkProxy;
- }
-
- public DownloadRemoteIndexTaskRequest setNetworkProxy( NetworkProxy networkProxy )
- {
- this.networkProxy = networkProxy;
- return this;
- }
-
- public boolean isFullDownload()
- {
- return fullDownload;
- }
-
- public DownloadRemoteIndexTaskRequest setFullDownload( boolean fullDownload )
- {
- this.fullDownload = fullDownload;
- return this;
- }
-
- public IndexUpdater getIndexUpdater()
- {
- return indexUpdater;
- }
-
- public DownloadRemoteIndexTaskRequest setIndexUpdater( IndexUpdater indexUpdater )
- {
- this.indexUpdater = indexUpdater;
- return this;
- }
-
- public IndexPacker getIndexPacker()
- {
- return indexPacker;
- }
-
- public DownloadRemoteIndexTaskRequest setIndexPacker( IndexPacker indexPacker )
- {
- this.indexPacker = indexPacker;
- return this;
- }
-}
default-lazy-init="false">
<context:annotation-config/>
- <context:component-scan base-package="org.apache.archiva.scheduler.indexing,org.apache.maven.index"/>
+ <context:component-scan base-package="org.apache.archiva.maven.scheduler.indexing,org.apache.maven.index"/>
--- /dev/null
+package org.apache.archiva.maven.scheduler.indexing;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.TestCase;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.indexer.UnsupportedBaseContextException;
+import org.apache.archiva.repository.ManagedRepository;
+import org.apache.archiva.repository.ReleaseScheme;
+import org.apache.archiva.repository.base.ArchivaRepositoryRegistry;
+import org.apache.archiva.repository.base.RepositoryHandlerDependencies;
+import org.apache.archiva.repository.base.managed.BasicManagedRepository;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.scheduler.indexing.ArtifactIndexingTask;
+import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
+import org.apache.maven.index.ArtifactInfo;
+import org.apache.maven.index.FlatSearchRequest;
+import org.apache.maven.index.FlatSearchResponse;
+import org.apache.maven.index.Indexer;
+import org.apache.maven.index.MAVEN;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.expr.SourcedSearchExpression;
+import org.apache.maven.index.expr.StringSearchExpression;
+import org.apache.maven.index.updater.DefaultIndexUpdater;
+import org.apache.maven.index.updater.IndexUpdateRequest;
+import org.apache.maven.index.updater.IndexUpdater;
+import org.apache.maven.index_shaded.lucene.search.BooleanClause;
+import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
+import org.apache.maven.index_shaded.lucene.search.IndexSearcher;
+import org.apache.maven.index_shaded.lucene.search.TopDocs;
+import org.assertj.core.api.Assertions;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.test.context.ContextConfiguration;
+
+import javax.inject.Inject;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Set;
+
+/**
+ * ArchivaIndexingTaskExecutorTest
+ */
+@RunWith( ArchivaSpringJUnit4ClassRunner.class )
+@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath*:/spring-context.xml" } )
+public class ArchivaIndexingTaskExecutorTest
+ extends TestCase
+{
+ @Inject
+ private ArchivaIndexingTaskExecutor indexingExecutor;
+
+ @Inject
+ ArchivaRepositoryRegistry repositoryRegistry;
+
+ @SuppressWarnings( "unused" )
+ @Inject
+ RepositoryHandlerDependencies repositoryHandlerDependencies;
+
+ @Inject
+ private IndexUpdater indexUpdater;
+
+ private ManagedRepository repo;
+
+ @Inject
+ private Indexer indexer;
+
+ @Before
+ @Override
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ Path baseDir = Paths.get(System.getProperty("basedir"), "target/test-classes").toAbsolutePath();
+ BasicManagedRepository repositoryConfig = BasicManagedRepository.newFilesystemInstance("test-repo", "Test Repository", baseDir.resolve("test-repo"));
+ Path repoLocation = baseDir.resolve("test-repo" );
+ repositoryConfig.setLocation(repoLocation.toUri() );
+ repositoryConfig.setLayout( "default" );
+ repositoryConfig.setScanned( true );
+ repositoryConfig.addActiveReleaseScheme( ReleaseScheme.RELEASE );
+ repositoryConfig.removeActiveReleaseScheme( ReleaseScheme.SNAPSHOT );
+ repositoryRegistry.putRepository(repositoryConfig);
+ repo = repositoryRegistry.getManagedRepository( repositoryConfig.getId() );
+ }
+
+ @After
+ @Override
+ public void tearDown()
+ throws Exception
+ {
+
+ repositoryRegistry.destroy();
+ /*
+ removeIndexingContext with true cleanup files.
+ // delete created index in the repository
+ File indexDir = new File( repositoryConfig.getLocation(), ".indexer" );
+ FileUtils.deleteDirectory( indexDir );
+ assertFalse( indexDir.exists() );
+
+ indexDir = new File( repositoryConfig.getLocation(), ".index" );
+ FileUtils.deleteDirectory( indexDir );
+ assertFalse( indexDir.exists() );
+ */
+ super.tearDown();
+ }
+
+ protected IndexingContext getIndexingContext() throws UnsupportedBaseContextException {
+ assert repo != null;
+ ArchivaIndexingContext ctx = repo.getIndexingContext();
+ assert ctx != null;
+ return ctx.getBaseContext(IndexingContext.class);
+ }
+
+ @Test
+ public void testAddArtifactToIndex()
+ throws Exception
+ {
+ Path basePath = repo.getRoot().getFilePath();
+ Path artifactFile = basePath.resolve(
+ "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+ ArtifactIndexingTask task =
+ new ArtifactIndexingTask( repo, artifactFile, ArtifactIndexingTask.Action.ADD,
+ repo.getIndexingContext());
+
+ indexingExecutor.executeTask( task );
+
+ task = new ArtifactIndexingTask( repo, null, ArtifactIndexingTask.Action.FINISH,
+ repo.getIndexingContext() );
+ indexingExecutor.executeTask( task );
+
+ BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder( );
+ queryBuilder.add( indexer.constructQuery( MAVEN.GROUP_ID, new StringSearchExpression( "org.apache.archiva" ) ),
+ BooleanClause.Occur.SHOULD );
+ queryBuilder.add(
+ indexer.constructQuery( MAVEN.ARTIFACT_ID, new StringSearchExpression( "archiva-index-methods-jar-test" ) ),
+ BooleanClause.Occur.SHOULD );
+ BooleanQuery q = queryBuilder.build();
+
+ FlatSearchRequest request = new FlatSearchRequest( q , getIndexingContext());
+ FlatSearchResponse response = indexer.searchFlat( request );
+
+ assertTrue( Files.exists(basePath.resolve( ".indexer" )) );
+ assertTrue( Files.exists(basePath.resolve(".index" )) );
+ assertEquals( 1, response.getTotalHitsCount());
+
+ Set<ArtifactInfo> results = response.getResults();
+
+ ArtifactInfo artifactInfo = results.iterator().next();
+ assertEquals( "org.apache.archiva", artifactInfo.getGroupId() );
+ assertEquals( "archiva-index-methods-jar-test", artifactInfo.getArtifactId() );
+ assertEquals( "test-repo", artifactInfo.getRepository() );
+
+ }
+
+ @Test
+ public void testUpdateArtifactInIndex()
+ throws Exception
+ {
+ Path basePath = repo.getRoot().getFilePath();
+ Path artifactFile = basePath.resolve(
+ "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+ ArtifactIndexingTask task =
+ new ArtifactIndexingTask( repo, artifactFile, ArtifactIndexingTask.Action.ADD,
+ repo.getIndexingContext() );
+
+ indexingExecutor.executeTask( task );
+ indexingExecutor.executeTask( task );
+
+ BooleanQuery.Builder qb = new BooleanQuery.Builder();
+ qb.add( indexer.constructQuery( MAVEN.GROUP_ID, new StringSearchExpression( "org.apache.archiva" ) ),
+ BooleanClause.Occur.SHOULD );
+ qb.add(
+ indexer.constructQuery( MAVEN.ARTIFACT_ID, new StringSearchExpression( "archiva-index-methods-jar-test" ) ),
+ BooleanClause.Occur.SHOULD );
+
+ IndexingContext ctx = getIndexingContext();
+
+ IndexSearcher searcher = ctx.acquireIndexSearcher();
+ TopDocs topDocs = searcher.search( qb.build(), 10 );
+
+ //searcher.close();
+ ctx.releaseIndexSearcher( searcher );
+
+ assertTrue( Files.exists(basePath.resolve(".indexer" )) );
+ assertTrue( Files.exists(basePath.resolve(".index" )) );
+
+ // should only return 1 hit!
+ assertEquals( 1, topDocs.totalHits );
+ }
+
+ @Test
+ public void testRemoveArtifactFromIndex()
+ throws Exception
+ {
+ Path basePath = repo.getRoot().getFilePath();
+ Path artifactFile = basePath.resolve(
+ "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+
+ ArtifactIndexingTask task =
+ new ArtifactIndexingTask( repo, artifactFile, ArtifactIndexingTask.Action.ADD,
+ repo.getIndexingContext() );
+
+ // add artifact to index
+ indexingExecutor.executeTask( task );
+
+ BooleanQuery.Builder qb = new BooleanQuery.Builder();
+ qb.add( indexer.constructQuery( MAVEN.GROUP_ID, new SourcedSearchExpression( "org.apache.archiva" ) ),
+ BooleanClause.Occur.SHOULD );
+ //q.add(
+ // indexer.constructQuery( MAVEN.ARTIFACT_ID, new SourcedSearchExpression( "archiva-index-methods-jar-test" ) ),
+ // Occur.SHOULD );
+
+ IndexingContext ctx = repo.getIndexingContext( ).getBaseContext( IndexingContext.class );
+ FlatSearchRequest flatSearchRequest =
+ new FlatSearchRequest( qb.build(), ctx );
+
+ FlatSearchResponse response = indexer.searchFlat( flatSearchRequest );
+
+ assertTrue( Files.exists(basePath.resolve(".indexer" )) );
+ assertTrue( Files.exists(basePath.resolve( ".index" )) );
+
+ // should return 1 hit
+ assertEquals( 1, response.getTotalHitsCount() );
+
+ // remove added artifact from index
+ task = new ArtifactIndexingTask( repo, artifactFile, ArtifactIndexingTask.Action.DELETE,
+ repo.getIndexingContext());
+ indexingExecutor.executeTask( task );
+
+ task = new ArtifactIndexingTask( repo, artifactFile, ArtifactIndexingTask.Action.FINISH,
+ repo.getIndexingContext() );
+ indexingExecutor.executeTask( task );
+
+ qb = new BooleanQuery.Builder();
+ qb.add( indexer.constructQuery( MAVEN.GROUP_ID, new SourcedSearchExpression( "org.apache.archiva" ) ),
+ BooleanClause.Occur.SHOULD );
+ qb.add( indexer.constructQuery( MAVEN.ARTIFACT_ID,
+ new SourcedSearchExpression( "archiva-index-methods-jar-test" ) ),
+ BooleanClause.Occur.SHOULD );
+
+ assertTrue( Files.exists(basePath.resolve( ".indexer" )) );
+ assertTrue( Files.exists(basePath.resolve(".index" )) );
+
+ flatSearchRequest = new FlatSearchRequest( qb.build(), getIndexingContext() );
+
+ response = indexer.searchFlat( flatSearchRequest );
+ // artifact should have been removed from the index!
+ assertEquals( 0, response.getTotalHitsCount() );//.totalHits );
+
+ // TODO: test it was removed from the packaged index also
+ }
+
+ @Test
+ public void testPackagedIndex()
+ throws Exception
+ {
+
+ Path basePath = repo.getRoot().getFilePath();
+ IndexCreationFeature icf = repo.getFeature( IndexCreationFeature.class ).get();
+ StorageAsset packedIndexDirectory = icf.getLocalPackedIndexPath();
+ StorageAsset indexerDirectory = icf.getLocalIndexPath();
+
+ for (StorageAsset dir : new StorageAsset[] { packedIndexDirectory, indexerDirectory }) {
+ if (dir.getFilePath()!=null)
+ {
+ Path localDirPath = dir.getFilePath();
+ Files.list( localDirPath ).filter( path -> path.getFileName( ).toString( ).startsWith( "nexus-maven-repository-index" ) )
+ .forEach( path ->
+ {
+ try
+ {
+ System.err.println( "Deleting " + path );
+ Files.delete( path );
+ }
+ catch ( IOException e )
+ {
+ e.printStackTrace( );
+ }
+ } );
+ }
+ }
+
+
+
+
+ Path artifactFile = basePath.resolve(
+ "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
+ ArtifactIndexingTask task =
+ new ArtifactIndexingTask( repo, artifactFile, ArtifactIndexingTask.Action.ADD,
+ repo.getIndexingContext() );
+ task.setExecuteOnEntireRepo( false );
+
+ indexingExecutor.executeTask( task );
+
+ task = new ArtifactIndexingTask( repo, null, ArtifactIndexingTask.Action.FINISH,
+ repo.getIndexingContext() );
+
+ task.setExecuteOnEntireRepo( false );
+
+ indexingExecutor.executeTask( task );
+
+ assertTrue( Files.exists(packedIndexDirectory.getFilePath()) );
+ assertTrue( Files.exists(indexerDirectory.getFilePath()) );
+
+ // test packed index file creation
+ //no more zip
+ //Assertions.assertThat(new File( indexerDirectory, "nexus-maven-repository-index.zip" )).exists();
+ Assertions.assertThat( Files.exists(packedIndexDirectory.getFilePath().resolve("nexus-maven-repository-index.properties" ) ));
+ Assertions.assertThat( Files.exists(packedIndexDirectory.getFilePath().resolve("nexus-maven-repository-index.gz" ) ));
+ assertFalse( Files.exists(packedIndexDirectory.getFilePath().resolve("nexus-maven-repository-index.1.gz" ) ));
+
+ // unpack .zip index
+ //unzipIndex( indexerDirectory.getPath(), destDir.getPath() );
+
+ DefaultIndexUpdater.FileFetcher fetcher = new DefaultIndexUpdater.FileFetcher( packedIndexDirectory.getFilePath().toFile() );
+ IndexUpdateRequest updateRequest = new IndexUpdateRequest( getIndexingContext(), fetcher );
+ //updateRequest.setLocalIndexCacheDir( indexerDirectory );
+ indexUpdater.fetchAndUpdateIndex( updateRequest );
+
+ BooleanQuery.Builder qb = new BooleanQuery.Builder();
+ qb.add( indexer.constructQuery( MAVEN.GROUP_ID, new StringSearchExpression( "org.apache.archiva" ) ),
+ BooleanClause.Occur.SHOULD );
+ qb.add(
+ indexer.constructQuery( MAVEN.ARTIFACT_ID, new StringSearchExpression( "archiva-index-methods-jar-test" ) ),
+ BooleanClause.Occur.SHOULD );
+
+ FlatSearchRequest request = new FlatSearchRequest( qb.build(), getIndexingContext() );
+ FlatSearchResponse response = indexer.searchFlat( request );
+
+ assertEquals( 1, response.getTotalHitsCount() );
+ Set<ArtifactInfo> results = response.getResults();
+
+ ArtifactInfo artifactInfo = results.iterator().next();
+ assertEquals( "org.apache.archiva", artifactInfo.getGroupId() );
+ assertEquals( "archiva-index-methods-jar-test", artifactInfo.getArtifactId() );
+ assertEquals( "test-repo", artifactInfo.getRepository() );
+
+
+ }
+
+}
--- /dev/null
+package org.apache.archiva.maven.scheduler.indexing;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.utils.FileUtils;
+import org.apache.archiva.indexer.ArchivaIndexManager;
+import org.apache.archiva.indexer.IndexCreationFailedException;
+import org.apache.archiva.repository.EditableRemoteRepository;
+import org.apache.archiva.repository.EditableRepository;
+import org.apache.archiva.repository.RemoteRepository;
+import org.apache.archiva.repository.RepositoryException;
+import org.apache.archiva.repository.RepositoryProvider;
+import org.apache.archiva.repository.RepositoryRegistry;
+import org.apache.archiva.repository.RepositoryType;
+import org.apache.archiva.repository.UnsupportedURIException;
+import org.apache.archiva.repository.base.RepositoryHandlerDependencies;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.features.RemoteIndexFeature;
+import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
+import org.apache.maven.index.FlatSearchRequest;
+import org.apache.maven.index.FlatSearchResponse;
+import org.apache.maven.index.Indexer;
+import org.apache.maven.index.MAVEN;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.expr.StringSearchExpression;
+import org.apache.maven.index_shaded.lucene.search.BooleanClause;
+import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
+import org.assertj.core.api.Assertions;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.servlet.DefaultServlet;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
+import org.springframework.test.context.ContextConfiguration;
+
+import javax.inject.Inject;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * @author Olivier Lamy
+ */
+@RunWith( ArchivaSpringJUnit4ClassRunner.class )
+@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath*:/spring-context.xml" } )
+public class DownloadRemoteIndexTaskTest
+{
+
+ private Server server;
+ private ServerConnector serverConnector;
+
+ private int port;
+
+ private Logger log = LoggerFactory.getLogger( getClass() );
+
+ @Inject
+ DefaultDownloadRemoteIndexScheduler downloadRemoteIndexScheduler;
+
+ @Inject
+ Indexer indexer;
+
+ @Inject
+ RepositoryRegistry repositoryRegistry;
+
+ @SuppressWarnings( "unused" )
+ @Inject
+ RepositoryHandlerDependencies repositoryHandlerDependencies;
+
+ @Inject
+ RepositoryProvider repositoryProvider;
+
+ @Before
+ public void initialize()
+ throws Exception
+ {
+ Path cfgFile = Paths.get("target/appserver-base/conf/archiva.xml");
+ if (Files.exists(cfgFile)) {
+ Files.delete(cfgFile);
+ }
+ try {
+ repositoryRegistry.removeRepository( "test-repo-re" );
+ } catch (Exception e) {
+ // Ignore
+ }
+ server = new Server( );
+ serverConnector = new ServerConnector( server, new HttpConnectionFactory());
+ server.addConnector( serverConnector );
+ createContext( server, Paths.get( "src/test" ) );
+ this.server.start();
+ this.port = serverConnector.getLocalPort();
+ log.info( "start server on port {}", this.port );
+ }
+
+ protected void createContext( Server server, Path repositoryDirectory )
+ throws IOException
+ {
+ ServletContextHandler context = new ServletContextHandler();
+ context.setResourceBase( repositoryDirectory.toAbsolutePath().toString() );
+ context.setContextPath( "/" );
+ ServletHolder sh = new ServletHolder( DefaultServlet.class );
+ context.addServlet( sh, "/" );
+ server.setHandler( context );
+
+ }
+
+ @After
+ public void tearDown()
+ throws Exception
+ {
+ if (server!=null) {
+ server.stop();
+ }
+ Path cfgFile = Paths.get("target/appserver-base/conf/archiva.xml");
+ if (Files.exists(cfgFile)) {
+ Files.delete(cfgFile);
+ }
+ }
+
+ @Test
+ public void downloadAndMergeRemoteIndexInEmptyIndex()
+ throws Exception
+ {
+ Path repoDirectory = Paths.get( FileUtils.getBasedir( ), "target/repo-" + Long.toString( System.currentTimeMillis( ) ) );
+
+ RemoteRepository remoteRepository = getRemoteRepository(repoDirectory);
+
+ repositoryRegistry.putRepository( remoteRepository);
+ repositoryRegistry.reload();
+
+ downloadRemoteIndexScheduler.startup();
+
+ downloadRemoteIndexScheduler.scheduleDownloadRemote( "test-repo-re", true, true );
+
+ ( (ThreadPoolTaskScheduler) downloadRemoteIndexScheduler.getTaskScheduler() ).getScheduledExecutor().awaitTermination(
+ 10, TimeUnit.SECONDS );
+
+ repositoryRegistry.removeRepository( "test-repo-re" );
+
+ // search
+ BooleanQuery.Builder iQuery = new BooleanQuery.Builder();
+ iQuery.add( indexer.constructQuery( MAVEN.GROUP_ID, new StringSearchExpression( "commons-logging" ) ),
+ BooleanClause.Occur.SHOULD );
+
+ remoteRepository = getRemoteRepository( repoDirectory );
+ FlatSearchRequest rq = new FlatSearchRequest( iQuery.build() );
+ rq.setContexts(
+ Arrays.asList( remoteRepository.getIndexingContext().getBaseContext(IndexingContext.class) ) );
+
+ FlatSearchResponse response = indexer.searchFlat(rq);
+
+ log.info( "returned hit count:{}", response.getReturnedHitsCount() );
+ Assertions.assertThat( response.getReturnedHitsCount() ).isEqualTo( 8 );
+ }
+
+
+ protected RemoteRepository getRemoteRepository(Path repoDirectory) throws IOException, URISyntaxException, UnsupportedURIException, RepositoryException
+ {
+
+ EditableRemoteRepository remoteRepository = repositoryProvider.createRemoteInstance( "test-repo-re", "foo" );
+ Path indexDirectory = repoDirectory.resolve( "index" );
+ Files.createDirectories( indexDirectory );
+ remoteRepository.setLocation( new URI( "http://localhost:" + port ) );
+ repoDirectory.toFile().deleteOnExit();
+ RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get();
+ rif.setDownloadRemoteIndex( true );
+ rif.setIndexUri( new URI("http://localhost:" + port + "/index-updates/" ) );
+ createIndexingContext( remoteRepository );
+
+ // IndexCreationFeature icf = remoteRepository.getFeature( IndexCreationFeature.class ).get( );
+ // icf.setLocalIndexPath( remoteRepository.getAsset( "index" ) );
+ return remoteRepository;
+ }
+
+ private void createIndexingContext( EditableRepository editableRepo) throws RepositoryException
+ {
+ if (editableRepo.supportsFeature(IndexCreationFeature.class)) {
+ ArchivaIndexManager idxManager = getIndexManager(editableRepo.getType());
+ try {
+ editableRepo.setIndexingContext(idxManager.createContext(editableRepo));
+ idxManager.updateLocalIndexPath(editableRepo);
+ } catch ( IndexCreationFailedException e) {
+ throw new RepositoryException("Could not create index for repository " + editableRepo.getId() + ": " + e.getMessage(), e);
+ }
+ }
+ }
+
+ public ArchivaIndexManager getIndexManager( RepositoryType type ) {
+ return repositoryRegistry.getIndexManager( type );
+ }
+}
+++ /dev/null
-package org.apache.archiva.scheduler.indexing.maven;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import junit.framework.TestCase;
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.indexer.UnsupportedBaseContextException;
-import org.apache.archiva.repository.ManagedRepository;
-import org.apache.archiva.repository.ReleaseScheme;
-import org.apache.archiva.repository.base.ArchivaRepositoryRegistry;
-import org.apache.archiva.repository.base.RepositoryHandlerDependencies;
-import org.apache.archiva.repository.base.managed.BasicManagedRepository;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.repository.storage.StorageAsset;
-import org.apache.archiva.scheduler.indexing.ArtifactIndexingTask;
-import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.apache.maven.index.ArtifactInfo;
-import org.apache.maven.index.FlatSearchRequest;
-import org.apache.maven.index.FlatSearchResponse;
-import org.apache.maven.index.Indexer;
-import org.apache.maven.index.MAVEN;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.expr.SourcedSearchExpression;
-import org.apache.maven.index.expr.StringSearchExpression;
-import org.apache.maven.index.updater.DefaultIndexUpdater;
-import org.apache.maven.index.updater.IndexUpdateRequest;
-import org.apache.maven.index.updater.IndexUpdater;
-import org.apache.maven.index_shaded.lucene.search.BooleanClause;
-import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
-import org.apache.maven.index_shaded.lucene.search.IndexSearcher;
-import org.apache.maven.index_shaded.lucene.search.TopDocs;
-import org.assertj.core.api.Assertions;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.test.context.ContextConfiguration;
-
-import javax.inject.Inject;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Set;
-
-/**
- * ArchivaIndexingTaskExecutorTest
- */
-@RunWith( ArchivaSpringJUnit4ClassRunner.class )
-@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath*:/spring-context.xml" } )
-public class ArchivaIndexingTaskExecutorTest
- extends TestCase
-{
- @Inject
- private ArchivaIndexingTaskExecutor indexingExecutor;
-
- @Inject
- ArchivaRepositoryRegistry repositoryRegistry;
-
- @SuppressWarnings( "unused" )
- @Inject
- RepositoryHandlerDependencies repositoryHandlerDependencies;
-
- @Inject
- private IndexUpdater indexUpdater;
-
- private ManagedRepository repo;
-
- @Inject
- private Indexer indexer;
-
- @Before
- @Override
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- Path baseDir = Paths.get(System.getProperty("basedir"), "target/test-classes").toAbsolutePath();
- BasicManagedRepository repositoryConfig = BasicManagedRepository.newFilesystemInstance("test-repo", "Test Repository", baseDir.resolve("test-repo"));
- Path repoLocation = baseDir.resolve("test-repo" );
- repositoryConfig.setLocation(repoLocation.toUri() );
- repositoryConfig.setLayout( "default" );
- repositoryConfig.setScanned( true );
- repositoryConfig.addActiveReleaseScheme( ReleaseScheme.RELEASE );
- repositoryConfig.removeActiveReleaseScheme( ReleaseScheme.SNAPSHOT );
- repositoryRegistry.putRepository(repositoryConfig);
- repo = repositoryRegistry.getManagedRepository( repositoryConfig.getId() );
- }
-
- @After
- @Override
- public void tearDown()
- throws Exception
- {
-
- repositoryRegistry.destroy();
- /*
- removeIndexingContext with true cleanup files.
- // delete created index in the repository
- File indexDir = new File( repositoryConfig.getLocation(), ".indexer" );
- FileUtils.deleteDirectory( indexDir );
- assertFalse( indexDir.exists() );
-
- indexDir = new File( repositoryConfig.getLocation(), ".index" );
- FileUtils.deleteDirectory( indexDir );
- assertFalse( indexDir.exists() );
- */
- super.tearDown();
- }
-
- protected IndexingContext getIndexingContext() throws UnsupportedBaseContextException {
- assert repo != null;
- ArchivaIndexingContext ctx = repo.getIndexingContext();
- assert ctx != null;
- return ctx.getBaseContext(IndexingContext.class);
- }
-
- @Test
- public void testAddArtifactToIndex()
- throws Exception
- {
- Path basePath = repo.getRoot().getFilePath();
- Path artifactFile = basePath.resolve(
- "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
-
- ArtifactIndexingTask task =
- new ArtifactIndexingTask( repo, artifactFile, ArtifactIndexingTask.Action.ADD,
- repo.getIndexingContext());
-
- indexingExecutor.executeTask( task );
-
- task = new ArtifactIndexingTask( repo, null, ArtifactIndexingTask.Action.FINISH,
- repo.getIndexingContext() );
- indexingExecutor.executeTask( task );
-
- BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder( );
- queryBuilder.add( indexer.constructQuery( MAVEN.GROUP_ID, new StringSearchExpression( "org.apache.archiva" ) ),
- BooleanClause.Occur.SHOULD );
- queryBuilder.add(
- indexer.constructQuery( MAVEN.ARTIFACT_ID, new StringSearchExpression( "archiva-index-methods-jar-test" ) ),
- BooleanClause.Occur.SHOULD );
- BooleanQuery q = queryBuilder.build();
-
- FlatSearchRequest request = new FlatSearchRequest( q , getIndexingContext());
- FlatSearchResponse response = indexer.searchFlat( request );
-
- assertTrue( Files.exists(basePath.resolve( ".indexer" )) );
- assertTrue( Files.exists(basePath.resolve(".index" )) );
- assertEquals( 1, response.getTotalHitsCount());
-
- Set<ArtifactInfo> results = response.getResults();
-
- ArtifactInfo artifactInfo = results.iterator().next();
- assertEquals( "org.apache.archiva", artifactInfo.getGroupId() );
- assertEquals( "archiva-index-methods-jar-test", artifactInfo.getArtifactId() );
- assertEquals( "test-repo", artifactInfo.getRepository() );
-
- }
-
- @Test
- public void testUpdateArtifactInIndex()
- throws Exception
- {
- Path basePath = repo.getRoot().getFilePath();
- Path artifactFile = basePath.resolve(
- "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
-
- ArtifactIndexingTask task =
- new ArtifactIndexingTask( repo, artifactFile, ArtifactIndexingTask.Action.ADD,
- repo.getIndexingContext() );
-
- indexingExecutor.executeTask( task );
- indexingExecutor.executeTask( task );
-
- BooleanQuery.Builder qb = new BooleanQuery.Builder();
- qb.add( indexer.constructQuery( MAVEN.GROUP_ID, new StringSearchExpression( "org.apache.archiva" ) ),
- BooleanClause.Occur.SHOULD );
- qb.add(
- indexer.constructQuery( MAVEN.ARTIFACT_ID, new StringSearchExpression( "archiva-index-methods-jar-test" ) ),
- BooleanClause.Occur.SHOULD );
-
- IndexingContext ctx = getIndexingContext();
-
- IndexSearcher searcher = ctx.acquireIndexSearcher();
- TopDocs topDocs = searcher.search( qb.build(), 10 );
-
- //searcher.close();
- ctx.releaseIndexSearcher( searcher );
-
- assertTrue( Files.exists(basePath.resolve(".indexer" )) );
- assertTrue( Files.exists(basePath.resolve(".index" )) );
-
- // should only return 1 hit!
- assertEquals( 1, topDocs.totalHits );
- }
-
- @Test
- public void testRemoveArtifactFromIndex()
- throws Exception
- {
- Path basePath = repo.getRoot().getFilePath();
- Path artifactFile = basePath.resolve(
- "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
-
- ArtifactIndexingTask task =
- new ArtifactIndexingTask( repo, artifactFile, ArtifactIndexingTask.Action.ADD,
- repo.getIndexingContext() );
-
- // add artifact to index
- indexingExecutor.executeTask( task );
-
- BooleanQuery.Builder qb = new BooleanQuery.Builder();
- qb.add( indexer.constructQuery( MAVEN.GROUP_ID, new SourcedSearchExpression( "org.apache.archiva" ) ),
- BooleanClause.Occur.SHOULD );
- //q.add(
- // indexer.constructQuery( MAVEN.ARTIFACT_ID, new SourcedSearchExpression( "archiva-index-methods-jar-test" ) ),
- // Occur.SHOULD );
-
- IndexingContext ctx = repo.getIndexingContext( ).getBaseContext( IndexingContext.class );
- FlatSearchRequest flatSearchRequest =
- new FlatSearchRequest( qb.build(), ctx );
-
- FlatSearchResponse response = indexer.searchFlat( flatSearchRequest );
-
- assertTrue( Files.exists(basePath.resolve(".indexer" )) );
- assertTrue( Files.exists(basePath.resolve( ".index" )) );
-
- // should return 1 hit
- assertEquals( 1, response.getTotalHitsCount() );
-
- // remove added artifact from index
- task = new ArtifactIndexingTask( repo, artifactFile, ArtifactIndexingTask.Action.DELETE,
- repo.getIndexingContext());
- indexingExecutor.executeTask( task );
-
- task = new ArtifactIndexingTask( repo, artifactFile, ArtifactIndexingTask.Action.FINISH,
- repo.getIndexingContext() );
- indexingExecutor.executeTask( task );
-
- qb = new BooleanQuery.Builder();
- qb.add( indexer.constructQuery( MAVEN.GROUP_ID, new SourcedSearchExpression( "org.apache.archiva" ) ),
- BooleanClause.Occur.SHOULD );
- qb.add( indexer.constructQuery( MAVEN.ARTIFACT_ID,
- new SourcedSearchExpression( "archiva-index-methods-jar-test" ) ),
- BooleanClause.Occur.SHOULD );
-
- assertTrue( Files.exists(basePath.resolve( ".indexer" )) );
- assertTrue( Files.exists(basePath.resolve(".index" )) );
-
- flatSearchRequest = new FlatSearchRequest( qb.build(), getIndexingContext() );
-
- response = indexer.searchFlat( flatSearchRequest );
- // artifact should have been removed from the index!
- assertEquals( 0, response.getTotalHitsCount() );//.totalHits );
-
- // TODO: test it was removed from the packaged index also
- }
-
- @Test
- public void testPackagedIndex()
- throws Exception
- {
-
- Path basePath = repo.getRoot().getFilePath();
- IndexCreationFeature icf = repo.getFeature( IndexCreationFeature.class ).get();
- StorageAsset packedIndexDirectory = icf.getLocalPackedIndexPath();
- StorageAsset indexerDirectory = icf.getLocalIndexPath();
-
- for (StorageAsset dir : new StorageAsset[] { packedIndexDirectory, indexerDirectory }) {
- if (dir.getFilePath()!=null)
- {
- Path localDirPath = dir.getFilePath();
- Files.list( localDirPath ).filter( path -> path.getFileName( ).toString( ).startsWith( "nexus-maven-repository-index" ) )
- .forEach( path ->
- {
- try
- {
- System.err.println( "Deleting " + path );
- Files.delete( path );
- }
- catch ( IOException e )
- {
- e.printStackTrace( );
- }
- } );
- }
- }
-
-
-
-
- Path artifactFile = basePath.resolve(
- "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
- ArtifactIndexingTask task =
- new ArtifactIndexingTask( repo, artifactFile, ArtifactIndexingTask.Action.ADD,
- repo.getIndexingContext() );
- task.setExecuteOnEntireRepo( false );
-
- indexingExecutor.executeTask( task );
-
- task = new ArtifactIndexingTask( repo, null, ArtifactIndexingTask.Action.FINISH,
- repo.getIndexingContext() );
-
- task.setExecuteOnEntireRepo( false );
-
- indexingExecutor.executeTask( task );
-
- assertTrue( Files.exists(packedIndexDirectory.getFilePath()) );
- assertTrue( Files.exists(indexerDirectory.getFilePath()) );
-
- // test packed index file creation
- //no more zip
- //Assertions.assertThat(new File( indexerDirectory, "nexus-maven-repository-index.zip" )).exists();
- Assertions.assertThat( Files.exists(packedIndexDirectory.getFilePath().resolve("nexus-maven-repository-index.properties" ) ));
- Assertions.assertThat( Files.exists(packedIndexDirectory.getFilePath().resolve("nexus-maven-repository-index.gz" ) ));
- assertFalse( Files.exists(packedIndexDirectory.getFilePath().resolve("nexus-maven-repository-index.1.gz" ) ));
-
- // unpack .zip index
- //unzipIndex( indexerDirectory.getPath(), destDir.getPath() );
-
- DefaultIndexUpdater.FileFetcher fetcher = new DefaultIndexUpdater.FileFetcher( packedIndexDirectory.getFilePath().toFile() );
- IndexUpdateRequest updateRequest = new IndexUpdateRequest( getIndexingContext(), fetcher );
- //updateRequest.setLocalIndexCacheDir( indexerDirectory );
- indexUpdater.fetchAndUpdateIndex( updateRequest );
-
- BooleanQuery.Builder qb = new BooleanQuery.Builder();
- qb.add( indexer.constructQuery( MAVEN.GROUP_ID, new StringSearchExpression( "org.apache.archiva" ) ),
- BooleanClause.Occur.SHOULD );
- qb.add(
- indexer.constructQuery( MAVEN.ARTIFACT_ID, new StringSearchExpression( "archiva-index-methods-jar-test" ) ),
- BooleanClause.Occur.SHOULD );
-
- FlatSearchRequest request = new FlatSearchRequest( qb.build(), getIndexingContext() );
- FlatSearchResponse response = indexer.searchFlat( request );
-
- assertEquals( 1, response.getTotalHitsCount() );
- Set<ArtifactInfo> results = response.getResults();
-
- ArtifactInfo artifactInfo = results.iterator().next();
- assertEquals( "org.apache.archiva", artifactInfo.getGroupId() );
- assertEquals( "archiva-index-methods-jar-test", artifactInfo.getArtifactId() );
- assertEquals( "test-repo", artifactInfo.getRepository() );
-
-
- }
-
-}
+++ /dev/null
-package org.apache.archiva.scheduler.indexing.maven;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.utils.FileUtils;
-import org.apache.archiva.indexer.ArchivaIndexManager;
-import org.apache.archiva.indexer.IndexCreationFailedException;
-import org.apache.archiva.repository.EditableRemoteRepository;
-import org.apache.archiva.repository.EditableRepository;
-import org.apache.archiva.repository.RemoteRepository;
-import org.apache.archiva.repository.RepositoryException;
-import org.apache.archiva.repository.RepositoryProvider;
-import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.repository.RepositoryType;
-import org.apache.archiva.repository.UnsupportedURIException;
-import org.apache.archiva.repository.base.RepositoryHandlerDependencies;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.repository.features.RemoteIndexFeature;
-import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.apache.maven.index.FlatSearchRequest;
-import org.apache.maven.index.FlatSearchResponse;
-import org.apache.maven.index.Indexer;
-import org.apache.maven.index.MAVEN;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.expr.StringSearchExpression;
-import org.apache.maven.index_shaded.lucene.search.BooleanClause;
-import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
-import org.assertj.core.api.Assertions;
-import org.eclipse.jetty.server.HttpConnectionFactory;
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.ServerConnector;
-import org.eclipse.jetty.servlet.DefaultServlet;
-import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlet.ServletHolder;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
-import org.springframework.test.context.ContextConfiguration;
-
-import javax.inject.Inject;
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Arrays;
-import java.util.concurrent.TimeUnit;
-
-/**
- * @author Olivier Lamy
- */
-@RunWith( ArchivaSpringJUnit4ClassRunner.class )
-@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath*:/spring-context.xml" } )
-public class DownloadRemoteIndexTaskTest
-{
-
- private Server server;
- private ServerConnector serverConnector;
-
- private int port;
-
- private Logger log = LoggerFactory.getLogger( getClass() );
-
- @Inject
- DefaultDownloadRemoteIndexScheduler downloadRemoteIndexScheduler;
-
- @Inject
- Indexer indexer;
-
- @Inject
- RepositoryRegistry repositoryRegistry;
-
- @SuppressWarnings( "unused" )
- @Inject
- RepositoryHandlerDependencies repositoryHandlerDependencies;
-
- @Inject
- RepositoryProvider repositoryProvider;
-
- @Before
- public void initialize()
- throws Exception
- {
- Path cfgFile = Paths.get("target/appserver-base/conf/archiva.xml");
- if (Files.exists(cfgFile)) {
- Files.delete(cfgFile);
- }
- try {
- repositoryRegistry.removeRepository( "test-repo-re" );
- } catch (Exception e) {
- // Ignore
- }
- server = new Server( );
- serverConnector = new ServerConnector( server, new HttpConnectionFactory());
- server.addConnector( serverConnector );
- createContext( server, Paths.get( "src/test" ) );
- this.server.start();
- this.port = serverConnector.getLocalPort();
- log.info( "start server on port {}", this.port );
- }
-
- protected void createContext( Server server, Path repositoryDirectory )
- throws IOException
- {
- ServletContextHandler context = new ServletContextHandler();
- context.setResourceBase( repositoryDirectory.toAbsolutePath().toString() );
- context.setContextPath( "/" );
- ServletHolder sh = new ServletHolder( DefaultServlet.class );
- context.addServlet( sh, "/" );
- server.setHandler( context );
-
- }
-
- @After
- public void tearDown()
- throws Exception
- {
- if (server!=null) {
- server.stop();
- }
- Path cfgFile = Paths.get("target/appserver-base/conf/archiva.xml");
- if (Files.exists(cfgFile)) {
- Files.delete(cfgFile);
- }
- }
-
- @Test
- public void downloadAndMergeRemoteIndexInEmptyIndex()
- throws Exception
- {
- Path repoDirectory = Paths.get( FileUtils.getBasedir( ), "target/repo-" + Long.toString( System.currentTimeMillis( ) ) );
-
- RemoteRepository remoteRepository = getRemoteRepository(repoDirectory);
-
- repositoryRegistry.putRepository( remoteRepository);
- repositoryRegistry.reload();
-
- downloadRemoteIndexScheduler.startup();
-
- downloadRemoteIndexScheduler.scheduleDownloadRemote( "test-repo-re", true, true );
-
- ( (ThreadPoolTaskScheduler) downloadRemoteIndexScheduler.getTaskScheduler() ).getScheduledExecutor().awaitTermination(
- 10, TimeUnit.SECONDS );
-
- repositoryRegistry.removeRepository( "test-repo-re" );
-
- // search
- BooleanQuery.Builder iQuery = new BooleanQuery.Builder();
- iQuery.add( indexer.constructQuery( MAVEN.GROUP_ID, new StringSearchExpression( "commons-logging" ) ),
- BooleanClause.Occur.SHOULD );
-
- remoteRepository = getRemoteRepository( repoDirectory );
- FlatSearchRequest rq = new FlatSearchRequest( iQuery.build() );
- rq.setContexts(
- Arrays.asList( remoteRepository.getIndexingContext().getBaseContext(IndexingContext.class) ) );
-
- FlatSearchResponse response = indexer.searchFlat(rq);
-
- log.info( "returned hit count:{}", response.getReturnedHitsCount() );
- Assertions.assertThat( response.getReturnedHitsCount() ).isEqualTo( 8 );
- }
-
-
- protected RemoteRepository getRemoteRepository(Path repoDirectory) throws IOException, URISyntaxException, UnsupportedURIException, RepositoryException
- {
-
- EditableRemoteRepository remoteRepository = repositoryProvider.createRemoteInstance( "test-repo-re", "foo" );
- Path indexDirectory = repoDirectory.resolve( "index" );
- Files.createDirectories( indexDirectory );
- remoteRepository.setLocation( new URI( "http://localhost:" + port ) );
- repoDirectory.toFile().deleteOnExit();
- RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get();
- rif.setDownloadRemoteIndex( true );
- rif.setIndexUri( new URI("http://localhost:" + port + "/index-updates/" ) );
- createIndexingContext( remoteRepository );
-
- // IndexCreationFeature icf = remoteRepository.getFeature( IndexCreationFeature.class ).get( );
- // icf.setLocalIndexPath( remoteRepository.getAsset( "index" ) );
- return remoteRepository;
- }
-
- private void createIndexingContext( EditableRepository editableRepo) throws RepositoryException
- {
- if (editableRepo.supportsFeature(IndexCreationFeature.class)) {
- ArchivaIndexManager idxManager = getIndexManager(editableRepo.getType());
- try {
- editableRepo.setIndexingContext(idxManager.createContext(editableRepo));
- idxManager.updateLocalIndexPath(editableRepo);
- } catch ( IndexCreationFailedException e) {
- throw new RepositoryException("Could not create index for repository " + editableRepo.getId() + ": " + e.getMessage(), e);
- }
- }
- }
-
- public ArchivaIndexManager getIndexManager( RepositoryType type ) {
- return repositoryRegistry.getIndexManager( type );
- }
-}
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"
default-lazy-init="true">
- <context:component-scan base-package="org.apache.archiva.indexer.maven" />
+ <context:component-scan base-package="org.apache.archiva.maven.scheduler.indexing" />
<bean name="scheduler" class="org.apache.archiva.components.scheduler.DefaultScheduler">
<property name="properties">
import org.apache.archiva.scheduler.indexing.ArtifactIndexingTask;
import org.apache.archiva.scheduler.indexing.DownloadRemoteIndexException;
import org.apache.archiva.scheduler.indexing.DownloadRemoteIndexScheduler;
-import org.apache.archiva.scheduler.indexing.maven.ArchivaIndexingTaskExecutor;
+import org.apache.archiva.maven.scheduler.indexing.ArchivaIndexingTaskExecutor;
import org.apache.archiva.scheduler.repository.model.RepositoryTask;
import org.apache.archiva.security.ArchivaSecurityException;
import org.apache.archiva.security.common.ArchivaRoleConstants;