+++ /dev/null
-package org.apache.archiva.indexer.maven;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.updater.IndexUpdateSideEffect;
-import org.apache.maven.index_shaded.lucene.store.Directory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-/**
- * Not doing much but required at least one implementation
- *
- * @since 3.0.0
- */
-@Service("archivaIndexUpdater")
-public class DefaultIndexUpdateSideEffect
- implements IndexUpdateSideEffect
-{
- private static final Logger LOGGER = LoggerFactory.getLogger( DefaultIndexUpdateSideEffect.class );
-
- @Override
- public void updateIndex( Directory directory, IndexingContext indexingContext, boolean b )
- {
- LOGGER.info( "updating index: {} with directory: {}", //
- indexingContext.getId(), //
- directory.toString() );
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.filelock.DefaultFileLockManager;
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.repository.Repository;
-import org.apache.archiva.repository.storage.fs.FilesystemStorage;
-import org.apache.archiva.repository.storage.StorageAsset;
-import org.apache.maven.index.context.IndexingContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.NoSuchFileException;
-import java.nio.file.Path;
-import java.sql.Date;
-import java.time.ZonedDateTime;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-/**
- * Maven implementation of index context
- */
-public class MavenIndexContext implements ArchivaIndexingContext {
-
- private static final Logger log = LoggerFactory.getLogger(ArchivaIndexingContext.class);
-
-
- private AtomicBoolean openStatus = new AtomicBoolean(false);
- private IndexingContext delegate;
- private Repository repository;
- private StorageAsset dir = null;
-
- protected MavenIndexContext(Repository repository, IndexingContext delegate) {
- this.delegate = delegate;
- this.repository = repository;
- this.openStatus.set(true);
-
- }
-
- @Override
- public String getId() {
- return delegate.getId();
- }
-
- @Override
- public Repository getRepository() {
- return repository;
- }
-
- @Override
- public StorageAsset getPath() {
- if (dir==null) {
- StorageAsset repositoryDirAsset = repository.getRoot();
- Path repositoryDir = repositoryDirAsset.getFilePath().toAbsolutePath();
- Path indexDir = delegate.getIndexDirectoryFile().toPath();
- if (indexDir.startsWith(repositoryDir)) {
- dir = repository.getAsset(repositoryDir.relativize(indexDir).toString());
- } else {
- try {
- FilesystemStorage storage = new FilesystemStorage(indexDir, new DefaultFileLockManager());
- dir = storage.getRoot();
- } catch (IOException e) {
- log.error("Error occured while creating storage for index dir");
- }
- }
- }
- return dir;
- }
-
- @Override
- public boolean isEmpty() throws IOException {
- return Files.list(delegate.getIndexDirectoryFile().toPath()).count()==0;
- }
-
- @Override
- public void commit() throws IOException {
- delegate.commit();
- }
-
- @Override
- public void rollback() throws IOException {
- delegate.rollback();
- }
-
- @Override
- public void optimize() throws IOException {
- delegate.optimize();
- }
-
- @Override
- public void close(boolean deleteFiles) throws IOException {
- if (openStatus.compareAndSet(true,false)) {
- try {
- delegate.close(deleteFiles);
- } catch (NoSuchFileException e) {
- // Ignore missing directory
- }
- }
- }
-
- @Override
- public void close() throws IOException {
- if (openStatus.compareAndSet(true,false)) {
- try {
- delegate.close(false);
- } catch (NoSuchFileException e) {
- // Ignore missing directory
- }
- }
- }
-
- @Override
- public boolean isOpen() {
- return openStatus.get();
- }
-
- @Override
- public void purge() throws IOException {
- delegate.purge();
- }
-
- @Override
- public boolean supports(Class<?> clazz) {
- return IndexingContext.class.equals(clazz);
- }
-
- @SuppressWarnings( "unchecked" )
- @Override
- public <T> T getBaseContext(Class<T> clazz) throws UnsupportedOperationException {
- if (IndexingContext.class.equals(clazz)) {
- return (T) delegate;
- } else {
- throw new UnsupportedOperationException("The class "+clazz+" is not supported by the maven indexer");
- }
- }
-
- @Override
- public Set<String> getGroups() throws IOException {
- return delegate.getAllGroups();
- }
-
- @Override
- public void updateTimestamp(boolean save) throws IOException {
- delegate.updateTimestamp(save);
- }
-
- @Override
- public void updateTimestamp(boolean save, ZonedDateTime time) throws IOException {
- delegate.updateTimestamp(save, Date.from(time.toInstant()));
- }
-
-
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.utils.FileUtils;
-import org.apache.archiva.common.utils.PathUtil;
-import org.apache.archiva.configuration.ArchivaConfiguration;
-import org.apache.archiva.indexer.ArchivaIndexManager;
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.indexer.IndexCreationFailedException;
-import org.apache.archiva.indexer.IndexUpdateFailedException;
-import org.apache.archiva.indexer.UnsupportedBaseContextException;
-import org.apache.archiva.proxy.ProxyRegistry;
-import org.apache.archiva.proxy.maven.WagonFactory;
-import org.apache.archiva.proxy.maven.WagonFactoryException;
-import org.apache.archiva.proxy.maven.WagonFactoryRequest;
-import org.apache.archiva.proxy.model.NetworkProxy;
-import org.apache.archiva.repository.EditableRepository;
-import org.apache.archiva.repository.ManagedRepository;
-import org.apache.archiva.repository.base.PasswordCredentials;
-import org.apache.archiva.repository.RemoteRepository;
-import org.apache.archiva.repository.Repository;
-import org.apache.archiva.repository.RepositoryType;
-import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
-import org.apache.archiva.repository.storage.AssetType;
-import org.apache.archiva.repository.storage.fs.FilesystemStorage;
-import org.apache.archiva.repository.storage.RepositoryStorage;
-import org.apache.archiva.repository.storage.StorageAsset;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.repository.features.RemoteIndexFeature;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.maven.index.ArtifactContext;
-import org.apache.maven.index.ArtifactContextProducer;
-import org.apache.maven.index.DefaultScannerListener;
-import org.apache.maven.index.Indexer;
-import org.apache.maven.index.IndexerEngine;
-import org.apache.maven.index.Scanner;
-import org.apache.maven.index.ScanningRequest;
-import org.apache.maven.index.ScanningResult;
-import org.apache.maven.index.context.ContextMemberProvider;
-import org.apache.maven.index.context.IndexCreator;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.context.StaticContextMemberProvider;
-import org.apache.maven.index.packer.IndexPacker;
-import org.apache.maven.index.packer.IndexPackingRequest;
-import org.apache.maven.index.updater.IndexUpdateRequest;
-import org.apache.maven.index.updater.IndexUpdater;
-import org.apache.maven.index.updater.ResourceFetcher;
-import org.apache.maven.index_shaded.lucene.index.IndexFormatTooOldException;
-import org.apache.maven.wagon.ConnectionException;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.StreamWagon;
-import org.apache.maven.wagon.TransferFailedException;
-import org.apache.maven.wagon.Wagon;
-import org.apache.maven.wagon.authentication.AuthenticationException;
-import org.apache.maven.wagon.authentication.AuthenticationInfo;
-import org.apache.maven.wagon.authorization.AuthorizationException;
-import org.apache.maven.wagon.events.TransferEvent;
-import org.apache.maven.wagon.events.TransferListener;
-import org.apache.maven.wagon.proxy.ProxyInfo;
-import org.apache.maven.wagon.shared.http.AbstractHttpClientWagon;
-import org.apache.maven.wagon.shared.http.HttpConfiguration;
-import org.apache.maven.wagon.shared.http.HttpMethodConfiguration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.MalformedURLException;
-import java.net.URI;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.concurrent.ConcurrentSkipListSet;
-import java.util.stream.Collectors;
-
-/**
- * Maven implementation of index manager.
- * The index manager is a singleton, so we try to make sure, that index operations are not running
- * parallel by synchronizing on the index path.
- * A update operation waits for parallel running methods to finish before starting, but after a certain
- * time of retries a IndexUpdateFailedException is thrown.
- */
-@Service( "archivaIndexManager#maven" )
-public class MavenIndexManager implements ArchivaIndexManager {
-
- private static final Logger log = LoggerFactory.getLogger( MavenIndexManager.class );
-
- @Inject
- private Indexer indexer;
-
- @Inject
- private IndexerEngine indexerEngine;
-
- @Inject
- private List<? extends IndexCreator> indexCreators;
-
- @Inject
- private IndexPacker indexPacker;
-
- @Inject
- private Scanner scanner;
-
- @Inject
- private ArchivaConfiguration archivaConfiguration;
-
- @Inject
- private WagonFactory wagonFactory;
-
- @Inject
- private IndexUpdater indexUpdater;
-
- @Inject
- private ArtifactContextProducer artifactContextProducer;
-
- @Inject
- private ProxyRegistry proxyRegistry;
-
-
- private ConcurrentSkipListSet<StorageAsset> activeContexts = new ConcurrentSkipListSet<>( );
-
- private static final int WAIT_TIME = 100;
- private static final int MAX_WAIT = 10;
-
-
- public static IndexingContext getMvnContext( ArchivaIndexingContext context ) throws UnsupportedBaseContextException
- {
- if (context!=null)
- {
- if ( !context.supports( IndexingContext.class ) )
- {
- log.error( "The provided archiva index context does not support the maven IndexingContext" );
- throw new UnsupportedBaseContextException( "The context does not support the Maven IndexingContext" );
- }
- return context.getBaseContext( IndexingContext.class );
- } else {
- return null;
- }
- }
-
- private StorageAsset getIndexPath( ArchivaIndexingContext ctx )
- {
- return ctx.getPath( );
- }
-
- @FunctionalInterface
- interface IndexUpdateConsumer
- {
-
- void accept( IndexingContext indexingContext ) throws IndexUpdateFailedException;
- }
-
- /*
- * This method is used to do some actions around the update execution code. And to make sure, that no other
- * method is running on the same index.
- */
- private void executeUpdateFunction( ArchivaIndexingContext context, IndexUpdateConsumer function ) throws IndexUpdateFailedException
- {
- if (context==null) {
- throw new IndexUpdateFailedException( "Given context is null" );
- }
- IndexingContext indexingContext = null;
- try
- {
- indexingContext = getMvnContext( context );
- }
- catch ( UnsupportedBaseContextException e )
- {
- throw new IndexUpdateFailedException( "Maven index is not supported by this context", e );
- }
- final StorageAsset ctxPath = getIndexPath( context );
- int loop = MAX_WAIT;
- boolean active = false;
- while ( loop-- > 0 && !active )
- {
- active = activeContexts.add( ctxPath );
- try
- {
- Thread.currentThread( ).sleep( WAIT_TIME );
- }
- catch ( InterruptedException e )
- {
- // Ignore this
- }
- }
- if ( active )
- {
- try
- {
- function.accept( indexingContext );
- }
- finally
- {
- activeContexts.remove( ctxPath );
- }
- }
- else
- {
- throw new IndexUpdateFailedException( "Timeout while waiting for index release on context " + context.getId( ) );
- }
- }
-
- @Override
- public void pack( final ArchivaIndexingContext context ) throws IndexUpdateFailedException
- {
- executeUpdateFunction( context, indexingContext -> {
- try
- {
- IndexPackingRequest request = new IndexPackingRequest( indexingContext,
- indexingContext.acquireIndexSearcher( ).getIndexReader( ),
- indexingContext.getIndexDirectoryFile( ) );
- indexPacker.packIndex( request );
- indexingContext.updateTimestamp( true );
- }
- catch ( IOException e )
- {
- log.error( "IOException while packing index of context " + context.getId( ) + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ) );
- throw new IndexUpdateFailedException( "IOException during update of " + context.getId( ), e );
- }
- }
- );
-
- }
-
- @Override
- public void scan(final ArchivaIndexingContext context) throws IndexUpdateFailedException
- {
- executeUpdateFunction( context, indexingContext -> {
- DefaultScannerListener listener = new DefaultScannerListener( indexingContext, indexerEngine, true, null );
- ScanningRequest request = new ScanningRequest( indexingContext, listener );
- ScanningResult result = scanner.scan( request );
- if ( result.hasExceptions( ) )
- {
- log.error( "Exceptions occured during index scan of " + context.getId( ) );
- result.getExceptions( ).stream( ).map( e -> e.getMessage( ) ).distinct( ).limit( 5 ).forEach(
- s -> log.error( "Message: " + s )
- );
- }
-
- } );
- }
-
- @Override
- public void update(final ArchivaIndexingContext context, final boolean fullUpdate) throws IndexUpdateFailedException
- {
- log.info( "start download remote index for remote repository {}", context.getRepository( ).getId( ) );
- URI remoteUpdateUri;
- if ( !( context.getRepository( ) instanceof RemoteRepository ) || !(context.getRepository().supportsFeature(RemoteIndexFeature.class)) )
- {
- throw new IndexUpdateFailedException( "The context is not associated to a remote repository with remote index " + context.getId( ) );
- } else {
- RemoteIndexFeature rif = context.getRepository().getFeature(RemoteIndexFeature.class).get();
- remoteUpdateUri = context.getRepository().getLocation().resolve(rif.getIndexUri());
- }
- final RemoteRepository remoteRepository = (RemoteRepository) context.getRepository( );
-
- executeUpdateFunction( context,
- indexingContext -> {
- try
- {
- // create a temp directory to download files
- Path tempIndexDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".tmpIndex" );
- Path indexCacheDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".indexCache" );
- Files.createDirectories( indexCacheDirectory );
- if ( Files.exists( tempIndexDirectory ) )
- {
- org.apache.archiva.common.utils.FileUtils.deleteDirectory( tempIndexDirectory );
- }
- Files.createDirectories( tempIndexDirectory );
- tempIndexDirectory.toFile( ).deleteOnExit( );
- String baseIndexUrl = indexingContext.getIndexUpdateUrl( );
-
- String wagonProtocol = remoteUpdateUri.toURL( ).getProtocol( );
-
- NetworkProxy networkProxy = null;
- if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
- {
- RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
- if ( StringUtils.isNotBlank( rif.getProxyId( ) ) )
- {
- networkProxy = proxyRegistry.getNetworkProxy( rif.getProxyId( ) );
- if ( networkProxy == null )
- {
- log.warn(
- "your remote repository is configured to download remote index trought a proxy we cannot find id:{}",
- rif.getProxyId( ) );
- }
- }
-
- final StreamWagon wagon = (StreamWagon) wagonFactory.getWagon(
- new WagonFactoryRequest( wagonProtocol, remoteRepository.getExtraHeaders( ) ).networkProxy(
- networkProxy )
- );
- int readTimeout = (int) rif.getDownloadTimeout( ).toMillis( ) * 1000;
- wagon.setReadTimeout( readTimeout );
- wagon.setTimeout( (int) remoteRepository.getTimeout( ).toMillis( ) * 1000 );
-
- if ( wagon instanceof AbstractHttpClientWagon )
- {
- HttpConfiguration httpConfiguration = new HttpConfiguration( );
- HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration( );
- httpMethodConfiguration.setUsePreemptive( true );
- httpMethodConfiguration.setReadTimeout( readTimeout );
- httpConfiguration.setGet( httpMethodConfiguration );
- AbstractHttpClientWagon.class.cast( wagon ).setHttpConfiguration( httpConfiguration );
- }
-
- wagon.addTransferListener( new DownloadListener( ) );
- ProxyInfo proxyInfo = null;
- if ( networkProxy != null )
- {
- proxyInfo = new ProxyInfo( );
- proxyInfo.setType( networkProxy.getProtocol( ) );
- proxyInfo.setHost( networkProxy.getHost( ) );
- proxyInfo.setPort( networkProxy.getPort( ) );
- proxyInfo.setUserName( networkProxy.getUsername( ) );
- proxyInfo.setPassword( new String(networkProxy.getPassword( )) );
- }
- AuthenticationInfo authenticationInfo = null;
- if ( remoteRepository.getLoginCredentials( ) != null && ( remoteRepository.getLoginCredentials( ) instanceof PasswordCredentials ) )
- {
- PasswordCredentials creds = (PasswordCredentials) remoteRepository.getLoginCredentials( );
- authenticationInfo = new AuthenticationInfo( );
- authenticationInfo.setUserName( creds.getUsername( ) );
- authenticationInfo.setPassword( new String( creds.getPassword( ) ) );
- }
- wagon.connect( new org.apache.maven.wagon.repository.Repository( remoteRepository.getId( ), baseIndexUrl ), authenticationInfo,
- proxyInfo );
-
- Path indexDirectory = indexingContext.getIndexDirectoryFile( ).toPath( );
- if ( !Files.exists( indexDirectory ) )
- {
- Files.createDirectories( indexDirectory );
- }
-
- ResourceFetcher resourceFetcher =
- new WagonResourceFetcher( log, tempIndexDirectory, wagon, remoteRepository );
- IndexUpdateRequest request = new IndexUpdateRequest( indexingContext, resourceFetcher );
- request.setForceFullUpdate( fullUpdate );
- request.setLocalIndexCacheDir( indexCacheDirectory.toFile( ) );
-
- indexUpdater.fetchAndUpdateIndex( request );
-
- indexingContext.updateTimestamp( true );
- }
-
- }
- catch ( AuthenticationException e )
- {
- log.error( "Could not login to the remote proxy for updating index of {}", remoteRepository.getId( ), e );
- throw new IndexUpdateFailedException( "Login in to proxy failed while updating remote repository " + remoteRepository.getId( ), e );
- }
- catch ( ConnectionException e )
- {
- log.error( "Connection error during index update for remote repository {}", remoteRepository.getId( ), e );
- throw new IndexUpdateFailedException( "Connection error during index update for remote repository " + remoteRepository.getId( ), e );
- }
- catch ( MalformedURLException e )
- {
- log.error( "URL for remote index update of remote repository {} is not correct {}", remoteRepository.getId( ), remoteUpdateUri, e );
- throw new IndexUpdateFailedException( "URL for remote index update of repository is not correct " + remoteUpdateUri, e );
- }
- catch ( IOException e )
- {
- log.error( "IOException during index update of remote repository {}: {}", remoteRepository.getId( ), e.getMessage( ), e );
- throw new IndexUpdateFailedException( "IOException during index update of remote repository " + remoteRepository.getId( )
- + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
- }
- catch ( WagonFactoryException e )
- {
- log.error( "Wagon for remote index download of {} could not be created: {}", remoteRepository.getId( ), e.getMessage( ), e );
- throw new IndexUpdateFailedException( "Error while updating the remote index of " + remoteRepository.getId( ), e );
- }
- } );
-
- }
-
- @Override
- public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
- {
- final StorageAsset ctxUri = context.getPath();
- executeUpdateFunction(context, indexingContext -> {
- Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
- try {
- indexer.addArtifactsToIndex(artifacts, indexingContext);
- } catch (IOException e) {
- log.error("IOException while adding artifact {}", e.getMessage(), e);
- throw new IndexUpdateFailedException("Error occured while adding artifact to index of "+context.getId()
- + (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
- }
- });
- }
-
- @Override
- public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
- {
- final StorageAsset ctxUri = context.getPath();
- executeUpdateFunction(context, indexingContext -> {
- Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
- try {
- indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
- } catch (IOException e) {
- log.error("IOException while removing artifact {}", e.getMessage(), e);
- throw new IndexUpdateFailedException("Error occured while removing artifact from index of "+context.getId()
- + (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
- }
- });
-
- }
-
- @Override
- public boolean supportsRepository( RepositoryType type )
- {
- return type == RepositoryType.MAVEN;
- }
-
- @Override
- public ArchivaIndexingContext createContext( Repository repository ) throws IndexCreationFailedException
- {
- log.debug("Creating context for repo {}, type: {}", repository.getId(), repository.getType());
- if ( repository.getType( ) != RepositoryType.MAVEN )
- {
- throw new UnsupportedRepositoryTypeException( repository.getType( ) );
- }
- IndexingContext mvnCtx = null;
- try
- {
- if ( repository instanceof RemoteRepository )
- {
- mvnCtx = createRemoteContext( (RemoteRepository) repository );
- }
- else if ( repository instanceof ManagedRepository )
- {
- mvnCtx = createManagedContext( (ManagedRepository) repository );
- }
- }
- catch ( IOException e )
- {
- log.error( "IOException during context creation " + e.getMessage( ), e );
- throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
- + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
- }
-
- return new MavenIndexContext( repository, mvnCtx );
- }
-
- @Override
- public ArchivaIndexingContext reset(ArchivaIndexingContext context) throws IndexUpdateFailedException {
- ArchivaIndexingContext ctx;
- executeUpdateFunction(context, indexingContext -> {
- try {
- indexingContext.close(true);
- } catch (IOException e) {
- log.warn("Index close failed");
- }
- org.apache.archiva.repository.storage.util.StorageUtil.deleteRecursively(context.getPath());
- });
- try {
- Repository repo = context.getRepository();
- ctx = createContext(context.getRepository());
- if (repo instanceof EditableRepository) {
- ((EditableRepository)repo).setIndexingContext(ctx);
- }
- } catch (IndexCreationFailedException e) {
- throw new IndexUpdateFailedException("Could not create index");
- }
- return ctx;
- }
-
- @Override
- public ArchivaIndexingContext move(ArchivaIndexingContext context, Repository repo) throws IndexCreationFailedException {
- if (context==null) {
- return null;
- }
- if (context.supports(IndexingContext.class)) {
- try {
- StorageAsset newPath = getIndexPath(repo);
- IndexingContext ctx = context.getBaseContext(IndexingContext.class);
- Path oldPath = ctx.getIndexDirectoryFile().toPath();
- Path newFilePath = newPath.getFilePath( );
- if (oldPath.equals(newFilePath)) {
- // Nothing to do, if path does not change
- return context;
- }
- if (!Files.exists(oldPath)) {
- return createContext(repo);
- } else if (context.isEmpty()) {
- context.close();
- return createContext(repo);
- } else {
- context.close(false);
- if (Files.exists( newFilePath )) {
- FileUtils.copyContent( oldPath, newFilePath );
- FileUtils.deleteDirectory( oldPath );
- } else
- {
- Files.move( oldPath, newFilePath );
- }
- return createContext(repo);
- }
- } catch (IOException e) {
- log.error("IOException while moving index directory {}", e.getMessage(), e);
- throw new IndexCreationFailedException("Could not recreated the index.", e);
- } catch (UnsupportedBaseContextException e) {
- throw new IndexCreationFailedException("The given context, is not a maven context.");
- }
- } else {
- throw new IndexCreationFailedException("Bad context type. This is not a maven context.");
- }
- }
-
- @Override
- public void updateLocalIndexPath(Repository repo) {
- if (repo.supportsFeature(IndexCreationFeature.class)) {
- IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
- try {
- icf.setLocalIndexPath(getIndexPath(repo));
- icf.setLocalPackedIndexPath(getPackedIndexPath(repo));
- } catch (IOException e) {
- log.error("Could not set local index path for {}. New URI: {}", repo.getId(), icf.getIndexPath());
- }
- }
- }
-
- @Override
- public ArchivaIndexingContext mergeContexts(Repository destinationRepo, List<ArchivaIndexingContext> contexts,
- boolean packIndex) throws UnsupportedOperationException,
- IndexCreationFailedException, IllegalArgumentException {
- if (!destinationRepo.supportsFeature(IndexCreationFeature.class)) {
- throw new IllegalArgumentException("The given repository does not support the indexcreation feature");
- }
- Path mergedIndexDirectory = null;
- try {
- mergedIndexDirectory = Files.createTempDirectory("archivaMergedIndex");
- } catch (IOException e) {
- log.error("Could not create temporary directory for merged index: {}", e.getMessage(), e);
- throw new IndexCreationFailedException("IO error while creating temporary directory for merged index: "+e.getMessage(), e);
- }
- IndexCreationFeature indexCreationFeature = destinationRepo.getFeature(IndexCreationFeature.class).get();
- if (indexCreationFeature.getLocalIndexPath()== null) {
- throw new IllegalArgumentException("The given repository does not have a local index path");
- }
- StorageAsset destinationPath = indexCreationFeature.getLocalIndexPath();
-
- String tempRepoId = mergedIndexDirectory.getFileName().toString();
-
- try
- {
- Path indexLocation = destinationPath.getFilePath();
-
- List<IndexingContext> members = contexts.stream( ).filter(ctx -> ctx.supports(IndexingContext.class)).map( ctx ->
- {
- try {
- return ctx.getBaseContext(IndexingContext.class);
- } catch (UnsupportedBaseContextException e) {
- // does not happen here
- return null;
- }
- }).filter( Objects::nonNull ).collect( Collectors.toList() );
- ContextMemberProvider memberProvider = new StaticContextMemberProvider(members);
- IndexingContext mergedCtx = indexer.createMergedIndexingContext( tempRepoId, tempRepoId, mergedIndexDirectory.toFile(),
- indexLocation.toFile(), true, memberProvider);
- mergedCtx.optimize();
-
- if ( packIndex )
- {
- IndexPackingRequest request = new IndexPackingRequest( mergedCtx, //
- mergedCtx.acquireIndexSearcher().getIndexReader(), //
- indexLocation.toFile() );
- indexPacker.packIndex( request );
- }
-
- return new MavenIndexContext(destinationRepo, mergedCtx);
- }
- catch ( IOException e)
- {
- throw new IndexCreationFailedException( "IO Error during index merge: "+ e.getMessage(), e );
- }
- }
-
- private StorageAsset getIndexPath(URI indexDirUri, RepositoryStorage repoStorage, String defaultDir) throws IOException
- {
- StorageAsset rootAsset = repoStorage.getRoot();
- RepositoryStorage storage = rootAsset.getStorage();
- Path indexDirectory;
- Path repositoryPath = rootAsset.getFilePath().toAbsolutePath();
- StorageAsset indexDir;
- if ( ! StringUtils.isEmpty(indexDirUri.toString( ) ) )
- {
-
- indexDirectory = PathUtil.getPathFromUri( indexDirUri );
- // not absolute so create it in repository directory
- if ( indexDirectory.isAbsolute( ) && !indexDirectory.startsWith(repositoryPath))
- {
- if (storage instanceof FilesystemStorage) {
- FilesystemStorage fsStorage = (FilesystemStorage) storage;
- FilesystemStorage indexStorage = new FilesystemStorage(indexDirectory.getParent(), fsStorage.getFileLockManager());
- indexDir = indexStorage.getAsset(indexDirectory.getFileName().toString());
- } else {
- throw new IOException("The given storage is not file based.");
- }
- } else if (indexDirectory.isAbsolute()) {
- indexDir = storage.getAsset(repositoryPath.relativize(indexDirectory).toString());
- }
- else
- {
- indexDir = storage.getAsset(indexDirectory.toString());
- }
- }
- else
- {
- indexDir = storage.getAsset( defaultDir );
- }
-
- if ( !indexDir.exists() )
- {
- indexDir.create( AssetType.CONTAINER );
- }
- return indexDir;
- }
-
- private StorageAsset getIndexPath( Repository repo) throws IOException {
- IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
- return getIndexPath( icf.getIndexPath(), repo, DEFAULT_INDEX_PATH);
- }
-
- private StorageAsset getPackedIndexPath(Repository repo) throws IOException {
- IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
- return getIndexPath(icf.getPackedIndexPath(), repo, DEFAULT_PACKED_INDEX_PATH);
- }
-
- private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
- {
- String contextKey = "remote-" + remoteRepository.getId( );
-
-
- // create remote repository path
- Path repoDir = remoteRepository.getRoot().getFilePath();
- if ( !Files.exists( repoDir ) )
- {
- Files.createDirectories( repoDir );
- }
-
- StorageAsset indexDirectory;
-
- // is there configured indexDirectory ?
- if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
- {
- RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
- indexDirectory = getIndexPath(remoteRepository);
- String remoteIndexUrl = calculateIndexRemoteUrl( remoteRepository.getLocation( ), rif );
- try
- {
-
- return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
- }
- catch ( IndexFormatTooOldException e )
- {
- // existing index with an old lucene format so we need to delete it!!!
- // delete it first then recreate it.
- log.warn( "the index of repository {} is too old we have to delete and recreate it", //
- remoteRepository.getId( ) );
- org.apache.archiva.common.utils.FileUtils.deleteDirectory( indexDirectory.getFilePath() );
- return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
-
- }
- }
- else
- {
- throw new IOException( "No remote index defined" );
- }
- }
-
- private IndexingContext getIndexingContext( Repository repository, String contextKey, Path repoDir, StorageAsset indexDirectory, String indexUrl ) throws IOException
- {
- try
- {
- if (!Files.exists(indexDirectory.getFilePath())) {
- Files.createDirectories(indexDirectory.getFilePath());
- }
- return indexer.createIndexingContext( contextKey, repository.getId( ), repoDir.toFile( ), indexDirectory.getFilePath( ).toFile( ),
- repository.getLocation( ) == null ? null : repository.getLocation( ).toString( ),
- indexUrl,
- true, false,
- indexCreators );
- } catch (Exception e) {
- log.error("Could not create index for asset {}", indexDirectory);
- throw new IOException(e);
- }
- }
-
- private IndexingContext createManagedContext( ManagedRepository repository ) throws IOException
- {
-
- IndexingContext context;
- // take care first about repository location as can be relative
- Path repositoryDirectory = repository.getRoot().getFilePath();
-
- if ( !Files.exists( repositoryDirectory ) )
- {
- try
- {
- Files.createDirectories( repositoryDirectory );
- }
- catch ( IOException e )
- {
- log.error( "Could not create directory {}", repositoryDirectory );
- }
- }
-
- StorageAsset indexDirectory;
-
- if ( repository.supportsFeature( IndexCreationFeature.class ) )
- {
- indexDirectory = getIndexPath(repository);
- log.debug( "Preparing index at {}", indexDirectory );
-
- String indexUrl = repositoryDirectory.toUri( ).toURL( ).toExternalForm( );
- try
- {
- context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
- context.setSearchable( repository.isScanned( ) );
- }
- catch ( IndexFormatTooOldException e )
- {
- // existing index with an old lucene format so we need to delete it!!!
- // delete it first then recreate it.
- log.warn( "the index of repository {} is too old we have to delete and recreate it", //
- repository.getId( ) );
- org.apache.archiva.common.utils.FileUtils.deleteDirectory( indexDirectory.getFilePath() );
- context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
- context.setSearchable( repository.isScanned( ) );
- }
- return context;
- }
- else
- {
- throw new IOException( "No repository index defined" );
- }
- }
-
- private String calculateIndexRemoteUrl( URI baseUri, RemoteIndexFeature rif )
- {
- if ( rif.getIndexUri( ) == null )
- {
- return baseUri.resolve( "/"+DEFAULT_INDEX_PATH ).toString( );
- }
- else
- {
- URI rifUri = rif.getIndexUri( );
- if (rifUri.isAbsolute()) {
- return rifUri.toString( );
- } else
- {
- if (baseUri.getScheme().toLowerCase().equals( "file" )) {
- return Paths.get( baseUri ).resolve( rifUri.getPath() ).toUri( ).toString( );
- } else
- {
- String pathString = rifUri.getPath( ).startsWith( "/" ) ? rifUri.getPath( ) : "/" + rifUri.getPath( );
- return baseUri.resolve( pathString ).toString( );
- }
- }
- }
- }
-
- private static final class DownloadListener
- implements TransferListener
- {
- private Logger log = LoggerFactory.getLogger( getClass( ) );
-
- private String resourceName;
-
- private long startTime;
-
- private int totalLength = 0;
-
- @Override
- public void transferInitiated( TransferEvent transferEvent )
- {
- startTime = System.currentTimeMillis( );
- resourceName = transferEvent.getResource( ).getName( );
- log.debug( "initiate transfer of {}", resourceName );
- }
-
- @Override
- public void transferStarted( TransferEvent transferEvent )
- {
- this.totalLength = 0;
- resourceName = transferEvent.getResource( ).getName( );
- log.info( "start transfer of {}", transferEvent.getResource( ).getName( ) );
- }
-
- @Override
- public void transferProgress( TransferEvent transferEvent, byte[] buffer, int length )
- {
- log.debug( "transfer of {} : {}/{}", transferEvent.getResource( ).getName( ), buffer.length, length );
- this.totalLength += length;
- }
-
- @Override
- public void transferCompleted( TransferEvent transferEvent )
- {
- resourceName = transferEvent.getResource( ).getName( );
- long endTime = System.currentTimeMillis( );
- log.info( "end of transfer file {} {} kb: {}s", transferEvent.getResource( ).getName( ),
- this.totalLength / 1024, ( endTime - startTime ) / 1000 );
- }
-
- @Override
- public void transferError( TransferEvent transferEvent )
- {
- log.info( "error of transfer file {}: {}", transferEvent.getResource( ).getName( ),
- transferEvent.getException( ).getMessage( ), transferEvent.getException( ) );
- }
-
- @Override
- public void debug( String message )
- {
- log.debug( "transfer debug {}", message );
- }
- }
-
- private static class WagonResourceFetcher
- implements ResourceFetcher
- {
-
- Logger log;
-
- Path tempIndexDirectory;
-
- Wagon wagon;
-
- RemoteRepository remoteRepository;
-
- private WagonResourceFetcher( Logger log, Path tempIndexDirectory, Wagon wagon,
- RemoteRepository remoteRepository )
- {
- this.log = log;
- this.tempIndexDirectory = tempIndexDirectory;
- this.wagon = wagon;
- this.remoteRepository = remoteRepository;
- }
-
- @Override
- public void connect( String id, String url ) {
- //no op
- }
-
- @Override
- public void disconnect( ) {
- // no op
- }
-
- @Override
- public InputStream retrieve( String name )
- throws IOException {
- try
- {
- log.info( "index update retrieve file, name:{}", name );
- Path file = tempIndexDirectory.resolve( name );
- Files.deleteIfExists( file );
- file.toFile( ).deleteOnExit( );
- wagon.get( addParameters( name, remoteRepository ), file.toFile( ) );
- return Files.newInputStream( file );
- }
- catch ( AuthorizationException | TransferFailedException e )
- {
- throw new IOException( e.getMessage( ), e );
- }
- catch ( ResourceDoesNotExistException e )
- {
- FileNotFoundException fnfe = new FileNotFoundException( e.getMessage( ) );
- fnfe.initCause( e );
- throw fnfe;
- }
- }
-
- // FIXME remove crappy copy/paste
- protected String addParameters( String path, RemoteRepository remoteRepository )
- {
- if ( remoteRepository.getExtraParameters( ).isEmpty( ) )
- {
- return path;
- }
-
- boolean question = false;
-
- StringBuilder res = new StringBuilder( path == null ? "" : path );
-
- for ( Map.Entry<String, String> entry : remoteRepository.getExtraParameters( ).entrySet( ) )
- {
- if ( !question )
- {
- res.append( '?' ).append( entry.getKey( ) ).append( '=' ).append( entry.getValue( ) );
- }
- }
-
- return res.toString( );
- }
-
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven.search;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.indexer.UnsupportedBaseContextException;
-import org.apache.archiva.indexer.search.ArtifactInfoFilter;
-import org.apache.archiva.indexer.search.NoClassifierArtifactInfoFilter;
-import org.apache.archiva.indexer.search.RepositorySearch;
-import org.apache.archiva.indexer.search.RepositorySearchException;
-import org.apache.archiva.indexer.search.SearchFields;
-import org.apache.archiva.indexer.search.SearchResultHit;
-import org.apache.archiva.indexer.search.SearchResultLimits;
-import org.apache.archiva.indexer.search.SearchResults;
-import org.apache.archiva.indexer.util.SearchUtil;
-import org.apache.archiva.model.ArchivaArtifactModel;
-import org.apache.archiva.proxy.ProxyRegistry;
-import org.apache.archiva.proxy.model.ProxyConnector;
-import org.apache.archiva.repository.RemoteRepository;
-import org.apache.archiva.repository.Repository;
-import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.repository.RepositoryType;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.maven.index.ArtifactInfo;
-import org.apache.maven.index.FlatSearchRequest;
-import org.apache.maven.index.FlatSearchResponse;
-import org.apache.maven.index.Indexer;
-import org.apache.maven.index.MAVEN;
-import org.apache.maven.index.OSGI;
-import org.apache.maven.index.QueryCreator;
-import org.apache.maven.index.SearchType;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.expr.SearchExpression;
-import org.apache.maven.index.expr.SearchTyped;
-import org.apache.maven.index.expr.SourcedSearchExpression;
-import org.apache.maven.index.expr.UserInputSearchExpression;
-import org.apache.maven.index_shaded.lucene.search.BooleanClause;
-import org.apache.maven.index_shaded.lucene.search.BooleanClause.Occur;
-import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * RepositorySearch implementation which uses the Maven Indexer for searching.
- */
-@Service( "repositorySearch#maven" )
-public class MavenRepositorySearch
- implements RepositorySearch
-{
- private Logger log = LoggerFactory.getLogger( getClass() );
-
- private Indexer indexer;
-
- private QueryCreator queryCreator;
-
-
- private RepositoryRegistry repositoryRegistry;
-
- private ProxyRegistry proxyRegistry;
-
- protected MavenRepositorySearch()
- {
- // for test purpose
- }
-
- @Inject
- public MavenRepositorySearch( Indexer nexusIndexer, RepositoryRegistry repositoryRegistry,
- ProxyRegistry proxyRegistry, QueryCreator queryCreator )
- {
- this.indexer = nexusIndexer;
- this.queryCreator = queryCreator;
- this.repositoryRegistry = repositoryRegistry;
- this.proxyRegistry = proxyRegistry;
- }
-
- /**
- * @see RepositorySearch#search(String, List, String, SearchResultLimits, List)
- */
- @Override
- public SearchResults search(String principal, List<String> selectedRepos, String term, SearchResultLimits limits,
- List<String> previousSearchTerms )
- throws RepositorySearchException
- {
- List<String> indexingContextIds = addIndexingContexts( selectedRepos );
-
- // since upgrade to nexus 2.0.0, query has changed from g:[QUERIED TERM]* to g:*[QUERIED TERM]*
- // resulting to more wildcard searches so we need to increase max clause count
- BooleanQuery.setMaxClauseCount( Integer.MAX_VALUE );
- BooleanQuery.Builder qb = new BooleanQuery.Builder();
-
- if ( previousSearchTerms == null || previousSearchTerms.isEmpty() )
- {
- constructQuery( term, qb );
- }
- else
- {
- for ( String previousTerm : previousSearchTerms )
- {
- BooleanQuery.Builder iQuery = new BooleanQuery.Builder();
- constructQuery( previousTerm, iQuery );
-
- qb.add( iQuery.build(), BooleanClause.Occur.MUST );
- }
-
- BooleanQuery.Builder iQuery = new BooleanQuery.Builder();
- constructQuery( term, iQuery );
- qb.add( iQuery.build(), BooleanClause.Occur.MUST );
- }
-
- // we retun only artifacts without classifier in quick search, olamy cannot find a way to say with this field empty
- // FIXME cannot find a way currently to setup this in constructQuery !!!
- return search( limits, qb.build(), indexingContextIds, NoClassifierArtifactInfoFilter.LIST, selectedRepos, true );
-
- }
-
- /**
- * @see RepositorySearch#search(String, SearchFields, SearchResultLimits)
- */
- @SuppressWarnings( "deprecation" )
- @Override
- public SearchResults search( String principal, SearchFields searchFields, SearchResultLimits limits )
- throws RepositorySearchException
- {
- if ( searchFields.getRepositories() == null )
- {
- throw new RepositorySearchException( "Repositories cannot be null." );
- }
-
- List<String> indexingContextIds = addIndexingContexts( searchFields.getRepositories() );
-
- // if no index found in the specified ones return an empty search result instead of doing a search on all index
- // olamy: IMHO doesn't make sense
- if ( !searchFields.getRepositories().isEmpty() && ( indexingContextIds == null
- || indexingContextIds.isEmpty() ) )
- {
- return new SearchResults();
- }
-
- BooleanQuery.Builder qb = new BooleanQuery.Builder();
- if ( StringUtils.isNotBlank( searchFields.getGroupId() ) )
- {
- qb.add( indexer.constructQuery( MAVEN.GROUP_ID, searchFields.isExactSearch() ? new SourcedSearchExpression(
- searchFields.getGroupId() ) : new UserInputSearchExpression( searchFields.getGroupId() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getArtifactId() ) )
- {
- qb.add( indexer.constructQuery( MAVEN.ARTIFACT_ID,
- searchFields.isExactSearch()
- ? new SourcedSearchExpression( searchFields.getArtifactId() )
- : new UserInputSearchExpression( searchFields.getArtifactId() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getVersion() ) )
- {
- qb.add( indexer.constructQuery( MAVEN.VERSION, searchFields.isExactSearch() ? new SourcedSearchExpression(
- searchFields.getVersion() ) : new SourcedSearchExpression( searchFields.getVersion() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getPackaging() ) )
- {
- qb.add( indexer.constructQuery( MAVEN.PACKAGING, searchFields.isExactSearch() ? new SourcedSearchExpression(
- searchFields.getPackaging() ) : new UserInputSearchExpression( searchFields.getPackaging() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getClassName() ) )
- {
- qb.add( indexer.constructQuery( MAVEN.CLASSNAMES,
- new UserInputSearchExpression( searchFields.getClassName() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleSymbolicName() ) )
- {
- qb.add( indexer.constructQuery( OSGI.SYMBOLIC_NAME,
- new UserInputSearchExpression( searchFields.getBundleSymbolicName() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleVersion() ) )
- {
- qb.add( indexer.constructQuery( OSGI.VERSION,
- new UserInputSearchExpression( searchFields.getBundleVersion() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleExportPackage() ) )
- {
- qb.add( indexer.constructQuery( OSGI.EXPORT_PACKAGE,
- new UserInputSearchExpression( searchFields.getBundleExportPackage() ) ),
- Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleExportService() ) )
- {
- qb.add( indexer.constructQuery( OSGI.EXPORT_SERVICE,
- new UserInputSearchExpression( searchFields.getBundleExportService() ) ),
- Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleImportPackage() ) )
- {
- qb.add( indexer.constructQuery( OSGI.IMPORT_PACKAGE,
- new UserInputSearchExpression( searchFields.getBundleImportPackage() ) ),
- Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleName() ) )
- {
- qb.add( indexer.constructQuery( OSGI.NAME, new UserInputSearchExpression( searchFields.getBundleName() ) ),
- Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleImportPackage() ) )
- {
- qb.add( indexer.constructQuery( OSGI.IMPORT_PACKAGE,
- new UserInputSearchExpression( searchFields.getBundleImportPackage() ) ),
- Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleRequireBundle() ) )
- {
- qb.add( indexer.constructQuery( OSGI.REQUIRE_BUNDLE,
- new UserInputSearchExpression( searchFields.getBundleRequireBundle() ) ),
- Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getClassifier() ) )
- {
- qb.add( indexer.constructQuery( MAVEN.CLASSIFIER, searchFields.isExactSearch() ? new SourcedSearchExpression(
- searchFields.getClassifier() ) : new UserInputSearchExpression( searchFields.getClassifier() ) ),
- Occur.MUST );
- }
- else if ( searchFields.isExactSearch() )
- {
- //TODO improvement in case of exact search and no classifier we must query for classifier with null value
- // currently it's done in DefaultSearchService with some filtering
- }
-
- BooleanQuery qu = qb.build();
- if ( qu.clauses() == null || qu.clauses().size() <= 0 )
- {
- throw new RepositorySearchException( "No search fields set." );
- }
- if (qu.clauses()!=null) {
- log.debug("CLAUSES ", qu.clauses());
- for (BooleanClause cl : qu.clauses()) {
- log.debug("Clause ",cl);
- }
- }
-
- return search( limits, qu, indexingContextIds, Collections.<ArtifactInfoFilter>emptyList(),
- searchFields.getRepositories(), searchFields.isIncludePomArtifacts() );
- }
-
- private static class NullSearch
- implements SearchTyped, SearchExpression
- {
- private static final NullSearch INSTANCE = new NullSearch();
-
- @Override
- public String getStringValue()
- {
- return "[[NULL_VALUE]]";
- }
-
- @Override
- public SearchType getSearchType()
- {
- return SearchType.EXACT;
- }
- }
-
- private SearchResults search( SearchResultLimits limits, BooleanQuery q, List<String> indexingContextIds,
- List<? extends ArtifactInfoFilter> filters, List<String> selectedRepos,
- boolean includePoms )
- throws RepositorySearchException
- {
-
- try
- {
- FlatSearchRequest request = new FlatSearchRequest( q );
-
- request.setContexts( getIndexingContexts( indexingContextIds ) );
- if ( limits != null )
- {
- // we apply limits only when first page asked
- if ( limits.getSelectedPage() == 0 )
- {
- request.setCount( limits.getPageSize() * ( Math.max( 1, limits.getSelectedPage() ) ) );
- }
- }
-
- FlatSearchResponse response = indexer.searchFlat( request );
-
- if ( response == null || response.getTotalHitsCount() == 0 )
- {
- SearchResults results = new SearchResults();
- results.setLimits( limits );
- return results;
- }
-
- return convertToSearchResults( response, limits, filters, selectedRepos, includePoms );
- }
- catch ( IOException e )
- {
- throw new RepositorySearchException( e.getMessage(), e );
- }
-
- }
-
- private IndexingContext getIndexingContext(String id) {
- String repoId;
- if (StringUtils.startsWith(id, "remote-")) {
- repoId = StringUtils.substringAfter(id, "remote-");
- } else {
- repoId = id;
- }
- Repository repo = repositoryRegistry.getRepository(repoId);
- if (repo==null) {
- return null;
- } else {
- if (repo.getIndexingContext()!=null) {
- try {
- return repo.getIndexingContext().getBaseContext(IndexingContext.class);
- } catch (UnsupportedBaseContextException e) {
- return null;
- }
- } else {
- return null;
- }
- }
- }
-
- private List<IndexingContext> getIndexingContexts( List<String> ids )
- {
- List<IndexingContext> contexts = new ArrayList<>( ids.size() );
-
- for ( String id : ids )
- {
- IndexingContext context = getIndexingContext(id);
- if ( context != null )
- {
- contexts.add( context );
- }
- else
- {
- log.warn( "context with id {} not exists", id );
- }
- }
-
- return contexts;
- }
-
- private void constructQuery( String term, BooleanQuery.Builder q )
- {
- q.add( indexer.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ), Occur.SHOULD );
- q.add( indexer.constructQuery( MAVEN.ARTIFACT_ID, new UserInputSearchExpression( term ) ), Occur.SHOULD );
- q.add( indexer.constructQuery( MAVEN.VERSION, new UserInputSearchExpression( term ) ), Occur.SHOULD );
- q.add( indexer.constructQuery( MAVEN.PACKAGING, new UserInputSearchExpression( term ) ), Occur.SHOULD );
- q.add( indexer.constructQuery( MAVEN.CLASSNAMES, new UserInputSearchExpression( term ) ), Occur.SHOULD );
-
- //Query query =
- // new WildcardQuery( new Term( MAVEN.CLASSNAMES.getFieldName(), "*" ) );
- //q.add( query, Occur.MUST_NOT );
- // olamy IMHO we could set this option as at least one must match
- //q.setMinimumNumberShouldMatch( 1 );
- }
-
-
- /**
- * @param selectedRepos
- * @return indexing contextId used
- */
- private List<String> addIndexingContexts( List<String> selectedRepos )
- {
- Set<String> indexingContextIds = new HashSet<>();
- for ( String repo : selectedRepos )
- {
- try
- {
- Repository rRepo = repositoryRegistry.getRepository(repo);
-
- if ( rRepo != null )
- {
-
- if (rRepo.getType().equals(RepositoryType.MAVEN)) {
- assert rRepo.getIndexingContext() != null;
- IndexingContext context = rRepo.getIndexingContext().getBaseContext(IndexingContext.class);
- if (context.isSearchable()) {
- indexingContextIds.addAll(getRemoteIndexingContextIds(repo));
- indexingContextIds.add(context.getId());
- } else {
- log.warn("indexingContext with id {} not searchable", rRepo.getId());
- }
- }
-
- }
- else
- {
- log.warn( "Repository '{}' not found in configuration.", repo );
- }
- }
- catch ( RepositorySearchException e )
- {
- log.warn( "RepositorySearchException occured while accessing index of repository '{}' : {}", repo,
- e.getMessage() );
- continue;
- } catch (UnsupportedBaseContextException e) {
- log.error("Fatal situation: Maven repository without IndexingContext found.");
- continue;
- }
- }
-
- return new ArrayList<>( indexingContextIds );
- }
-
-
- @Override
- public Set<String> getRemoteIndexingContextIds( String managedRepoId )
- throws RepositorySearchException
- {
- Set<String> ids = new HashSet<>();
-
- List<ProxyConnector> proxyConnectors = null;
- proxyConnectors = proxyRegistry.getProxyConnectorAsMap( ).get( managedRepoId );
-
- if ( proxyConnectors == null || proxyConnectors.isEmpty() )
- {
- return ids;
- }
-
- for ( ProxyConnector proxyConnector : proxyConnectors )
- {
- String remoteId = "remote-" + proxyConnector.getTargetRepository().getId();
- RemoteRepository repo = repositoryRegistry.getRemoteRepository(proxyConnector.getTargetRepository().getId());
- if (repo.getType()==RepositoryType.MAVEN) {
- try {
- IndexingContext context = repo.getIndexingContext() != null ? repo.getIndexingContext().getBaseContext(IndexingContext.class) : null;
- if (context!=null && context.isSearchable()) {
- ids.add(remoteId);
- }
- } catch (UnsupportedBaseContextException e) {
- // Ignore this one
- }
- }
- }
-
- return ids;
- }
-
- @Override
- public Collection<String> getAllGroupIds( String principal, List<String> selectedRepos )
- throws RepositorySearchException
- {
- List<IndexingContext> indexContexts = getIndexingContexts( selectedRepos );
-
- if ( indexContexts == null || indexContexts.isEmpty() )
- {
- return Collections.emptyList();
- }
-
- try
- {
- Set<String> allGroupIds = new HashSet<>();
- for ( IndexingContext indexingContext : indexContexts )
- {
- allGroupIds.addAll( indexingContext.getAllGroups() );
- }
- return allGroupIds;
- }
- catch ( IOException e )
- {
- throw new RepositorySearchException( e.getMessage(), e );
- }
-
- }
-
- private SearchResults convertToSearchResults( FlatSearchResponse response, SearchResultLimits limits,
- List<? extends ArtifactInfoFilter> artifactInfoFilters,
- List<String> selectedRepos, boolean includePoms )
- {
- SearchResults results = new SearchResults();
- Set<ArtifactInfo> artifactInfos = response.getResults();
-
- for ( ArtifactInfo artifactInfo : artifactInfos )
- {
- if ( StringUtils.equalsIgnoreCase( "pom", artifactInfo.getFileExtension() ) && !includePoms )
- {
- continue;
- }
- String id = SearchUtil.getHitId( artifactInfo.getGroupId(), //
- artifactInfo.getArtifactId(), //
- artifactInfo.getClassifier(), //
- artifactInfo.getPackaging() );
- Map<String, SearchResultHit> hitsMap = results.getHitsMap();
-
-
- if ( !applyArtifactInfoFilters( artifactInfo, artifactInfoFilters, hitsMap ) )
- {
- continue;
- }
-
- SearchResultHit hit = hitsMap.get( id );
- if ( hit != null )
- {
- if ( !hit.getVersions().contains( artifactInfo.getVersion() ) )
- {
- hit.addVersion( artifactInfo.getVersion() );
- }
- }
- else
- {
- hit = new SearchResultHit();
- hit.setArtifactId( artifactInfo.getArtifactId() );
- hit.setGroupId( artifactInfo.getGroupId() );
- hit.setRepositoryId( artifactInfo.getRepository() );
- hit.addVersion( artifactInfo.getVersion() );
- hit.setBundleExportPackage( artifactInfo.getBundleExportPackage() );
- hit.setBundleExportService( artifactInfo.getBundleExportService() );
- hit.setBundleSymbolicName( artifactInfo.getBundleSymbolicName() );
- hit.setBundleVersion( artifactInfo.getBundleVersion() );
- hit.setBundleDescription( artifactInfo.getBundleDescription() );
- hit.setBundleDocUrl( artifactInfo.getBundleDocUrl() );
- hit.setBundleRequireBundle( artifactInfo.getBundleRequireBundle() );
- hit.setBundleImportPackage( artifactInfo.getBundleImportPackage() );
- hit.setBundleLicense( artifactInfo.getBundleLicense() );
- hit.setBundleName( artifactInfo.getBundleName() );
- hit.setContext( artifactInfo.getContext() );
- hit.setGoals( artifactInfo.getGoals() );
- hit.setPrefix( artifactInfo.getPrefix() );
- hit.setPackaging( artifactInfo.getPackaging() );
- hit.setClassifier( artifactInfo.getClassifier() );
- hit.setFileExtension( artifactInfo.getFileExtension() );
- hit.setUrl( getBaseUrl( artifactInfo, selectedRepos ) );
- }
-
- results.addHit( id, hit );
- }
-
- results.setTotalHits( response.getTotalHitsCount() );
- results.setTotalHitsMapSize( results.getHitsMap().values().size() );
- results.setReturnedHitsCount( response.getReturnedHitsCount() );
- results.setLimits( limits );
-
- if ( limits == null || limits.getSelectedPage() == SearchResultLimits.ALL_PAGES )
- {
- return results;
- }
- else
- {
- return paginate( results );
- }
- }
-
- /**
- * calculate baseUrl without the context and base Archiva Url
- *
- * @param artifactInfo
- * @return
- */
- protected String getBaseUrl( ArtifactInfo artifactInfo, List<String> selectedRepos )
- {
- StringBuilder sb = new StringBuilder();
- if ( StringUtils.startsWith( artifactInfo.getContext(), "remote-" ) )
- {
- // it's a remote index result we search a managed which proxying this remote and on which
- // current user has read karma
- String managedRepoId =
- getManagedRepoId( StringUtils.substringAfter( artifactInfo.getContext(), "remote-" ), selectedRepos );
- if ( managedRepoId != null )
- {
- sb.append( '/' ).append( managedRepoId );
- artifactInfo.setContext( managedRepoId );
- }
- }
- else
- {
- sb.append( '/' ).append( artifactInfo.getContext() );
- }
-
- sb.append( '/' ).append( StringUtils.replaceChars( artifactInfo.getGroupId(), '.', '/' ) );
- sb.append( '/' ).append( artifactInfo.getArtifactId() );
- sb.append( '/' ).append( artifactInfo.getVersion() );
- sb.append( '/' ).append( artifactInfo.getArtifactId() );
- sb.append( '-' ).append( artifactInfo.getVersion() );
- if ( StringUtils.isNotBlank( artifactInfo.getClassifier() ) )
- {
- sb.append( '-' ).append( artifactInfo.getClassifier() );
- }
- // maven-plugin packaging is a jar
- if ( StringUtils.equals( "maven-plugin", artifactInfo.getPackaging() ) )
- {
- sb.append( "jar" );
- }
- else
- {
- sb.append( '.' ).append( artifactInfo.getPackaging() );
- }
-
- return sb.toString();
- }
-
- /**
- * return a managed repo for a remote result
- *
- * @param remoteRepo
- * @param selectedRepos
- * @return
- */
- private String getManagedRepoId( String remoteRepo, List<String> selectedRepos )
- {
- Map<String, List<ProxyConnector>> proxyConnectorMap = proxyRegistry.getProxyConnectorAsMap();
- if ( proxyConnectorMap == null || proxyConnectorMap.isEmpty() )
- {
- return null;
- }
- if ( selectedRepos != null && !selectedRepos.isEmpty() )
- {
- for ( Map.Entry<String, List<ProxyConnector>> entry : proxyConnectorMap.entrySet() )
- {
- if ( selectedRepos.contains( entry.getKey() ) )
- {
- for ( ProxyConnector proxyConnector : entry.getValue() )
- {
- if ( StringUtils.equals( remoteRepo, proxyConnector.getTargetRepository().getId() ) )
- {
- return proxyConnector.getSourceRepository().getId();
- }
- }
- }
- }
- }
-
- // we don't find in search selected repos so return the first one
- for ( Map.Entry<String, List<ProxyConnector>> entry : proxyConnectorMap.entrySet() )
- {
-
- for ( ProxyConnector proxyConnector : entry.getValue() )
- {
- if ( StringUtils.equals( remoteRepo, proxyConnector.getTargetRepository().getId() ) )
- {
- return proxyConnector.getSourceRepository().getId();
- }
- }
-
- }
- return null;
- }
-
- private boolean applyArtifactInfoFilters( ArtifactInfo artifactInfo,
- List<? extends ArtifactInfoFilter> artifactInfoFilters,
- Map<String, SearchResultHit> currentResult )
- {
- if ( artifactInfoFilters == null || artifactInfoFilters.isEmpty() )
- {
- return true;
- }
-
- ArchivaArtifactModel artifact = new ArchivaArtifactModel();
- artifact.setArtifactId( artifactInfo.getArtifactId() );
- artifact.setClassifier( artifactInfo.getClassifier() );
- artifact.setGroupId( artifactInfo.getGroupId() );
- artifact.setRepositoryId( artifactInfo.getRepository() );
- artifact.setVersion( artifactInfo.getVersion() );
- artifact.setChecksumMD5( artifactInfo.getMd5() );
- artifact.setChecksumSHA1( artifactInfo.getSha1() );
- for ( ArtifactInfoFilter filter : artifactInfoFilters )
- {
- if ( !filter.addArtifactInResult( artifact, currentResult ) )
- {
- return false;
- }
- }
- return true;
- }
-
- protected SearchResults paginate( SearchResults results )
- {
- SearchResultLimits limits = results.getLimits();
- SearchResults paginated = new SearchResults();
-
- // ( limits.getPageSize() * ( Math.max( 1, limits.getSelectedPage() ) ) );
-
- int fetchCount = limits.getPageSize();
- int offset = ( limits.getSelectedPage() * limits.getPageSize() );
-
- if ( fetchCount > results.getTotalHits() )
- {
- fetchCount = results.getTotalHits();
- }
-
- // Goto offset.
- if ( offset < results.getTotalHits() )
- {
- // only process if the offset is within the hit count.
- for ( int i = 0; i < fetchCount; i++ )
- {
- // Stop fetching if we are past the total # of available hits.
- if ( offset + i >= results.getHits().size() )
- {
- break;
- }
-
- SearchResultHit hit = results.getHits().get( ( offset + i ) );
- if ( hit != null )
- {
- String id = SearchUtil.getHitId( hit.getGroupId(), hit.getArtifactId(), hit.getClassifier(),
- hit.getPackaging() );
- paginated.addHit( id, hit );
- }
- else
- {
- break;
- }
- }
- }
- paginated.setTotalHits( results.getTotalHits() );
- paginated.setReturnedHitsCount( paginated.getHits().size() );
- paginated.setTotalHitsMapSize( results.getTotalHitsMapSize() );
- paginated.setLimits( limits );
-
- return paginated;
- }
-
-
-}
+++ /dev/null
-package org.apache.archiva.indexer.search;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.model.ArchivaArtifactModel;
-import org.apache.commons.lang3.StringUtils;
-
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author Olivier Lamy
- */
-public class NoClassifierArtifactInfoFilter
- implements ArtifactInfoFilter
-{
- public static final NoClassifierArtifactInfoFilter INSTANCE = new NoClassifierArtifactInfoFilter();
-
- public static final List<? extends ArtifactInfoFilter> LIST = Arrays.asList( INSTANCE );
-
- @Override
- public boolean addArtifactInResult( ArchivaArtifactModel artifact, Map<String, SearchResultHit> currentResult )
- {
- return StringUtils.isBlank( artifact.getClassifier() );
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.util;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang3.StringUtils;
-
-/**
- * SearchUtil - utility class for search.
- */
-public class SearchUtil
-{
- public static String getHitId( String groupId, String artifactId, String classifier, String packaging )
- {
- return ( StringUtils.isBlank( groupId ) ? "" : StringUtils.trim( groupId ) ) + ":" //
- + ( StringUtils.isBlank( artifactId ) ? "" : StringUtils.trim( artifactId ) ) + ":" //
- + ( StringUtils.isBlank( classifier ) ? "" : StringUtils.trim( classifier ) ) + ":" //
- + ( StringUtils.isBlank( packaging ) ? "" : StringUtils.trim( packaging ) );
- }
-}
--- /dev/null
+package org.apache.archiva.maven.indexer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.updater.IndexUpdateSideEffect;
+import org.apache.maven.index_shaded.lucene.store.Directory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+/**
+ * Not doing much but required at least one implementation
+ *
+ * @since 3.0.0
+ */
+@Service("archivaIndexUpdater")
+public class DefaultIndexUpdateSideEffect
+ implements IndexUpdateSideEffect
+{
+ private static final Logger LOGGER = LoggerFactory.getLogger( DefaultIndexUpdateSideEffect.class );
+
+ @Override
+ public void updateIndex( Directory directory, IndexingContext indexingContext, boolean b )
+ {
+ LOGGER.info( "updating index: {} with directory: {}", //
+ indexingContext.getId(), //
+ directory.toString() );
+ }
+}
--- /dev/null
+package org.apache.archiva.maven.indexer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.storage.fs.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.maven.index.context.IndexingContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.NoSuchFileException;
+import java.nio.file.Path;
+import java.sql.Date;
+import java.time.ZonedDateTime;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ * Maven implementation of index context
+ */
+public class MavenIndexContext implements ArchivaIndexingContext {
+
+ private static final Logger log = LoggerFactory.getLogger(ArchivaIndexingContext.class);
+
+
+ private AtomicBoolean openStatus = new AtomicBoolean(false);
+ private IndexingContext delegate;
+ private Repository repository;
+ private StorageAsset dir = null;
+
+ protected MavenIndexContext(Repository repository, IndexingContext delegate) {
+ this.delegate = delegate;
+ this.repository = repository;
+ this.openStatus.set(true);
+
+ }
+
+ @Override
+ public String getId() {
+ return delegate.getId();
+ }
+
+ @Override
+ public Repository getRepository() {
+ return repository;
+ }
+
+ @Override
+ public StorageAsset getPath() {
+ if (dir==null) {
+ StorageAsset repositoryDirAsset = repository.getRoot();
+ Path repositoryDir = repositoryDirAsset.getFilePath().toAbsolutePath();
+ Path indexDir = delegate.getIndexDirectoryFile().toPath();
+ if (indexDir.startsWith(repositoryDir)) {
+ dir = repository.getAsset(repositoryDir.relativize(indexDir).toString());
+ } else {
+ try {
+ FilesystemStorage storage = new FilesystemStorage(indexDir, new DefaultFileLockManager());
+ dir = storage.getRoot();
+ } catch (IOException e) {
+ log.error("Error occured while creating storage for index dir");
+ }
+ }
+ }
+ return dir;
+ }
+
+ @Override
+ public boolean isEmpty() throws IOException {
+ return Files.list(delegate.getIndexDirectoryFile().toPath()).count()==0;
+ }
+
+ @Override
+ public void commit() throws IOException {
+ delegate.commit();
+ }
+
+ @Override
+ public void rollback() throws IOException {
+ delegate.rollback();
+ }
+
+ @Override
+ public void optimize() throws IOException {
+ delegate.optimize();
+ }
+
+ @Override
+ public void close(boolean deleteFiles) throws IOException {
+ if (openStatus.compareAndSet(true,false)) {
+ try {
+ delegate.close(deleteFiles);
+ } catch (NoSuchFileException e) {
+ // Ignore missing directory
+ }
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ if (openStatus.compareAndSet(true,false)) {
+ try {
+ delegate.close(false);
+ } catch (NoSuchFileException e) {
+ // Ignore missing directory
+ }
+ }
+ }
+
+ @Override
+ public boolean isOpen() {
+ return openStatus.get();
+ }
+
+ @Override
+ public void purge() throws IOException {
+ delegate.purge();
+ }
+
+ @Override
+ public boolean supports(Class<?> clazz) {
+ return IndexingContext.class.equals(clazz);
+ }
+
+ @SuppressWarnings( "unchecked" )
+ @Override
+ public <T> T getBaseContext(Class<T> clazz) throws UnsupportedOperationException {
+ if (IndexingContext.class.equals(clazz)) {
+ return (T) delegate;
+ } else {
+ throw new UnsupportedOperationException("The class "+clazz+" is not supported by the maven indexer");
+ }
+ }
+
+ @Override
+ public Set<String> getGroups() throws IOException {
+ return delegate.getAllGroups();
+ }
+
+ @Override
+ public void updateTimestamp(boolean save) throws IOException {
+ delegate.updateTimestamp(save);
+ }
+
+ @Override
+ public void updateTimestamp(boolean save, ZonedDateTime time) throws IOException {
+ delegate.updateTimestamp(save, Date.from(time.toInstant()));
+ }
+
+
+}
--- /dev/null
+package org.apache.archiva.maven.indexer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.utils.FileUtils;
+import org.apache.archiva.common.utils.PathUtil;
+import org.apache.archiva.configuration.ArchivaConfiguration;
+import org.apache.archiva.indexer.ArchivaIndexManager;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.indexer.IndexCreationFailedException;
+import org.apache.archiva.indexer.IndexUpdateFailedException;
+import org.apache.archiva.indexer.UnsupportedBaseContextException;
+import org.apache.archiva.proxy.ProxyRegistry;
+import org.apache.archiva.proxy.maven.WagonFactory;
+import org.apache.archiva.proxy.maven.WagonFactoryException;
+import org.apache.archiva.proxy.maven.WagonFactoryRequest;
+import org.apache.archiva.proxy.model.NetworkProxy;
+import org.apache.archiva.repository.EditableRepository;
+import org.apache.archiva.repository.ManagedRepository;
+import org.apache.archiva.repository.base.PasswordCredentials;
+import org.apache.archiva.repository.RemoteRepository;
+import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.RepositoryType;
+import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
+import org.apache.archiva.repository.storage.AssetType;
+import org.apache.archiva.repository.storage.fs.FilesystemStorage;
+import org.apache.archiva.repository.storage.RepositoryStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.features.RemoteIndexFeature;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.maven.index.ArtifactContext;
+import org.apache.maven.index.ArtifactContextProducer;
+import org.apache.maven.index.DefaultScannerListener;
+import org.apache.maven.index.Indexer;
+import org.apache.maven.index.IndexerEngine;
+import org.apache.maven.index.Scanner;
+import org.apache.maven.index.ScanningRequest;
+import org.apache.maven.index.ScanningResult;
+import org.apache.maven.index.context.ContextMemberProvider;
+import org.apache.maven.index.context.IndexCreator;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.context.StaticContextMemberProvider;
+import org.apache.maven.index.packer.IndexPacker;
+import org.apache.maven.index.packer.IndexPackingRequest;
+import org.apache.maven.index.updater.IndexUpdateRequest;
+import org.apache.maven.index.updater.IndexUpdater;
+import org.apache.maven.index.updater.ResourceFetcher;
+import org.apache.maven.index_shaded.lucene.index.IndexFormatTooOldException;
+import org.apache.maven.wagon.ConnectionException;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.StreamWagon;
+import org.apache.maven.wagon.TransferFailedException;
+import org.apache.maven.wagon.Wagon;
+import org.apache.maven.wagon.authentication.AuthenticationException;
+import org.apache.maven.wagon.authentication.AuthenticationInfo;
+import org.apache.maven.wagon.authorization.AuthorizationException;
+import org.apache.maven.wagon.events.TransferEvent;
+import org.apache.maven.wagon.events.TransferListener;
+import org.apache.maven.wagon.proxy.ProxyInfo;
+import org.apache.maven.wagon.shared.http.AbstractHttpClientWagon;
+import org.apache.maven.wagon.shared.http.HttpConfiguration;
+import org.apache.maven.wagon.shared.http.HttpMethodConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.ConcurrentSkipListSet;
+import java.util.stream.Collectors;
+
+/**
+ * Maven implementation of index manager.
+ * The index manager is a singleton, so we try to make sure, that index operations are not running
+ * parallel by synchronizing on the index path.
+ * A update operation waits for parallel running methods to finish before starting, but after a certain
+ * time of retries a IndexUpdateFailedException is thrown.
+ */
+@Service( "archivaIndexManager#maven" )
+public class MavenIndexManager implements ArchivaIndexManager {
+
+ private static final Logger log = LoggerFactory.getLogger( MavenIndexManager.class );
+
+ @Inject
+ private Indexer indexer;
+
+ @Inject
+ private IndexerEngine indexerEngine;
+
+ @Inject
+ private List<? extends IndexCreator> indexCreators;
+
+ @Inject
+ private IndexPacker indexPacker;
+
+ @Inject
+ private Scanner scanner;
+
+ @Inject
+ private ArchivaConfiguration archivaConfiguration;
+
+ @Inject
+ private WagonFactory wagonFactory;
+
+ @Inject
+ private IndexUpdater indexUpdater;
+
+ @Inject
+ private ArtifactContextProducer artifactContextProducer;
+
+ @Inject
+ private ProxyRegistry proxyRegistry;
+
+
+ private ConcurrentSkipListSet<StorageAsset> activeContexts = new ConcurrentSkipListSet<>( );
+
+ private static final int WAIT_TIME = 100;
+ private static final int MAX_WAIT = 10;
+
+
+ public static IndexingContext getMvnContext( ArchivaIndexingContext context ) throws UnsupportedBaseContextException
+ {
+ if (context!=null)
+ {
+ if ( !context.supports( IndexingContext.class ) )
+ {
+ log.error( "The provided archiva index context does not support the maven IndexingContext" );
+ throw new UnsupportedBaseContextException( "The context does not support the Maven IndexingContext" );
+ }
+ return context.getBaseContext( IndexingContext.class );
+ } else {
+ return null;
+ }
+ }
+
+ private StorageAsset getIndexPath( ArchivaIndexingContext ctx )
+ {
+ return ctx.getPath( );
+ }
+
+ @FunctionalInterface
+ interface IndexUpdateConsumer
+ {
+
+ void accept( IndexingContext indexingContext ) throws IndexUpdateFailedException;
+ }
+
+ /*
+ * This method is used to do some actions around the update execution code. And to make sure, that no other
+ * method is running on the same index.
+ */
+ private void executeUpdateFunction( ArchivaIndexingContext context, IndexUpdateConsumer function ) throws IndexUpdateFailedException
+ {
+ if (context==null) {
+ throw new IndexUpdateFailedException( "Given context is null" );
+ }
+ IndexingContext indexingContext = null;
+ try
+ {
+ indexingContext = getMvnContext( context );
+ }
+ catch ( UnsupportedBaseContextException e )
+ {
+ throw new IndexUpdateFailedException( "Maven index is not supported by this context", e );
+ }
+ final StorageAsset ctxPath = getIndexPath( context );
+ int loop = MAX_WAIT;
+ boolean active = false;
+ while ( loop-- > 0 && !active )
+ {
+ active = activeContexts.add( ctxPath );
+ try
+ {
+ Thread.currentThread( ).sleep( WAIT_TIME );
+ }
+ catch ( InterruptedException e )
+ {
+ // Ignore this
+ }
+ }
+ if ( active )
+ {
+ try
+ {
+ function.accept( indexingContext );
+ }
+ finally
+ {
+ activeContexts.remove( ctxPath );
+ }
+ }
+ else
+ {
+ throw new IndexUpdateFailedException( "Timeout while waiting for index release on context " + context.getId( ) );
+ }
+ }
+
+ @Override
+ public void pack( final ArchivaIndexingContext context ) throws IndexUpdateFailedException
+ {
+ executeUpdateFunction( context, indexingContext -> {
+ try
+ {
+ IndexPackingRequest request = new IndexPackingRequest( indexingContext,
+ indexingContext.acquireIndexSearcher( ).getIndexReader( ),
+ indexingContext.getIndexDirectoryFile( ) );
+ indexPacker.packIndex( request );
+ indexingContext.updateTimestamp( true );
+ }
+ catch ( IOException e )
+ {
+ log.error( "IOException while packing index of context " + context.getId( ) + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ) );
+ throw new IndexUpdateFailedException( "IOException during update of " + context.getId( ), e );
+ }
+ }
+ );
+
+ }
+
+ @Override
+ public void scan(final ArchivaIndexingContext context) throws IndexUpdateFailedException
+ {
+ executeUpdateFunction( context, indexingContext -> {
+ DefaultScannerListener listener = new DefaultScannerListener( indexingContext, indexerEngine, true, null );
+ ScanningRequest request = new ScanningRequest( indexingContext, listener );
+ ScanningResult result = scanner.scan( request );
+ if ( result.hasExceptions( ) )
+ {
+ log.error( "Exceptions occured during index scan of " + context.getId( ) );
+ result.getExceptions( ).stream( ).map( e -> e.getMessage( ) ).distinct( ).limit( 5 ).forEach(
+ s -> log.error( "Message: " + s )
+ );
+ }
+
+ } );
+ }
+
+ @Override
+ public void update(final ArchivaIndexingContext context, final boolean fullUpdate) throws IndexUpdateFailedException
+ {
+ log.info( "start download remote index for remote repository {}", context.getRepository( ).getId( ) );
+ URI remoteUpdateUri;
+ if ( !( context.getRepository( ) instanceof RemoteRepository ) || !(context.getRepository().supportsFeature(RemoteIndexFeature.class)) )
+ {
+ throw new IndexUpdateFailedException( "The context is not associated to a remote repository with remote index " + context.getId( ) );
+ } else {
+ RemoteIndexFeature rif = context.getRepository().getFeature(RemoteIndexFeature.class).get();
+ remoteUpdateUri = context.getRepository().getLocation().resolve(rif.getIndexUri());
+ }
+ final RemoteRepository remoteRepository = (RemoteRepository) context.getRepository( );
+
+ executeUpdateFunction( context,
+ indexingContext -> {
+ try
+ {
+ // create a temp directory to download files
+ Path tempIndexDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".tmpIndex" );
+ Path indexCacheDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".indexCache" );
+ Files.createDirectories( indexCacheDirectory );
+ if ( Files.exists( tempIndexDirectory ) )
+ {
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( tempIndexDirectory );
+ }
+ Files.createDirectories( tempIndexDirectory );
+ tempIndexDirectory.toFile( ).deleteOnExit( );
+ String baseIndexUrl = indexingContext.getIndexUpdateUrl( );
+
+ String wagonProtocol = remoteUpdateUri.toURL( ).getProtocol( );
+
+ NetworkProxy networkProxy = null;
+ if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
+ {
+ RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
+ if ( StringUtils.isNotBlank( rif.getProxyId( ) ) )
+ {
+ networkProxy = proxyRegistry.getNetworkProxy( rif.getProxyId( ) );
+ if ( networkProxy == null )
+ {
+ log.warn(
+ "your remote repository is configured to download remote index trought a proxy we cannot find id:{}",
+ rif.getProxyId( ) );
+ }
+ }
+
+ final StreamWagon wagon = (StreamWagon) wagonFactory.getWagon(
+ new WagonFactoryRequest( wagonProtocol, remoteRepository.getExtraHeaders( ) ).networkProxy(
+ networkProxy )
+ );
+ int readTimeout = (int) rif.getDownloadTimeout( ).toMillis( ) * 1000;
+ wagon.setReadTimeout( readTimeout );
+ wagon.setTimeout( (int) remoteRepository.getTimeout( ).toMillis( ) * 1000 );
+
+ if ( wagon instanceof AbstractHttpClientWagon )
+ {
+ HttpConfiguration httpConfiguration = new HttpConfiguration( );
+ HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration( );
+ httpMethodConfiguration.setUsePreemptive( true );
+ httpMethodConfiguration.setReadTimeout( readTimeout );
+ httpConfiguration.setGet( httpMethodConfiguration );
+ AbstractHttpClientWagon.class.cast( wagon ).setHttpConfiguration( httpConfiguration );
+ }
+
+ wagon.addTransferListener( new DownloadListener( ) );
+ ProxyInfo proxyInfo = null;
+ if ( networkProxy != null )
+ {
+ proxyInfo = new ProxyInfo( );
+ proxyInfo.setType( networkProxy.getProtocol( ) );
+ proxyInfo.setHost( networkProxy.getHost( ) );
+ proxyInfo.setPort( networkProxy.getPort( ) );
+ proxyInfo.setUserName( networkProxy.getUsername( ) );
+ proxyInfo.setPassword( new String(networkProxy.getPassword( )) );
+ }
+ AuthenticationInfo authenticationInfo = null;
+ if ( remoteRepository.getLoginCredentials( ) != null && ( remoteRepository.getLoginCredentials( ) instanceof PasswordCredentials ) )
+ {
+ PasswordCredentials creds = (PasswordCredentials) remoteRepository.getLoginCredentials( );
+ authenticationInfo = new AuthenticationInfo( );
+ authenticationInfo.setUserName( creds.getUsername( ) );
+ authenticationInfo.setPassword( new String( creds.getPassword( ) ) );
+ }
+ wagon.connect( new org.apache.maven.wagon.repository.Repository( remoteRepository.getId( ), baseIndexUrl ), authenticationInfo,
+ proxyInfo );
+
+ Path indexDirectory = indexingContext.getIndexDirectoryFile( ).toPath( );
+ if ( !Files.exists( indexDirectory ) )
+ {
+ Files.createDirectories( indexDirectory );
+ }
+
+ ResourceFetcher resourceFetcher =
+ new WagonResourceFetcher( log, tempIndexDirectory, wagon, remoteRepository );
+ IndexUpdateRequest request = new IndexUpdateRequest( indexingContext, resourceFetcher );
+ request.setForceFullUpdate( fullUpdate );
+ request.setLocalIndexCacheDir( indexCacheDirectory.toFile( ) );
+
+ indexUpdater.fetchAndUpdateIndex( request );
+
+ indexingContext.updateTimestamp( true );
+ }
+
+ }
+ catch ( AuthenticationException e )
+ {
+ log.error( "Could not login to the remote proxy for updating index of {}", remoteRepository.getId( ), e );
+ throw new IndexUpdateFailedException( "Login in to proxy failed while updating remote repository " + remoteRepository.getId( ), e );
+ }
+ catch ( ConnectionException e )
+ {
+ log.error( "Connection error during index update for remote repository {}", remoteRepository.getId( ), e );
+ throw new IndexUpdateFailedException( "Connection error during index update for remote repository " + remoteRepository.getId( ), e );
+ }
+ catch ( MalformedURLException e )
+ {
+ log.error( "URL for remote index update of remote repository {} is not correct {}", remoteRepository.getId( ), remoteUpdateUri, e );
+ throw new IndexUpdateFailedException( "URL for remote index update of repository is not correct " + remoteUpdateUri, e );
+ }
+ catch ( IOException e )
+ {
+ log.error( "IOException during index update of remote repository {}: {}", remoteRepository.getId( ), e.getMessage( ), e );
+ throw new IndexUpdateFailedException( "IOException during index update of remote repository " + remoteRepository.getId( )
+ + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
+ }
+ catch ( WagonFactoryException e )
+ {
+ log.error( "Wagon for remote index download of {} could not be created: {}", remoteRepository.getId( ), e.getMessage( ), e );
+ throw new IndexUpdateFailedException( "Error while updating the remote index of " + remoteRepository.getId( ), e );
+ }
+ } );
+
+ }
+
+ @Override
+ public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
+ {
+ final StorageAsset ctxUri = context.getPath();
+ executeUpdateFunction(context, indexingContext -> {
+ Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
+ try {
+ indexer.addArtifactsToIndex(artifacts, indexingContext);
+ } catch (IOException e) {
+ log.error("IOException while adding artifact {}", e.getMessage(), e);
+ throw new IndexUpdateFailedException("Error occured while adding artifact to index of "+context.getId()
+ + (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
+ }
+ });
+ }
+
+ @Override
+ public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
+ {
+ final StorageAsset ctxUri = context.getPath();
+ executeUpdateFunction(context, indexingContext -> {
+ Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
+ try {
+ indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
+ } catch (IOException e) {
+ log.error("IOException while removing artifact {}", e.getMessage(), e);
+ throw new IndexUpdateFailedException("Error occured while removing artifact from index of "+context.getId()
+ + (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
+ }
+ });
+
+ }
+
+ @Override
+ public boolean supportsRepository( RepositoryType type )
+ {
+ return type == RepositoryType.MAVEN;
+ }
+
+ @Override
+ public ArchivaIndexingContext createContext( Repository repository ) throws IndexCreationFailedException
+ {
+ log.debug("Creating context for repo {}, type: {}", repository.getId(), repository.getType());
+ if ( repository.getType( ) != RepositoryType.MAVEN )
+ {
+ throw new UnsupportedRepositoryTypeException( repository.getType( ) );
+ }
+ IndexingContext mvnCtx = null;
+ try
+ {
+ if ( repository instanceof RemoteRepository )
+ {
+ mvnCtx = createRemoteContext( (RemoteRepository) repository );
+ }
+ else if ( repository instanceof ManagedRepository )
+ {
+ mvnCtx = createManagedContext( (ManagedRepository) repository );
+ }
+ }
+ catch ( IOException e )
+ {
+ log.error( "IOException during context creation " + e.getMessage( ), e );
+ throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
+ + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
+ }
+
+ return new MavenIndexContext( repository, mvnCtx );
+ }
+
+ @Override
+ public ArchivaIndexingContext reset(ArchivaIndexingContext context) throws IndexUpdateFailedException {
+ ArchivaIndexingContext ctx;
+ executeUpdateFunction(context, indexingContext -> {
+ try {
+ indexingContext.close(true);
+ } catch (IOException e) {
+ log.warn("Index close failed");
+ }
+ org.apache.archiva.repository.storage.util.StorageUtil.deleteRecursively(context.getPath());
+ });
+ try {
+ Repository repo = context.getRepository();
+ ctx = createContext(context.getRepository());
+ if (repo instanceof EditableRepository) {
+ ((EditableRepository)repo).setIndexingContext(ctx);
+ }
+ } catch (IndexCreationFailedException e) {
+ throw new IndexUpdateFailedException("Could not create index");
+ }
+ return ctx;
+ }
+
+ @Override
+ public ArchivaIndexingContext move(ArchivaIndexingContext context, Repository repo) throws IndexCreationFailedException {
+ if (context==null) {
+ return null;
+ }
+ if (context.supports(IndexingContext.class)) {
+ try {
+ StorageAsset newPath = getIndexPath(repo);
+ IndexingContext ctx = context.getBaseContext(IndexingContext.class);
+ Path oldPath = ctx.getIndexDirectoryFile().toPath();
+ Path newFilePath = newPath.getFilePath( );
+ if (oldPath.equals(newFilePath)) {
+ // Nothing to do, if path does not change
+ return context;
+ }
+ if (!Files.exists(oldPath)) {
+ return createContext(repo);
+ } else if (context.isEmpty()) {
+ context.close();
+ return createContext(repo);
+ } else {
+ context.close(false);
+ if (Files.exists( newFilePath )) {
+ FileUtils.copyContent( oldPath, newFilePath );
+ FileUtils.deleteDirectory( oldPath );
+ } else
+ {
+ Files.move( oldPath, newFilePath );
+ }
+ return createContext(repo);
+ }
+ } catch (IOException e) {
+ log.error("IOException while moving index directory {}", e.getMessage(), e);
+ throw new IndexCreationFailedException("Could not recreated the index.", e);
+ } catch (UnsupportedBaseContextException e) {
+ throw new IndexCreationFailedException("The given context, is not a maven context.");
+ }
+ } else {
+ throw new IndexCreationFailedException("Bad context type. This is not a maven context.");
+ }
+ }
+
+ @Override
+ public void updateLocalIndexPath(Repository repo) {
+ if (repo.supportsFeature(IndexCreationFeature.class)) {
+ IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
+ try {
+ icf.setLocalIndexPath(getIndexPath(repo));
+ icf.setLocalPackedIndexPath(getPackedIndexPath(repo));
+ } catch (IOException e) {
+ log.error("Could not set local index path for {}. New URI: {}", repo.getId(), icf.getIndexPath());
+ }
+ }
+ }
+
+ @Override
+ public ArchivaIndexingContext mergeContexts(Repository destinationRepo, List<ArchivaIndexingContext> contexts,
+ boolean packIndex) throws UnsupportedOperationException,
+ IndexCreationFailedException, IllegalArgumentException {
+ if (!destinationRepo.supportsFeature(IndexCreationFeature.class)) {
+ throw new IllegalArgumentException("The given repository does not support the indexcreation feature");
+ }
+ Path mergedIndexDirectory = null;
+ try {
+ mergedIndexDirectory = Files.createTempDirectory("archivaMergedIndex");
+ } catch (IOException e) {
+ log.error("Could not create temporary directory for merged index: {}", e.getMessage(), e);
+ throw new IndexCreationFailedException("IO error while creating temporary directory for merged index: "+e.getMessage(), e);
+ }
+ IndexCreationFeature indexCreationFeature = destinationRepo.getFeature(IndexCreationFeature.class).get();
+ if (indexCreationFeature.getLocalIndexPath()== null) {
+ throw new IllegalArgumentException("The given repository does not have a local index path");
+ }
+ StorageAsset destinationPath = indexCreationFeature.getLocalIndexPath();
+
+ String tempRepoId = mergedIndexDirectory.getFileName().toString();
+
+ try
+ {
+ Path indexLocation = destinationPath.getFilePath();
+
+ List<IndexingContext> members = contexts.stream( ).filter(ctx -> ctx.supports(IndexingContext.class)).map( ctx ->
+ {
+ try {
+ return ctx.getBaseContext(IndexingContext.class);
+ } catch (UnsupportedBaseContextException e) {
+ // does not happen here
+ return null;
+ }
+ }).filter( Objects::nonNull ).collect( Collectors.toList() );
+ ContextMemberProvider memberProvider = new StaticContextMemberProvider(members);
+ IndexingContext mergedCtx = indexer.createMergedIndexingContext( tempRepoId, tempRepoId, mergedIndexDirectory.toFile(),
+ indexLocation.toFile(), true, memberProvider);
+ mergedCtx.optimize();
+
+ if ( packIndex )
+ {
+ IndexPackingRequest request = new IndexPackingRequest( mergedCtx, //
+ mergedCtx.acquireIndexSearcher().getIndexReader(), //
+ indexLocation.toFile() );
+ indexPacker.packIndex( request );
+ }
+
+ return new MavenIndexContext(destinationRepo, mergedCtx);
+ }
+ catch ( IOException e)
+ {
+ throw new IndexCreationFailedException( "IO Error during index merge: "+ e.getMessage(), e );
+ }
+ }
+
+ private StorageAsset getIndexPath(URI indexDirUri, RepositoryStorage repoStorage, String defaultDir) throws IOException
+ {
+ StorageAsset rootAsset = repoStorage.getRoot();
+ RepositoryStorage storage = rootAsset.getStorage();
+ Path indexDirectory;
+ Path repositoryPath = rootAsset.getFilePath().toAbsolutePath();
+ StorageAsset indexDir;
+ if ( ! StringUtils.isEmpty(indexDirUri.toString( ) ) )
+ {
+
+ indexDirectory = PathUtil.getPathFromUri( indexDirUri );
+ // not absolute so create it in repository directory
+ if ( indexDirectory.isAbsolute( ) && !indexDirectory.startsWith(repositoryPath))
+ {
+ if (storage instanceof FilesystemStorage) {
+ FilesystemStorage fsStorage = (FilesystemStorage) storage;
+ FilesystemStorage indexStorage = new FilesystemStorage(indexDirectory.getParent(), fsStorage.getFileLockManager());
+ indexDir = indexStorage.getAsset(indexDirectory.getFileName().toString());
+ } else {
+ throw new IOException("The given storage is not file based.");
+ }
+ } else if (indexDirectory.isAbsolute()) {
+ indexDir = storage.getAsset(repositoryPath.relativize(indexDirectory).toString());
+ }
+ else
+ {
+ indexDir = storage.getAsset(indexDirectory.toString());
+ }
+ }
+ else
+ {
+ indexDir = storage.getAsset( defaultDir );
+ }
+
+ if ( !indexDir.exists() )
+ {
+ indexDir.create( AssetType.CONTAINER );
+ }
+ return indexDir;
+ }
+
+ private StorageAsset getIndexPath( Repository repo) throws IOException {
+ IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
+ return getIndexPath( icf.getIndexPath(), repo, DEFAULT_INDEX_PATH);
+ }
+
+ private StorageAsset getPackedIndexPath(Repository repo) throws IOException {
+ IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
+ return getIndexPath(icf.getPackedIndexPath(), repo, DEFAULT_PACKED_INDEX_PATH);
+ }
+
+ private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
+ {
+ String contextKey = "remote-" + remoteRepository.getId( );
+
+
+ // create remote repository path
+ Path repoDir = remoteRepository.getRoot().getFilePath();
+ if ( !Files.exists( repoDir ) )
+ {
+ Files.createDirectories( repoDir );
+ }
+
+ StorageAsset indexDirectory;
+
+ // is there configured indexDirectory ?
+ if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
+ {
+ RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
+ indexDirectory = getIndexPath(remoteRepository);
+ String remoteIndexUrl = calculateIndexRemoteUrl( remoteRepository.getLocation( ), rif );
+ try
+ {
+
+ return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
+ }
+ catch ( IndexFormatTooOldException e )
+ {
+ // existing index with an old lucene format so we need to delete it!!!
+ // delete it first then recreate it.
+ log.warn( "the index of repository {} is too old we have to delete and recreate it", //
+ remoteRepository.getId( ) );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( indexDirectory.getFilePath() );
+ return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
+
+ }
+ }
+ else
+ {
+ throw new IOException( "No remote index defined" );
+ }
+ }
+
+ private IndexingContext getIndexingContext( Repository repository, String contextKey, Path repoDir, StorageAsset indexDirectory, String indexUrl ) throws IOException
+ {
+ try
+ {
+ if (!Files.exists(indexDirectory.getFilePath())) {
+ Files.createDirectories(indexDirectory.getFilePath());
+ }
+ return indexer.createIndexingContext( contextKey, repository.getId( ), repoDir.toFile( ), indexDirectory.getFilePath( ).toFile( ),
+ repository.getLocation( ) == null ? null : repository.getLocation( ).toString( ),
+ indexUrl,
+ true, false,
+ indexCreators );
+ } catch (Exception e) {
+ log.error("Could not create index for asset {}", indexDirectory);
+ throw new IOException(e);
+ }
+ }
+
+ private IndexingContext createManagedContext( ManagedRepository repository ) throws IOException
+ {
+
+ IndexingContext context;
+ // take care first about repository location as can be relative
+ Path repositoryDirectory = repository.getRoot().getFilePath();
+
+ if ( !Files.exists( repositoryDirectory ) )
+ {
+ try
+ {
+ Files.createDirectories( repositoryDirectory );
+ }
+ catch ( IOException e )
+ {
+ log.error( "Could not create directory {}", repositoryDirectory );
+ }
+ }
+
+ StorageAsset indexDirectory;
+
+ if ( repository.supportsFeature( IndexCreationFeature.class ) )
+ {
+ indexDirectory = getIndexPath(repository);
+ log.debug( "Preparing index at {}", indexDirectory );
+
+ String indexUrl = repositoryDirectory.toUri( ).toURL( ).toExternalForm( );
+ try
+ {
+ context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
+ context.setSearchable( repository.isScanned( ) );
+ }
+ catch ( IndexFormatTooOldException e )
+ {
+ // existing index with an old lucene format so we need to delete it!!!
+ // delete it first then recreate it.
+ log.warn( "the index of repository {} is too old we have to delete and recreate it", //
+ repository.getId( ) );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( indexDirectory.getFilePath() );
+ context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
+ context.setSearchable( repository.isScanned( ) );
+ }
+ return context;
+ }
+ else
+ {
+ throw new IOException( "No repository index defined" );
+ }
+ }
+
+ private String calculateIndexRemoteUrl( URI baseUri, RemoteIndexFeature rif )
+ {
+ if ( rif.getIndexUri( ) == null )
+ {
+ return baseUri.resolve( "/"+DEFAULT_INDEX_PATH ).toString( );
+ }
+ else
+ {
+ URI rifUri = rif.getIndexUri( );
+ if (rifUri.isAbsolute()) {
+ return rifUri.toString( );
+ } else
+ {
+ if (baseUri.getScheme().toLowerCase().equals( "file" )) {
+ return Paths.get( baseUri ).resolve( rifUri.getPath() ).toUri( ).toString( );
+ } else
+ {
+ String pathString = rifUri.getPath( ).startsWith( "/" ) ? rifUri.getPath( ) : "/" + rifUri.getPath( );
+ return baseUri.resolve( pathString ).toString( );
+ }
+ }
+ }
+ }
+
+ private static final class DownloadListener
+ implements TransferListener
+ {
+ private Logger log = LoggerFactory.getLogger( getClass( ) );
+
+ private String resourceName;
+
+ private long startTime;
+
+ private int totalLength = 0;
+
+ @Override
+ public void transferInitiated( TransferEvent transferEvent )
+ {
+ startTime = System.currentTimeMillis( );
+ resourceName = transferEvent.getResource( ).getName( );
+ log.debug( "initiate transfer of {}", resourceName );
+ }
+
+ @Override
+ public void transferStarted( TransferEvent transferEvent )
+ {
+ this.totalLength = 0;
+ resourceName = transferEvent.getResource( ).getName( );
+ log.info( "start transfer of {}", transferEvent.getResource( ).getName( ) );
+ }
+
+ @Override
+ public void transferProgress( TransferEvent transferEvent, byte[] buffer, int length )
+ {
+ log.debug( "transfer of {} : {}/{}", transferEvent.getResource( ).getName( ), buffer.length, length );
+ this.totalLength += length;
+ }
+
+ @Override
+ public void transferCompleted( TransferEvent transferEvent )
+ {
+ resourceName = transferEvent.getResource( ).getName( );
+ long endTime = System.currentTimeMillis( );
+ log.info( "end of transfer file {} {} kb: {}s", transferEvent.getResource( ).getName( ),
+ this.totalLength / 1024, ( endTime - startTime ) / 1000 );
+ }
+
+ @Override
+ public void transferError( TransferEvent transferEvent )
+ {
+ log.info( "error of transfer file {}: {}", transferEvent.getResource( ).getName( ),
+ transferEvent.getException( ).getMessage( ), transferEvent.getException( ) );
+ }
+
+ @Override
+ public void debug( String message )
+ {
+ log.debug( "transfer debug {}", message );
+ }
+ }
+
+ private static class WagonResourceFetcher
+ implements ResourceFetcher
+ {
+
+ Logger log;
+
+ Path tempIndexDirectory;
+
+ Wagon wagon;
+
+ RemoteRepository remoteRepository;
+
+ private WagonResourceFetcher( Logger log, Path tempIndexDirectory, Wagon wagon,
+ RemoteRepository remoteRepository )
+ {
+ this.log = log;
+ this.tempIndexDirectory = tempIndexDirectory;
+ this.wagon = wagon;
+ this.remoteRepository = remoteRepository;
+ }
+
+ @Override
+ public void connect( String id, String url ) {
+ //no op
+ }
+
+ @Override
+ public void disconnect( ) {
+ // no op
+ }
+
+ @Override
+ public InputStream retrieve( String name )
+ throws IOException {
+ try
+ {
+ log.info( "index update retrieve file, name:{}", name );
+ Path file = tempIndexDirectory.resolve( name );
+ Files.deleteIfExists( file );
+ file.toFile( ).deleteOnExit( );
+ wagon.get( addParameters( name, remoteRepository ), file.toFile( ) );
+ return Files.newInputStream( file );
+ }
+ catch ( AuthorizationException | TransferFailedException e )
+ {
+ throw new IOException( e.getMessage( ), e );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ FileNotFoundException fnfe = new FileNotFoundException( e.getMessage( ) );
+ fnfe.initCause( e );
+ throw fnfe;
+ }
+ }
+
+ // FIXME remove crappy copy/paste
+ protected String addParameters( String path, RemoteRepository remoteRepository )
+ {
+ if ( remoteRepository.getExtraParameters( ).isEmpty( ) )
+ {
+ return path;
+ }
+
+ boolean question = false;
+
+ StringBuilder res = new StringBuilder( path == null ? "" : path );
+
+ for ( Map.Entry<String, String> entry : remoteRepository.getExtraParameters( ).entrySet( ) )
+ {
+ if ( !question )
+ {
+ res.append( '?' ).append( entry.getKey( ) ).append( '=' ).append( entry.getValue( ) );
+ }
+ }
+
+ return res.toString( );
+ }
+
+ }
+}
--- /dev/null
+package org.apache.archiva.maven.indexer.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.indexer.UnsupportedBaseContextException;
+import org.apache.archiva.indexer.search.ArtifactInfoFilter;
+import org.apache.archiva.maven.indexer.search.NoClassifierArtifactInfoFilter;
+import org.apache.archiva.indexer.search.RepositorySearch;
+import org.apache.archiva.indexer.search.RepositorySearchException;
+import org.apache.archiva.indexer.search.SearchFields;
+import org.apache.archiva.indexer.search.SearchResultHit;
+import org.apache.archiva.indexer.search.SearchResultLimits;
+import org.apache.archiva.indexer.search.SearchResults;
+import org.apache.archiva.maven.indexer.util.SearchUtil;
+import org.apache.archiva.model.ArchivaArtifactModel;
+import org.apache.archiva.proxy.ProxyRegistry;
+import org.apache.archiva.proxy.model.ProxyConnector;
+import org.apache.archiva.repository.RemoteRepository;
+import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.RepositoryRegistry;
+import org.apache.archiva.repository.RepositoryType;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.maven.index.ArtifactInfo;
+import org.apache.maven.index.FlatSearchRequest;
+import org.apache.maven.index.FlatSearchResponse;
+import org.apache.maven.index.Indexer;
+import org.apache.maven.index.MAVEN;
+import org.apache.maven.index.OSGI;
+import org.apache.maven.index.QueryCreator;
+import org.apache.maven.index.SearchType;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.expr.SearchExpression;
+import org.apache.maven.index.expr.SearchTyped;
+import org.apache.maven.index.expr.SourcedSearchExpression;
+import org.apache.maven.index.expr.UserInputSearchExpression;
+import org.apache.maven.index_shaded.lucene.search.BooleanClause;
+import org.apache.maven.index_shaded.lucene.search.BooleanClause.Occur;
+import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * RepositorySearch implementation which uses the Maven Indexer for searching.
+ */
+@Service( "repositorySearch#maven" )
+public class MavenRepositorySearch
+ implements RepositorySearch
+{
+ private Logger log = LoggerFactory.getLogger( getClass() );
+
+ private Indexer indexer;
+
+ private QueryCreator queryCreator;
+
+
+ private RepositoryRegistry repositoryRegistry;
+
+ private ProxyRegistry proxyRegistry;
+
+ protected MavenRepositorySearch()
+ {
+ // for test purpose
+ }
+
+ @Inject
+ public MavenRepositorySearch( Indexer nexusIndexer, RepositoryRegistry repositoryRegistry,
+ ProxyRegistry proxyRegistry, QueryCreator queryCreator )
+ {
+ this.indexer = nexusIndexer;
+ this.queryCreator = queryCreator;
+ this.repositoryRegistry = repositoryRegistry;
+ this.proxyRegistry = proxyRegistry;
+ }
+
+ /**
+ * @see RepositorySearch#search(String, List, String, SearchResultLimits, List)
+ */
+ @Override
+ public SearchResults search(String principal, List<String> selectedRepos, String term, SearchResultLimits limits,
+ List<String> previousSearchTerms )
+ throws RepositorySearchException
+ {
+ List<String> indexingContextIds = addIndexingContexts( selectedRepos );
+
+ // since upgrade to nexus 2.0.0, query has changed from g:[QUERIED TERM]* to g:*[QUERIED TERM]*
+ // resulting to more wildcard searches so we need to increase max clause count
+ BooleanQuery.setMaxClauseCount( Integer.MAX_VALUE );
+ BooleanQuery.Builder qb = new BooleanQuery.Builder();
+
+ if ( previousSearchTerms == null || previousSearchTerms.isEmpty() )
+ {
+ constructQuery( term, qb );
+ }
+ else
+ {
+ for ( String previousTerm : previousSearchTerms )
+ {
+ BooleanQuery.Builder iQuery = new BooleanQuery.Builder();
+ constructQuery( previousTerm, iQuery );
+
+ qb.add( iQuery.build(), BooleanClause.Occur.MUST );
+ }
+
+ BooleanQuery.Builder iQuery = new BooleanQuery.Builder();
+ constructQuery( term, iQuery );
+ qb.add( iQuery.build(), BooleanClause.Occur.MUST );
+ }
+
+ // we retun only artifacts without classifier in quick search, olamy cannot find a way to say with this field empty
+ // FIXME cannot find a way currently to setup this in constructQuery !!!
+ return search( limits, qb.build(), indexingContextIds, NoClassifierArtifactInfoFilter.LIST, selectedRepos, true );
+
+ }
+
+ /**
+ * @see RepositorySearch#search(String, SearchFields, SearchResultLimits)
+ */
+ @SuppressWarnings( "deprecation" )
+ @Override
+ public SearchResults search( String principal, SearchFields searchFields, SearchResultLimits limits )
+ throws RepositorySearchException
+ {
+ if ( searchFields.getRepositories() == null )
+ {
+ throw new RepositorySearchException( "Repositories cannot be null." );
+ }
+
+ List<String> indexingContextIds = addIndexingContexts( searchFields.getRepositories() );
+
+ // if no index found in the specified ones return an empty search result instead of doing a search on all index
+ // olamy: IMHO doesn't make sense
+ if ( !searchFields.getRepositories().isEmpty() && ( indexingContextIds == null
+ || indexingContextIds.isEmpty() ) )
+ {
+ return new SearchResults();
+ }
+
+ BooleanQuery.Builder qb = new BooleanQuery.Builder();
+ if ( StringUtils.isNotBlank( searchFields.getGroupId() ) )
+ {
+ qb.add( indexer.constructQuery( MAVEN.GROUP_ID, searchFields.isExactSearch() ? new SourcedSearchExpression(
+ searchFields.getGroupId() ) : new UserInputSearchExpression( searchFields.getGroupId() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getArtifactId() ) )
+ {
+ qb.add( indexer.constructQuery( MAVEN.ARTIFACT_ID,
+ searchFields.isExactSearch()
+ ? new SourcedSearchExpression( searchFields.getArtifactId() )
+ : new UserInputSearchExpression( searchFields.getArtifactId() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getVersion() ) )
+ {
+ qb.add( indexer.constructQuery( MAVEN.VERSION, searchFields.isExactSearch() ? new SourcedSearchExpression(
+ searchFields.getVersion() ) : new SourcedSearchExpression( searchFields.getVersion() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getPackaging() ) )
+ {
+ qb.add( indexer.constructQuery( MAVEN.PACKAGING, searchFields.isExactSearch() ? new SourcedSearchExpression(
+ searchFields.getPackaging() ) : new UserInputSearchExpression( searchFields.getPackaging() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getClassName() ) )
+ {
+ qb.add( indexer.constructQuery( MAVEN.CLASSNAMES,
+ new UserInputSearchExpression( searchFields.getClassName() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleSymbolicName() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.SYMBOLIC_NAME,
+ new UserInputSearchExpression( searchFields.getBundleSymbolicName() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleVersion() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.VERSION,
+ new UserInputSearchExpression( searchFields.getBundleVersion() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleExportPackage() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.EXPORT_PACKAGE,
+ new UserInputSearchExpression( searchFields.getBundleExportPackage() ) ),
+ Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleExportService() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.EXPORT_SERVICE,
+ new UserInputSearchExpression( searchFields.getBundleExportService() ) ),
+ Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleImportPackage() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.IMPORT_PACKAGE,
+ new UserInputSearchExpression( searchFields.getBundleImportPackage() ) ),
+ Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleName() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.NAME, new UserInputSearchExpression( searchFields.getBundleName() ) ),
+ Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleImportPackage() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.IMPORT_PACKAGE,
+ new UserInputSearchExpression( searchFields.getBundleImportPackage() ) ),
+ Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleRequireBundle() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.REQUIRE_BUNDLE,
+ new UserInputSearchExpression( searchFields.getBundleRequireBundle() ) ),
+ Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getClassifier() ) )
+ {
+ qb.add( indexer.constructQuery( MAVEN.CLASSIFIER, searchFields.isExactSearch() ? new SourcedSearchExpression(
+ searchFields.getClassifier() ) : new UserInputSearchExpression( searchFields.getClassifier() ) ),
+ Occur.MUST );
+ }
+ else if ( searchFields.isExactSearch() )
+ {
+ //TODO improvement in case of exact search and no classifier we must query for classifier with null value
+ // currently it's done in DefaultSearchService with some filtering
+ }
+
+ BooleanQuery qu = qb.build();
+ if ( qu.clauses() == null || qu.clauses().size() <= 0 )
+ {
+ throw new RepositorySearchException( "No search fields set." );
+ }
+ if (qu.clauses()!=null) {
+ log.debug("CLAUSES ", qu.clauses());
+ for (BooleanClause cl : qu.clauses()) {
+ log.debug("Clause ",cl);
+ }
+ }
+
+ return search( limits, qu, indexingContextIds, Collections.<ArtifactInfoFilter>emptyList(),
+ searchFields.getRepositories(), searchFields.isIncludePomArtifacts() );
+ }
+
+ private static class NullSearch
+ implements SearchTyped, SearchExpression
+ {
+ private static final NullSearch INSTANCE = new NullSearch();
+
+ @Override
+ public String getStringValue()
+ {
+ return "[[NULL_VALUE]]";
+ }
+
+ @Override
+ public SearchType getSearchType()
+ {
+ return SearchType.EXACT;
+ }
+ }
+
+ private SearchResults search( SearchResultLimits limits, BooleanQuery q, List<String> indexingContextIds,
+ List<? extends ArtifactInfoFilter> filters, List<String> selectedRepos,
+ boolean includePoms )
+ throws RepositorySearchException
+ {
+
+ try
+ {
+ FlatSearchRequest request = new FlatSearchRequest( q );
+
+ request.setContexts( getIndexingContexts( indexingContextIds ) );
+ if ( limits != null )
+ {
+ // we apply limits only when first page asked
+ if ( limits.getSelectedPage() == 0 )
+ {
+ request.setCount( limits.getPageSize() * ( Math.max( 1, limits.getSelectedPage() ) ) );
+ }
+ }
+
+ FlatSearchResponse response = indexer.searchFlat( request );
+
+ if ( response == null || response.getTotalHitsCount() == 0 )
+ {
+ SearchResults results = new SearchResults();
+ results.setLimits( limits );
+ return results;
+ }
+
+ return convertToSearchResults( response, limits, filters, selectedRepos, includePoms );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositorySearchException( e.getMessage(), e );
+ }
+
+ }
+
+ private IndexingContext getIndexingContext(String id) {
+ String repoId;
+ if (StringUtils.startsWith(id, "remote-")) {
+ repoId = StringUtils.substringAfter(id, "remote-");
+ } else {
+ repoId = id;
+ }
+ Repository repo = repositoryRegistry.getRepository(repoId);
+ if (repo==null) {
+ return null;
+ } else {
+ if (repo.getIndexingContext()!=null) {
+ try {
+ return repo.getIndexingContext().getBaseContext(IndexingContext.class);
+ } catch (UnsupportedBaseContextException e) {
+ return null;
+ }
+ } else {
+ return null;
+ }
+ }
+ }
+
+ private List<IndexingContext> getIndexingContexts( List<String> ids )
+ {
+ List<IndexingContext> contexts = new ArrayList<>( ids.size() );
+
+ for ( String id : ids )
+ {
+ IndexingContext context = getIndexingContext(id);
+ if ( context != null )
+ {
+ contexts.add( context );
+ }
+ else
+ {
+ log.warn( "context with id {} not exists", id );
+ }
+ }
+
+ return contexts;
+ }
+
+ private void constructQuery( String term, BooleanQuery.Builder q )
+ {
+ q.add( indexer.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ), Occur.SHOULD );
+ q.add( indexer.constructQuery( MAVEN.ARTIFACT_ID, new UserInputSearchExpression( term ) ), Occur.SHOULD );
+ q.add( indexer.constructQuery( MAVEN.VERSION, new UserInputSearchExpression( term ) ), Occur.SHOULD );
+ q.add( indexer.constructQuery( MAVEN.PACKAGING, new UserInputSearchExpression( term ) ), Occur.SHOULD );
+ q.add( indexer.constructQuery( MAVEN.CLASSNAMES, new UserInputSearchExpression( term ) ), Occur.SHOULD );
+
+ //Query query =
+ // new WildcardQuery( new Term( MAVEN.CLASSNAMES.getFieldName(), "*" ) );
+ //q.add( query, Occur.MUST_NOT );
+ // olamy IMHO we could set this option as at least one must match
+ //q.setMinimumNumberShouldMatch( 1 );
+ }
+
+
+ /**
+ * @param selectedRepos
+ * @return indexing contextId used
+ */
+ private List<String> addIndexingContexts( List<String> selectedRepos )
+ {
+ Set<String> indexingContextIds = new HashSet<>();
+ for ( String repo : selectedRepos )
+ {
+ try
+ {
+ Repository rRepo = repositoryRegistry.getRepository(repo);
+
+ if ( rRepo != null )
+ {
+
+ if (rRepo.getType().equals(RepositoryType.MAVEN)) {
+ assert rRepo.getIndexingContext() != null;
+ IndexingContext context = rRepo.getIndexingContext().getBaseContext(IndexingContext.class);
+ if (context.isSearchable()) {
+ indexingContextIds.addAll(getRemoteIndexingContextIds(repo));
+ indexingContextIds.add(context.getId());
+ } else {
+ log.warn("indexingContext with id {} not searchable", rRepo.getId());
+ }
+ }
+
+ }
+ else
+ {
+ log.warn( "Repository '{}' not found in configuration.", repo );
+ }
+ }
+ catch ( RepositorySearchException e )
+ {
+ log.warn( "RepositorySearchException occured while accessing index of repository '{}' : {}", repo,
+ e.getMessage() );
+ continue;
+ } catch (UnsupportedBaseContextException e) {
+ log.error("Fatal situation: Maven repository without IndexingContext found.");
+ continue;
+ }
+ }
+
+ return new ArrayList<>( indexingContextIds );
+ }
+
+
+ @Override
+ public Set<String> getRemoteIndexingContextIds( String managedRepoId )
+ throws RepositorySearchException
+ {
+ Set<String> ids = new HashSet<>();
+
+ List<ProxyConnector> proxyConnectors = null;
+ proxyConnectors = proxyRegistry.getProxyConnectorAsMap( ).get( managedRepoId );
+
+ if ( proxyConnectors == null || proxyConnectors.isEmpty() )
+ {
+ return ids;
+ }
+
+ for ( ProxyConnector proxyConnector : proxyConnectors )
+ {
+ String remoteId = "remote-" + proxyConnector.getTargetRepository().getId();
+ RemoteRepository repo = repositoryRegistry.getRemoteRepository(proxyConnector.getTargetRepository().getId());
+ if (repo.getType()==RepositoryType.MAVEN) {
+ try {
+ IndexingContext context = repo.getIndexingContext() != null ? repo.getIndexingContext().getBaseContext(IndexingContext.class) : null;
+ if (context!=null && context.isSearchable()) {
+ ids.add(remoteId);
+ }
+ } catch (UnsupportedBaseContextException e) {
+ // Ignore this one
+ }
+ }
+ }
+
+ return ids;
+ }
+
+ @Override
+ public Collection<String> getAllGroupIds( String principal, List<String> selectedRepos )
+ throws RepositorySearchException
+ {
+ List<IndexingContext> indexContexts = getIndexingContexts( selectedRepos );
+
+ if ( indexContexts == null || indexContexts.isEmpty() )
+ {
+ return Collections.emptyList();
+ }
+
+ try
+ {
+ Set<String> allGroupIds = new HashSet<>();
+ for ( IndexingContext indexingContext : indexContexts )
+ {
+ allGroupIds.addAll( indexingContext.getAllGroups() );
+ }
+ return allGroupIds;
+ }
+ catch ( IOException e )
+ {
+ throw new RepositorySearchException( e.getMessage(), e );
+ }
+
+ }
+
+ private SearchResults convertToSearchResults( FlatSearchResponse response, SearchResultLimits limits,
+ List<? extends ArtifactInfoFilter> artifactInfoFilters,
+ List<String> selectedRepos, boolean includePoms )
+ {
+ SearchResults results = new SearchResults();
+ Set<ArtifactInfo> artifactInfos = response.getResults();
+
+ for ( ArtifactInfo artifactInfo : artifactInfos )
+ {
+ if ( StringUtils.equalsIgnoreCase( "pom", artifactInfo.getFileExtension() ) && !includePoms )
+ {
+ continue;
+ }
+ String id = SearchUtil.getHitId( artifactInfo.getGroupId(), //
+ artifactInfo.getArtifactId(), //
+ artifactInfo.getClassifier(), //
+ artifactInfo.getPackaging() );
+ Map<String, SearchResultHit> hitsMap = results.getHitsMap();
+
+
+ if ( !applyArtifactInfoFilters( artifactInfo, artifactInfoFilters, hitsMap ) )
+ {
+ continue;
+ }
+
+ SearchResultHit hit = hitsMap.get( id );
+ if ( hit != null )
+ {
+ if ( !hit.getVersions().contains( artifactInfo.getVersion() ) )
+ {
+ hit.addVersion( artifactInfo.getVersion() );
+ }
+ }
+ else
+ {
+ hit = new SearchResultHit();
+ hit.setArtifactId( artifactInfo.getArtifactId() );
+ hit.setGroupId( artifactInfo.getGroupId() );
+ hit.setRepositoryId( artifactInfo.getRepository() );
+ hit.addVersion( artifactInfo.getVersion() );
+ hit.setBundleExportPackage( artifactInfo.getBundleExportPackage() );
+ hit.setBundleExportService( artifactInfo.getBundleExportService() );
+ hit.setBundleSymbolicName( artifactInfo.getBundleSymbolicName() );
+ hit.setBundleVersion( artifactInfo.getBundleVersion() );
+ hit.setBundleDescription( artifactInfo.getBundleDescription() );
+ hit.setBundleDocUrl( artifactInfo.getBundleDocUrl() );
+ hit.setBundleRequireBundle( artifactInfo.getBundleRequireBundle() );
+ hit.setBundleImportPackage( artifactInfo.getBundleImportPackage() );
+ hit.setBundleLicense( artifactInfo.getBundleLicense() );
+ hit.setBundleName( artifactInfo.getBundleName() );
+ hit.setContext( artifactInfo.getContext() );
+ hit.setGoals( artifactInfo.getGoals() );
+ hit.setPrefix( artifactInfo.getPrefix() );
+ hit.setPackaging( artifactInfo.getPackaging() );
+ hit.setClassifier( artifactInfo.getClassifier() );
+ hit.setFileExtension( artifactInfo.getFileExtension() );
+ hit.setUrl( getBaseUrl( artifactInfo, selectedRepos ) );
+ }
+
+ results.addHit( id, hit );
+ }
+
+ results.setTotalHits( response.getTotalHitsCount() );
+ results.setTotalHitsMapSize( results.getHitsMap().values().size() );
+ results.setReturnedHitsCount( response.getReturnedHitsCount() );
+ results.setLimits( limits );
+
+ if ( limits == null || limits.getSelectedPage() == SearchResultLimits.ALL_PAGES )
+ {
+ return results;
+ }
+ else
+ {
+ return paginate( results );
+ }
+ }
+
+ /**
+ * calculate baseUrl without the context and base Archiva Url
+ *
+ * @param artifactInfo
+ * @return
+ */
+ protected String getBaseUrl( ArtifactInfo artifactInfo, List<String> selectedRepos )
+ {
+ StringBuilder sb = new StringBuilder();
+ if ( StringUtils.startsWith( artifactInfo.getContext(), "remote-" ) )
+ {
+ // it's a remote index result we search a managed which proxying this remote and on which
+ // current user has read karma
+ String managedRepoId =
+ getManagedRepoId( StringUtils.substringAfter( artifactInfo.getContext(), "remote-" ), selectedRepos );
+ if ( managedRepoId != null )
+ {
+ sb.append( '/' ).append( managedRepoId );
+ artifactInfo.setContext( managedRepoId );
+ }
+ }
+ else
+ {
+ sb.append( '/' ).append( artifactInfo.getContext() );
+ }
+
+ sb.append( '/' ).append( StringUtils.replaceChars( artifactInfo.getGroupId(), '.', '/' ) );
+ sb.append( '/' ).append( artifactInfo.getArtifactId() );
+ sb.append( '/' ).append( artifactInfo.getVersion() );
+ sb.append( '/' ).append( artifactInfo.getArtifactId() );
+ sb.append( '-' ).append( artifactInfo.getVersion() );
+ if ( StringUtils.isNotBlank( artifactInfo.getClassifier() ) )
+ {
+ sb.append( '-' ).append( artifactInfo.getClassifier() );
+ }
+ // maven-plugin packaging is a jar
+ if ( StringUtils.equals( "maven-plugin", artifactInfo.getPackaging() ) )
+ {
+ sb.append( "jar" );
+ }
+ else
+ {
+ sb.append( '.' ).append( artifactInfo.getPackaging() );
+ }
+
+ return sb.toString();
+ }
+
+ /**
+ * return a managed repo for a remote result
+ *
+ * @param remoteRepo
+ * @param selectedRepos
+ * @return
+ */
+ private String getManagedRepoId( String remoteRepo, List<String> selectedRepos )
+ {
+ Map<String, List<ProxyConnector>> proxyConnectorMap = proxyRegistry.getProxyConnectorAsMap();
+ if ( proxyConnectorMap == null || proxyConnectorMap.isEmpty() )
+ {
+ return null;
+ }
+ if ( selectedRepos != null && !selectedRepos.isEmpty() )
+ {
+ for ( Map.Entry<String, List<ProxyConnector>> entry : proxyConnectorMap.entrySet() )
+ {
+ if ( selectedRepos.contains( entry.getKey() ) )
+ {
+ for ( ProxyConnector proxyConnector : entry.getValue() )
+ {
+ if ( StringUtils.equals( remoteRepo, proxyConnector.getTargetRepository().getId() ) )
+ {
+ return proxyConnector.getSourceRepository().getId();
+ }
+ }
+ }
+ }
+ }
+
+ // we don't find in search selected repos so return the first one
+ for ( Map.Entry<String, List<ProxyConnector>> entry : proxyConnectorMap.entrySet() )
+ {
+
+ for ( ProxyConnector proxyConnector : entry.getValue() )
+ {
+ if ( StringUtils.equals( remoteRepo, proxyConnector.getTargetRepository().getId() ) )
+ {
+ return proxyConnector.getSourceRepository().getId();
+ }
+ }
+
+ }
+ return null;
+ }
+
+ private boolean applyArtifactInfoFilters( ArtifactInfo artifactInfo,
+ List<? extends ArtifactInfoFilter> artifactInfoFilters,
+ Map<String, SearchResultHit> currentResult )
+ {
+ if ( artifactInfoFilters == null || artifactInfoFilters.isEmpty() )
+ {
+ return true;
+ }
+
+ ArchivaArtifactModel artifact = new ArchivaArtifactModel();
+ artifact.setArtifactId( artifactInfo.getArtifactId() );
+ artifact.setClassifier( artifactInfo.getClassifier() );
+ artifact.setGroupId( artifactInfo.getGroupId() );
+ artifact.setRepositoryId( artifactInfo.getRepository() );
+ artifact.setVersion( artifactInfo.getVersion() );
+ artifact.setChecksumMD5( artifactInfo.getMd5() );
+ artifact.setChecksumSHA1( artifactInfo.getSha1() );
+ for ( ArtifactInfoFilter filter : artifactInfoFilters )
+ {
+ if ( !filter.addArtifactInResult( artifact, currentResult ) )
+ {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ protected SearchResults paginate( SearchResults results )
+ {
+ SearchResultLimits limits = results.getLimits();
+ SearchResults paginated = new SearchResults();
+
+ // ( limits.getPageSize() * ( Math.max( 1, limits.getSelectedPage() ) ) );
+
+ int fetchCount = limits.getPageSize();
+ int offset = ( limits.getSelectedPage() * limits.getPageSize() );
+
+ if ( fetchCount > results.getTotalHits() )
+ {
+ fetchCount = results.getTotalHits();
+ }
+
+ // Goto offset.
+ if ( offset < results.getTotalHits() )
+ {
+ // only process if the offset is within the hit count.
+ for ( int i = 0; i < fetchCount; i++ )
+ {
+ // Stop fetching if we are past the total # of available hits.
+ if ( offset + i >= results.getHits().size() )
+ {
+ break;
+ }
+
+ SearchResultHit hit = results.getHits().get( ( offset + i ) );
+ if ( hit != null )
+ {
+ String id = SearchUtil.getHitId( hit.getGroupId(), hit.getArtifactId(), hit.getClassifier(),
+ hit.getPackaging() );
+ paginated.addHit( id, hit );
+ }
+ else
+ {
+ break;
+ }
+ }
+ }
+ paginated.setTotalHits( results.getTotalHits() );
+ paginated.setReturnedHitsCount( paginated.getHits().size() );
+ paginated.setTotalHitsMapSize( results.getTotalHitsMapSize() );
+ paginated.setLimits( limits );
+
+ return paginated;
+ }
+
+
+}
--- /dev/null
+package org.apache.archiva.maven.indexer.search;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.indexer.search.ArtifactInfoFilter;
+import org.apache.archiva.indexer.search.SearchResultHit;
+import org.apache.archiva.model.ArchivaArtifactModel;
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @author Olivier Lamy
+ */
+public class NoClassifierArtifactInfoFilter
+ implements ArtifactInfoFilter
+{
+ public static final NoClassifierArtifactInfoFilter INSTANCE = new NoClassifierArtifactInfoFilter();
+
+ public static final List<? extends ArtifactInfoFilter> LIST = Arrays.asList( INSTANCE );
+
+ @Override
+ public boolean addArtifactInResult( ArchivaArtifactModel artifact, Map<String, SearchResultHit> currentResult )
+ {
+ return StringUtils.isBlank( artifact.getClassifier() );
+ }
+}
--- /dev/null
+package org.apache.archiva.maven.indexer.util;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang3.StringUtils;
+
+/**
+ * SearchUtil - utility class for search.
+ */
+public class SearchUtil
+{
+ public static String getHitId( String groupId, String artifactId, String classifier, String packaging )
+ {
+ return ( StringUtils.isBlank( groupId ) ? "" : StringUtils.trim( groupId ) ) + ":" //
+ + ( StringUtils.isBlank( artifactId ) ? "" : StringUtils.trim( artifactId ) ) + ":" //
+ + ( StringUtils.isBlank( classifier ) ? "" : StringUtils.trim( classifier ) ) + ":" //
+ + ( StringUtils.isBlank( packaging ) ? "" : StringUtils.trim( packaging ) );
+ }
+}
default-lazy-init="false">
<context:annotation-config/>
- <context:component-scan base-package="org.apache.archiva.indexer.maven,org.apache.maven.index"/>
+ <context:component-scan base-package="org.apache.archiva.maven.indexer,org.apache.maven.index"/>
<bean name="taskScheduler#mergeRemoteIndexes"
+++ /dev/null
-package org.apache.archiva.indexer.maven;
-
-/*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements. See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership. The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License. You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied. See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*/
-
-import org.apache.archiva.common.utils.FileUtils;
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.indexer.IndexCreationFailedException;
-import org.apache.archiva.repository.RepositoryType;
-import org.apache.archiva.repository.base.ArchivaRepositoryRegistry;
-import org.apache.archiva.repository.base.RepositoryHandlerDependencies;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.repository.features.RemoteIndexFeature;
-import org.apache.archiva.repository.maven.MavenManagedRepository;
-import org.apache.archiva.repository.maven.MavenRemoteRepository;
-import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.apache.maven.index.MAVEN;
-import org.apache.maven.index.QueryCreator;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.expr.UserInputSearchExpression;
-import org.apache.maven.index_shaded.lucene.search.BooleanClause;
-import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
-import org.apache.maven.index_shaded.lucene.search.Query;
-import org.junit.After;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.test.context.ContextConfiguration;
-
-import javax.inject.Inject;
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.stream.Collectors;
-
-import static org.junit.Assert.*;
-
-@RunWith( ArchivaSpringJUnit4ClassRunner.class )
-@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
-public class MavenIndexManagerTest {
-
- @Inject
- ArchivaRepositoryRegistry repositoryRegistry;
-
- @SuppressWarnings( "unused" )
- @Inject
- RepositoryHandlerDependencies repositoryHandlerDependencies;
-
- private Path indexPath;
- private MavenManagedRepository repository;
- private ArchivaIndexingContext ctx;
- private MavenRemoteRepository repositoryRemote;
-
- @Inject
- MavenIndexManager mavenIndexManager;
-
- @Inject
- QueryCreator queryCreator;
-
-
- @After
- public void tearDown() {
- repositoryRegistry.destroy();
- if (ctx!=null) {
- try {
- ctx.close(true);
- } catch (IOException e) {
- //
- }
- }
- if (indexPath!=null && Files.exists(indexPath)) {
- FileUtils.deleteQuietly(indexPath);
- }
-
- }
-
- @Test
- public void pack() throws Exception {
- createTestContext();
- Path destDir = repository.getRoot().getFilePath().resolve("org/apache/archiva/archiva-webapp/1.0");
- Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-webapp/1.0");
- org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(),destDir.toFile());
- mavenIndexManager.scan(ctx);
- mavenIndexManager.pack(ctx);
- assertTrue(Files.list(indexPath).filter(path -> {
- try {
- return path.getFileName().toString().endsWith(".gz") && Files.size(path) > 0;
- } catch (IOException e) {
- return false;
- }
- }).findAny().isPresent());
- }
-
- @Test
- public void scan() throws Exception {
- createTestContext();
- Path destDir = repository.getRoot().getFilePath().resolve("org/apache/archiva/archiva-webapp/1.0");
- Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-webapp/1.0");
- org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(),destDir.toFile());
- mavenIndexManager.scan(ctx);
-
- IndexingContext mvnCtx = mavenIndexManager.getMvnContext(ctx);
- String term = "org.apache.archiva";
- Query q = new BooleanQuery.Builder().add( queryCreator.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ),
- BooleanClause.Occur.SHOULD ).build();
- assertEquals(4, mvnCtx.acquireIndexSearcher().count(q));
- }
-
- /*
- * Does only a index update via file uri, no HTTP uri
- */
- @Test
- public void update() throws Exception {
- createTestContext();
- mavenIndexManager.pack(ctx);
- ctx.close(false);
- createTestContextForRemote();
- mavenIndexManager.update(ctx, true);
- }
-
- @Test
- public void addArtifactsToIndex() throws Exception {
-
- ArchivaIndexingContext ctx = createTestContext();
- try {
- Path destDir = repository.getRoot().getFilePath().resolve("org/apache/archiva/archiva-search/1.0");
- Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-search/1.0");
- org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(), destDir.toFile());
- List<URI> uriList = new ArrayList<>();
- uriList.add(destDir.resolve("archiva-search-1.0.jar").toUri());
- uriList.add(destDir.resolve("archiva-search-1.0-sources.jar").toUri());
- mavenIndexManager.addArtifactsToIndex(ctx, uriList);
-
- IndexingContext mvnCtx = mavenIndexManager.getMvnContext(ctx);
- String term = "org.apache.archiva";
- Query q = new BooleanQuery.Builder().add(queryCreator.constructQuery(MAVEN.GROUP_ID, new UserInputSearchExpression(term)),
- BooleanClause.Occur.SHOULD).build();
- assertEquals(2, mvnCtx.acquireIndexSearcher().count(q));
- } finally {
- try {
- ctx.close(true);
- } catch (IOException e) {
- // Ignore
- }
- }
- }
-
- @Test
- public void removeArtifactsFromIndex() throws Exception {
- ArchivaIndexingContext ctx = createTestContext();
- Path destDir = repository.getRoot().getFilePath().resolve("org/apache/archiva/archiva-search/1.0");
- Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-search/1.0");
- org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(), destDir.toFile());
- List<URI> uriList = new ArrayList<>();
- uriList.add(destDir.resolve("archiva-search-1.0.jar").toUri());
- uriList.add(destDir.resolve("archiva-search-1.0-sources.jar").toUri());
- mavenIndexManager.addArtifactsToIndex(ctx, uriList);
-
- IndexingContext mvnCtx = mavenIndexManager.getMvnContext(ctx);
- String term = "org.apache.archiva";
- Query q = new BooleanQuery.Builder().add( queryCreator.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ),
- BooleanClause.Occur.SHOULD ).build();
- assertEquals(2, mvnCtx.acquireIndexSearcher().count(q));
- uriList.remove(0);
- mavenIndexManager.removeArtifactsFromIndex(ctx, uriList);
- assertEquals(1, mvnCtx.acquireIndexSearcher().count(q));
- }
-
- @Test
- public void supportsRepository() throws Exception {
- assertTrue(mavenIndexManager.supportsRepository(RepositoryType.MAVEN));
- assertFalse(mavenIndexManager.supportsRepository(RepositoryType.NPM));
- }
-
- private ArchivaIndexingContext createTestContext() throws URISyntaxException, IndexCreationFailedException, IOException {
- String indexPathName = ".index-test." + System.nanoTime();
- indexPath = Paths.get("target/repositories/test-repo" ).resolve(indexPathName);
- if (Files.exists(indexPath)) {
-
- try {
- FileUtils.deleteDirectory(indexPath);
- } catch (IOException e) {
- String destName = indexPath.getFileName().toString() + "." + System.currentTimeMillis();
- Files.move(indexPath, indexPath.getParent().resolve(destName));
- }
- }
- repository = MavenManagedRepository.newLocalInstance("test-repo", "Test Repo", Paths.get("target/repositories"));
- // repository.setLocation(new URI("test-repo"));
- IndexCreationFeature icf = repository.getFeature(IndexCreationFeature.class).get();
- icf.setIndexPath(new URI(indexPathName));
- ctx = mavenIndexManager.createContext(repository);
- return ctx;
- }
-
- private ArchivaIndexingContext createTestContextForRemote() throws URISyntaxException, IndexCreationFailedException, IOException {
- // indexPath = Paths.get("target/repositories/test-repo/.index-test");
- Path repoPath = Paths.get("target/repositories").toAbsolutePath();
- repositoryRemote = MavenRemoteRepository.newLocalInstance("test-repo", "Test Repo", repoPath);
- repositoryRemote.setLocation(repoPath.resolve("test-repo").toUri());
- RemoteIndexFeature icf = repositoryRemote.getFeature(RemoteIndexFeature.class).get();
- icf.setIndexUri(new URI(indexPath.getFileName().toString()));
- ctx = mavenIndexManager.createContext(repositoryRemote);
- return ctx;
- }
-
- @Test
- public void createContext() throws Exception {
- ArchivaIndexingContext ctx = createTestContext();
- assertNotNull(ctx);
- assertEquals(repository, ctx.getRepository());
- assertEquals("test-repo", ctx.getId());
- assertEquals(indexPath.toAbsolutePath(), ctx.getPath().getFilePath().toAbsolutePath());
- assertTrue(Files.exists(indexPath));
- List<Path> li = Files.list(indexPath).collect(Collectors.toList());
- assertTrue(li.size()>0);
-
- }
-
-}
\ No newline at end of file
+++ /dev/null
-package org.apache.archiva.indexer.maven.search;
-
-/*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements. See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership. The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License. You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied. See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*/
-
-import junit.framework.TestCase;
-import org.apache.archiva.common.utils.FileUtils;
-import org.apache.archiva.configuration.ArchivaConfiguration;
-import org.apache.archiva.configuration.Configuration;
-import org.apache.archiva.configuration.ConfigurationListener;
-import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.indexer.search.SearchResultHit;
-import org.apache.archiva.indexer.search.SearchResults;
-import org.apache.archiva.proxy.ProxyRegistry;
-import org.apache.archiva.repository.Repository;
-import org.apache.archiva.repository.base.ArchivaRepositoryRegistry;
-import org.apache.archiva.repository.base.RepositoryHandlerDependencies;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.apache.commons.lang3.SystemUtils;
-import org.apache.maven.index.ArtifactContext;
-import org.apache.maven.index.ArtifactContextProducer;
-import org.apache.maven.index.ArtifactScanningListener;
-import org.apache.maven.index.DefaultScannerListener;
-import org.apache.maven.index.Indexer;
-import org.apache.maven.index.IndexerEngine;
-import org.apache.maven.index.QueryCreator;
-import org.apache.maven.index.Scanner;
-import org.apache.maven.index.ScanningRequest;
-import org.apache.maven.index.ScanningResult;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index_shaded.lucene.index.IndexUpgrader;
-import org.easymock.EasyMock;
-import org.easymock.IMocksControl;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.runner.RunWith;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.test.context.ContextConfiguration;
-
-import javax.inject.Inject;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.List;
-import java.util.Locale;
-
-import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
-
-/**
- * @author Olivier Lamy
- */
-@RunWith( ArchivaSpringJUnit4ClassRunner.class )
-@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
-public abstract class AbstractMavenRepositorySearch
- extends TestCase
-{
-
- protected Logger log = LoggerFactory.getLogger( getClass() );
-
- public static String TEST_REPO_1 = "maven-search-test-repo";
-
- public static String TEST_REPO_2 = "maven-search-test-repo-2";
-
-
- public static String REPO_RELEASE = "repo-release";
-
- MavenRepositorySearch search;
-
- ArchivaConfiguration archivaConfig;
-
- @Inject
- ArtifactContextProducer artifactContextProducer;
-
- @Inject
- ArchivaRepositoryRegistry repositoryRegistry;
-
- @SuppressWarnings( "unused" )
- @Inject
- RepositoryHandlerDependencies repositoryHandlerDependencies;
-
- @Inject
- ProxyRegistry proxyRegistry;
-
- @Inject
- private IndexerEngine indexerEngine;
-
- IMocksControl archivaConfigControl;
-
- Configuration config;
-
- @Inject
- Indexer indexer;
-
- @Inject
- Scanner scanner;
-
- @Inject
- QueryCreator queryCreator;
-
- @Before
- @Override
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" ) );
- assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" )) );
- Files.createDirectories( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" ) );
-
- FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" ) );
- assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" )) );
- Files.createDirectories( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" ) );
-
- archivaConfigControl = EasyMock.createControl();
-
- archivaConfig = archivaConfigControl.createMock( ArchivaConfiguration.class );
-
- repositoryRegistry.setArchivaConfiguration( archivaConfig );
-
- search = new MavenRepositorySearch( indexer, repositoryRegistry, proxyRegistry,
- queryCreator );
-
- assertNotNull( repositoryRegistry );
-
- config = new Configuration();
- config.addManagedRepository( createRepositoryConfig( TEST_REPO_1 ) );
- config.addManagedRepository( createRepositoryConfig( TEST_REPO_2 ) );
- config.addManagedRepository( createRepositoryConfig( REPO_RELEASE ) );
-
- archivaConfig.addListener( EasyMock.anyObject( ConfigurationListener.class ) );
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration() ).andReturn(config).anyTimes();
- archivaConfig.save(EasyMock.anyObject(Configuration.class), EasyMock.anyString());
- EasyMock.expectLastCall().anyTimes();
- archivaConfigControl.replay();
- repositoryRegistry.reload();
-
- }
-
- @After
- @Override
- public void tearDown()
- throws Exception
- {
- archivaConfigControl.reset();
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration() ).andReturn(config).anyTimes();
- archivaConfig.save(EasyMock.anyObject(Configuration.class), EasyMock.anyString());
- EasyMock.expectLastCall().anyTimes();
- archivaConfigControl.replay();
- repositoryRegistry.removeRepository(TEST_REPO_1);
- repositoryRegistry.removeRepository(TEST_REPO_2);
- repositoryRegistry.removeRepository(REPO_RELEASE);
- repositoryRegistry.destroy();
- FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 ) );
- assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 )) );
-
- FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 ) );
- assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 )) );
-
- super.tearDown();
- }
-
- protected ManagedRepositoryConfiguration createRepositoryConfig( String repository )
- {
- ManagedRepositoryConfiguration repositoryConfig = new ManagedRepositoryConfiguration();
- repositoryConfig.setId( repository );
- repositoryConfig.setLocation( org.apache.archiva.common.utils.FileUtils.getBasedir() + "/target/repos/" + repository );
- Path f = Paths.get( repositoryConfig.getLocation() );
- if ( !Files.exists(f) )
- {
- try
- {
- Files.createDirectories( f );
- }
- catch ( IOException e )
- {
- log.error("Could not create directories for {}", f);
- }
- }
- repositoryConfig.setLayout( "default" );
- repositoryConfig.setName( repository );
- repositoryConfig.setScanned( true );
- repositoryConfig.setSnapshots( false );
- repositoryConfig.setReleases( true );
- repositoryConfig.setIndexDir(DEFAULT_INDEX_PATH);
-
- return repositoryConfig;
- }
-
- protected void createIndex( String repository, List<Path> filesToBeIndexed, boolean scan) throws Exception {
- createIndex(repository, filesToBeIndexed, scan, null, true);
- }
-
- protected void createIndex( String repository, List<Path> filesToBeIndexed, boolean scan, Path indexDir, boolean copyFiles)
- throws Exception
- {
- final Repository rRepo = repositoryRegistry.getRepository(repository);
- IndexCreationFeature icf = rRepo.getFeature(IndexCreationFeature.class).get();
-
-
- ArchivaIndexingContext archivaCtx = rRepo.getIndexingContext();
- IndexingContext context = archivaCtx.getBaseContext(IndexingContext.class);
-
- if ( archivaCtx != null )
- {
- archivaCtx.close(true);
- }
-
- Path repoDir = Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir()).resolve("target").resolve("repos").resolve(repository);
-
- Path indexerDirectory = repoDir.resolve(".indexer" );
-
- if ( indexDir == null && Files.exists(indexerDirectory) )
- {
- FileUtils.deleteDirectory( indexerDirectory );
- assertFalse( Files.exists(indexerDirectory) );
- }
-
-
- Path lockFile = repoDir.resolve(".indexer/write.lock" );
- if ( Files.exists(lockFile) )
- {
- Files.delete(lockFile);
- }
- assertFalse( Files.exists(lockFile) );
- if (indexDir==null) {
- Path indexDirectory =
- Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/index/test-" + Long.toString(System.currentTimeMillis()));
- indexDirectory.toFile().deleteOnExit();
- FileUtils.deleteDirectory(indexDirectory);
- icf.setIndexPath(indexDirectory.toUri());
- config.getManagedRepositories( ).stream( ).filter( r -> r.getId( ).equals( rRepo.getId( ) ) ).findFirst( ).ifPresent( r ->
- r.setIndexDir( indexDirectory.toAbsolutePath( ).toString( ) )
- );
- // IndexUpgrader.main(new String[]{indexDirectory.toAbsolutePath().toString()});
- } else {
-
- icf.setIndexPath(indexDir.toUri());
- Files.createDirectories( indexDir );
- config.getManagedRepositories( ).stream( ).filter( r -> r.getId( ).equals( rRepo.getId( ) ) ).findFirst( ).ifPresent( r ->
- r.setIndexDir( indexDir.toAbsolutePath( ).toString( ) )
- );
- IndexUpgrader.main(new String[]{indexDir.toAbsolutePath().toString()});
-
- }
-
- if (copyFiles) {
- Path repo = Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + repository);
- assertTrue(Files.exists(repo));
- org.apache.commons.io.FileUtils.copyDirectory(repo.toFile(), repoDir.toFile());
- }
-
-
-
-
- archivaConfigControl.reset();
- archivaConfig.addListener( EasyMock.anyObject( ConfigurationListener.class ) );
- EasyMock.expect( archivaConfig.getConfiguration() ).andReturn(config).anyTimes();
- archivaConfig.save(EasyMock.anyObject(Configuration.class), EasyMock.anyString());
- EasyMock.expectLastCall().anyTimes();
- archivaConfigControl.replay();
- repositoryRegistry.reload();
- archivaConfigControl.reset();
-
- Repository rRepo2 = repositoryRegistry.getRepository( repository );
- icf = rRepo2.getFeature(IndexCreationFeature.class).get();
-
-
- archivaCtx = rRepo2.getIndexingContext();
- context = archivaCtx.getBaseContext(IndexingContext.class);
-
-
- // minimize datas in memory
-// context.getIndexWriter().setMaxBufferedDocs( -1 );
-// context.getIndexWriter().setRAMBufferSizeMB( 1 );
- for ( Path artifactFile : filesToBeIndexed )
- {
- assertTrue( "file not exists " + artifactFile, Files.exists(artifactFile) );
- ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile.toFile() );
-
- if ( artifactFile.toString().endsWith( ".pom" ) )
- {
- ac.getArtifactInfo().setFileExtension( "pom" );
- ac.getArtifactInfo().setPackaging( "pom" );
- ac.getArtifactInfo().setClassifier( "pom" );
- }
- indexer.addArtifactToIndex( ac, context );
- context.updateTimestamp( true );
- }
-
- if ( scan )
- {
- DefaultScannerListener listener = new DefaultScannerListener( context, indexerEngine, true, new ArtifactScanListener());
- ScanningRequest req = new ScanningRequest(context, listener );
- scanner.scan( req );
- context.commit();
- }
- // force flushing
- context.commit();
- // context.getIndexWriter().commit();
- context.setSearchable( true );
-
- }
-
- static class ArtifactScanListener
- implements ArtifactScanningListener
- {
- protected Logger log = LoggerFactory.getLogger( getClass() );
-
- @Override
- public void scanningStarted( IndexingContext ctx )
- {
- //
- }
-
- @Override
- public void scanningFinished( IndexingContext ctx, ScanningResult result )
- {
- // no op
- }
-
- @Override
- public void artifactError( ArtifactContext ac, Exception e )
- {
- log.debug( "artifactError {}", ac.getArtifact().getPath(), e );
- }
-
- @Override
- public void artifactDiscovered( ArtifactContext ac )
- {
- log.debug( "artifactDiscovered {}:{}", //
- ac.getArtifact() == null ? "" : ac.getArtifact().getPath(), //
- ac.getArtifact() == null ? "" : ac.getArtifactInfo() );
- }
- }
-
- public String niceDisplay( SearchResults searchResults )
- throws Exception
- {
- StringBuilder sb = new StringBuilder();
- for ( SearchResultHit hit : searchResults.getHits() )
- {
- sb.append( hit.toString() ).append( SystemUtils.LINE_SEPARATOR );
- }
- return sb.toString();
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven.search;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.indexer.search.SearchFields;
-import org.apache.archiva.indexer.search.SearchResultHit;
-import org.apache.archiva.indexer.search.SearchResults;
-import org.apache.archiva.repository.base.ArchivaRepositoryRegistry;
-import org.apache.archiva.repository.base.RepositoryHandlerDependencies;
-import org.easymock.EasyMock;
-import org.junit.After;
-import org.junit.Test;
-
-import javax.inject.Inject;
-import java.nio.file.Path;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * @author Olivier Lamy
- */
-public class MavenRepositorySearchOSGITest
- extends AbstractMavenRepositorySearch
-{
-
- @Inject
- ArchivaRepositoryRegistry repositoryRegistry;
-
- @SuppressWarnings( "unused" )
- @Inject
- RepositoryHandlerDependencies repositoryHandlerDependencies;
-
-
- @After
- @Override
- public void tearDown() throws Exception {
- super.tearDown();
- repositoryRegistry.destroy();
- }
-
- @Test
- public void searchFelixWithSymbolicName()
- throws Exception
- {
-
- createIndex( TEST_REPO_1, Collections.<Path>emptyList(), true );
-
- List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
-
- // search artifactId
- // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchFields searchFields = new SearchFields();
- searchFields.setBundleSymbolicName( "org.apache.felix.bundlerepository" );
- searchFields.setBundleVersion( "1.6.6" );
- searchFields.setRepositories( selectedRepos );
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 1, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "org.apache.felix", hit.getGroupId() );
- assertEquals( "org.apache.felix.bundlerepository", hit.getArtifactId() );
- assertEquals( "1.6.6", hit.getVersions().get( 0 ) );
-
- assertEquals( "org.apache.felix.bundlerepository;uses:=\"org.osgi.framework\";version=\"2.0\"",
- hit.getBundleExportPackage() );
- assertEquals( "org.apache.felix.bundlerepository.RepositoryAdmin,org.osgi.service.obr.RepositoryAdmin",
- hit.getBundleExportService() );
- assertEquals( "org.apache.felix.bundlerepository", hit.getBundleSymbolicName() );
- assertEquals( "1.6.6", hit.getBundleVersion() );
- }
-
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven.search;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import junit.framework.TestCase;
-import org.apache.archiva.indexer.search.SearchResultHit;
-import org.apache.archiva.indexer.search.SearchResultLimits;
-import org.apache.archiva.indexer.search.SearchResults;
-import org.apache.archiva.indexer.util.SearchUtil;
-import org.apache.archiva.repository.base.ArchivaRepositoryRegistry;
-import org.apache.archiva.repository.base.RepositoryHandlerDependencies;
-import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.junit.After;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.test.context.ContextConfiguration;
-
-import javax.inject.Inject;
-import java.util.Arrays;
-
-/**
- * @author Olivier Lamy
- */
-@RunWith( ArchivaSpringJUnit4ClassRunner.class )
-@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
-public class MavenRepositorySearchPaginateTest
- extends TestCase
-{
-
- @Inject
- ArchivaRepositoryRegistry repositoryRegistry;
-
- @SuppressWarnings( "unused" )
- @Inject
- RepositoryHandlerDependencies repositoryHandlerDependencies;
-
- @After
- public void endTests() {
- assert repositoryRegistry!=null;
- repositoryRegistry.destroy();
- }
-
- @Test
- public void nonPaginatedResult()
- throws Exception
- {
- MavenRepositorySearch search = new MavenRepositorySearch();
-
- SearchResults searchResults = build( 10, new SearchResultLimits( 0 ) );
-
- searchResults = search.paginate( searchResults );
-
- assertEquals( 10, searchResults.getReturnedHitsCount() );
-
- }
-
- @Test
- public void nonPaginatedHugeResult()
- throws Exception
- {
- MavenRepositorySearch search = new MavenRepositorySearch();
-
- SearchResults origSearchResults = build( 63, new SearchResultLimits( 0 ) );
-
- SearchResults searchResults = search.paginate( origSearchResults );
-
- assertEquals( 30, searchResults.getReturnedHitsCount() );
-
- origSearchResults = build( 63, new SearchResultLimits( 1 ) );
-
- searchResults = search.paginate( origSearchResults );
-
- assertEquals( 30, searchResults.getReturnedHitsCount() );
-
- }
-
- @Test
- public void paginatedResult()
- throws Exception
- {
- MavenRepositorySearch search = new MavenRepositorySearch();
-
- SearchResults searchResults = build( 32, new SearchResultLimits( 1 ) );
-
- searchResults = search.paginate( searchResults );
-
- assertEquals( 2, searchResults.getReturnedHitsCount() );
-
- }
-
-
- SearchResults build( int number, SearchResultLimits limits )
- {
- SearchResults searchResults = new SearchResults();
- searchResults.setLimits( limits );
- for ( int i = 0; i < number; i++ )
- {
- SearchResultHit hit = new SearchResultHit();
- hit.setGroupId( "commons-foo" );
- hit.setArtifactId( "commons-bar-" + i );
- hit.setPackaging( "jar" );
- hit.setVersions( Arrays.asList( "1.0" ) );
- String id =
- SearchUtil.getHitId( hit.getGroupId(), hit.getArtifactId(), hit.getClassifier(), hit.getPackaging() );
- searchResults.addHit( id, hit );
- }
-
- searchResults.setTotalHits( number );
- return searchResults;
-
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven.search;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.utils.FileUtils;
-import org.apache.archiva.indexer.search.RepositorySearchException;
-import org.apache.archiva.indexer.search.SearchFields;
-import org.apache.archiva.indexer.search.SearchResultHit;
-import org.apache.archiva.indexer.search.SearchResultLimits;
-import org.apache.archiva.indexer.search.SearchResults;
-import org.apache.archiva.indexer.util.SearchUtil;
-import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.apache.maven.index_shaded.lucene.index.IndexUpgrader;
-import org.easymock.EasyMock;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.test.context.ContextConfiguration;
-
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Locale;
-
-
-@RunWith( ArchivaSpringJUnit4ClassRunner.class )
-@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
-public class MavenRepositorySearchTest
- extends AbstractMavenRepositorySearch
-{
-
-
- private void createSimpleIndex( boolean scan )
- throws Exception
- {
- List<Path> files = new ArrayList<>();
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos", TEST_REPO_1,
- "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos", TEST_REPO_1,
- "/org/apache/archiva/archiva-test/1.0/archiva-test-1.0.jar" ));
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos", TEST_REPO_1,
- "org/apache/archiva/archiva-test/2.0/archiva-test-2.0.jar" ));
-
- createIndex( TEST_REPO_1, files, scan );
- }
-
- private void createIndexContainingMoreArtifacts( boolean scan )
- throws Exception
- {
- List<Path> files = new ArrayList<>();
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-test/1.0/archiva-test-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-test/2.0/archiva-test-2.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-webapp/1.0/archiva-webapp-1.0.war" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
- "target/repos/" + TEST_REPO_1 + "/com/artifactid-numeric/1.0/artifactid-numeric-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/com/artifactid-numeric123/1.0/artifactid-numeric123-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
- "target/repos/" + TEST_REPO_1 + "/com/classname-search/1.0/classname-search-1.0.jar" ) );
-
- createIndex( TEST_REPO_1, files, scan );
- }
-
- private void createIndexContainingMultipleArtifactsSameVersion( boolean scan )
- throws Exception
- {
- List<Path> files = new ArrayList<>();
-
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
-
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.pom" ) );
-
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0-sources.jar" ) );
-
- createIndex( TEST_REPO_1, files, scan );
- }
-
- @Test
- public void testQuickSearch()
- throws Exception
- {
- createSimpleIndex( false );
-
- List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
-
- // search artifactId
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
-
- SearchResultHit hit =
- results.getSearchResultHit( SearchUtil.getHitId( "org.apache.archiva", "archiva-search", null, "jar" ) );
- assertNotNull( "hit null in result " + results.getHits(), hit );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-search", hit.getArtifactId() );
- assertEquals( "1.0", hit.getVersions().get( 0 ) );
-
- archivaConfigControl.reset();
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- // search groupId
- archivaConfigControl.replay();
-
- results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( "total hints not 3", 3, results.getTotalHits() );
-
- //TODO: search for class & package names
- }
-
- @Test
- public void testQuickSearchNotWithClassifier()
- throws Exception
- {
- createSimpleIndex( true );
-
- List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
-
- // search artifactId
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
-
- SearchResultHit hit =
- results.getSearchResultHit( SearchUtil.getHitId( "org.apache.archiva", "archiva-search", null, "jar" ) );
- assertNotNull( "hit null in result " + results.getHits(), hit );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-search", hit.getArtifactId() );
- assertEquals( "1.0", hit.getVersions().get( 0 ) );
-
- archivaConfigControl.reset();
-
- // search groupId
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- results = search.search( "user", selectedRepos, "archiva-search", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( "total hints not 3 hits " + results.getHits(), 3, results.getTotalHits() );
-
- //TODO: search for class & package names
- }
-
- @Test
- public void testQuickSearchMultipleArtifactsSameVersion()
- throws Exception
- {
- createIndexContainingMultipleArtifactsSameVersion( false );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- // search artifactId
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 3, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-search", hit.getArtifactId() );
- assertEquals( "1.0", hit.getVersions().get( 0 ) );
-
- //only 1 version of 1.0 is retrieved
- assertEquals( 1, hit.getVersions().size() );
- }
-
- @Test
- public void testMultipleArtifactsSameVersionWithClassifier()
- throws Exception
- {
- createIndexContainingMultipleArtifactsSameVersion( true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- // search artifactId
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.apache.archiva" );
- searchFields.setArtifactId( "archiva-search" );
- searchFields.setClassifier( "sources" );
- searchFields.setRepositories( selectedRepos );
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 1, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-search", hit.getArtifactId() );
- assertEquals( "1.0", hit.getVersions().get( 0 ) );
-
- //only 1 version of 1.0 is retrieved
- assertEquals( 1, hit.getVersions().size() );
- }
-
- // search for existing artifact using multiple keywords
- @Test
- public void testQuickSearchWithMultipleKeywords()
- throws Exception
- {
- createSimpleIndex( false );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "archiva search", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 1, results.getTotalHits() );
- }
-
- @Test
- public void testQuickSearchWithPagination()
- throws Exception
- {
- createSimpleIndex( true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- // page 1
- SearchResultLimits limits = new SearchResultLimits( 0 );
- limits.setPageSize( 1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "org", limits, Collections.emptyList() );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 1, results.getHits().size() );
- assertEquals( "total hits not 9 for page1 " + results, 9, results.getTotalHits() );
- assertEquals( "returned hits not 1 for page1 " + results, 1, results.getReturnedHitsCount() );
- assertEquals( limits, results.getLimits() );
-
- archivaConfigControl.reset();
-
- // page 2
- limits = new SearchResultLimits( 1 );
- limits.setPageSize( 1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- results = search.search( "user", selectedRepos, "org", limits, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
-
- assertEquals( "hits not 1", 1, results.getHits().size() );
- assertEquals( "total hits not 9 for page 2 " + results, 9, results.getTotalHits() );
- assertEquals( "returned hits not 1 for page2 " + results, 1, results.getReturnedHitsCount() );
- assertEquals( limits, results.getLimits() );
- }
-
- @Test
- public void testArtifactFoundInMultipleRepositories()
- throws Exception
- {
- createSimpleIndex( true );
-
- List<Path> files = new ArrayList<>();
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
- + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
- + "/org/apache/archiva/archiva-search/1.1/archiva-search-1.1.jar" ) );
- createIndex( TEST_REPO_2, files, false );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
- selectedRepos.add( TEST_REPO_2 );
-
- config.addManagedRepository( createRepositoryConfig( TEST_REPO_2 ) );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- // wait lucene flush.....
- Thread.sleep( 2000 );
-
- SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
-
- SearchResultHit hit =
- results.getSearchResultHit( SearchUtil.getHitId( "org.apache.archiva", "archiva-search", null, "jar" ) );
- assertNotNull(hit);
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-search", hit.getArtifactId() );
- assertEquals( "not 2 version for hit " + hit + "::" + niceDisplay( results ), 2, hit.getVersions().size() );
- assertTrue( hit.getVersions().contains( "1.0" ) );
- assertTrue( hit.getVersions().contains( "1.1" ) );
-
- archivaConfigControl.reset();
-
- // TODO: [BROWSE] in artifact info from browse, display all the repositories where the artifact is found
- }
-
- @Test
- public void testNoMatchFound()
- throws Exception
- {
- createSimpleIndex( false );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "dfghdfkweriuasndsaie", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 0, results.getTotalHits() );
- }
-
- @Test
- public void testNoIndexFound()
- throws Exception
- {
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- // EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- // archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
- assertNotNull( results );
- assertEquals( 0, results.getTotalHits() );
-
- archivaConfigControl.verify();
- }
-
- @Test
- public void testRepositoryNotFound()
- throws Exception
- {
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( "non-existing-repo" );
-
- // archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
- assertNotNull( results );
- assertEquals( 0, results.getTotalHits() );
-
- archivaConfigControl.verify();
- }
-
- @Test
- public void testSearchWithinSearchResults()
- throws Exception
- {
- createSimpleIndex( true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- List<String> previousSearchTerms = new ArrayList<>();
- previousSearchTerms.add( "archiva-test" );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "1.0", null, previousSearchTerms );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( "total hints not 1", 1, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-test", hit.getArtifactId() );
- assertEquals( "versions not 1", 1, hit.getVersions().size() );
- assertEquals( "1.0", hit.getVersions().get( 0 ) );
- }
-
- // tests for advanced search
- @Test
- public void testAdvancedSearch()
- throws Exception
- {
- List<Path> files = new ArrayList<>();
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
- + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
- + "/org/apache/archiva/archiva-search/1.1/archiva-search-1.1.jar" ) );
- createIndex( TEST_REPO_2, files, false );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_2 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.apache.archiva" );
- searchFields.setVersion( "1.0" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 1, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-search", hit.getArtifactId() );
- assertEquals( "1.0", hit.getVersions().get( 0 ) );
- }
-
- @Test
- public void testAdvancedSearchWithPagination()
- throws Exception
- {
- createIndexContainingMoreArtifacts( false );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.apache.archiva" );
- searchFields.setRepositories( selectedRepos );
-
- // page 1
-
- SearchResultLimits limits = new SearchResultLimits( 0 );
- limits.setPageSize( 1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, limits );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 4, results.getTotalHits() );
- assertEquals( 1, results.getHits().size() );
-
- // page 2
- archivaConfigControl.reset();
-
- limits = new SearchResultLimits( 1 );
- limits.setPageSize( 1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- results = search.search( "user", searchFields, limits );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 4, results.getTotalHits() );
- assertEquals( 1, results.getHits().size() );
- }
-
- // MRM-981 - artifactIds with numeric characters aren't found in advanced search
- @Test
- public void testAdvancedSearchArtifactIdHasNumericChar()
- throws Exception
- {
- List<Path> files = new ArrayList<>();
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
- "target/repos/" + TEST_REPO_1 + "/com/artifactid-numeric/1.0/artifactid-numeric-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/com/artifactid-numeric123/1.0/artifactid-numeric123-1.0.jar" ) );
- createIndex( TEST_REPO_1, files, true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setArtifactId( "artifactid-numeric" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 2, results.getTotalHits() );
- }
-
- @Test
- public void testAdvancedSearchNoRepositoriesConfigured()
- throws Exception
- {
- SearchFields searchFields = new SearchFields();
- searchFields.setArtifactId( "archiva" );
- searchFields.setRepositories( null );
-
- try
- {
- search.search( "user", searchFields, null );
- fail( "A RepositorySearchExcecption should have been thrown." );
- }
- catch ( RepositorySearchException e )
- {
- assertEquals( "Repositories cannot be null.", e.getMessage() );
- }
- }
-
- @Test
- public void testAdvancedSearchSearchFieldsAreNull()
- throws Exception
- {
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setRepositories( selectedRepos );
-
- try
- {
- // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- // EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- // archivaConfigControl.replay();
-
- search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- fail( "A RepositorySearchExcecption should have been thrown." );
- }
- catch ( RepositorySearchException e )
- {
- assertEquals( "No search fields set.", e.getMessage() );
- }
- }
-
- @Test
- public void testAdvancedSearchSearchFieldsAreBlank()
- throws Exception
- {
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "" );
- searchFields.setArtifactId( "" );
- searchFields.setVersion( "" );
- searchFields.setPackaging( "" );
- searchFields.setClassName( "" );
-
- searchFields.setRepositories( selectedRepos );
-
- try
- {
-
- // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- // EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
- // archivaConfigControl.replay();
-
- search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- fail( "A RepositorySearchException should have been thrown." );
- }
- catch ( RepositorySearchException e )
- {
- assertEquals( "No search fields set.", e.getMessage() );
- }
- }
-
- @Test
- public void testAdvancedSearchAllSearchCriteriaSpecified()
- throws Exception
- {
- createSimpleIndex( true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.apache.archiva" );
- searchFields.setArtifactId( "archiva-test" );
- searchFields.setVersion( "2.0" );
- searchFields.setPackaging( "jar" );
- searchFields.setClassName( "org.apache.archiva.test.App" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
-
- assertEquals( "total hints not 1" + results, 1, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-test", hit.getArtifactId() );
- assertEquals( "version not 2.0", "2.0", hit.getVersions().get( 0 ) );
- }
-
- @Test
- public void testAdvancedSearchJarArtifacts()
- throws Exception
- {
- createIndexContainingMoreArtifacts( true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setPackaging( "jar" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( "not 8 but " + results.getTotalHits() + ":" + niceDisplay( results ), 8, results.getTotalHits() );
- }
-
- @Test
- public void testAdvancedSearchWithIncorrectPackaging()
- throws Exception
- {
- createSimpleIndex( true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.apache.archiva" );
- searchFields.setArtifactId( "archiva-test" );
- searchFields.setVersion( "2.0" );
- searchFields.setPackaging( "war" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 0, results.getTotalHits() );
- }
-
- @Test
- public void testAdvancedSearchClassname()
- throws Exception
- {
- createIndexContainingMoreArtifacts( true );
-
- List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setClassName( "com.classname.search.App" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( "totalHits not 1 results " + results, 1, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "groupId not com", "com", hit.getGroupId() );
- assertEquals( "arttifactId not classname-search", "classname-search", hit.getArtifactId() );
- assertEquals( " hits.version(0) not 1.0", "1.0", hit.getVersions().get( 0 ) );
- }
-
- @Test
- public void testAdvancedSearchNoIndexFound()
- throws Exception
- {
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.apache.archiva" );
- searchFields.setRepositories( selectedRepos );
-
- // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- // EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- // archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 0, results.getTotalHits() );
- }
-
- @Test
- public void testAdvancedSearchClassNameInWar()
- throws Exception
- {
- createIndexContainingMoreArtifacts( true );
-
- List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setClassName( "SomeClass" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 1, results.getHits().size() );
- assertEquals( "test-webapp", results.getHits().get( 0 ).getArtifactId() );
- }
-
- @Test
- public void getAllGroupIds()
- throws Exception
- {
- createIndexContainingMoreArtifacts( true );
-
- List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
-
- EasyMock.expect( archivaConfig.getConfiguration() ).andReturn( config ).times( 0, 2 );
-
- archivaConfigControl.replay();
-
- Collection<String> groupIds = search.getAllGroupIds( "user", selectedRepos );
-
- archivaConfigControl.verify();
-
- log.info( "groupIds: {}", groupIds );
-
- assertEquals( 3, groupIds.size() );
- assertTrue( groupIds.contains( "com" ) );
- assertTrue( groupIds.contains( "org.apache.felix" ) );
- assertTrue( groupIds.contains( "org.apache.archiva" ) );
- }
-
- @Test
- public void testSearchWithUnknownRepo()
- throws Exception
- {
- createIndexContainingMoreArtifacts( true );
-
- List<String> selectedRepos = Arrays.asList( "foo" );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setClassName( "SomeClass" );
- searchFields.setRepositories( selectedRepos );
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 0, results.getHits().size() );
- }
-
- @Test
- public void nolimitedResult()
- throws Exception
- {
-
- Path repo = Paths.get("target/repo-release-index-test/repo-release");
- try {
- Path indexDirectory = repo.resolve(".indexer");
- Path zipFile = Paths.get(Thread.currentThread().getContextClassLoader().getResource("repo-release.zip").toURI());
- FileUtils.unzip(zipFile, repo.getParent());
-// IndexUpgrader.main(new String[]{indexDirectory.toAbsolutePath().toString(), "-delete-prior-commits"});
- createIndex(REPO_RELEASE, Collections.emptyList(), false, indexDirectory, false);
-
-// indexer.addIndexingContext( REPO_RELEASE, REPO_RELEASE, repo.toFile(), indexDirectory.toFile(),
-// repo.toUri().toURL().toExternalForm(),
-// indexDirectory.toUri().toURL().toString(), indexCreators );
-
-
- SearchResultLimits limits = new SearchResultLimits(SearchResultLimits.ALL_PAGES);
- limits.setPageSize(300);
-
- // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect(archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults searchResults = search.search(null, Arrays.asList(REPO_RELEASE), //
- "org.example", limits, //
- Collections.emptyList());
-
- log.info("results: {}", searchResults.getHits().size());
-
- assertEquals(255, searchResults.getHits().size());
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId("org.example");
- searchFields.setRepositories(Arrays.asList(REPO_RELEASE));
-
- searchResults = search.search(null, searchFields, limits);
-
- log.info("results: {}", searchResults.getHits().size());
-
- assertEquals(255, searchResults.getHits().size());
-
- archivaConfigControl.verify();
- } finally {
- FileUtils.deleteQuietly(repo);
- }
- }
-}
--- /dev/null
+package org.apache.archiva.maven.indexer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.utils.FileUtils;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.indexer.IndexCreationFailedException;
+import org.apache.archiva.maven.indexer.MavenIndexManager;
+import org.apache.archiva.repository.RepositoryType;
+import org.apache.archiva.repository.base.ArchivaRepositoryRegistry;
+import org.apache.archiva.repository.base.RepositoryHandlerDependencies;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.features.RemoteIndexFeature;
+import org.apache.archiva.repository.maven.MavenManagedRepository;
+import org.apache.archiva.repository.maven.MavenRemoteRepository;
+import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
+import org.apache.maven.index.MAVEN;
+import org.apache.maven.index.QueryCreator;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.expr.UserInputSearchExpression;
+import org.apache.maven.index_shaded.lucene.search.BooleanClause;
+import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
+import org.apache.maven.index_shaded.lucene.search.Query;
+import org.junit.After;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.test.context.ContextConfiguration;
+
+import javax.inject.Inject;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static org.junit.Assert.*;
+
+@RunWith( ArchivaSpringJUnit4ClassRunner.class )
+@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
+public class MavenIndexManagerTest {
+
+ @Inject
+ ArchivaRepositoryRegistry repositoryRegistry;
+
+ @SuppressWarnings( "unused" )
+ @Inject
+ RepositoryHandlerDependencies repositoryHandlerDependencies;
+
+ private Path indexPath;
+ private MavenManagedRepository repository;
+ private ArchivaIndexingContext ctx;
+ private MavenRemoteRepository repositoryRemote;
+
+ @Inject
+ MavenIndexManager mavenIndexManager;
+
+ @Inject
+ QueryCreator queryCreator;
+
+
+ @After
+ public void tearDown() {
+ repositoryRegistry.destroy();
+ if (ctx!=null) {
+ try {
+ ctx.close(true);
+ } catch (IOException e) {
+ //
+ }
+ }
+ if (indexPath!=null && Files.exists(indexPath)) {
+ FileUtils.deleteQuietly(indexPath);
+ }
+
+ }
+
+ @Test
+ public void pack() throws Exception {
+ createTestContext();
+ Path destDir = repository.getRoot().getFilePath().resolve("org/apache/archiva/archiva-webapp/1.0");
+ Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-webapp/1.0");
+ org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(),destDir.toFile());
+ mavenIndexManager.scan(ctx);
+ mavenIndexManager.pack(ctx);
+ assertTrue(Files.list(indexPath).filter(path -> {
+ try {
+ return path.getFileName().toString().endsWith(".gz") && Files.size(path) > 0;
+ } catch (IOException e) {
+ return false;
+ }
+ }).findAny().isPresent());
+ }
+
+ @Test
+ public void scan() throws Exception {
+ createTestContext();
+ Path destDir = repository.getRoot().getFilePath().resolve("org/apache/archiva/archiva-webapp/1.0");
+ Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-webapp/1.0");
+ org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(),destDir.toFile());
+ mavenIndexManager.scan(ctx);
+
+ IndexingContext mvnCtx = mavenIndexManager.getMvnContext(ctx);
+ String term = "org.apache.archiva";
+ Query q = new BooleanQuery.Builder().add( queryCreator.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ),
+ BooleanClause.Occur.SHOULD ).build();
+ assertEquals(4, mvnCtx.acquireIndexSearcher().count(q));
+ }
+
+ /*
+ * Does only a index update via file uri, no HTTP uri
+ */
+ @Test
+ public void update() throws Exception {
+ createTestContext();
+ mavenIndexManager.pack(ctx);
+ ctx.close(false);
+ createTestContextForRemote();
+ mavenIndexManager.update(ctx, true);
+ }
+
+ @Test
+ public void addArtifactsToIndex() throws Exception {
+
+ ArchivaIndexingContext ctx = createTestContext();
+ try {
+ Path destDir = repository.getRoot().getFilePath().resolve("org/apache/archiva/archiva-search/1.0");
+ Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-search/1.0");
+ org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(), destDir.toFile());
+ List<URI> uriList = new ArrayList<>();
+ uriList.add(destDir.resolve("archiva-search-1.0.jar").toUri());
+ uriList.add(destDir.resolve("archiva-search-1.0-sources.jar").toUri());
+ mavenIndexManager.addArtifactsToIndex(ctx, uriList);
+
+ IndexingContext mvnCtx = mavenIndexManager.getMvnContext(ctx);
+ String term = "org.apache.archiva";
+ Query q = new BooleanQuery.Builder().add(queryCreator.constructQuery(MAVEN.GROUP_ID, new UserInputSearchExpression(term)),
+ BooleanClause.Occur.SHOULD).build();
+ assertEquals(2, mvnCtx.acquireIndexSearcher().count(q));
+ } finally {
+ try {
+ ctx.close(true);
+ } catch (IOException e) {
+ // Ignore
+ }
+ }
+ }
+
+ @Test
+ public void removeArtifactsFromIndex() throws Exception {
+ ArchivaIndexingContext ctx = createTestContext();
+ Path destDir = repository.getRoot().getFilePath().resolve("org/apache/archiva/archiva-search/1.0");
+ Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-search/1.0");
+ org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(), destDir.toFile());
+ List<URI> uriList = new ArrayList<>();
+ uriList.add(destDir.resolve("archiva-search-1.0.jar").toUri());
+ uriList.add(destDir.resolve("archiva-search-1.0-sources.jar").toUri());
+ mavenIndexManager.addArtifactsToIndex(ctx, uriList);
+
+ IndexingContext mvnCtx = mavenIndexManager.getMvnContext(ctx);
+ String term = "org.apache.archiva";
+ Query q = new BooleanQuery.Builder().add( queryCreator.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ),
+ BooleanClause.Occur.SHOULD ).build();
+ assertEquals(2, mvnCtx.acquireIndexSearcher().count(q));
+ uriList.remove(0);
+ mavenIndexManager.removeArtifactsFromIndex(ctx, uriList);
+ assertEquals(1, mvnCtx.acquireIndexSearcher().count(q));
+ }
+
+ @Test
+ public void supportsRepository() throws Exception {
+ assertTrue(mavenIndexManager.supportsRepository(RepositoryType.MAVEN));
+ assertFalse(mavenIndexManager.supportsRepository(RepositoryType.NPM));
+ }
+
+ private ArchivaIndexingContext createTestContext() throws URISyntaxException, IndexCreationFailedException, IOException {
+ String indexPathName = ".index-test." + System.nanoTime();
+ indexPath = Paths.get("target/repositories/test-repo" ).resolve(indexPathName);
+ if (Files.exists(indexPath)) {
+
+ try {
+ FileUtils.deleteDirectory(indexPath);
+ } catch (IOException e) {
+ String destName = indexPath.getFileName().toString() + "." + System.currentTimeMillis();
+ Files.move(indexPath, indexPath.getParent().resolve(destName));
+ }
+ }
+ repository = MavenManagedRepository.newLocalInstance("test-repo", "Test Repo", Paths.get("target/repositories"));
+ // repository.setLocation(new URI("test-repo"));
+ IndexCreationFeature icf = repository.getFeature(IndexCreationFeature.class).get();
+ icf.setIndexPath(new URI(indexPathName));
+ ctx = mavenIndexManager.createContext(repository);
+ return ctx;
+ }
+
+ private ArchivaIndexingContext createTestContextForRemote() throws URISyntaxException, IndexCreationFailedException, IOException {
+ // indexPath = Paths.get("target/repositories/test-repo/.index-test");
+ Path repoPath = Paths.get("target/repositories").toAbsolutePath();
+ repositoryRemote = MavenRemoteRepository.newLocalInstance("test-repo", "Test Repo", repoPath);
+ repositoryRemote.setLocation(repoPath.resolve("test-repo").toUri());
+ RemoteIndexFeature icf = repositoryRemote.getFeature(RemoteIndexFeature.class).get();
+ icf.setIndexUri(new URI(indexPath.getFileName().toString()));
+ ctx = mavenIndexManager.createContext(repositoryRemote);
+ return ctx;
+ }
+
+ @Test
+ public void createContext() throws Exception {
+ ArchivaIndexingContext ctx = createTestContext();
+ assertNotNull(ctx);
+ assertEquals(repository, ctx.getRepository());
+ assertEquals("test-repo", ctx.getId());
+ assertEquals(indexPath.toAbsolutePath(), ctx.getPath().getFilePath().toAbsolutePath());
+ assertTrue(Files.exists(indexPath));
+ List<Path> li = Files.list(indexPath).collect(Collectors.toList());
+ assertTrue(li.size()>0);
+
+ }
+
+}
\ No newline at end of file
--- /dev/null
+package org.apache.archiva.maven.indexer.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.TestCase;
+import org.apache.archiva.common.utils.FileUtils;
+import org.apache.archiva.configuration.ArchivaConfiguration;
+import org.apache.archiva.configuration.Configuration;
+import org.apache.archiva.configuration.ConfigurationListener;
+import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.indexer.search.SearchResultHit;
+import org.apache.archiva.indexer.search.SearchResults;
+import org.apache.archiva.maven.indexer.search.MavenRepositorySearch;
+import org.apache.archiva.proxy.ProxyRegistry;
+import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.base.ArchivaRepositoryRegistry;
+import org.apache.archiva.repository.base.RepositoryHandlerDependencies;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
+import org.apache.commons.lang3.SystemUtils;
+import org.apache.maven.index.ArtifactContext;
+import org.apache.maven.index.ArtifactContextProducer;
+import org.apache.maven.index.ArtifactScanningListener;
+import org.apache.maven.index.DefaultScannerListener;
+import org.apache.maven.index.Indexer;
+import org.apache.maven.index.IndexerEngine;
+import org.apache.maven.index.QueryCreator;
+import org.apache.maven.index.Scanner;
+import org.apache.maven.index.ScanningRequest;
+import org.apache.maven.index.ScanningResult;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index_shaded.lucene.index.IndexUpgrader;
+import org.easymock.EasyMock;
+import org.easymock.IMocksControl;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.runner.RunWith;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.test.context.ContextConfiguration;
+
+import javax.inject.Inject;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.List;
+import java.util.Locale;
+
+import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
+
+/**
+ * @author Olivier Lamy
+ */
+@RunWith( ArchivaSpringJUnit4ClassRunner.class )
+@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
+public abstract class AbstractMavenRepositorySearch
+ extends TestCase
+{
+
+ protected Logger log = LoggerFactory.getLogger( getClass() );
+
+ public static String TEST_REPO_1 = "maven-search-test-repo";
+
+ public static String TEST_REPO_2 = "maven-search-test-repo-2";
+
+
+ public static String REPO_RELEASE = "repo-release";
+
+ MavenRepositorySearch search;
+
+ ArchivaConfiguration archivaConfig;
+
+ @Inject
+ ArtifactContextProducer artifactContextProducer;
+
+ @Inject
+ ArchivaRepositoryRegistry repositoryRegistry;
+
+ @SuppressWarnings( "unused" )
+ @Inject
+ RepositoryHandlerDependencies repositoryHandlerDependencies;
+
+ @Inject
+ ProxyRegistry proxyRegistry;
+
+ @Inject
+ private IndexerEngine indexerEngine;
+
+ IMocksControl archivaConfigControl;
+
+ Configuration config;
+
+ @Inject
+ Indexer indexer;
+
+ @Inject
+ Scanner scanner;
+
+ @Inject
+ QueryCreator queryCreator;
+
+ @Before
+ @Override
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" ) );
+ assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" )) );
+ Files.createDirectories( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" ) );
+
+ FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" ) );
+ assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" )) );
+ Files.createDirectories( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" ) );
+
+ archivaConfigControl = EasyMock.createControl();
+
+ archivaConfig = archivaConfigControl.createMock( ArchivaConfiguration.class );
+
+ repositoryRegistry.setArchivaConfiguration( archivaConfig );
+
+ search = new MavenRepositorySearch( indexer, repositoryRegistry, proxyRegistry,
+ queryCreator );
+
+ assertNotNull( repositoryRegistry );
+
+ config = new Configuration();
+ config.addManagedRepository( createRepositoryConfig( TEST_REPO_1 ) );
+ config.addManagedRepository( createRepositoryConfig( TEST_REPO_2 ) );
+ config.addManagedRepository( createRepositoryConfig( REPO_RELEASE ) );
+
+ archivaConfig.addListener( EasyMock.anyObject( ConfigurationListener.class ) );
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration() ).andReturn(config).anyTimes();
+ archivaConfig.save(EasyMock.anyObject(Configuration.class), EasyMock.anyString());
+ EasyMock.expectLastCall().anyTimes();
+ archivaConfigControl.replay();
+ repositoryRegistry.reload();
+
+ }
+
+ @After
+ @Override
+ public void tearDown()
+ throws Exception
+ {
+ archivaConfigControl.reset();
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration() ).andReturn(config).anyTimes();
+ archivaConfig.save(EasyMock.anyObject(Configuration.class), EasyMock.anyString());
+ EasyMock.expectLastCall().anyTimes();
+ archivaConfigControl.replay();
+ repositoryRegistry.removeRepository(TEST_REPO_1);
+ repositoryRegistry.removeRepository(TEST_REPO_2);
+ repositoryRegistry.removeRepository(REPO_RELEASE);
+ repositoryRegistry.destroy();
+ FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 ) );
+ assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 )) );
+
+ FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 ) );
+ assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 )) );
+
+ super.tearDown();
+ }
+
+ protected ManagedRepositoryConfiguration createRepositoryConfig( String repository )
+ {
+ ManagedRepositoryConfiguration repositoryConfig = new ManagedRepositoryConfiguration();
+ repositoryConfig.setId( repository );
+ repositoryConfig.setLocation( org.apache.archiva.common.utils.FileUtils.getBasedir() + "/target/repos/" + repository );
+ Path f = Paths.get( repositoryConfig.getLocation() );
+ if ( !Files.exists(f) )
+ {
+ try
+ {
+ Files.createDirectories( f );
+ }
+ catch ( IOException e )
+ {
+ log.error("Could not create directories for {}", f);
+ }
+ }
+ repositoryConfig.setLayout( "default" );
+ repositoryConfig.setName( repository );
+ repositoryConfig.setScanned( true );
+ repositoryConfig.setSnapshots( false );
+ repositoryConfig.setReleases( true );
+ repositoryConfig.setIndexDir(DEFAULT_INDEX_PATH);
+
+ return repositoryConfig;
+ }
+
+ protected void createIndex( String repository, List<Path> filesToBeIndexed, boolean scan) throws Exception {
+ createIndex(repository, filesToBeIndexed, scan, null, true);
+ }
+
+ protected void createIndex( String repository, List<Path> filesToBeIndexed, boolean scan, Path indexDir, boolean copyFiles)
+ throws Exception
+ {
+ final Repository rRepo = repositoryRegistry.getRepository(repository);
+ IndexCreationFeature icf = rRepo.getFeature(IndexCreationFeature.class).get();
+
+
+ ArchivaIndexingContext archivaCtx = rRepo.getIndexingContext();
+ IndexingContext context = archivaCtx.getBaseContext(IndexingContext.class);
+
+ if ( archivaCtx != null )
+ {
+ archivaCtx.close(true);
+ }
+
+ Path repoDir = Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir()).resolve("target").resolve("repos").resolve(repository);
+
+ Path indexerDirectory = repoDir.resolve(".indexer" );
+
+ if ( indexDir == null && Files.exists(indexerDirectory) )
+ {
+ FileUtils.deleteDirectory( indexerDirectory );
+ assertFalse( Files.exists(indexerDirectory) );
+ }
+
+
+ Path lockFile = repoDir.resolve(".indexer/write.lock" );
+ if ( Files.exists(lockFile) )
+ {
+ Files.delete(lockFile);
+ }
+ assertFalse( Files.exists(lockFile) );
+ if (indexDir==null) {
+ Path indexDirectory =
+ Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/index/test-" + Long.toString(System.currentTimeMillis()));
+ indexDirectory.toFile().deleteOnExit();
+ FileUtils.deleteDirectory(indexDirectory);
+ icf.setIndexPath(indexDirectory.toUri());
+ config.getManagedRepositories( ).stream( ).filter( r -> r.getId( ).equals( rRepo.getId( ) ) ).findFirst( ).ifPresent( r ->
+ r.setIndexDir( indexDirectory.toAbsolutePath( ).toString( ) )
+ );
+ // IndexUpgrader.main(new String[]{indexDirectory.toAbsolutePath().toString()});
+ } else {
+
+ icf.setIndexPath(indexDir.toUri());
+ Files.createDirectories( indexDir );
+ config.getManagedRepositories( ).stream( ).filter( r -> r.getId( ).equals( rRepo.getId( ) ) ).findFirst( ).ifPresent( r ->
+ r.setIndexDir( indexDir.toAbsolutePath( ).toString( ) )
+ );
+ IndexUpgrader.main(new String[]{indexDir.toAbsolutePath().toString()});
+
+ }
+
+ if (copyFiles) {
+ Path repo = Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + repository);
+ assertTrue(Files.exists(repo));
+ org.apache.commons.io.FileUtils.copyDirectory(repo.toFile(), repoDir.toFile());
+ }
+
+
+
+
+ archivaConfigControl.reset();
+ archivaConfig.addListener( EasyMock.anyObject( ConfigurationListener.class ) );
+ EasyMock.expect( archivaConfig.getConfiguration() ).andReturn(config).anyTimes();
+ archivaConfig.save(EasyMock.anyObject(Configuration.class), EasyMock.anyString());
+ EasyMock.expectLastCall().anyTimes();
+ archivaConfigControl.replay();
+ repositoryRegistry.reload();
+ archivaConfigControl.reset();
+
+ Repository rRepo2 = repositoryRegistry.getRepository( repository );
+ icf = rRepo2.getFeature(IndexCreationFeature.class).get();
+
+
+ archivaCtx = rRepo2.getIndexingContext();
+ context = archivaCtx.getBaseContext(IndexingContext.class);
+
+
+ // minimize datas in memory
+// context.getIndexWriter().setMaxBufferedDocs( -1 );
+// context.getIndexWriter().setRAMBufferSizeMB( 1 );
+ for ( Path artifactFile : filesToBeIndexed )
+ {
+ assertTrue( "file not exists " + artifactFile, Files.exists(artifactFile) );
+ ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile.toFile() );
+
+ if ( artifactFile.toString().endsWith( ".pom" ) )
+ {
+ ac.getArtifactInfo().setFileExtension( "pom" );
+ ac.getArtifactInfo().setPackaging( "pom" );
+ ac.getArtifactInfo().setClassifier( "pom" );
+ }
+ indexer.addArtifactToIndex( ac, context );
+ context.updateTimestamp( true );
+ }
+
+ if ( scan )
+ {
+ DefaultScannerListener listener = new DefaultScannerListener( context, indexerEngine, true, new ArtifactScanListener());
+ ScanningRequest req = new ScanningRequest(context, listener );
+ scanner.scan( req );
+ context.commit();
+ }
+ // force flushing
+ context.commit();
+ // context.getIndexWriter().commit();
+ context.setSearchable( true );
+
+ }
+
+ static class ArtifactScanListener
+ implements ArtifactScanningListener
+ {
+ protected Logger log = LoggerFactory.getLogger( getClass() );
+
+ @Override
+ public void scanningStarted( IndexingContext ctx )
+ {
+ //
+ }
+
+ @Override
+ public void scanningFinished( IndexingContext ctx, ScanningResult result )
+ {
+ // no op
+ }
+
+ @Override
+ public void artifactError( ArtifactContext ac, Exception e )
+ {
+ log.debug( "artifactError {}", ac.getArtifact().getPath(), e );
+ }
+
+ @Override
+ public void artifactDiscovered( ArtifactContext ac )
+ {
+ log.debug( "artifactDiscovered {}:{}", //
+ ac.getArtifact() == null ? "" : ac.getArtifact().getPath(), //
+ ac.getArtifact() == null ? "" : ac.getArtifactInfo() );
+ }
+ }
+
+ public String niceDisplay( SearchResults searchResults )
+ throws Exception
+ {
+ StringBuilder sb = new StringBuilder();
+ for ( SearchResultHit hit : searchResults.getHits() )
+ {
+ sb.append( hit.toString() ).append( SystemUtils.LINE_SEPARATOR );
+ }
+ return sb.toString();
+ }
+}
--- /dev/null
+package org.apache.archiva.maven.indexer.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.indexer.search.SearchFields;
+import org.apache.archiva.indexer.search.SearchResultHit;
+import org.apache.archiva.indexer.search.SearchResults;
+import org.apache.archiva.repository.base.ArchivaRepositoryRegistry;
+import org.apache.archiva.repository.base.RepositoryHandlerDependencies;
+import org.easymock.EasyMock;
+import org.junit.After;
+import org.junit.Test;
+
+import javax.inject.Inject;
+import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * @author Olivier Lamy
+ */
+public class MavenRepositorySearchOSGITest
+ extends AbstractMavenRepositorySearch
+{
+
+ @Inject
+ ArchivaRepositoryRegistry repositoryRegistry;
+
+ @SuppressWarnings( "unused" )
+ @Inject
+ RepositoryHandlerDependencies repositoryHandlerDependencies;
+
+
+ @After
+ @Override
+ public void tearDown() throws Exception {
+ super.tearDown();
+ repositoryRegistry.destroy();
+ }
+
+ @Test
+ public void searchFelixWithSymbolicName()
+ throws Exception
+ {
+
+ createIndex( TEST_REPO_1, Collections.<Path>emptyList(), true );
+
+ List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
+
+ // search artifactId
+ // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setBundleSymbolicName( "org.apache.felix.bundlerepository" );
+ searchFields.setBundleVersion( "1.6.6" );
+ searchFields.setRepositories( selectedRepos );
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 1, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "org.apache.felix", hit.getGroupId() );
+ assertEquals( "org.apache.felix.bundlerepository", hit.getArtifactId() );
+ assertEquals( "1.6.6", hit.getVersions().get( 0 ) );
+
+ assertEquals( "org.apache.felix.bundlerepository;uses:=\"org.osgi.framework\";version=\"2.0\"",
+ hit.getBundleExportPackage() );
+ assertEquals( "org.apache.felix.bundlerepository.RepositoryAdmin,org.osgi.service.obr.RepositoryAdmin",
+ hit.getBundleExportService() );
+ assertEquals( "org.apache.felix.bundlerepository", hit.getBundleSymbolicName() );
+ assertEquals( "1.6.6", hit.getBundleVersion() );
+ }
+
+}
--- /dev/null
+package org.apache.archiva.maven.indexer.search;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.TestCase;
+import org.apache.archiva.indexer.search.SearchResultHit;
+import org.apache.archiva.indexer.search.SearchResultLimits;
+import org.apache.archiva.indexer.search.SearchResults;
+import org.apache.archiva.maven.indexer.search.MavenRepositorySearch;
+import org.apache.archiva.maven.indexer.util.SearchUtil;
+import org.apache.archiva.repository.base.ArchivaRepositoryRegistry;
+import org.apache.archiva.repository.base.RepositoryHandlerDependencies;
+import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
+import org.junit.After;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.test.context.ContextConfiguration;
+
+import javax.inject.Inject;
+import java.util.Arrays;
+
+/**
+ * @author Olivier Lamy
+ */
+@RunWith( ArchivaSpringJUnit4ClassRunner.class )
+@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
+public class MavenRepositorySearchPaginateTest
+ extends TestCase
+{
+
+ @Inject
+ ArchivaRepositoryRegistry repositoryRegistry;
+
+ @SuppressWarnings( "unused" )
+ @Inject
+ RepositoryHandlerDependencies repositoryHandlerDependencies;
+
+ @After
+ public void endTests() {
+ assert repositoryRegistry!=null;
+ repositoryRegistry.destroy();
+ }
+
+ @Test
+ public void nonPaginatedResult()
+ throws Exception
+ {
+ MavenRepositorySearch search = new MavenRepositorySearch();
+
+ SearchResults searchResults = build( 10, new SearchResultLimits( 0 ) );
+
+ searchResults = search.paginate( searchResults );
+
+ assertEquals( 10, searchResults.getReturnedHitsCount() );
+
+ }
+
+ @Test
+ public void nonPaginatedHugeResult()
+ throws Exception
+ {
+ MavenRepositorySearch search = new MavenRepositorySearch();
+
+ SearchResults origSearchResults = build( 63, new SearchResultLimits( 0 ) );
+
+ SearchResults searchResults = search.paginate( origSearchResults );
+
+ assertEquals( 30, searchResults.getReturnedHitsCount() );
+
+ origSearchResults = build( 63, new SearchResultLimits( 1 ) );
+
+ searchResults = search.paginate( origSearchResults );
+
+ assertEquals( 30, searchResults.getReturnedHitsCount() );
+
+ }
+
+ @Test
+ public void paginatedResult()
+ throws Exception
+ {
+ MavenRepositorySearch search = new MavenRepositorySearch();
+
+ SearchResults searchResults = build( 32, new SearchResultLimits( 1 ) );
+
+ searchResults = search.paginate( searchResults );
+
+ assertEquals( 2, searchResults.getReturnedHitsCount() );
+
+ }
+
+
+ SearchResults build( int number, SearchResultLimits limits )
+ {
+ SearchResults searchResults = new SearchResults();
+ searchResults.setLimits( limits );
+ for ( int i = 0; i < number; i++ )
+ {
+ SearchResultHit hit = new SearchResultHit();
+ hit.setGroupId( "commons-foo" );
+ hit.setArtifactId( "commons-bar-" + i );
+ hit.setPackaging( "jar" );
+ hit.setVersions( Arrays.asList( "1.0" ) );
+ String id =
+ SearchUtil.getHitId( hit.getGroupId(), hit.getArtifactId(), hit.getClassifier(), hit.getPackaging() );
+ searchResults.addHit( id, hit );
+ }
+
+ searchResults.setTotalHits( number );
+ return searchResults;
+
+ }
+}
--- /dev/null
+package org.apache.archiva.maven.indexer.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.utils.FileUtils;
+import org.apache.archiva.indexer.search.RepositorySearchException;
+import org.apache.archiva.indexer.search.SearchFields;
+import org.apache.archiva.indexer.search.SearchResultHit;
+import org.apache.archiva.indexer.search.SearchResultLimits;
+import org.apache.archiva.indexer.search.SearchResults;
+import org.apache.archiva.maven.indexer.util.SearchUtil;
+import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
+import org.easymock.EasyMock;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.test.context.ContextConfiguration;
+
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Locale;
+
+
+@RunWith( ArchivaSpringJUnit4ClassRunner.class )
+@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
+public class MavenRepositorySearchTest
+ extends AbstractMavenRepositorySearch
+{
+
+
+ private void createSimpleIndex( boolean scan )
+ throws Exception
+ {
+ List<Path> files = new ArrayList<>();
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos", TEST_REPO_1,
+ "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos", TEST_REPO_1,
+ "/org/apache/archiva/archiva-test/1.0/archiva-test-1.0.jar" ));
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos", TEST_REPO_1,
+ "org/apache/archiva/archiva-test/2.0/archiva-test-2.0.jar" ));
+
+ createIndex( TEST_REPO_1, files, scan );
+ }
+
+ private void createIndexContainingMoreArtifacts( boolean scan )
+ throws Exception
+ {
+ List<Path> files = new ArrayList<>();
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-test/1.0/archiva-test-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-test/2.0/archiva-test-2.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-webapp/1.0/archiva-webapp-1.0.war" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
+ "target/repos/" + TEST_REPO_1 + "/com/artifactid-numeric/1.0/artifactid-numeric-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/com/artifactid-numeric123/1.0/artifactid-numeric123-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
+ "target/repos/" + TEST_REPO_1 + "/com/classname-search/1.0/classname-search-1.0.jar" ) );
+
+ createIndex( TEST_REPO_1, files, scan );
+ }
+
+ private void createIndexContainingMultipleArtifactsSameVersion( boolean scan )
+ throws Exception
+ {
+ List<Path> files = new ArrayList<>();
+
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
+
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.pom" ) );
+
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0-sources.jar" ) );
+
+ createIndex( TEST_REPO_1, files, scan );
+ }
+
+ @Test
+ public void testQuickSearch()
+ throws Exception
+ {
+ createSimpleIndex( false );
+
+ List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
+
+ // search artifactId
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+
+ SearchResultHit hit =
+ results.getSearchResultHit( SearchUtil.getHitId( "org.apache.archiva", "archiva-search", null, "jar" ) );
+ assertNotNull( "hit null in result " + results.getHits(), hit );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-search", hit.getArtifactId() );
+ assertEquals( "1.0", hit.getVersions().get( 0 ) );
+
+ archivaConfigControl.reset();
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ // search groupId
+ archivaConfigControl.replay();
+
+ results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( "total hints not 3", 3, results.getTotalHits() );
+
+ //TODO: search for class & package names
+ }
+
+ @Test
+ public void testQuickSearchNotWithClassifier()
+ throws Exception
+ {
+ createSimpleIndex( true );
+
+ List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
+
+ // search artifactId
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+
+ SearchResultHit hit =
+ results.getSearchResultHit( SearchUtil.getHitId( "org.apache.archiva", "archiva-search", null, "jar" ) );
+ assertNotNull( "hit null in result " + results.getHits(), hit );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-search", hit.getArtifactId() );
+ assertEquals( "1.0", hit.getVersions().get( 0 ) );
+
+ archivaConfigControl.reset();
+
+ // search groupId
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ results = search.search( "user", selectedRepos, "archiva-search", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( "total hints not 3 hits " + results.getHits(), 3, results.getTotalHits() );
+
+ //TODO: search for class & package names
+ }
+
+ @Test
+ public void testQuickSearchMultipleArtifactsSameVersion()
+ throws Exception
+ {
+ createIndexContainingMultipleArtifactsSameVersion( false );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ // search artifactId
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 3, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-search", hit.getArtifactId() );
+ assertEquals( "1.0", hit.getVersions().get( 0 ) );
+
+ //only 1 version of 1.0 is retrieved
+ assertEquals( 1, hit.getVersions().size() );
+ }
+
+ @Test
+ public void testMultipleArtifactsSameVersionWithClassifier()
+ throws Exception
+ {
+ createIndexContainingMultipleArtifactsSameVersion( true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ // search artifactId
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.apache.archiva" );
+ searchFields.setArtifactId( "archiva-search" );
+ searchFields.setClassifier( "sources" );
+ searchFields.setRepositories( selectedRepos );
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 1, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-search", hit.getArtifactId() );
+ assertEquals( "1.0", hit.getVersions().get( 0 ) );
+
+ //only 1 version of 1.0 is retrieved
+ assertEquals( 1, hit.getVersions().size() );
+ }
+
+ // search for existing artifact using multiple keywords
+ @Test
+ public void testQuickSearchWithMultipleKeywords()
+ throws Exception
+ {
+ createSimpleIndex( false );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "archiva search", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 1, results.getTotalHits() );
+ }
+
+ @Test
+ public void testQuickSearchWithPagination()
+ throws Exception
+ {
+ createSimpleIndex( true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ // page 1
+ SearchResultLimits limits = new SearchResultLimits( 0 );
+ limits.setPageSize( 1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "org", limits, Collections.emptyList() );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 1, results.getHits().size() );
+ assertEquals( "total hits not 9 for page1 " + results, 9, results.getTotalHits() );
+ assertEquals( "returned hits not 1 for page1 " + results, 1, results.getReturnedHitsCount() );
+ assertEquals( limits, results.getLimits() );
+
+ archivaConfigControl.reset();
+
+ // page 2
+ limits = new SearchResultLimits( 1 );
+ limits.setPageSize( 1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ results = search.search( "user", selectedRepos, "org", limits, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+
+ assertEquals( "hits not 1", 1, results.getHits().size() );
+ assertEquals( "total hits not 9 for page 2 " + results, 9, results.getTotalHits() );
+ assertEquals( "returned hits not 1 for page2 " + results, 1, results.getReturnedHitsCount() );
+ assertEquals( limits, results.getLimits() );
+ }
+
+ @Test
+ public void testArtifactFoundInMultipleRepositories()
+ throws Exception
+ {
+ createSimpleIndex( true );
+
+ List<Path> files = new ArrayList<>();
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
+ + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
+ + "/org/apache/archiva/archiva-search/1.1/archiva-search-1.1.jar" ) );
+ createIndex( TEST_REPO_2, files, false );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+ selectedRepos.add( TEST_REPO_2 );
+
+ config.addManagedRepository( createRepositoryConfig( TEST_REPO_2 ) );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ // wait lucene flush.....
+ Thread.sleep( 2000 );
+
+ SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+
+ SearchResultHit hit =
+ results.getSearchResultHit( SearchUtil.getHitId( "org.apache.archiva", "archiva-search", null, "jar" ) );
+ assertNotNull(hit);
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-search", hit.getArtifactId() );
+ assertEquals( "not 2 version for hit " + hit + "::" + niceDisplay( results ), 2, hit.getVersions().size() );
+ assertTrue( hit.getVersions().contains( "1.0" ) );
+ assertTrue( hit.getVersions().contains( "1.1" ) );
+
+ archivaConfigControl.reset();
+
+ // TODO: [BROWSE] in artifact info from browse, display all the repositories where the artifact is found
+ }
+
+ @Test
+ public void testNoMatchFound()
+ throws Exception
+ {
+ createSimpleIndex( false );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "dfghdfkweriuasndsaie", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 0, results.getTotalHits() );
+ }
+
+ @Test
+ public void testNoIndexFound()
+ throws Exception
+ {
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ // EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ // archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
+ assertNotNull( results );
+ assertEquals( 0, results.getTotalHits() );
+
+ archivaConfigControl.verify();
+ }
+
+ @Test
+ public void testRepositoryNotFound()
+ throws Exception
+ {
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( "non-existing-repo" );
+
+ // archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
+ assertNotNull( results );
+ assertEquals( 0, results.getTotalHits() );
+
+ archivaConfigControl.verify();
+ }
+
+ @Test
+ public void testSearchWithinSearchResults()
+ throws Exception
+ {
+ createSimpleIndex( true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ List<String> previousSearchTerms = new ArrayList<>();
+ previousSearchTerms.add( "archiva-test" );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "1.0", null, previousSearchTerms );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( "total hints not 1", 1, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-test", hit.getArtifactId() );
+ assertEquals( "versions not 1", 1, hit.getVersions().size() );
+ assertEquals( "1.0", hit.getVersions().get( 0 ) );
+ }
+
+ // tests for advanced search
+ @Test
+ public void testAdvancedSearch()
+ throws Exception
+ {
+ List<Path> files = new ArrayList<>();
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
+ + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
+ + "/org/apache/archiva/archiva-search/1.1/archiva-search-1.1.jar" ) );
+ createIndex( TEST_REPO_2, files, false );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_2 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.apache.archiva" );
+ searchFields.setVersion( "1.0" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 1, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-search", hit.getArtifactId() );
+ assertEquals( "1.0", hit.getVersions().get( 0 ) );
+ }
+
+ @Test
+ public void testAdvancedSearchWithPagination()
+ throws Exception
+ {
+ createIndexContainingMoreArtifacts( false );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.apache.archiva" );
+ searchFields.setRepositories( selectedRepos );
+
+ // page 1
+
+ SearchResultLimits limits = new SearchResultLimits( 0 );
+ limits.setPageSize( 1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, limits );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 4, results.getTotalHits() );
+ assertEquals( 1, results.getHits().size() );
+
+ // page 2
+ archivaConfigControl.reset();
+
+ limits = new SearchResultLimits( 1 );
+ limits.setPageSize( 1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ results = search.search( "user", searchFields, limits );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 4, results.getTotalHits() );
+ assertEquals( 1, results.getHits().size() );
+ }
+
+ // MRM-981 - artifactIds with numeric characters aren't found in advanced search
+ @Test
+ public void testAdvancedSearchArtifactIdHasNumericChar()
+ throws Exception
+ {
+ List<Path> files = new ArrayList<>();
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
+ "target/repos/" + TEST_REPO_1 + "/com/artifactid-numeric/1.0/artifactid-numeric-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/com/artifactid-numeric123/1.0/artifactid-numeric123-1.0.jar" ) );
+ createIndex( TEST_REPO_1, files, true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setArtifactId( "artifactid-numeric" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 2, results.getTotalHits() );
+ }
+
+ @Test
+ public void testAdvancedSearchNoRepositoriesConfigured()
+ throws Exception
+ {
+ SearchFields searchFields = new SearchFields();
+ searchFields.setArtifactId( "archiva" );
+ searchFields.setRepositories( null );
+
+ try
+ {
+ search.search( "user", searchFields, null );
+ fail( "A RepositorySearchExcecption should have been thrown." );
+ }
+ catch ( RepositorySearchException e )
+ {
+ assertEquals( "Repositories cannot be null.", e.getMessage() );
+ }
+ }
+
+ @Test
+ public void testAdvancedSearchSearchFieldsAreNull()
+ throws Exception
+ {
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setRepositories( selectedRepos );
+
+ try
+ {
+ // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ // EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ // archivaConfigControl.replay();
+
+ search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ fail( "A RepositorySearchExcecption should have been thrown." );
+ }
+ catch ( RepositorySearchException e )
+ {
+ assertEquals( "No search fields set.", e.getMessage() );
+ }
+ }
+
+ @Test
+ public void testAdvancedSearchSearchFieldsAreBlank()
+ throws Exception
+ {
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "" );
+ searchFields.setArtifactId( "" );
+ searchFields.setVersion( "" );
+ searchFields.setPackaging( "" );
+ searchFields.setClassName( "" );
+
+ searchFields.setRepositories( selectedRepos );
+
+ try
+ {
+
+ // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ // EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+ // archivaConfigControl.replay();
+
+ search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ fail( "A RepositorySearchException should have been thrown." );
+ }
+ catch ( RepositorySearchException e )
+ {
+ assertEquals( "No search fields set.", e.getMessage() );
+ }
+ }
+
+ @Test
+ public void testAdvancedSearchAllSearchCriteriaSpecified()
+ throws Exception
+ {
+ createSimpleIndex( true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.apache.archiva" );
+ searchFields.setArtifactId( "archiva-test" );
+ searchFields.setVersion( "2.0" );
+ searchFields.setPackaging( "jar" );
+ searchFields.setClassName( "org.apache.archiva.test.App" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+
+ assertEquals( "total hints not 1" + results, 1, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-test", hit.getArtifactId() );
+ assertEquals( "version not 2.0", "2.0", hit.getVersions().get( 0 ) );
+ }
+
+ @Test
+ public void testAdvancedSearchJarArtifacts()
+ throws Exception
+ {
+ createIndexContainingMoreArtifacts( true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setPackaging( "jar" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( "not 8 but " + results.getTotalHits() + ":" + niceDisplay( results ), 8, results.getTotalHits() );
+ }
+
+ @Test
+ public void testAdvancedSearchWithIncorrectPackaging()
+ throws Exception
+ {
+ createSimpleIndex( true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.apache.archiva" );
+ searchFields.setArtifactId( "archiva-test" );
+ searchFields.setVersion( "2.0" );
+ searchFields.setPackaging( "war" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 0, results.getTotalHits() );
+ }
+
+ @Test
+ public void testAdvancedSearchClassname()
+ throws Exception
+ {
+ createIndexContainingMoreArtifacts( true );
+
+ List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setClassName( "com.classname.search.App" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( "totalHits not 1 results " + results, 1, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "groupId not com", "com", hit.getGroupId() );
+ assertEquals( "arttifactId not classname-search", "classname-search", hit.getArtifactId() );
+ assertEquals( " hits.version(0) not 1.0", "1.0", hit.getVersions().get( 0 ) );
+ }
+
+ @Test
+ public void testAdvancedSearchNoIndexFound()
+ throws Exception
+ {
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.apache.archiva" );
+ searchFields.setRepositories( selectedRepos );
+
+ // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ // EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ // archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 0, results.getTotalHits() );
+ }
+
+ @Test
+ public void testAdvancedSearchClassNameInWar()
+ throws Exception
+ {
+ createIndexContainingMoreArtifacts( true );
+
+ List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setClassName( "SomeClass" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 1, results.getHits().size() );
+ assertEquals( "test-webapp", results.getHits().get( 0 ).getArtifactId() );
+ }
+
+ @Test
+ public void getAllGroupIds()
+ throws Exception
+ {
+ createIndexContainingMoreArtifacts( true );
+
+ List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
+
+ EasyMock.expect( archivaConfig.getConfiguration() ).andReturn( config ).times( 0, 2 );
+
+ archivaConfigControl.replay();
+
+ Collection<String> groupIds = search.getAllGroupIds( "user", selectedRepos );
+
+ archivaConfigControl.verify();
+
+ log.info( "groupIds: {}", groupIds );
+
+ assertEquals( 3, groupIds.size() );
+ assertTrue( groupIds.contains( "com" ) );
+ assertTrue( groupIds.contains( "org.apache.felix" ) );
+ assertTrue( groupIds.contains( "org.apache.archiva" ) );
+ }
+
+ @Test
+ public void testSearchWithUnknownRepo()
+ throws Exception
+ {
+ createIndexContainingMoreArtifacts( true );
+
+ List<String> selectedRepos = Arrays.asList( "foo" );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setClassName( "SomeClass" );
+ searchFields.setRepositories( selectedRepos );
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 0, results.getHits().size() );
+ }
+
+ @Test
+ public void nolimitedResult()
+ throws Exception
+ {
+
+ Path repo = Paths.get("target/repo-release-index-test/repo-release");
+ try {
+ Path indexDirectory = repo.resolve(".indexer");
+ Path zipFile = Paths.get(Thread.currentThread().getContextClassLoader().getResource("repo-release.zip").toURI());
+ FileUtils.unzip(zipFile, repo.getParent());
+// IndexUpgrader.main(new String[]{indexDirectory.toAbsolutePath().toString(), "-delete-prior-commits"});
+ createIndex(REPO_RELEASE, Collections.emptyList(), false, indexDirectory, false);
+
+// indexer.addIndexingContext( REPO_RELEASE, REPO_RELEASE, repo.toFile(), indexDirectory.toFile(),
+// repo.toUri().toURL().toExternalForm(),
+// indexDirectory.toUri().toURL().toString(), indexCreators );
+
+
+ SearchResultLimits limits = new SearchResultLimits(SearchResultLimits.ALL_PAGES);
+ limits.setPageSize(300);
+
+ // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect(archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults searchResults = search.search(null, Arrays.asList(REPO_RELEASE), //
+ "org.example", limits, //
+ Collections.emptyList());
+
+ log.info("results: {}", searchResults.getHits().size());
+
+ assertEquals(255, searchResults.getHits().size());
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId("org.example");
+ searchFields.setRepositories(Arrays.asList(REPO_RELEASE));
+
+ searchResults = search.search(null, searchFields, limits);
+
+ log.info("results: {}", searchResults.getHits().size());
+
+ assertEquals(255, searchResults.getHits().size());
+
+ archivaConfigControl.verify();
+ } finally {
+ FileUtils.deleteQuietly(repo);
+ }
+ }
+}
<logger name="org.apache.http.headers" level="debug"/>
-->
- <logger name="org.apache.archiva.indexer.maven.search.MavenRepositorySearch" level="info"/>
+ <logger name="org.apache.archiva.maven.indexer.search.MavenRepositorySearch" level="info"/>
<logger name="org.apache.archiva.common.plexusbridge.MavenIndexerUtils" level="info"/>