+++ /dev/null
-package org.apache.archiva.proxy;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.configuration.ArchivaConfiguration;
-import org.apache.archiva.configuration.NetworkProxyConfiguration;
-import org.apache.archiva.configuration.ProxyConnectorConfiguration;
-import org.apache.archiva.policies.Policy;
-import org.apache.archiva.policies.PolicyOption;
-import org.apache.archiva.policies.PolicyUtil;
-import org.apache.archiva.proxy.model.NetworkProxy;
-import org.apache.archiva.proxy.model.ProxyConnector;
-import org.apache.archiva.proxy.model.RepositoryProxyHandler;
-import org.apache.archiva.repository.base.ArchivaRepositoryRegistry;
-import org.apache.archiva.repository.ManagedRepository;
-import org.apache.archiva.repository.RemoteRepository;
-import org.apache.archiva.repository.RepositoryType;
-import org.apache.archiva.event.EventHandler;
-import org.apache.archiva.repository.event.RepositoryRegistryEvent;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-import javax.annotation.PostConstruct;
-import javax.inject.Inject;
-import java.util.*;
-import java.util.function.Function;
-import java.util.stream.Collectors;
-
-/**
- * Default proxy registry implementation. Uses the archiva configuration for accessing and storing the
- * proxy information.
- *
- */
-@SuppressWarnings( "SpringJavaInjectionPointsAutowiringInspection" )
-@Service("proxyRegistry#default")
-public class ArchivaProxyRegistry implements ProxyRegistry, EventHandler<RepositoryRegistryEvent> {
-
- private static final Logger log = LoggerFactory.getLogger(ArchivaProxyRegistry.class);
-
- @Inject
- ArchivaConfiguration archivaConfiguration;
-
- @Inject
- List<RepositoryProxyHandler> repositoryProxyHandlers;
-
- @Inject
- List<Policy> policies;
-
- @Inject
- ArchivaRepositoryRegistry repositoryRegistry;
-
- private Map<String, NetworkProxy> networkProxyMap = new HashMap<>();
- private Map<RepositoryType, List<RepositoryProxyHandler>> handlerMap = new HashMap<>();
- private ProxyConnectorOrderComparator comparator = ProxyConnectorOrderComparator.getInstance();
-
- private Map<String, List<ProxyConnector>> connectorMap = new HashMap<>();
- private List<ProxyConnector> connectorList = new ArrayList<>();
- private Map<Policy, PolicyOption> policyMap = new HashMap<>( );
-
-
- @PostConstruct
- private void init() {
- if (repositoryProxyHandlers == null) {
- repositoryProxyHandlers = new ArrayList<>();
- }
- updateHandler();
- updateConnectors();
- updateNetworkProxies();
- repositoryRegistry.registerEventHandler(RepositoryRegistryEvent.RELOADED, this);
- }
-
- private ArchivaConfiguration getArchivaConfiguration() {
- return archivaConfiguration;
- }
-
- private void updateNetworkProxies() {
- this.networkProxyMap.clear();
- List<NetworkProxyConfiguration> networkProxies = getArchivaConfiguration().getConfiguration().getNetworkProxies();
- for (NetworkProxyConfiguration networkProxyConfig : networkProxies) {
- String key = networkProxyConfig.getId();
-
- NetworkProxy proxy = new NetworkProxy();
-
- proxy.setProtocol(networkProxyConfig.getProtocol());
- proxy.setHost(networkProxyConfig.getHost());
- proxy.setPort(networkProxyConfig.getPort());
- proxy.setUsername(networkProxyConfig.getUsername());
- proxy.setPassword(networkProxyConfig.getPassword()==null? new char[0] : networkProxyConfig.getPassword().toCharArray());
- proxy.setUseNtlm(networkProxyConfig.isUseNtlm());
-
- this.networkProxyMap.put(key, proxy);
- }
- for (RepositoryProxyHandler proxyHandler : repositoryProxyHandlers) {
- proxyHandler.setNetworkProxies(this.networkProxyMap);
- }
- }
-
- private void updateHandler( ) {
-
- for (RepositoryProxyHandler handler : repositoryProxyHandlers) {
- List<RepositoryType> types = handler.supports();
- for (RepositoryType type : types) {
- if (!handlerMap.containsKey(type)) {
- handlerMap.put(type, new ArrayList<>());
- }
- handlerMap.get(type).add(handler);
- }
- handler.setPolicies( policies );
- }
- }
-
- private void updateConnectors() {
- List<ProxyConnectorConfiguration> proxyConnectorConfigurations =
- getArchivaConfiguration().getConfiguration().getProxyConnectors();
-
- connectorList = proxyConnectorConfigurations.stream()
- .map(this::buildProxyConnector)
- .filter(Optional::isPresent)
- .map(Optional::get)
- .sorted(comparator).collect(Collectors.toList());
- connectorMap = connectorList.stream().collect(Collectors.groupingBy(a -> a.getSourceRepository().getId()));
- for (RepositoryProxyHandler handler : repositoryProxyHandlers) {
- handler.setProxyConnectors( connectorList );
- }
- }
-
-
- private Map<Policy, PolicyOption> getPolicyMap(ProxyConnectorConfiguration configuration) {
- Map<String, String> policyConfig = configuration.getPolicies( );
- return policies.stream().collect( Collectors.toMap( Function.identity(), p -> PolicyUtil.findOption( policyConfig.get(p.getId()), p ) ) );
- }
-
- private Optional<ProxyConnector> buildProxyConnector(ProxyConnectorConfiguration configuration) {
- ProxyConnector proxyConnector = new ProxyConnector();
- proxyConnector.setOrder(configuration.getOrder());
- proxyConnector.setBlacklist(configuration.getBlackListPatterns());
- proxyConnector.setWhitelist(configuration.getWhiteListPatterns());
- if (configuration.isDisabled()) {
- proxyConnector.disable();
- } else {
- proxyConnector.enable();
- }
- proxyConnector.setPolicies(getPolicyMap( configuration ));
- proxyConnector.setProperties(configuration.getProperties());
- proxyConnector.setProxyId(configuration.getProxyId());
- ManagedRepository srcRepo = repositoryRegistry.getManagedRepository(configuration.getSourceRepoId());
- if (srcRepo==null) {
- return Optional.empty();
- }
- proxyConnector.setSourceRepository(srcRepo);
- RemoteRepository targetRepo = repositoryRegistry.getRemoteRepository(configuration.getTargetRepoId());
- if (targetRepo==null) {
- return Optional.empty();
- }
- proxyConnector.setTargetRepository(targetRepo);
- return Optional.of(proxyConnector);
- }
-
- @Override
- public NetworkProxy getNetworkProxy(String id) {
- return this.networkProxyMap.get(id);
- }
-
- @Override
- public Map<RepositoryType, List<RepositoryProxyHandler>> getAllHandler() {
- return this.handlerMap;
- }
-
- @Override
- public List<RepositoryProxyHandler> getHandler(RepositoryType type) {
- if (this.handlerMap.containsKey(type)) {
- return this.handlerMap.get(type);
- } else {
- return new ArrayList<>();
- }
- }
-
- @Override
- public boolean hasHandler(RepositoryType type) {
- return this.handlerMap.containsKey(type);
- }
-
-
- @Override
- public List<ProxyConnector> getProxyConnectors() {
- return connectorList;
-
- }
-
- @Override
- public Map<String, List<ProxyConnector>> getProxyConnectorAsMap() {
- return connectorMap;
- }
-
- @Override
- public void reload( )
- {
- init();
- }
-
- @Override
- public void handle(RepositoryRegistryEvent event) {
- log.debug("Reload happened, updating proxy list");
- if (event.getType()== RepositoryRegistryEvent.RELOADED) {
- init();
- }
- }
-}
+++ /dev/null
-package org.apache.archiva.proxy;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.checksum.ChecksumAlgorithm;
-import org.apache.archiva.checksum.ChecksumUtil;
-import org.apache.archiva.common.filelock.FileLockManager;
-import org.apache.archiva.common.utils.PathUtil;
-import org.apache.archiva.components.taskqueue.TaskQueueException;
-import org.apache.archiva.configuration.ArchivaConfiguration;
-import org.apache.archiva.configuration.ProxyConnectorConfiguration;
-import org.apache.archiva.configuration.ProxyConnectorRuleConfiguration;
-import org.apache.archiva.policies.DownloadErrorPolicy;
-import org.apache.archiva.policies.DownloadPolicy;
-import org.apache.archiva.policies.Policy;
-import org.apache.archiva.policies.PolicyConfigurationException;
-import org.apache.archiva.policies.PolicyOption;
-import org.apache.archiva.policies.PolicyViolationException;
-import org.apache.archiva.policies.PostDownloadPolicy;
-import org.apache.archiva.policies.PreDownloadPolicy;
-import org.apache.archiva.policies.ProxyDownloadException;
-import org.apache.archiva.policies.urlcache.UrlFailureCache;
-import org.apache.archiva.proxy.model.NetworkProxy;
-import org.apache.archiva.proxy.model.ProxyConnector;
-import org.apache.archiva.proxy.model.ProxyFetchResult;
-import org.apache.archiva.proxy.model.RepositoryProxyHandler;
-import org.apache.archiva.repository.ManagedRepository;
-import org.apache.archiva.repository.RemoteRepository;
-import org.apache.archiva.repository.RemoteRepositoryContent;
-import org.apache.archiva.repository.RepositoryType;
-import org.apache.archiva.repository.content.Artifact;
-import org.apache.archiva.repository.content.ContentItem;
-import org.apache.archiva.repository.content.ItemSelector;
-import org.apache.archiva.repository.metadata.RepositoryMetadataException;
-import org.apache.archiva.repository.metadata.base.MetadataTools;
-import org.apache.archiva.repository.storage.StorageAsset;
-import org.apache.archiva.repository.storage.fs.FilesystemStorage;
-import org.apache.archiva.repository.storage.fs.FsStorageUtil;
-import org.apache.archiva.scheduler.ArchivaTaskScheduler;
-import org.apache.archiva.scheduler.repository.model.RepositoryTask;
-import org.apache.commons.collections4.CollectionUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.MarkerFactory;
-
-import javax.annotation.PostConstruct;
-import javax.inject.Inject;
-import javax.inject.Named;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.StandardCopyOption;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
-public abstract class DefaultRepositoryProxyHandler implements RepositoryProxyHandler {
-
- protected Logger log = LoggerFactory.getLogger( DefaultRepositoryProxyHandler.class );
- @Inject
- protected UrlFailureCache urlFailureCache;
-
- @Inject
- @Named(value = "metadataTools#default")
- private MetadataTools metadataTools;
-
- private Map<String, PreDownloadPolicy> preDownloadPolicies = new HashMap<>( );
- private Map<String, PostDownloadPolicy> postDownloadPolicies = new HashMap<>( );
- private Map<String, DownloadErrorPolicy> downloadErrorPolicies = new HashMap<>( );
- private ConcurrentMap<String, List<ProxyConnector>> proxyConnectorMap = new ConcurrentHashMap<>();
-
- @Inject
- @Named(value = "archivaTaskScheduler#repository")
- private ArchivaTaskScheduler<RepositoryTask> scheduler;
-
- @Inject
- private ArchivaConfiguration archivaConfiguration;
-
- @Inject
- @Named(value = "fileLockManager#default")
- private FileLockManager fileLockManager;
-
- private Map<String, NetworkProxy> networkProxyMap = new ConcurrentHashMap<>();
- private List<ChecksumAlgorithm> checksumAlgorithms;
-
- @PostConstruct
- public void initialize()
- {
- checksumAlgorithms = ChecksumUtil.getAlgorithms(archivaConfiguration.getConfiguration().getArchivaRuntimeConfiguration().getChecksumTypes());
- }
-
- private List<ProxyConnectorRuleConfiguration> findProxyConnectorRules(String sourceRepository,
- String targetRepository,
- List<ProxyConnectorRuleConfiguration> all )
- {
- List<ProxyConnectorRuleConfiguration> proxyConnectorRuleConfigurations = new ArrayList<>();
-
- for ( ProxyConnectorRuleConfiguration proxyConnectorRuleConfiguration : all )
- {
- for ( ProxyConnectorConfiguration proxyConnector : proxyConnectorRuleConfiguration.getProxyConnectors() )
- {
- if ( StringUtils.equals( sourceRepository, proxyConnector.getSourceRepoId() ) && StringUtils.equals(
- targetRepository, proxyConnector.getTargetRepoId() ) )
- {
- proxyConnectorRuleConfigurations.add( proxyConnectorRuleConfiguration );
- }
- }
- }
-
- return proxyConnectorRuleConfigurations;
- }
-
- @Override
- public StorageAsset fetchFromProxies( ManagedRepository repository, Artifact artifact )
- throws ProxyDownloadException
- {
- Map<String, Exception> previousExceptions = new LinkedHashMap<>();
- StorageAsset localFile = artifact.getAsset( );
-
- Properties requestProperties = new Properties();
- requestProperties.setProperty( "filetype", "artifact" );
- requestProperties.setProperty( "version", artifact.getVersion().getId() );
- requestProperties.setProperty( "managedRepositoryId", repository.getId() );
-
- List<ProxyConnector> connectors = getProxyConnectors( repository );
- for ( ProxyConnector connector : connectors )
- {
- if ( !connector.isEnabled() )
- {
- continue;
- }
-
- RemoteRepository targetRepository = connector.getTargetRepository();
- requestProperties.setProperty( "remoteRepositoryId", targetRepository.getId() );
-
- StorageAsset targetFile = targetRepository.getAsset( localFile.getPath( ) );
- // Removing the leading '/' from the path
- String targetPath = targetFile.getPath( ).substring( 1 );
- try
- {
- StorageAsset downloadedFile =
- transferFile( connector, targetRepository, targetPath, repository, localFile, requestProperties,
- true );
-
- if ( fileExists(downloadedFile) )
- {
- log.debug( "Successfully transferred: {}", downloadedFile.getPath() );
- return downloadedFile;
- }
- }
- catch ( NotFoundException e )
- {
- log.debug( "Artifact {} not found on repository \"{}\".", artifact.getId(),
- targetRepository.getId() );
- }
- catch ( NotModifiedException e )
- {
- log.debug( "Artifact {} not updated on repository \"{}\".", artifact.getId(),
- targetRepository.getId() );
- }
- catch ( ProxyException e )
- {
- validatePolicies( this.downloadErrorPolicies, connector.getPolicies(), requestProperties, artifact,
- targetRepository.getContent(), localFile, e, previousExceptions );
- }
- }
-
- if ( !previousExceptions.isEmpty() )
- {
- throw new ProxyDownloadException( "Failures occurred downloading from some remote repositories",
- previousExceptions );
- }
-
- log.debug( "Exhausted all target repositories, artifact {} not found.", artifact.getId() );
-
- return null;
- }
-
- @Override
- public StorageAsset fetchFromProxies( ManagedRepository repository, ItemSelector artifactSelector )
- throws ProxyDownloadException
- {
- Map<String, Exception> previousExceptions = new LinkedHashMap<>();
- ContentItem item = repository.getContent( ).getItem( artifactSelector );
- StorageAsset localFile = item.getAsset( );
-
- Properties requestProperties = new Properties();
- requestProperties.setProperty( "filetype", "artifact" );
- requestProperties.setProperty( "version", artifactSelector.getVersion() );
- requestProperties.setProperty( "managedRepositoryId", repository.getId() );
-
- List<ProxyConnector> connectors = getProxyConnectors( repository );
- for ( ProxyConnector connector : connectors )
- {
- if ( !connector.isEnabled() )
- {
- continue;
- }
-
- RemoteRepository targetRepository = connector.getTargetRepository();
- requestProperties.setProperty( "remoteRepositoryId", targetRepository.getId() );
-
- StorageAsset targetFile = targetRepository.getAsset( localFile.getPath( ) );
- // Removing the leading '/' from the path
- String targetPath = targetFile.getPath( ).substring( 1 );
- try
- {
- StorageAsset downloadedFile =
- transferFile( connector, targetRepository, targetPath, repository, localFile, requestProperties,
- true );
-
- if ( fileExists(downloadedFile) )
- {
- log.debug( "Successfully transferred: {}", downloadedFile.getPath() );
- return downloadedFile;
- }
- }
- catch ( NotFoundException e )
- {
- log.debug( "Artifact {} not found on repository \"{}\".", item,
- targetRepository.getId() );
- }
- catch ( NotModifiedException e )
- {
- log.debug( "Artifact {} not updated on repository \"{}\".", item,
- targetRepository.getId() );
- }
- catch ( ProxyException e )
- {
- validatePolicies( this.downloadErrorPolicies, connector.getPolicies(), requestProperties, item,
- targetRepository.getContent(), localFile, e, previousExceptions );
- }
- }
-
- if ( !previousExceptions.isEmpty() )
- {
- throw new ProxyDownloadException( "Failures occurred downloading from some remote repositories",
- previousExceptions );
- }
-
- log.debug( "Exhausted all target repositories, artifact {} not found.", item );
-
- return null;
- }
-
- @Override
- public StorageAsset fetchFromProxies( ManagedRepository repository, String path )
- {
- StorageAsset localFile = repository.getAsset( path );
-
- // no update policies for these paths
- if ( localFile.exists() )
- {
- return null;
- }
-
- Properties requestProperties = new Properties();
- requestProperties.setProperty( "filetype", "resource" );
- requestProperties.setProperty( "managedRepositoryId", repository.getId() );
-
- List<ProxyConnector> connectors = getProxyConnectors( repository );
- for ( ProxyConnector connector : connectors )
- {
- if ( !connector.isEnabled() )
- {
- continue;
- }
-
- RemoteRepository targetRepository = connector.getTargetRepository();
- requestProperties.setProperty( "remoteRepositoryId", targetRepository.getId() );
-
- String targetPath = path;
-
- try
- {
- StorageAsset downloadedFile =
- transferFile( connector, targetRepository, targetPath, repository, localFile, requestProperties,
- false );
-
- if ( fileExists( downloadedFile ) )
- {
- log.debug( "Successfully transferred: {}", downloadedFile.getPath() );
- return downloadedFile;
- }
- }
- catch ( NotFoundException e )
- {
- log.debug( "Resource {} not found on repository \"{}\".", path,
- targetRepository.getId() );
- }
- catch ( NotModifiedException e )
- {
- log.debug( "Resource {} not updated on repository \"{}\".", path,
- targetRepository.getId() );
- }
- catch ( ProxyException e )
- {
- log.warn(
- "Transfer error from repository {} for resource {}, continuing to next repository. Error message: {}",
- targetRepository.getId(), path, e.getMessage() );
- log.debug( MarkerFactory.getDetachedMarker( "transfer.error" ),
- "Transfer error from repository \"{}"
- + "\" for resource {}, continuing to next repository. Error message: {}",
- targetRepository.getId(), path, e.getMessage(), e );
- }
-
- }
-
- log.debug( "Exhausted all target repositories, resource {} not found.", path );
-
- return null;
- }
-
- @Override
- public ProxyFetchResult fetchMetadataFromProxies( ManagedRepository repository, String rawLogicalPath )
- {
- String logicalPath;
- if (rawLogicalPath.startsWith( "/" )){
- logicalPath = rawLogicalPath.substring( 1 );
- } else {
- logicalPath = rawLogicalPath;
- }
- StorageAsset localFile = repository.getAsset( logicalPath );
-
- Properties requestProperties = new Properties();
- requestProperties.setProperty( "filetype", "metadata" );
- boolean metadataNeedsUpdating = false;
- long originalTimestamp = getLastModified( localFile );
-
- List<ProxyConnector> connectors = new ArrayList<>( getProxyConnectors( repository ) );
- for ( ProxyConnector connector : connectors )
- {
- if ( !connector.isEnabled() )
- {
- continue;
- }
-
- RemoteRepository targetRepository = connector.getTargetRepository();
-
- StorageAsset localRepoFile = toLocalRepoFile( repository, targetRepository.getContent(), logicalPath );
- long originalMetadataTimestamp = getLastModified( localRepoFile );
-
- try
- {
- transferFile( connector, targetRepository, logicalPath, repository, localRepoFile, requestProperties,
- true );
-
- if ( hasBeenUpdated( localRepoFile, originalMetadataTimestamp ) )
- {
- metadataNeedsUpdating = true;
- }
- }
- catch ( NotFoundException e )
- {
-
- log.debug( "Metadata {} not found on remote repository '{}'.", logicalPath,
- targetRepository.getId(), e );
-
- }
- catch ( NotModifiedException e )
- {
-
- log.debug( "Metadata {} not updated on remote repository '{}'.", logicalPath,
- targetRepository.getId(), e );
-
- }
- catch ( ProxyException e )
- {
- log.warn(
- "Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}",
- targetRepository.getId(), logicalPath, e.getMessage() );
- log.debug( "Full stack trace", e );
- }
- }
-
- if ( hasBeenUpdated( localFile, originalTimestamp ) )
- {
- metadataNeedsUpdating = true;
- }
-
- if ( metadataNeedsUpdating || !localFile.exists())
- {
- try
- {
- metadataTools.updateMetadata( repository.getContent(), logicalPath );
- }
- catch ( RepositoryMetadataException e )
- {
- log.warn( "Unable to update metadata {}:{}", localFile.getPath(), e.getMessage(), e );
- }
-
- }
-
- if ( fileExists( localFile ) )
- {
- return new ProxyFetchResult( localFile, metadataNeedsUpdating );
- }
-
- return new ProxyFetchResult( null, false );
- }
-
- private long getLastModified(StorageAsset file )
- {
- if ( !file.exists() || file.isContainer() )
- {
- return 0;
- }
-
- return file.getModificationTime().toEpochMilli();
- }
-
- private boolean hasBeenUpdated(StorageAsset file, long originalLastModified )
- {
- if ( !file.exists() || file.isContainer() )
- {
- return false;
- }
-
- long currentLastModified = getLastModified( file );
- return ( currentLastModified > originalLastModified );
- }
-
- private StorageAsset toLocalRepoFile( ManagedRepository repository, RemoteRepositoryContent targetRepository,
- String targetPath )
- {
- String repoPath = metadataTools.getRepositorySpecificName( targetRepository, targetPath );
- return repository.getAsset( repoPath );
- }
-
- /**
- * Test if the provided ManagedRepositoryContent has any proxies configured for it.
- * @param repository
- */
- @Override
- public boolean hasProxies( ManagedRepository repository )
- {
- synchronized ( this.proxyConnectorMap )
- {
- return this.proxyConnectorMap.containsKey( repository.getId() );
- }
- }
-
- /**
- * Simple method to test if the file exists on the local disk.
- *
- * @param file the file to test. (may be null)
- * @return true if file exists. false if the file param is null, doesn't exist, or is not of type File.
- */
- private boolean fileExists( StorageAsset file )
- {
- if ( file == null )
- {
- return false;
- }
-
- if ( !file.exists())
- {
- return false;
- }
-
- return !file.isContainer();
- }
-
- /**
- * Perform the transfer of the file.
- *
- * @param connector the connector configuration to use.
- * @param remoteRepository the remote repository get the resource from.
- * @param remotePath the path in the remote repository to the resource to get.
- * @param repository the managed repository that will hold the file
- * @param resource the path relative to the repository storage where the file should be downloaded to
- * @param requestProperties the request properties to utilize for policy handling.
- * @param executeConsumers whether to execute the consumers after proxying
- * @return the local file that was downloaded, or null if not downloaded.
- * @throws NotFoundException if the file was not found on the remote repository.
- * @throws NotModifiedException if the localFile was present, and the resource was present on remote repository, but
- * the remote resource is not newer than the local File.
- * @throws ProxyException if transfer was unsuccessful.
- */
- protected StorageAsset transferFile( ProxyConnector connector, RemoteRepository remoteRepository, String remotePath,
- ManagedRepository repository, StorageAsset resource, Properties requestProperties,
- boolean executeConsumers )
- throws ProxyException, NotModifiedException
- {
- String url = null;
- try
- {
- url = remoteRepository.getLocation().toURL().toString();
- }
- catch ( MalformedURLException e )
- {
- throw new ProxyException( e.getMessage(), e );
- }
- if ( !url.endsWith( "/" ) )
- {
- url = url + "/";
- }
- if (remotePath.startsWith( "/" )) {
- url = url + remotePath.substring( 1 );
- } else {
- url = url + remotePath;
- }
- requestProperties.setProperty( "url", url );
-
- // Is a whitelist defined?
- if ( CollectionUtils.isNotEmpty( connector.getWhitelist() ) )
- {
- // Path must belong to whitelist.
- if ( !matchesPattern( remotePath, connector.getWhitelist() ) )
- {
- log.debug( "Path [{}] is not part of defined whitelist (skipping transfer from repository [{}]).",
- remotePath, remoteRepository.getId() );
- return null;
- }
- }
-
- // Is target path part of blacklist?
- if ( matchesPattern( remotePath, connector.getBlacklist() ) )
- {
- log.debug( "Path [{}] is part of blacklist (skipping transfer from repository [{}]).", remotePath,
- remoteRepository.getId() );
- return null;
- }
-
- // Handle pre-download policy
- try
- {
- validatePolicies( this.preDownloadPolicies, connector.getPolicies(), requestProperties, resource );
- }
- catch ( PolicyViolationException e )
- {
- String emsg = "Transfer not attempted on " + url + " : " + e.getMessage();
- if ( resource.exists() )
- {
- log.debug( "{} : using already present local file.", emsg );
- return resource;
- }
-
- log.debug( emsg );
- return null;
- }
-
- Path workingDirectory = createWorkingDirectory( repository );
- FilesystemStorage tmpStorage = null;
- try
- {
- tmpStorage = new FilesystemStorage( workingDirectory, fileLockManager );
- }
- catch ( IOException e )
- {
- throw new ProxyException( "Could not create tmp storage" );
- }
- StorageAsset tmpResource = tmpStorage.getAsset( resource.getName( ) );
- StorageAsset[] tmpChecksumFiles = new StorageAsset[checksumAlgorithms.size()];
- for(int i=0; i<checksumAlgorithms.size(); i++) {
- ChecksumAlgorithm alg = checksumAlgorithms.get( i );
- tmpChecksumFiles[i] = tmpStorage.getAsset( resource.getName() + "." + alg.getDefaultExtension() );
- }
-
- try
- {
-
- transferResources( connector, remoteRepository, tmpResource,tmpChecksumFiles , url, remotePath,
- resource, workingDirectory, repository );
-
- // Handle post-download policies.
- try
- {
- validatePolicies( this.postDownloadPolicies, connector.getPolicies(), requestProperties, tmpResource );
- }
- catch ( PolicyViolationException e )
- {
- log.warn( "Transfer invalidated from {} : {}", url, e.getMessage() );
- executeConsumers = false;
- if ( !fileExists( tmpResource ) )
- {
- resource = null;
- }
- }
-
- if ( resource != null )
- {
- synchronized ( resource.getPath().intern() )
- {
- StorageAsset directory = resource.getParent();
- for (int i=0; i<tmpChecksumFiles.length; i++) {
- moveFileIfExists( tmpChecksumFiles[i], directory );
- }
- moveFileIfExists( tmpResource, directory );
- }
- }
- }
- finally
- {
- org.apache.archiva.common.utils.FileUtils.deleteQuietly( workingDirectory );
- }
-
- if ( executeConsumers )
- {
- // Just-in-time update of the index and database by executing the consumers for this artifact
- //consumers.executeConsumers( connector.getSourceRepository().getRepository(), resource );
- queueRepositoryTask( connector.getSourceRepository().getId(), resource );
- }
-
- return resource;
- }
-
- protected abstract void transferResources( ProxyConnector connector, RemoteRepository remoteRepository,
- StorageAsset tmpResource, StorageAsset[] checksumFiles, String url, String remotePath, StorageAsset resource, Path workingDirectory,
- ManagedRepository repository ) throws ProxyException;
-
- private void queueRepositoryTask(String repositoryId, StorageAsset localFile )
- {
- RepositoryTask task = new RepositoryTask();
- task.setRepositoryId( repositoryId );
- task.setResourceFile( localFile );
- task.setUpdateRelatedArtifacts( true );
- task.setScanAll( true );
-
- try
- {
- scheduler.queueTask( task );
- }
- catch ( TaskQueueException e )
- {
- log.error( "Unable to queue repository task to execute consumers on resource file ['{}"
- + "'].", localFile.getName() );
- }
- }
-
- /**
- * Moves the file into repository location if it exists
- *
- * @param fileToMove this could be either the main artifact, sha1 or md5 checksum file.
- * @param directory directory to write files to
- */
- private void moveFileIfExists( StorageAsset fileToMove, StorageAsset directory )
- throws ProxyException
- {
- if ( fileToMove != null && fileToMove.exists() )
- {
- StorageAsset newLocation = directory.getStorage().getAsset( directory.getPath()+ "/" + fileToMove.getName());
- moveTempToTarget( fileToMove, newLocation );
- }
- }
-
- /**
- * Apply the policies.
- *
- * @param policies the map of policies to execute. (Map of String policy keys, to {@link DownloadPolicy} objects)
- * @param settings the map of settings for the policies to execute. (Map of String policy keys, to String policy
- * setting)
- * @param request the request properties (utilized by the {@link DownloadPolicy#applyPolicy(PolicyOption, Properties, StorageAsset)}
- * )
- * @param localFile the local file (utilized by the {@link DownloadPolicy#applyPolicy(PolicyOption, Properties, StorageAsset)})
- * @throws PolicyViolationException
- */
- private void validatePolicies( Map<String, ? extends DownloadPolicy> policies, Map<Policy, PolicyOption> settings,
- Properties request, StorageAsset localFile )
- throws PolicyViolationException
- {
- for ( Map.Entry<String, ? extends DownloadPolicy> entry : policies.entrySet() )
- {
- // olamy with spring rolehint is now downloadPolicy#hint
- // so substring after last # to get the hint as with plexus
- String key = entry.getValue( ).getId( );
- DownloadPolicy policy = entry.getValue();
- PolicyOption option = settings.containsKey(policy ) ? settings.get(policy) : policy.getDefaultOption();
-
- log.debug( "Applying [{}] policy with [{}]", key, option );
- try
- {
- policy.applyPolicy( option, request, localFile );
- }
- catch ( PolicyConfigurationException e )
- {
- log.error( e.getMessage(), e );
- }
- }
- }
-
- private void validatePolicies( Map<String, DownloadErrorPolicy> policies, Map<Policy, PolicyOption> settings,
- Properties request, ContentItem artifact, RemoteRepositoryContent content,
- StorageAsset localFile, Exception exception, Map<String, Exception> previousExceptions )
- throws ProxyDownloadException
- {
- boolean process = true;
- for ( Map.Entry<String, ? extends DownloadErrorPolicy> entry : policies.entrySet() )
- {
-
- // olamy with spring rolehint is now downloadPolicy#hint
- // so substring after last # to get the hint as with plexus
- String key = entry.getValue( ).getId( );
- DownloadErrorPolicy policy = entry.getValue();
- PolicyOption option = settings.containsKey( policy ) ? settings.get(policy) : policy.getDefaultOption();
-
- log.debug( "Applying [{}] policy with [{}]", key, option );
- try
- {
- // all policies must approve the exception, any can cancel
- process = policy.applyPolicy( option, request, localFile, exception, previousExceptions );
- if ( !process )
- {
- break;
- }
- }
- catch ( PolicyConfigurationException e )
- {
- log.error( e.getMessage(), e );
- }
- }
-
- if ( process )
- {
- // if the exception was queued, don't throw it
- if ( !previousExceptions.containsKey( content.getId() ) )
- {
- throw new ProxyDownloadException(
- "An error occurred in downloading from the remote repository, and the policy is to fail immediately",
- content.getId(), exception );
- }
- }
- else
- {
- // if the exception was queued, but cancelled, remove it
- previousExceptions.remove( content.getId() );
- }
-
- log.warn(
- "Transfer error from repository {} for artifact {} , continuing to next repository. Error message: {}",
- content.getRepository().getId(), artifact, exception.getMessage() );
- log.debug( "Full stack trace", exception );
- }
-
- /**
- * Creates a working directory
- *
- * @param repository
- * @return file location of working directory
- */
- private Path createWorkingDirectory( ManagedRepository repository )
- {
- try
- {
- return Files.createTempDirectory( "temp" );
- }
- catch ( IOException e )
- {
- throw new RuntimeException( e.getMessage(), e );
- }
-
- }
-
- /**
- * Used to move the temporary file to its real destination. This is patterned from the way WagonManager handles its
- * downloaded files.
- *
- * @param temp The completed download file
- * @param target The final location of the downloaded file
- * @throws ProxyException when the temp file cannot replace the target file
- */
- private void moveTempToTarget( StorageAsset temp, StorageAsset target )
- throws ProxyException
- {
-
- try
- {
- org.apache.archiva.repository.storage.util.StorageUtil.moveAsset( temp, target, true , StandardCopyOption.REPLACE_EXISTING);
- }
- catch ( IOException e )
- {
- log.error( "Move failed from {} to {}, trying copy.", temp, target );
- try
- {
- FsStorageUtil.copyAsset( temp, target, true );
- if (temp.exists()) {
- temp.getStorage( ).removeAsset( temp );
- }
- }
- catch ( IOException ex )
- {
- log.error("Copy failed from {} to {}: ({}) {}", temp, target, e.getClass(), e.getMessage());
- throw new ProxyException("Could not move temp file "+temp.getPath()+" to target "+target.getPath()+": ("+e.getClass()+") "+e.getMessage(), e);
- }
- }
- }
-
- /**
- * Tests whitelist and blacklist patterns against path.
- *
- * @param path the path to test.
- * @param patterns the list of patterns to check.
- * @return true if the path matches at least 1 pattern in the provided patterns list.
- */
- private boolean matchesPattern( String path, List<String> patterns )
- {
- if ( CollectionUtils.isEmpty( patterns ) )
- {
- return false;
- }
-
- if ( !path.startsWith( "/" ) )
- {
- path = "/" + path;
- }
-
- for ( String pattern : patterns )
- {
- if ( !pattern.startsWith( "/" ) )
- {
- pattern = "/" + pattern;
- }
-
- if ( PathUtil.matchPath( pattern, path, false ) )
- {
- return true;
- }
- }
-
- return false;
- }
-
- /**
- * TODO: Ensure that list is correctly ordered based on configuration. See MRM-477
- * @param repository
- */
- @Override
- public List<ProxyConnector> getProxyConnectors( ManagedRepository repository )
- {
-
- if ( !this.proxyConnectorMap.containsKey( repository.getId() ) )
- {
- return Collections.emptyList();
- }
- List<ProxyConnector> ret = new ArrayList<>( this.proxyConnectorMap.get( repository.getId() ) );
-
- Collections.sort( ret, ProxyConnectorOrderComparator.getInstance() );
- return ret;
-
- }
-
-
- protected String addParameters(String path, RemoteRepository remoteRepository )
- {
- if ( remoteRepository.getExtraParameters().isEmpty() )
- {
- return path;
- }
-
- boolean question = false;
-
- StringBuilder res = new StringBuilder( path == null ? "" : path );
-
- for ( Map.Entry<String, String> entry : remoteRepository.getExtraParameters().entrySet() )
- {
- if ( !question )
- {
- res.append( '?' ).append( entry.getKey() ).append( '=' ).append( entry.getValue() );
- }
- }
-
- return res.toString();
- }
-
- public void setArchivaConfiguration(ArchivaConfiguration archivaConfiguration )
- {
- this.archivaConfiguration = archivaConfiguration;
- }
-
- public MetadataTools getMetadataTools()
- {
- return metadataTools;
- }
-
- public void setMetadataTools(MetadataTools metadataTools )
- {
- this.metadataTools = metadataTools;
- }
-
- public UrlFailureCache getUrlFailureCache()
- {
- return urlFailureCache;
- }
-
- public void setUrlFailureCache(UrlFailureCache urlFailureCache )
- {
- this.urlFailureCache = urlFailureCache;
- }
-
- public Map<String, PreDownloadPolicy> getPreDownloadPolicies()
- {
- return preDownloadPolicies;
- }
-
- public void setPreDownloadPolicies(Map<String, PreDownloadPolicy> preDownloadPolicies )
- {
- this.preDownloadPolicies = preDownloadPolicies;
- }
-
- public Map<String, PostDownloadPolicy> getPostDownloadPolicies()
- {
- return postDownloadPolicies;
- }
-
- public void setPostDownloadPolicies(Map<String, PostDownloadPolicy> postDownloadPolicies )
- {
- this.postDownloadPolicies = postDownloadPolicies;
- }
-
- public Map<String, DownloadErrorPolicy> getDownloadErrorPolicies()
- {
- return downloadErrorPolicies;
- }
-
- public void setDownloadErrorPolicies(Map<String, DownloadErrorPolicy> downloadErrorPolicies )
- {
- this.downloadErrorPolicies = downloadErrorPolicies;
- }
-
- @Override
- public void setNetworkProxies(Map<String, NetworkProxy> networkProxies ) {
- this.networkProxyMap.clear();
- this.networkProxyMap.putAll( networkProxies );
- }
-
- @Override
- public NetworkProxy getNetworkProxy(String id) {
- return this.networkProxyMap.get(id);
- }
-
- @Override
- public Map<String, NetworkProxy> getNetworkProxies() {
- return this.networkProxyMap;
- }
-
- @Override
- public abstract List<RepositoryType> supports();
-
- @Override
- public void setPolicies( List<Policy> policyList )
- {
- preDownloadPolicies.clear();
- postDownloadPolicies.clear();
- downloadErrorPolicies.clear();
- for (Policy policy : policyList) {
- addPolicy( policy );
- }
- }
-
- void addPolicy(PreDownloadPolicy policy) {
- preDownloadPolicies.put( policy.getId( ), policy );
- }
-
- void addPolicy(PostDownloadPolicy policy) {
- postDownloadPolicies.put( policy.getId( ), policy );
- }
- void addPolicy(DownloadErrorPolicy policy) {
- downloadErrorPolicies.put( policy.getId( ), policy );
- }
-
- @Override
- public void addPolicy( Policy policy )
- {
- if (policy instanceof PreDownloadPolicy) {
- addPolicy( (PreDownloadPolicy)policy );
- } else if (policy instanceof PostDownloadPolicy) {
- addPolicy( (PostDownloadPolicy) policy );
- } else if (policy instanceof DownloadErrorPolicy) {
- addPolicy( (DownloadErrorPolicy) policy );
- } else {
- log.warn( "Policy not known: {}, {}", policy.getId( ), policy.getClass( ).getName( ) );
- }
- }
-
- @Override
- public void removePolicy( Policy policy )
- {
- final String id = policy.getId();
- if (preDownloadPolicies.containsKey( id )) {
- preDownloadPolicies.remove( id );
- } else if (postDownloadPolicies.containsKey( id )) {
- postDownloadPolicies.remove( id );
- } else if (downloadErrorPolicies.containsKey( id )) {
- downloadErrorPolicies.remove( id );
- }
- }
-
- @Override
- public void addProxyConnector( ProxyConnector connector )
- {
- final String sourceId = connector.getSourceRepository( ).getId( );
- List<ProxyConnector> connectors;
- if (proxyConnectorMap.containsKey( sourceId )) {
- connectors = proxyConnectorMap.get( sourceId );
- } else {
- connectors = new ArrayList<>( );
- proxyConnectorMap.put( sourceId, connectors );
- }
- connectors.add( connector );
- }
-
- @Override
- public void setProxyConnectors( List<ProxyConnector> proxyConnectors )
- {
- proxyConnectorMap.clear();
- for ( ProxyConnector connector : proxyConnectors )
- {
- addProxyConnector( connector );
- }
- }
-}
+++ /dev/null
-package org.apache.archiva.proxy;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * NotFoundException - thrown when the resource requested was not found on the remote repository.
- *
- *
- */
-public class NotFoundException
- extends ProxyException
-{
- public NotFoundException( String message, Throwable t )
- {
- super( message, t );
- }
-
- public NotFoundException( String message )
- {
- super( message );
- }
-}
+++ /dev/null
-package org.apache.archiva.proxy;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * <p>
- * NotModifiedException - thrown when the resource requested was found on the remote repository, but
- * the remote repository reported that the copy we have in our managed repository is newer than
- * the one present on the remote repository.
- * </p>
- * <p>
- * Similar in scope to the <code>HTTP 304 Not Modified</code> response code.
- * </p>
- *
- *
- */
-public class NotModifiedException
- extends ProxyException
-{
-
- public NotModifiedException( String message )
- {
- super( message );
- }
-
- public NotModifiedException( String message, Throwable t )
- {
- super( message, t );
- }
-}
+++ /dev/null
-package org.apache.archiva.proxy;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.proxy.model.ProxyConnector;
-
-import java.util.Comparator;
-
-/**
- * ProxyConnectorOrderComparator
- *
- *
- */
-public class ProxyConnectorOrderComparator
- implements Comparator<ProxyConnector>
-{
- private static ProxyConnectorOrderComparator INSTANCE = new ProxyConnectorOrderComparator();
-
- public static ProxyConnectorOrderComparator getInstance()
- {
- return INSTANCE;
- }
-
- @Override
- public int compare( ProxyConnector o1, ProxyConnector o2 )
- {
- if ( o1 == null && o2 == null )
- {
- return 0;
- }
-
- // Ensure null goes to end of list.
- if ( o1 == null && o2 != null )
- {
- return 1;
- }
-
- if ( o1 != null && o2 == null )
- {
- return -1;
- }
-
- // Ensure 0 (unordered) goes to end of list.
- if ( o1.getOrder() == 0 && o2.getOrder() != 0 )
- {
- return 1;
- }
-
- if ( o1.getOrder() != 0 && o2.getOrder() == 0 )
- {
- return -1;
- }
-
- return o1.getOrder() - o2.getOrder();
- }
-}
+++ /dev/null
-package org.apache.archiva.proxy;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- */
-public class ProxyException
- extends Exception
-{
- public ProxyException( String message )
- {
- super( message );
- }
-
- public ProxyException( String message, Throwable t )
- {
- super( message, t );
- }
-}
--- /dev/null
+package org.apache.archiva.proxy.base;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.configuration.ArchivaConfiguration;
+import org.apache.archiva.configuration.NetworkProxyConfiguration;
+import org.apache.archiva.configuration.ProxyConnectorConfiguration;
+import org.apache.archiva.policies.Policy;
+import org.apache.archiva.policies.PolicyOption;
+import org.apache.archiva.policies.PolicyUtil;
+import org.apache.archiva.proxy.ProxyRegistry;
+import org.apache.archiva.proxy.model.NetworkProxy;
+import org.apache.archiva.proxy.model.ProxyConnector;
+import org.apache.archiva.proxy.model.RepositoryProxyHandler;
+import org.apache.archiva.repository.base.ArchivaRepositoryRegistry;
+import org.apache.archiva.repository.ManagedRepository;
+import org.apache.archiva.repository.RemoteRepository;
+import org.apache.archiva.repository.RepositoryType;
+import org.apache.archiva.event.EventHandler;
+import org.apache.archiva.repository.event.RepositoryRegistryEvent;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import javax.inject.Inject;
+import java.util.*;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+/**
+ * Default proxy registry implementation. Uses the archiva configuration for accessing and storing the
+ * proxy information.
+ *
+ */
+@SuppressWarnings( "SpringJavaInjectionPointsAutowiringInspection" )
+@Service("proxyRegistry#default")
+public class ArchivaProxyRegistry implements ProxyRegistry, EventHandler<RepositoryRegistryEvent> {
+
+ private static final Logger log = LoggerFactory.getLogger(ArchivaProxyRegistry.class);
+
+ @Inject
+ ArchivaConfiguration archivaConfiguration;
+
+ @Inject
+ List<RepositoryProxyHandler> repositoryProxyHandlers;
+
+ @Inject
+ List<Policy> policies;
+
+ @Inject
+ ArchivaRepositoryRegistry repositoryRegistry;
+
+ private Map<String, NetworkProxy> networkProxyMap = new HashMap<>();
+ private Map<RepositoryType, List<RepositoryProxyHandler>> handlerMap = new HashMap<>();
+ private ProxyConnectorOrderComparator comparator = ProxyConnectorOrderComparator.getInstance();
+
+ private Map<String, List<ProxyConnector>> connectorMap = new HashMap<>();
+ private List<ProxyConnector> connectorList = new ArrayList<>();
+ private Map<Policy, PolicyOption> policyMap = new HashMap<>( );
+
+
+ @PostConstruct
+ private void init() {
+ if (repositoryProxyHandlers == null) {
+ repositoryProxyHandlers = new ArrayList<>();
+ }
+ updateHandler();
+ updateConnectors();
+ updateNetworkProxies();
+ repositoryRegistry.registerEventHandler(RepositoryRegistryEvent.RELOADED, this);
+ }
+
+ private ArchivaConfiguration getArchivaConfiguration() {
+ return archivaConfiguration;
+ }
+
+ private void updateNetworkProxies() {
+ this.networkProxyMap.clear();
+ List<NetworkProxyConfiguration> networkProxies = getArchivaConfiguration().getConfiguration().getNetworkProxies();
+ for (NetworkProxyConfiguration networkProxyConfig : networkProxies) {
+ String key = networkProxyConfig.getId();
+
+ NetworkProxy proxy = new NetworkProxy();
+
+ proxy.setProtocol(networkProxyConfig.getProtocol());
+ proxy.setHost(networkProxyConfig.getHost());
+ proxy.setPort(networkProxyConfig.getPort());
+ proxy.setUsername(networkProxyConfig.getUsername());
+ proxy.setPassword(networkProxyConfig.getPassword()==null? new char[0] : networkProxyConfig.getPassword().toCharArray());
+ proxy.setUseNtlm(networkProxyConfig.isUseNtlm());
+
+ this.networkProxyMap.put(key, proxy);
+ }
+ for (RepositoryProxyHandler proxyHandler : repositoryProxyHandlers) {
+ proxyHandler.setNetworkProxies(this.networkProxyMap);
+ }
+ }
+
+ private void updateHandler( ) {
+
+ for (RepositoryProxyHandler handler : repositoryProxyHandlers) {
+ List<RepositoryType> types = handler.supports();
+ for (RepositoryType type : types) {
+ if (!handlerMap.containsKey(type)) {
+ handlerMap.put(type, new ArrayList<>());
+ }
+ handlerMap.get(type).add(handler);
+ }
+ handler.setPolicies( policies );
+ }
+ }
+
+ private void updateConnectors() {
+ List<ProxyConnectorConfiguration> proxyConnectorConfigurations =
+ getArchivaConfiguration().getConfiguration().getProxyConnectors();
+
+ connectorList = proxyConnectorConfigurations.stream()
+ .map(this::buildProxyConnector)
+ .filter(Optional::isPresent)
+ .map(Optional::get)
+ .sorted(comparator).collect(Collectors.toList());
+ connectorMap = connectorList.stream().collect(Collectors.groupingBy(a -> a.getSourceRepository().getId()));
+ for (RepositoryProxyHandler handler : repositoryProxyHandlers) {
+ handler.setProxyConnectors( connectorList );
+ }
+ }
+
+
+ private Map<Policy, PolicyOption> getPolicyMap(ProxyConnectorConfiguration configuration) {
+ Map<String, String> policyConfig = configuration.getPolicies( );
+ return policies.stream().collect( Collectors.toMap( Function.identity(), p -> PolicyUtil.findOption( policyConfig.get(p.getId()), p ) ) );
+ }
+
+ private Optional<ProxyConnector> buildProxyConnector(ProxyConnectorConfiguration configuration) {
+ ProxyConnector proxyConnector = new ProxyConnector();
+ proxyConnector.setOrder(configuration.getOrder());
+ proxyConnector.setBlacklist(configuration.getBlackListPatterns());
+ proxyConnector.setWhitelist(configuration.getWhiteListPatterns());
+ if (configuration.isDisabled()) {
+ proxyConnector.disable();
+ } else {
+ proxyConnector.enable();
+ }
+ proxyConnector.setPolicies(getPolicyMap( configuration ));
+ proxyConnector.setProperties(configuration.getProperties());
+ proxyConnector.setProxyId(configuration.getProxyId());
+ ManagedRepository srcRepo = repositoryRegistry.getManagedRepository(configuration.getSourceRepoId());
+ if (srcRepo==null) {
+ return Optional.empty();
+ }
+ proxyConnector.setSourceRepository(srcRepo);
+ RemoteRepository targetRepo = repositoryRegistry.getRemoteRepository(configuration.getTargetRepoId());
+ if (targetRepo==null) {
+ return Optional.empty();
+ }
+ proxyConnector.setTargetRepository(targetRepo);
+ return Optional.of(proxyConnector);
+ }
+
+ @Override
+ public NetworkProxy getNetworkProxy(String id) {
+ return this.networkProxyMap.get(id);
+ }
+
+ @Override
+ public Map<RepositoryType, List<RepositoryProxyHandler>> getAllHandler() {
+ return this.handlerMap;
+ }
+
+ @Override
+ public List<RepositoryProxyHandler> getHandler(RepositoryType type) {
+ if (this.handlerMap.containsKey(type)) {
+ return this.handlerMap.get(type);
+ } else {
+ return new ArrayList<>();
+ }
+ }
+
+ @Override
+ public boolean hasHandler(RepositoryType type) {
+ return this.handlerMap.containsKey(type);
+ }
+
+
+ @Override
+ public List<ProxyConnector> getProxyConnectors() {
+ return connectorList;
+
+ }
+
+ @Override
+ public Map<String, List<ProxyConnector>> getProxyConnectorAsMap() {
+ return connectorMap;
+ }
+
+ @Override
+ public void reload( )
+ {
+ init();
+ }
+
+ @Override
+ public void handle(RepositoryRegistryEvent event) {
+ log.debug("Reload happened, updating proxy list");
+ if (event.getType()== RepositoryRegistryEvent.RELOADED) {
+ init();
+ }
+ }
+}
--- /dev/null
+package org.apache.archiva.proxy.base;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.checksum.ChecksumAlgorithm;
+import org.apache.archiva.checksum.ChecksumUtil;
+import org.apache.archiva.common.filelock.FileLockManager;
+import org.apache.archiva.common.utils.PathUtil;
+import org.apache.archiva.components.taskqueue.TaskQueueException;
+import org.apache.archiva.configuration.ArchivaConfiguration;
+import org.apache.archiva.configuration.ProxyConnectorConfiguration;
+import org.apache.archiva.configuration.ProxyConnectorRuleConfiguration;
+import org.apache.archiva.policies.DownloadErrorPolicy;
+import org.apache.archiva.policies.DownloadPolicy;
+import org.apache.archiva.policies.Policy;
+import org.apache.archiva.policies.PolicyConfigurationException;
+import org.apache.archiva.policies.PolicyOption;
+import org.apache.archiva.policies.PolicyViolationException;
+import org.apache.archiva.policies.PostDownloadPolicy;
+import org.apache.archiva.policies.PreDownloadPolicy;
+import org.apache.archiva.policies.ProxyDownloadException;
+import org.apache.archiva.policies.urlcache.UrlFailureCache;
+import org.apache.archiva.proxy.model.NetworkProxy;
+import org.apache.archiva.proxy.model.ProxyConnector;
+import org.apache.archiva.proxy.model.ProxyFetchResult;
+import org.apache.archiva.proxy.model.RepositoryProxyHandler;
+import org.apache.archiva.repository.ManagedRepository;
+import org.apache.archiva.repository.RemoteRepository;
+import org.apache.archiva.repository.RemoteRepositoryContent;
+import org.apache.archiva.repository.RepositoryType;
+import org.apache.archiva.repository.content.Artifact;
+import org.apache.archiva.repository.content.ContentItem;
+import org.apache.archiva.repository.content.ItemSelector;
+import org.apache.archiva.repository.metadata.RepositoryMetadataException;
+import org.apache.archiva.repository.metadata.base.MetadataTools;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.repository.storage.fs.FilesystemStorage;
+import org.apache.archiva.repository.storage.fs.FsStorageUtil;
+import org.apache.archiva.scheduler.ArchivaTaskScheduler;
+import org.apache.archiva.scheduler.repository.model.RepositoryTask;
+import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MarkerFactory;
+
+import javax.annotation.PostConstruct;
+import javax.inject.Inject;
+import javax.inject.Named;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+public abstract class DefaultRepositoryProxyHandler implements RepositoryProxyHandler {
+
+ protected Logger log = LoggerFactory.getLogger( DefaultRepositoryProxyHandler.class );
+ @Inject
+ protected UrlFailureCache urlFailureCache;
+
+ @Inject
+ @Named(value = "metadataTools#default")
+ private MetadataTools metadataTools;
+
+ private Map<String, PreDownloadPolicy> preDownloadPolicies = new HashMap<>( );
+ private Map<String, PostDownloadPolicy> postDownloadPolicies = new HashMap<>( );
+ private Map<String, DownloadErrorPolicy> downloadErrorPolicies = new HashMap<>( );
+ private ConcurrentMap<String, List<ProxyConnector>> proxyConnectorMap = new ConcurrentHashMap<>();
+
+ @Inject
+ @Named(value = "archivaTaskScheduler#repository")
+ private ArchivaTaskScheduler<RepositoryTask> scheduler;
+
+ @Inject
+ private ArchivaConfiguration archivaConfiguration;
+
+ @Inject
+ @Named(value = "fileLockManager#default")
+ private FileLockManager fileLockManager;
+
+ private Map<String, NetworkProxy> networkProxyMap = new ConcurrentHashMap<>();
+ private List<ChecksumAlgorithm> checksumAlgorithms;
+
+ @PostConstruct
+ public void initialize()
+ {
+ checksumAlgorithms = ChecksumUtil.getAlgorithms(archivaConfiguration.getConfiguration().getArchivaRuntimeConfiguration().getChecksumTypes());
+ }
+
+ private List<ProxyConnectorRuleConfiguration> findProxyConnectorRules(String sourceRepository,
+ String targetRepository,
+ List<ProxyConnectorRuleConfiguration> all )
+ {
+ List<ProxyConnectorRuleConfiguration> proxyConnectorRuleConfigurations = new ArrayList<>();
+
+ for ( ProxyConnectorRuleConfiguration proxyConnectorRuleConfiguration : all )
+ {
+ for ( ProxyConnectorConfiguration proxyConnector : proxyConnectorRuleConfiguration.getProxyConnectors() )
+ {
+ if ( StringUtils.equals( sourceRepository, proxyConnector.getSourceRepoId() ) && StringUtils.equals(
+ targetRepository, proxyConnector.getTargetRepoId() ) )
+ {
+ proxyConnectorRuleConfigurations.add( proxyConnectorRuleConfiguration );
+ }
+ }
+ }
+
+ return proxyConnectorRuleConfigurations;
+ }
+
+ @Override
+ public StorageAsset fetchFromProxies( ManagedRepository repository, Artifact artifact )
+ throws ProxyDownloadException
+ {
+ Map<String, Exception> previousExceptions = new LinkedHashMap<>();
+ StorageAsset localFile = artifact.getAsset( );
+
+ Properties requestProperties = new Properties();
+ requestProperties.setProperty( "filetype", "artifact" );
+ requestProperties.setProperty( "version", artifact.getVersion().getId() );
+ requestProperties.setProperty( "managedRepositoryId", repository.getId() );
+
+ List<ProxyConnector> connectors = getProxyConnectors( repository );
+ for ( ProxyConnector connector : connectors )
+ {
+ if ( !connector.isEnabled() )
+ {
+ continue;
+ }
+
+ RemoteRepository targetRepository = connector.getTargetRepository();
+ requestProperties.setProperty( "remoteRepositoryId", targetRepository.getId() );
+
+ StorageAsset targetFile = targetRepository.getAsset( localFile.getPath( ) );
+ // Removing the leading '/' from the path
+ String targetPath = targetFile.getPath( ).substring( 1 );
+ try
+ {
+ StorageAsset downloadedFile =
+ transferFile( connector, targetRepository, targetPath, repository, localFile, requestProperties,
+ true );
+
+ if ( fileExists(downloadedFile) )
+ {
+ log.debug( "Successfully transferred: {}", downloadedFile.getPath() );
+ return downloadedFile;
+ }
+ }
+ catch ( NotFoundException e )
+ {
+ log.debug( "Artifact {} not found on repository \"{}\".", artifact.getId(),
+ targetRepository.getId() );
+ }
+ catch ( NotModifiedException e )
+ {
+ log.debug( "Artifact {} not updated on repository \"{}\".", artifact.getId(),
+ targetRepository.getId() );
+ }
+ catch ( ProxyException e )
+ {
+ validatePolicies( this.downloadErrorPolicies, connector.getPolicies(), requestProperties, artifact,
+ targetRepository.getContent(), localFile, e, previousExceptions );
+ }
+ }
+
+ if ( !previousExceptions.isEmpty() )
+ {
+ throw new ProxyDownloadException( "Failures occurred downloading from some remote repositories",
+ previousExceptions );
+ }
+
+ log.debug( "Exhausted all target repositories, artifact {} not found.", artifact.getId() );
+
+ return null;
+ }
+
+ @Override
+ public StorageAsset fetchFromProxies( ManagedRepository repository, ItemSelector artifactSelector )
+ throws ProxyDownloadException
+ {
+ Map<String, Exception> previousExceptions = new LinkedHashMap<>();
+ ContentItem item = repository.getContent( ).getItem( artifactSelector );
+ StorageAsset localFile = item.getAsset( );
+
+ Properties requestProperties = new Properties();
+ requestProperties.setProperty( "filetype", "artifact" );
+ requestProperties.setProperty( "version", artifactSelector.getVersion() );
+ requestProperties.setProperty( "managedRepositoryId", repository.getId() );
+
+ List<ProxyConnector> connectors = getProxyConnectors( repository );
+ for ( ProxyConnector connector : connectors )
+ {
+ if ( !connector.isEnabled() )
+ {
+ continue;
+ }
+
+ RemoteRepository targetRepository = connector.getTargetRepository();
+ requestProperties.setProperty( "remoteRepositoryId", targetRepository.getId() );
+
+ StorageAsset targetFile = targetRepository.getAsset( localFile.getPath( ) );
+ // Removing the leading '/' from the path
+ String targetPath = targetFile.getPath( ).substring( 1 );
+ try
+ {
+ StorageAsset downloadedFile =
+ transferFile( connector, targetRepository, targetPath, repository, localFile, requestProperties,
+ true );
+
+ if ( fileExists(downloadedFile) )
+ {
+ log.debug( "Successfully transferred: {}", downloadedFile.getPath() );
+ return downloadedFile;
+ }
+ }
+ catch ( NotFoundException e )
+ {
+ log.debug( "Artifact {} not found on repository \"{}\".", item,
+ targetRepository.getId() );
+ }
+ catch ( NotModifiedException e )
+ {
+ log.debug( "Artifact {} not updated on repository \"{}\".", item,
+ targetRepository.getId() );
+ }
+ catch ( ProxyException e )
+ {
+ validatePolicies( this.downloadErrorPolicies, connector.getPolicies(), requestProperties, item,
+ targetRepository.getContent(), localFile, e, previousExceptions );
+ }
+ }
+
+ if ( !previousExceptions.isEmpty() )
+ {
+ throw new ProxyDownloadException( "Failures occurred downloading from some remote repositories",
+ previousExceptions );
+ }
+
+ log.debug( "Exhausted all target repositories, artifact {} not found.", item );
+
+ return null;
+ }
+
+ @Override
+ public StorageAsset fetchFromProxies( ManagedRepository repository, String path )
+ {
+ StorageAsset localFile = repository.getAsset( path );
+
+ // no update policies for these paths
+ if ( localFile.exists() )
+ {
+ return null;
+ }
+
+ Properties requestProperties = new Properties();
+ requestProperties.setProperty( "filetype", "resource" );
+ requestProperties.setProperty( "managedRepositoryId", repository.getId() );
+
+ List<ProxyConnector> connectors = getProxyConnectors( repository );
+ for ( ProxyConnector connector : connectors )
+ {
+ if ( !connector.isEnabled() )
+ {
+ continue;
+ }
+
+ RemoteRepository targetRepository = connector.getTargetRepository();
+ requestProperties.setProperty( "remoteRepositoryId", targetRepository.getId() );
+
+ String targetPath = path;
+
+ try
+ {
+ StorageAsset downloadedFile =
+ transferFile( connector, targetRepository, targetPath, repository, localFile, requestProperties,
+ false );
+
+ if ( fileExists( downloadedFile ) )
+ {
+ log.debug( "Successfully transferred: {}", downloadedFile.getPath() );
+ return downloadedFile;
+ }
+ }
+ catch ( NotFoundException e )
+ {
+ log.debug( "Resource {} not found on repository \"{}\".", path,
+ targetRepository.getId() );
+ }
+ catch ( NotModifiedException e )
+ {
+ log.debug( "Resource {} not updated on repository \"{}\".", path,
+ targetRepository.getId() );
+ }
+ catch ( ProxyException e )
+ {
+ log.warn(
+ "Transfer error from repository {} for resource {}, continuing to next repository. Error message: {}",
+ targetRepository.getId(), path, e.getMessage() );
+ log.debug( MarkerFactory.getDetachedMarker( "transfer.error" ),
+ "Transfer error from repository \"{}"
+ + "\" for resource {}, continuing to next repository. Error message: {}",
+ targetRepository.getId(), path, e.getMessage(), e );
+ }
+
+ }
+
+ log.debug( "Exhausted all target repositories, resource {} not found.", path );
+
+ return null;
+ }
+
+ @Override
+ public ProxyFetchResult fetchMetadataFromProxies( ManagedRepository repository, String rawLogicalPath )
+ {
+ String logicalPath;
+ if (rawLogicalPath.startsWith( "/" )){
+ logicalPath = rawLogicalPath.substring( 1 );
+ } else {
+ logicalPath = rawLogicalPath;
+ }
+ StorageAsset localFile = repository.getAsset( logicalPath );
+
+ Properties requestProperties = new Properties();
+ requestProperties.setProperty( "filetype", "metadata" );
+ boolean metadataNeedsUpdating = false;
+ long originalTimestamp = getLastModified( localFile );
+
+ List<ProxyConnector> connectors = new ArrayList<>( getProxyConnectors( repository ) );
+ for ( ProxyConnector connector : connectors )
+ {
+ if ( !connector.isEnabled() )
+ {
+ continue;
+ }
+
+ RemoteRepository targetRepository = connector.getTargetRepository();
+
+ StorageAsset localRepoFile = toLocalRepoFile( repository, targetRepository.getContent(), logicalPath );
+ long originalMetadataTimestamp = getLastModified( localRepoFile );
+
+ try
+ {
+ transferFile( connector, targetRepository, logicalPath, repository, localRepoFile, requestProperties,
+ true );
+
+ if ( hasBeenUpdated( localRepoFile, originalMetadataTimestamp ) )
+ {
+ metadataNeedsUpdating = true;
+ }
+ }
+ catch ( NotFoundException e )
+ {
+
+ log.debug( "Metadata {} not found on remote repository '{}'.", logicalPath,
+ targetRepository.getId(), e );
+
+ }
+ catch ( NotModifiedException e )
+ {
+
+ log.debug( "Metadata {} not updated on remote repository '{}'.", logicalPath,
+ targetRepository.getId(), e );
+
+ }
+ catch ( ProxyException e )
+ {
+ log.warn(
+ "Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}",
+ targetRepository.getId(), logicalPath, e.getMessage() );
+ log.debug( "Full stack trace", e );
+ }
+ }
+
+ if ( hasBeenUpdated( localFile, originalTimestamp ) )
+ {
+ metadataNeedsUpdating = true;
+ }
+
+ if ( metadataNeedsUpdating || !localFile.exists())
+ {
+ try
+ {
+ metadataTools.updateMetadata( repository.getContent(), logicalPath );
+ }
+ catch ( RepositoryMetadataException e )
+ {
+ log.warn( "Unable to update metadata {}:{}", localFile.getPath(), e.getMessage(), e );
+ }
+
+ }
+
+ if ( fileExists( localFile ) )
+ {
+ return new ProxyFetchResult( localFile, metadataNeedsUpdating );
+ }
+
+ return new ProxyFetchResult( null, false );
+ }
+
+ private long getLastModified(StorageAsset file )
+ {
+ if ( !file.exists() || file.isContainer() )
+ {
+ return 0;
+ }
+
+ return file.getModificationTime().toEpochMilli();
+ }
+
+ private boolean hasBeenUpdated(StorageAsset file, long originalLastModified )
+ {
+ if ( !file.exists() || file.isContainer() )
+ {
+ return false;
+ }
+
+ long currentLastModified = getLastModified( file );
+ return ( currentLastModified > originalLastModified );
+ }
+
+ private StorageAsset toLocalRepoFile( ManagedRepository repository, RemoteRepositoryContent targetRepository,
+ String targetPath )
+ {
+ String repoPath = metadataTools.getRepositorySpecificName( targetRepository, targetPath );
+ return repository.getAsset( repoPath );
+ }
+
+ /**
+ * Test if the provided ManagedRepositoryContent has any proxies configured for it.
+ * @param repository
+ */
+ @Override
+ public boolean hasProxies( ManagedRepository repository )
+ {
+ synchronized ( this.proxyConnectorMap )
+ {
+ return this.proxyConnectorMap.containsKey( repository.getId() );
+ }
+ }
+
+ /**
+ * Simple method to test if the file exists on the local disk.
+ *
+ * @param file the file to test. (may be null)
+ * @return true if file exists. false if the file param is null, doesn't exist, or is not of type File.
+ */
+ private boolean fileExists( StorageAsset file )
+ {
+ if ( file == null )
+ {
+ return false;
+ }
+
+ if ( !file.exists())
+ {
+ return false;
+ }
+
+ return !file.isContainer();
+ }
+
+ /**
+ * Perform the transfer of the file.
+ *
+ * @param connector the connector configuration to use.
+ * @param remoteRepository the remote repository get the resource from.
+ * @param remotePath the path in the remote repository to the resource to get.
+ * @param repository the managed repository that will hold the file
+ * @param resource the path relative to the repository storage where the file should be downloaded to
+ * @param requestProperties the request properties to utilize for policy handling.
+ * @param executeConsumers whether to execute the consumers after proxying
+ * @return the local file that was downloaded, or null if not downloaded.
+ * @throws NotFoundException if the file was not found on the remote repository.
+ * @throws NotModifiedException if the localFile was present, and the resource was present on remote repository, but
+ * the remote resource is not newer than the local File.
+ * @throws ProxyException if transfer was unsuccessful.
+ */
+ protected StorageAsset transferFile( ProxyConnector connector, RemoteRepository remoteRepository, String remotePath,
+ ManagedRepository repository, StorageAsset resource, Properties requestProperties,
+ boolean executeConsumers )
+ throws ProxyException, NotModifiedException
+ {
+ String url = null;
+ try
+ {
+ url = remoteRepository.getLocation().toURL().toString();
+ }
+ catch ( MalformedURLException e )
+ {
+ throw new ProxyException( e.getMessage(), e );
+ }
+ if ( !url.endsWith( "/" ) )
+ {
+ url = url + "/";
+ }
+ if (remotePath.startsWith( "/" )) {
+ url = url + remotePath.substring( 1 );
+ } else {
+ url = url + remotePath;
+ }
+ requestProperties.setProperty( "url", url );
+
+ // Is a whitelist defined?
+ if ( CollectionUtils.isNotEmpty( connector.getWhitelist() ) )
+ {
+ // Path must belong to whitelist.
+ if ( !matchesPattern( remotePath, connector.getWhitelist() ) )
+ {
+ log.debug( "Path [{}] is not part of defined whitelist (skipping transfer from repository [{}]).",
+ remotePath, remoteRepository.getId() );
+ return null;
+ }
+ }
+
+ // Is target path part of blacklist?
+ if ( matchesPattern( remotePath, connector.getBlacklist() ) )
+ {
+ log.debug( "Path [{}] is part of blacklist (skipping transfer from repository [{}]).", remotePath,
+ remoteRepository.getId() );
+ return null;
+ }
+
+ // Handle pre-download policy
+ try
+ {
+ validatePolicies( this.preDownloadPolicies, connector.getPolicies(), requestProperties, resource );
+ }
+ catch ( PolicyViolationException e )
+ {
+ String emsg = "Transfer not attempted on " + url + " : " + e.getMessage();
+ if ( resource.exists() )
+ {
+ log.debug( "{} : using already present local file.", emsg );
+ return resource;
+ }
+
+ log.debug( emsg );
+ return null;
+ }
+
+ Path workingDirectory = createWorkingDirectory( repository );
+ FilesystemStorage tmpStorage = null;
+ try
+ {
+ tmpStorage = new FilesystemStorage( workingDirectory, fileLockManager );
+ }
+ catch ( IOException e )
+ {
+ throw new ProxyException( "Could not create tmp storage" );
+ }
+ StorageAsset tmpResource = tmpStorage.getAsset( resource.getName( ) );
+ StorageAsset[] tmpChecksumFiles = new StorageAsset[checksumAlgorithms.size()];
+ for(int i=0; i<checksumAlgorithms.size(); i++) {
+ ChecksumAlgorithm alg = checksumAlgorithms.get( i );
+ tmpChecksumFiles[i] = tmpStorage.getAsset( resource.getName() + "." + alg.getDefaultExtension() );
+ }
+
+ try
+ {
+
+ transferResources( connector, remoteRepository, tmpResource,tmpChecksumFiles , url, remotePath,
+ resource, workingDirectory, repository );
+
+ // Handle post-download policies.
+ try
+ {
+ validatePolicies( this.postDownloadPolicies, connector.getPolicies(), requestProperties, tmpResource );
+ }
+ catch ( PolicyViolationException e )
+ {
+ log.warn( "Transfer invalidated from {} : {}", url, e.getMessage() );
+ executeConsumers = false;
+ if ( !fileExists( tmpResource ) )
+ {
+ resource = null;
+ }
+ }
+
+ if ( resource != null )
+ {
+ synchronized ( resource.getPath().intern() )
+ {
+ StorageAsset directory = resource.getParent();
+ for (int i=0; i<tmpChecksumFiles.length; i++) {
+ moveFileIfExists( tmpChecksumFiles[i], directory );
+ }
+ moveFileIfExists( tmpResource, directory );
+ }
+ }
+ }
+ finally
+ {
+ org.apache.archiva.common.utils.FileUtils.deleteQuietly( workingDirectory );
+ }
+
+ if ( executeConsumers )
+ {
+ // Just-in-time update of the index and database by executing the consumers for this artifact
+ //consumers.executeConsumers( connector.getSourceRepository().getRepository(), resource );
+ queueRepositoryTask( connector.getSourceRepository().getId(), resource );
+ }
+
+ return resource;
+ }
+
+ protected abstract void transferResources( ProxyConnector connector, RemoteRepository remoteRepository,
+ StorageAsset tmpResource, StorageAsset[] checksumFiles, String url, String remotePath, StorageAsset resource, Path workingDirectory,
+ ManagedRepository repository ) throws ProxyException;
+
+ private void queueRepositoryTask(String repositoryId, StorageAsset localFile )
+ {
+ RepositoryTask task = new RepositoryTask();
+ task.setRepositoryId( repositoryId );
+ task.setResourceFile( localFile );
+ task.setUpdateRelatedArtifacts( true );
+ task.setScanAll( true );
+
+ try
+ {
+ scheduler.queueTask( task );
+ }
+ catch ( TaskQueueException e )
+ {
+ log.error( "Unable to queue repository task to execute consumers on resource file ['{}"
+ + "'].", localFile.getName() );
+ }
+ }
+
+ /**
+ * Moves the file into repository location if it exists
+ *
+ * @param fileToMove this could be either the main artifact, sha1 or md5 checksum file.
+ * @param directory directory to write files to
+ */
+ private void moveFileIfExists( StorageAsset fileToMove, StorageAsset directory )
+ throws ProxyException
+ {
+ if ( fileToMove != null && fileToMove.exists() )
+ {
+ StorageAsset newLocation = directory.getStorage().getAsset( directory.getPath()+ "/" + fileToMove.getName());
+ moveTempToTarget( fileToMove, newLocation );
+ }
+ }
+
+ /**
+ * Apply the policies.
+ *
+ * @param policies the map of policies to execute. (Map of String policy keys, to {@link DownloadPolicy} objects)
+ * @param settings the map of settings for the policies to execute. (Map of String policy keys, to String policy
+ * setting)
+ * @param request the request properties (utilized by the {@link DownloadPolicy#applyPolicy(PolicyOption, Properties, StorageAsset)}
+ * )
+ * @param localFile the local file (utilized by the {@link DownloadPolicy#applyPolicy(PolicyOption, Properties, StorageAsset)})
+ * @throws PolicyViolationException
+ */
+ private void validatePolicies( Map<String, ? extends DownloadPolicy> policies, Map<Policy, PolicyOption> settings,
+ Properties request, StorageAsset localFile )
+ throws PolicyViolationException
+ {
+ for ( Map.Entry<String, ? extends DownloadPolicy> entry : policies.entrySet() )
+ {
+ // olamy with spring rolehint is now downloadPolicy#hint
+ // so substring after last # to get the hint as with plexus
+ String key = entry.getValue( ).getId( );
+ DownloadPolicy policy = entry.getValue();
+ PolicyOption option = settings.containsKey(policy ) ? settings.get(policy) : policy.getDefaultOption();
+
+ log.debug( "Applying [{}] policy with [{}]", key, option );
+ try
+ {
+ policy.applyPolicy( option, request, localFile );
+ }
+ catch ( PolicyConfigurationException e )
+ {
+ log.error( e.getMessage(), e );
+ }
+ }
+ }
+
+ private void validatePolicies( Map<String, DownloadErrorPolicy> policies, Map<Policy, PolicyOption> settings,
+ Properties request, ContentItem artifact, RemoteRepositoryContent content,
+ StorageAsset localFile, Exception exception, Map<String, Exception> previousExceptions )
+ throws ProxyDownloadException
+ {
+ boolean process = true;
+ for ( Map.Entry<String, ? extends DownloadErrorPolicy> entry : policies.entrySet() )
+ {
+
+ // olamy with spring rolehint is now downloadPolicy#hint
+ // so substring after last # to get the hint as with plexus
+ String key = entry.getValue( ).getId( );
+ DownloadErrorPolicy policy = entry.getValue();
+ PolicyOption option = settings.containsKey( policy ) ? settings.get(policy) : policy.getDefaultOption();
+
+ log.debug( "Applying [{}] policy with [{}]", key, option );
+ try
+ {
+ // all policies must approve the exception, any can cancel
+ process = policy.applyPolicy( option, request, localFile, exception, previousExceptions );
+ if ( !process )
+ {
+ break;
+ }
+ }
+ catch ( PolicyConfigurationException e )
+ {
+ log.error( e.getMessage(), e );
+ }
+ }
+
+ if ( process )
+ {
+ // if the exception was queued, don't throw it
+ if ( !previousExceptions.containsKey( content.getId() ) )
+ {
+ throw new ProxyDownloadException(
+ "An error occurred in downloading from the remote repository, and the policy is to fail immediately",
+ content.getId(), exception );
+ }
+ }
+ else
+ {
+ // if the exception was queued, but cancelled, remove it
+ previousExceptions.remove( content.getId() );
+ }
+
+ log.warn(
+ "Transfer error from repository {} for artifact {} , continuing to next repository. Error message: {}",
+ content.getRepository().getId(), artifact, exception.getMessage() );
+ log.debug( "Full stack trace", exception );
+ }
+
+ /**
+ * Creates a working directory
+ *
+ * @param repository
+ * @return file location of working directory
+ */
+ private Path createWorkingDirectory( ManagedRepository repository )
+ {
+ try
+ {
+ return Files.createTempDirectory( "temp" );
+ }
+ catch ( IOException e )
+ {
+ throw new RuntimeException( e.getMessage(), e );
+ }
+
+ }
+
+ /**
+ * Used to move the temporary file to its real destination. This is patterned from the way WagonManager handles its
+ * downloaded files.
+ *
+ * @param temp The completed download file
+ * @param target The final location of the downloaded file
+ * @throws ProxyException when the temp file cannot replace the target file
+ */
+ private void moveTempToTarget( StorageAsset temp, StorageAsset target )
+ throws ProxyException
+ {
+
+ try
+ {
+ org.apache.archiva.repository.storage.util.StorageUtil.moveAsset( temp, target, true , StandardCopyOption.REPLACE_EXISTING);
+ }
+ catch ( IOException e )
+ {
+ log.error( "Move failed from {} to {}, trying copy.", temp, target );
+ try
+ {
+ FsStorageUtil.copyAsset( temp, target, true );
+ if (temp.exists()) {
+ temp.getStorage( ).removeAsset( temp );
+ }
+ }
+ catch ( IOException ex )
+ {
+ log.error("Copy failed from {} to {}: ({}) {}", temp, target, e.getClass(), e.getMessage());
+ throw new ProxyException("Could not move temp file "+temp.getPath()+" to target "+target.getPath()+": ("+e.getClass()+") "+e.getMessage(), e);
+ }
+ }
+ }
+
+ /**
+ * Tests whitelist and blacklist patterns against path.
+ *
+ * @param path the path to test.
+ * @param patterns the list of patterns to check.
+ * @return true if the path matches at least 1 pattern in the provided patterns list.
+ */
+ private boolean matchesPattern( String path, List<String> patterns )
+ {
+ if ( CollectionUtils.isEmpty( patterns ) )
+ {
+ return false;
+ }
+
+ if ( !path.startsWith( "/" ) )
+ {
+ path = "/" + path;
+ }
+
+ for ( String pattern : patterns )
+ {
+ if ( !pattern.startsWith( "/" ) )
+ {
+ pattern = "/" + pattern;
+ }
+
+ if ( PathUtil.matchPath( pattern, path, false ) )
+ {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * TODO: Ensure that list is correctly ordered based on configuration. See MRM-477
+ * @param repository
+ */
+ @Override
+ public List<ProxyConnector> getProxyConnectors( ManagedRepository repository )
+ {
+
+ if ( !this.proxyConnectorMap.containsKey( repository.getId() ) )
+ {
+ return Collections.emptyList();
+ }
+ List<ProxyConnector> ret = new ArrayList<>( this.proxyConnectorMap.get( repository.getId() ) );
+
+ Collections.sort( ret, ProxyConnectorOrderComparator.getInstance() );
+ return ret;
+
+ }
+
+
+ protected String addParameters(String path, RemoteRepository remoteRepository )
+ {
+ if ( remoteRepository.getExtraParameters().isEmpty() )
+ {
+ return path;
+ }
+
+ boolean question = false;
+
+ StringBuilder res = new StringBuilder( path == null ? "" : path );
+
+ for ( Map.Entry<String, String> entry : remoteRepository.getExtraParameters().entrySet() )
+ {
+ if ( !question )
+ {
+ res.append( '?' ).append( entry.getKey() ).append( '=' ).append( entry.getValue() );
+ }
+ }
+
+ return res.toString();
+ }
+
+ public void setArchivaConfiguration(ArchivaConfiguration archivaConfiguration )
+ {
+ this.archivaConfiguration = archivaConfiguration;
+ }
+
+ public MetadataTools getMetadataTools()
+ {
+ return metadataTools;
+ }
+
+ public void setMetadataTools(MetadataTools metadataTools )
+ {
+ this.metadataTools = metadataTools;
+ }
+
+ public UrlFailureCache getUrlFailureCache()
+ {
+ return urlFailureCache;
+ }
+
+ public void setUrlFailureCache(UrlFailureCache urlFailureCache )
+ {
+ this.urlFailureCache = urlFailureCache;
+ }
+
+ public Map<String, PreDownloadPolicy> getPreDownloadPolicies()
+ {
+ return preDownloadPolicies;
+ }
+
+ public void setPreDownloadPolicies(Map<String, PreDownloadPolicy> preDownloadPolicies )
+ {
+ this.preDownloadPolicies = preDownloadPolicies;
+ }
+
+ public Map<String, PostDownloadPolicy> getPostDownloadPolicies()
+ {
+ return postDownloadPolicies;
+ }
+
+ public void setPostDownloadPolicies(Map<String, PostDownloadPolicy> postDownloadPolicies )
+ {
+ this.postDownloadPolicies = postDownloadPolicies;
+ }
+
+ public Map<String, DownloadErrorPolicy> getDownloadErrorPolicies()
+ {
+ return downloadErrorPolicies;
+ }
+
+ public void setDownloadErrorPolicies(Map<String, DownloadErrorPolicy> downloadErrorPolicies )
+ {
+ this.downloadErrorPolicies = downloadErrorPolicies;
+ }
+
+ @Override
+ public void setNetworkProxies(Map<String, NetworkProxy> networkProxies ) {
+ this.networkProxyMap.clear();
+ this.networkProxyMap.putAll( networkProxies );
+ }
+
+ @Override
+ public NetworkProxy getNetworkProxy(String id) {
+ return this.networkProxyMap.get(id);
+ }
+
+ @Override
+ public Map<String, NetworkProxy> getNetworkProxies() {
+ return this.networkProxyMap;
+ }
+
+ @Override
+ public abstract List<RepositoryType> supports();
+
+ @Override
+ public void setPolicies( List<Policy> policyList )
+ {
+ preDownloadPolicies.clear();
+ postDownloadPolicies.clear();
+ downloadErrorPolicies.clear();
+ for (Policy policy : policyList) {
+ addPolicy( policy );
+ }
+ }
+
+ void addPolicy(PreDownloadPolicy policy) {
+ preDownloadPolicies.put( policy.getId( ), policy );
+ }
+
+ void addPolicy(PostDownloadPolicy policy) {
+ postDownloadPolicies.put( policy.getId( ), policy );
+ }
+ void addPolicy(DownloadErrorPolicy policy) {
+ downloadErrorPolicies.put( policy.getId( ), policy );
+ }
+
+ @Override
+ public void addPolicy( Policy policy )
+ {
+ if (policy instanceof PreDownloadPolicy) {
+ addPolicy( (PreDownloadPolicy)policy );
+ } else if (policy instanceof PostDownloadPolicy) {
+ addPolicy( (PostDownloadPolicy) policy );
+ } else if (policy instanceof DownloadErrorPolicy) {
+ addPolicy( (DownloadErrorPolicy) policy );
+ } else {
+ log.warn( "Policy not known: {}, {}", policy.getId( ), policy.getClass( ).getName( ) );
+ }
+ }
+
+ @Override
+ public void removePolicy( Policy policy )
+ {
+ final String id = policy.getId();
+ if (preDownloadPolicies.containsKey( id )) {
+ preDownloadPolicies.remove( id );
+ } else if (postDownloadPolicies.containsKey( id )) {
+ postDownloadPolicies.remove( id );
+ } else if (downloadErrorPolicies.containsKey( id )) {
+ downloadErrorPolicies.remove( id );
+ }
+ }
+
+ @Override
+ public void addProxyConnector( ProxyConnector connector )
+ {
+ final String sourceId = connector.getSourceRepository( ).getId( );
+ List<ProxyConnector> connectors;
+ if (proxyConnectorMap.containsKey( sourceId )) {
+ connectors = proxyConnectorMap.get( sourceId );
+ } else {
+ connectors = new ArrayList<>( );
+ proxyConnectorMap.put( sourceId, connectors );
+ }
+ connectors.add( connector );
+ }
+
+ @Override
+ public void setProxyConnectors( List<ProxyConnector> proxyConnectors )
+ {
+ proxyConnectorMap.clear();
+ for ( ProxyConnector connector : proxyConnectors )
+ {
+ addProxyConnector( connector );
+ }
+ }
+}
--- /dev/null
+package org.apache.archiva.proxy.base;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * NotFoundException - thrown when the resource requested was not found on the remote repository.
+ *
+ *
+ */
+public class NotFoundException
+ extends ProxyException
+{
+ public NotFoundException( String message, Throwable t )
+ {
+ super( message, t );
+ }
+
+ public NotFoundException( String message )
+ {
+ super( message );
+ }
+}
--- /dev/null
+package org.apache.archiva.proxy.base;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * <p>
+ * NotModifiedException - thrown when the resource requested was found on the remote repository, but
+ * the remote repository reported that the copy we have in our managed repository is newer than
+ * the one present on the remote repository.
+ * </p>
+ * <p>
+ * Similar in scope to the <code>HTTP 304 Not Modified</code> response code.
+ * </p>
+ *
+ *
+ */
+public class NotModifiedException
+ extends ProxyException
+{
+
+ public NotModifiedException( String message )
+ {
+ super( message );
+ }
+
+ public NotModifiedException( String message, Throwable t )
+ {
+ super( message, t );
+ }
+}
--- /dev/null
+package org.apache.archiva.proxy.base;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.proxy.model.ProxyConnector;
+
+import java.util.Comparator;
+
+/**
+ * ProxyConnectorOrderComparator
+ *
+ *
+ */
+public class ProxyConnectorOrderComparator
+ implements Comparator<ProxyConnector>
+{
+ private static ProxyConnectorOrderComparator INSTANCE = new ProxyConnectorOrderComparator();
+
+ public static ProxyConnectorOrderComparator getInstance()
+ {
+ return INSTANCE;
+ }
+
+ @Override
+ public int compare( ProxyConnector o1, ProxyConnector o2 )
+ {
+ if ( o1 == null && o2 == null )
+ {
+ return 0;
+ }
+
+ // Ensure null goes to end of list.
+ if ( o1 == null && o2 != null )
+ {
+ return 1;
+ }
+
+ if ( o1 != null && o2 == null )
+ {
+ return -1;
+ }
+
+ // Ensure 0 (unordered) goes to end of list.
+ if ( o1.getOrder() == 0 && o2.getOrder() != 0 )
+ {
+ return 1;
+ }
+
+ if ( o1.getOrder() != 0 && o2.getOrder() == 0 )
+ {
+ return -1;
+ }
+
+ return o1.getOrder() - o2.getOrder();
+ }
+}
--- /dev/null
+package org.apache.archiva.proxy.base;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ */
+public class ProxyException
+ extends Exception
+{
+ public ProxyException( String message )
+ {
+ super( message );
+ }
+
+ public ProxyException( String message, Throwable t )
+ {
+ super( message, t );
+ }
+}
default-lazy-init="true">
<context:annotation-config/>
- <context:component-scan base-package="org.apache.archiva.proxy"/>
+ <context:component-scan base-package="org.apache.archiva.proxy.base"/>
</beans>
\ No newline at end of file
import org.apache.archiva.maven.common.proxy.WagonFactory;
import org.apache.archiva.maven.common.proxy.WagonFactoryException;
import org.apache.archiva.maven.common.proxy.WagonFactoryRequest;
-import org.apache.archiva.proxy.DefaultRepositoryProxyHandler;
-import org.apache.archiva.proxy.NotFoundException;
-import org.apache.archiva.proxy.NotModifiedException;
-import org.apache.archiva.proxy.ProxyException;
+import org.apache.archiva.proxy.base.DefaultRepositoryProxyHandler;
+import org.apache.archiva.proxy.base.NotFoundException;
+import org.apache.archiva.proxy.base.NotModifiedException;
+import org.apache.archiva.proxy.base.ProxyException;
import org.apache.archiva.proxy.model.NetworkProxy;
import org.apache.archiva.proxy.model.ProxyConnector;
import org.apache.archiva.proxy.model.RepositoryProxyHandler;
+++ /dev/null
-package org.apache.archiva.mock;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.filelock.DefaultFileLockManager;
-import org.apache.archiva.common.utils.FileUtils;
-import org.apache.archiva.common.utils.PathUtil;
-import org.apache.archiva.configuration.ArchivaConfiguration;
-import org.apache.archiva.indexer.ArchivaIndexManager;
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.indexer.IndexCreationFailedException;
-import org.apache.archiva.indexer.IndexUpdateFailedException;
-import org.apache.archiva.indexer.UnsupportedBaseContextException;
-import org.apache.archiva.maven.common.proxy.WagonFactory;
-import org.apache.archiva.maven.common.proxy.WagonFactoryException;
-import org.apache.archiva.maven.common.proxy.WagonFactoryRequest;
-import org.apache.archiva.proxy.model.NetworkProxy;
-import org.apache.archiva.repository.EditableRepository;
-import org.apache.archiva.repository.ManagedRepository;
-import org.apache.archiva.repository.base.PasswordCredentials;
-import org.apache.archiva.repository.RemoteRepository;
-import org.apache.archiva.repository.Repository;
-import org.apache.archiva.repository.RepositoryType;
-import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
-import org.apache.archiva.repository.storage.fs.FilesystemAsset;
-import org.apache.archiva.repository.storage.fs.FilesystemStorage;
-import org.apache.archiva.repository.storage.StorageAsset;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.repository.features.RemoteIndexFeature;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.maven.index.ArtifactContext;
-import org.apache.maven.index.ArtifactContextProducer;
-import org.apache.maven.index.DefaultScannerListener;
-import org.apache.maven.index.Indexer;
-import org.apache.maven.index.IndexerEngine;
-import org.apache.maven.index.Scanner;
-import org.apache.maven.index.ScanningRequest;
-import org.apache.maven.index.ScanningResult;
-import org.apache.maven.index.context.IndexCreator;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.packer.IndexPacker;
-import org.apache.maven.index.packer.IndexPackingRequest;
-import org.apache.maven.index.updater.IndexUpdateRequest;
-import org.apache.maven.index.updater.ResourceFetcher;
-import org.apache.maven.index_shaded.lucene.index.IndexFormatTooOldException;
-import org.apache.maven.wagon.ConnectionException;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.StreamWagon;
-import org.apache.maven.wagon.TransferFailedException;
-import org.apache.maven.wagon.Wagon;
-import org.apache.maven.wagon.authentication.AuthenticationException;
-import org.apache.maven.wagon.authentication.AuthenticationInfo;
-import org.apache.maven.wagon.authorization.AuthorizationException;
-import org.apache.maven.wagon.events.TransferEvent;
-import org.apache.maven.wagon.events.TransferListener;
-import org.apache.maven.wagon.proxy.ProxyInfo;
-import org.apache.maven.wagon.shared.http.AbstractHttpClientWagon;
-import org.apache.maven.wagon.shared.http.HttpConfiguration;
-import org.apache.maven.wagon.shared.http.HttpMethodConfiguration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.MalformedURLException;
-import java.net.URI;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentSkipListSet;
-import java.util.stream.Collectors;
-
-@Service("archivaIndexManager#maven")
-public class ArchivaIndexManagerMock implements ArchivaIndexManager {
-
- private static final Logger log = LoggerFactory.getLogger( ArchivaIndexManagerMock.class );
-
- @Inject
- private Indexer indexer;
-
- @Inject
- private IndexerEngine indexerEngine;
-
- @Inject
- private List<? extends IndexCreator> indexCreators;
-
- @Inject
- private IndexPacker indexPacker;
-
- @Inject
- private Scanner scanner;
-
- @Inject
- private ArchivaConfiguration archivaConfiguration;
-
- @Inject
- private WagonFactory wagonFactory;
-
-
- @Inject
- private ArtifactContextProducer artifactContextProducer;
-
- private ConcurrentSkipListSet<Path> activeContexts = new ConcurrentSkipListSet<>( );
-
- private static final int WAIT_TIME = 100;
- private static final int MAX_WAIT = 10;
-
-
- public static IndexingContext getMvnContext(ArchivaIndexingContext context ) throws UnsupportedBaseContextException
- {
- if ( !context.supports( IndexingContext.class ) )
- {
- log.error( "The provided archiva index context does not support the maven IndexingContext" );
- throw new UnsupportedBaseContextException( "The context does not support the Maven IndexingContext" );
- }
- return context.getBaseContext( IndexingContext.class );
- }
-
- private Path getIndexPath( ArchivaIndexingContext ctx )
- {
- return ctx.getPath().getFilePath();
- }
-
- @FunctionalInterface
- interface IndexUpdateConsumer
- {
-
- void accept( IndexingContext indexingContext ) throws IndexUpdateFailedException;
- }
-
- /*
- * This method is used to do some actions around the update execution code. And to make sure, that no other
- * method is running on the same index.
- */
- private void executeUpdateFunction( ArchivaIndexingContext context, IndexUpdateConsumer function ) throws IndexUpdateFailedException
- {
- IndexingContext indexingContext = null;
- try
- {
- indexingContext = getMvnContext( context );
- }
- catch ( UnsupportedBaseContextException e )
- {
- throw new IndexUpdateFailedException( "Maven index is not supported by this context", e );
- }
- final Path ctxPath = getIndexPath( context );
- int loop = MAX_WAIT;
- boolean active = false;
- while ( loop-- > 0 && !active )
- {
- active = activeContexts.add( ctxPath );
- try
- {
- Thread.currentThread( ).sleep( WAIT_TIME );
- }
- catch ( InterruptedException e )
- {
- // Ignore this
- }
- }
- if ( active )
- {
- try
- {
- function.accept( indexingContext );
- }
- finally
- {
- activeContexts.remove( ctxPath );
- }
- }
- else
- {
- throw new IndexUpdateFailedException( "Timeout while waiting for index release on context " + context.getId( ) );
- }
- }
-
- @Override
- public void pack( final ArchivaIndexingContext context ) throws IndexUpdateFailedException
- {
- executeUpdateFunction( context, indexingContext -> {
- try
- {
- IndexPackingRequest request = new IndexPackingRequest( indexingContext,
- indexingContext.acquireIndexSearcher( ).getIndexReader( ),
- indexingContext.getIndexDirectoryFile( ) );
- indexPacker.packIndex( request );
- indexingContext.updateTimestamp( true );
- }
- catch ( IOException e )
- {
- log.error( "IOException while packing index of context " + context.getId( ) + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ) );
- throw new IndexUpdateFailedException( "IOException during update of " + context.getId( ), e );
- }
- }
- );
-
- }
-
- @Override
- public void scan(final ArchivaIndexingContext context) throws IndexUpdateFailedException
- {
- executeUpdateFunction( context, indexingContext -> {
- DefaultScannerListener listener = new DefaultScannerListener( indexingContext, indexerEngine, true, null );
- ScanningRequest request = new ScanningRequest( indexingContext, listener );
- ScanningResult result = scanner.scan( request );
- if ( result.hasExceptions( ) )
- {
- log.error( "Exceptions occured during index scan of " + context.getId( ) );
- result.getExceptions( ).stream( ).map( e -> e.getMessage( ) ).distinct( ).limit( 5 ).forEach(
- s -> log.error( "Message: " + s )
- );
- }
-
- } );
- }
-
- @Override
- public void update(final ArchivaIndexingContext context, final boolean fullUpdate) throws IndexUpdateFailedException
- {
- log.info( "start download remote index for remote repository {}", context.getRepository( ).getId( ) );
- URI remoteUpdateUri;
- if ( !( context.getRepository( ) instanceof RemoteRepository) || !(context.getRepository().supportsFeature(RemoteIndexFeature.class)) )
- {
- throw new IndexUpdateFailedException( "The context is not associated to a remote repository with remote index " + context.getId( ) );
- } else {
- RemoteIndexFeature rif = context.getRepository().getFeature(RemoteIndexFeature.class).get();
- remoteUpdateUri = context.getRepository().getLocation().resolve(rif.getIndexUri());
- }
- final RemoteRepository remoteRepository = (RemoteRepository) context.getRepository( );
-
- executeUpdateFunction( context,
- indexingContext -> {
- try
- {
- // create a temp directory to download files
- Path tempIndexDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".tmpIndex" );
- Path indexCacheDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".indexCache" );
- Files.createDirectories( indexCacheDirectory );
- if ( Files.exists( tempIndexDirectory ) )
- {
- FileUtils.deleteDirectory( tempIndexDirectory );
- }
- Files.createDirectories( tempIndexDirectory );
- tempIndexDirectory.toFile( ).deleteOnExit( );
- String baseIndexUrl = indexingContext.getIndexUpdateUrl( );
-
- String wagonProtocol = remoteUpdateUri.toURL( ).getProtocol( );
-
- NetworkProxy networkProxy = null;
- if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
- {
- RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
-
- final StreamWagon wagon = (StreamWagon) wagonFactory.getWagon(
- new WagonFactoryRequest( wagonProtocol, remoteRepository.getExtraHeaders( ) ).networkProxy(
- networkProxy )
- );
- int readTimeout = (int) rif.getDownloadTimeout( ).toMillis( ) * 1000;
- wagon.setReadTimeout( readTimeout );
- wagon.setTimeout( (int) remoteRepository.getTimeout( ).toMillis( ) * 1000 );
-
- if ( wagon instanceof AbstractHttpClientWagon)
- {
- HttpConfiguration httpConfiguration = new HttpConfiguration( );
- HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration( );
- httpMethodConfiguration.setUsePreemptive( true );
- httpMethodConfiguration.setReadTimeout( readTimeout );
- httpConfiguration.setGet( httpMethodConfiguration );
- AbstractHttpClientWagon.class.cast( wagon ).setHttpConfiguration( httpConfiguration );
- }
-
- wagon.addTransferListener( new DownloadListener( ) );
- ProxyInfo proxyInfo = null;
- if ( networkProxy != null )
- {
- proxyInfo = new ProxyInfo( );
- proxyInfo.setType( networkProxy.getProtocol( ) );
- proxyInfo.setHost( networkProxy.getHost( ) );
- proxyInfo.setPort( networkProxy.getPort( ) );
- proxyInfo.setUserName( networkProxy.getUsername( ) );
- proxyInfo.setPassword(new String(networkProxy.getPassword()));
- }
- AuthenticationInfo authenticationInfo = null;
- if ( remoteRepository.getLoginCredentials( ) != null && ( remoteRepository.getLoginCredentials( ) instanceof PasswordCredentials) )
- {
- PasswordCredentials creds = (PasswordCredentials) remoteRepository.getLoginCredentials( );
- authenticationInfo = new AuthenticationInfo( );
- authenticationInfo.setUserName( creds.getUsername( ) );
- authenticationInfo.setPassword( new String( creds.getPassword( ) ) );
- }
- wagon.connect( new org.apache.maven.wagon.repository.Repository( remoteRepository.getId( ), baseIndexUrl ), authenticationInfo,
- proxyInfo );
-
- Path indexDirectory = indexingContext.getIndexDirectoryFile( ).toPath( );
- if ( !Files.exists( indexDirectory ) )
- {
- Files.createDirectories( indexDirectory );
- }
-
- ResourceFetcher resourceFetcher =
- new WagonResourceFetcher( log, tempIndexDirectory, wagon, remoteRepository );
- IndexUpdateRequest request = new IndexUpdateRequest( indexingContext, resourceFetcher );
- request.setForceFullUpdate( fullUpdate );
- request.setLocalIndexCacheDir( indexCacheDirectory.toFile( ) );
-
- // indexUpdater.fetchAndUpdateIndex( request );
-
- indexingContext.updateTimestamp( true );
- }
-
- }
- catch ( AuthenticationException e )
- {
- log.error( "Could not login to the remote proxy for updating index of {}", remoteRepository.getId( ), e );
- throw new IndexUpdateFailedException( "Login in to proxy failed while updating remote repository " + remoteRepository.getId( ), e );
- }
- catch ( ConnectionException e )
- {
- log.error( "Connection error during index update for remote repository {}", remoteRepository.getId( ), e );
- throw new IndexUpdateFailedException( "Connection error during index update for remote repository " + remoteRepository.getId( ), e );
- }
- catch ( MalformedURLException e )
- {
- log.error( "URL for remote index update of remote repository {} is not correct {}", remoteRepository.getId( ), remoteUpdateUri, e );
- throw new IndexUpdateFailedException( "URL for remote index update of repository is not correct " + remoteUpdateUri, e );
- }
- catch ( IOException e )
- {
- log.error( "IOException during index update of remote repository {}: {}", remoteRepository.getId( ), e.getMessage( ), e );
- throw new IndexUpdateFailedException( "IOException during index update of remote repository " + remoteRepository.getId( )
- + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
- }
- catch ( WagonFactoryException e )
- {
- log.error( "Wagon for remote index download of {} could not be created: {}", remoteRepository.getId( ), e.getMessage( ), e );
- throw new IndexUpdateFailedException( "Error while updating the remote index of " + remoteRepository.getId( ), e );
- }
- } );
-
- }
-
- @Override
- public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
- {
- StorageAsset ctxUri = context.getPath();
- executeUpdateFunction(context, indexingContext -> {
- Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
- try {
- indexer.addArtifactsToIndex(artifacts, indexingContext);
- } catch (IOException e) {
- log.error("IOException while adding artifact {}", e.getMessage(), e);
- throw new IndexUpdateFailedException("Error occured while adding artifact to index of "+context.getId()
- + (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
- }
- });
- }
-
- @Override
- public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
- {
- final StorageAsset ctxUri = context.getPath();
- executeUpdateFunction(context, indexingContext -> {
- Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
- try {
- indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
- } catch (IOException e) {
- log.error("IOException while removing artifact {}", e.getMessage(), e);
- throw new IndexUpdateFailedException("Error occured while removing artifact from index of "+context.getId()
- + (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
- }
- });
-
- }
-
- @Override
- public boolean supportsRepository( RepositoryType type )
- {
- return type == RepositoryType.MAVEN;
- }
-
- @Override
- public ArchivaIndexingContext createContext( Repository repository ) throws IndexCreationFailedException
- {
- log.debug("Creating context for repo {}, type: {}", repository.getId(), repository.getType());
- if ( repository.getType( ) != RepositoryType.MAVEN )
- {
- throw new UnsupportedRepositoryTypeException( repository.getType( ) );
- }
- IndexingContext mvnCtx = null;
- try
- {
- if ( repository instanceof RemoteRepository )
- {
- mvnCtx = createRemoteContext( (RemoteRepository) repository );
- }
- else if ( repository instanceof ManagedRepository )
- {
- mvnCtx = createManagedContext( (ManagedRepository) repository );
- }
- }
- catch ( IOException e )
- {
- log.error( "IOException during context creation " + e.getMessage( ), e );
- throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
- + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
- }
- MavenIndexContextMock context = new MavenIndexContextMock( repository, mvnCtx );
-
- return context;
- }
-
- @Override
- public ArchivaIndexingContext reset(ArchivaIndexingContext context) throws IndexUpdateFailedException {
- ArchivaIndexingContext ctx;
- executeUpdateFunction(context, indexingContext -> {
- try {
- indexingContext.close(true);
- } catch (IOException e) {
- log.warn("Index close failed");
- }
- org.apache.archiva.repository.storage.util.StorageUtil.deleteRecursively(context.getPath());
- });
- try {
- Repository repo = context.getRepository();
- ctx = createContext(context.getRepository());
- if (repo instanceof EditableRepository) {
- ((EditableRepository)repo).setIndexingContext(ctx);
- }
- } catch (IndexCreationFailedException e) {
- throw new IndexUpdateFailedException("Could not create index");
- }
- return ctx;
- }
-
- @Override
- public ArchivaIndexingContext move(ArchivaIndexingContext context, Repository repo) throws IndexCreationFailedException {
- if (context==null) {
- return null;
- }
- if (context.supports(IndexingContext.class)) {
- try {
- StorageAsset newPath = getIndexPath(repo);
- IndexingContext ctx = context.getBaseContext(IndexingContext.class);
- Path oldPath = ctx.getIndexDirectoryFile().toPath();
- if (oldPath.equals(newPath)) {
- // Nothing to do, if path does not change
- return context;
- }
- if (!Files.exists(oldPath)) {
- return createContext(repo);
- } else if (context.isEmpty()) {
- context.close();
- return createContext(repo);
- } else {
- context.close(false);
- Files.move(oldPath, newPath.getFilePath());
- return createContext(repo);
- }
- } catch (IOException e) {
- log.error("IOException while moving index directory {}", e.getMessage(), e);
- throw new IndexCreationFailedException("Could not recreated the index.", e);
- } catch (UnsupportedBaseContextException e) {
- throw new IndexCreationFailedException("The given context, is not a maven context.");
- }
- } else {
- throw new IndexCreationFailedException("Bad context type. This is not a maven context.");
- }
- }
-
- @Override
- public void updateLocalIndexPath(Repository repo) {
- if (repo.supportsFeature(IndexCreationFeature.class)) {
- IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
- try {
- icf.setLocalIndexPath(getIndexPath(repo));
- } catch (IOException e) {
- log.error("Could not set local index path for {}. New URI: {}", repo.getId(), icf.getIndexPath());
- }
- }
- }
-
- @Override
- public ArchivaIndexingContext mergeContexts(Repository destinationRepo, List<ArchivaIndexingContext> contexts, boolean packIndex) throws UnsupportedOperationException, IndexCreationFailedException {
- return null;
- }
-
- private StorageAsset getIndexPath( Repository repo) throws IOException {
- IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
- Path repoDir = repo.getRoot().getFilePath();
- URI indexDir = icf.getIndexPath();
- String indexPath = indexDir.getPath();
- Path indexDirectory = null;
- FilesystemStorage filesystemStorage = (FilesystemStorage) repo.getRoot().getStorage();
- if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
- {
-
- indexDirectory = PathUtil.getPathFromUri( indexDir );
- // not absolute so create it in repository directory
- if ( indexDirectory.isAbsolute( ) )
- {
- indexPath = indexDirectory.getFileName().toString();
- filesystemStorage = new FilesystemStorage(indexDirectory, new DefaultFileLockManager());
- }
- else
- {
- indexDirectory = repoDir.resolve( indexDirectory );
- }
- }
- else
- {
- indexDirectory = repoDir.resolve( ".index" );
- indexPath = ".index";
- }
-
- if ( !Files.exists( indexDirectory ) )
- {
- Files.createDirectories( indexDirectory );
- }
- return new FilesystemAsset( filesystemStorage, indexPath, indexDirectory);
- }
-
- private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
- {
- Path appServerBase = archivaConfiguration.getAppServerBaseDir( );
-
- String contextKey = "remote-" + remoteRepository.getId( );
-
-
- // create remote repository path
- Path repoDir = remoteRepository.getRoot().getFilePath();
- if ( !Files.exists( repoDir ) )
- {
- Files.createDirectories( repoDir );
- }
-
- StorageAsset indexDirectory = null;
-
- // is there configured indexDirectory ?
- if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
- {
- RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
- indexDirectory = getIndexPath(remoteRepository);
- String remoteIndexUrl = calculateIndexRemoteUrl( remoteRepository.getLocation( ), rif );
- try
- {
-
- return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
- }
- catch ( IndexFormatTooOldException e )
- {
- // existing index with an old lucene format so we need to delete it!!!
- // delete it first then recreate it.
- log.warn( "the index of repository {} is too old we have to delete and recreate it", //
- remoteRepository.getId( ) );
- FileUtils.deleteDirectory( indexDirectory.getFilePath() );
- return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
-
- }
- }
- else
- {
- throw new IOException( "No remote index defined" );
- }
- }
-
- private IndexingContext getIndexingContext( Repository repository, String contextKey, Path repoDir, StorageAsset indexDirectory, String indexUrl ) throws IOException
- {
- return indexer.createIndexingContext( contextKey, repository.getId( ), repoDir.toFile( ), indexDirectory.getFilePath().toFile( ),
- repository.getLocation( ) == null ? null : repository.getLocation( ).toString( ),
- indexUrl,
- true, false,
- indexCreators );
- }
-
- private IndexingContext createManagedContext( ManagedRepository repository ) throws IOException
- {
-
- IndexingContext context;
- // take care first about repository location as can be relative
- Path repositoryDirectory = repository.getRoot().getFilePath();
-
- if ( !Files.exists( repositoryDirectory ) )
- {
- try
- {
- Files.createDirectories( repositoryDirectory );
- }
- catch ( IOException e )
- {
- log.error( "Could not create directory {}", repositoryDirectory );
- }
- }
-
- StorageAsset indexDirectory = null;
-
- if ( repository.supportsFeature( IndexCreationFeature.class ) )
- {
- indexDirectory = getIndexPath(repository);
-
- String indexUrl = repositoryDirectory.toUri( ).toURL( ).toExternalForm( );
- try
- {
- context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
- context.setSearchable( repository.isScanned( ) );
- }
- catch ( IndexFormatTooOldException e )
- {
- // existing index with an old lucene format so we need to delete it!!!
- // delete it first then recreate it.
- log.warn( "the index of repository {} is too old we have to delete and recreate it", //
- repository.getId( ) );
- FileUtils.deleteDirectory( indexDirectory.getFilePath() );
- context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
- context.setSearchable( repository.isScanned( ) );
- }
- return context;
- }
- else
- {
- throw new IOException( "No repository index defined" );
- }
- }
-
- private String calculateIndexRemoteUrl( URI baseUri, RemoteIndexFeature rif )
- {
- if ( rif.getIndexUri( ) == null )
- {
- return baseUri.resolve( ".index" ).toString( );
- }
- else
- {
- return baseUri.resolve( rif.getIndexUri( ) ).toString( );
- }
- }
-
- private static final class DownloadListener
- implements TransferListener
- {
- private Logger log = LoggerFactory.getLogger( getClass( ) );
-
- private String resourceName;
-
- private long startTime;
-
- private int totalLength = 0;
-
- @Override
- public void transferInitiated( TransferEvent transferEvent )
- {
- startTime = System.currentTimeMillis( );
- resourceName = transferEvent.getResource( ).getName( );
- log.debug( "initiate transfer of {}", resourceName );
- }
-
- @Override
- public void transferStarted( TransferEvent transferEvent )
- {
- this.totalLength = 0;
- resourceName = transferEvent.getResource( ).getName( );
- log.info( "start transfer of {}", transferEvent.getResource( ).getName( ) );
- }
-
- @Override
- public void transferProgress( TransferEvent transferEvent, byte[] buffer, int length )
- {
- log.debug( "transfer of {} : {}/{}", transferEvent.getResource( ).getName( ), buffer.length, length );
- this.totalLength += length;
- }
-
- @Override
- public void transferCompleted( TransferEvent transferEvent )
- {
- resourceName = transferEvent.getResource( ).getName( );
- long endTime = System.currentTimeMillis( );
- log.info( "end of transfer file {} {} kb: {}s", transferEvent.getResource( ).getName( ),
- this.totalLength / 1024, ( endTime - startTime ) / 1000 );
- }
-
- @Override
- public void transferError( TransferEvent transferEvent )
- {
- log.info( "error of transfer file {}: {}", transferEvent.getResource( ).getName( ),
- transferEvent.getException( ).getMessage( ), transferEvent.getException( ) );
- }
-
- @Override
- public void debug( String message )
- {
- log.debug( "transfer debug {}", message );
- }
- }
-
- private static class WagonResourceFetcher
- implements ResourceFetcher
- {
-
- Logger log;
-
- Path tempIndexDirectory;
-
- Wagon wagon;
-
- RemoteRepository remoteRepository;
-
- private WagonResourceFetcher( Logger log, Path tempIndexDirectory, Wagon wagon,
- RemoteRepository remoteRepository )
- {
- this.log = log;
- this.tempIndexDirectory = tempIndexDirectory;
- this.wagon = wagon;
- this.remoteRepository = remoteRepository;
- }
-
- @Override
- public void connect( String id, String url )
- throws IOException
- {
- //no op
- }
-
- @Override
- public void disconnect( )
- throws IOException
- {
- // no op
- }
-
- @Override
- public InputStream retrieve(String name )
- throws IOException, FileNotFoundException
- {
- try
- {
- log.info( "index update retrieve file, name:{}", name );
- Path file = tempIndexDirectory.resolve( name );
- Files.deleteIfExists( file );
- file.toFile( ).deleteOnExit( );
- wagon.get( addParameters( name, remoteRepository ), file.toFile( ) );
- return Files.newInputStream( file );
- }
- catch ( AuthorizationException | TransferFailedException e )
- {
- throw new IOException( e.getMessage( ), e );
- }
- catch ( ResourceDoesNotExistException e )
- {
- FileNotFoundException fnfe = new FileNotFoundException( e.getMessage( ) );
- fnfe.initCause( e );
- throw fnfe;
- }
- }
-
- // FIXME remove crappy copy/paste
- protected String addParameters( String path, RemoteRepository remoteRepository )
- {
- if ( remoteRepository.getExtraParameters( ).isEmpty( ) )
- {
- return path;
- }
-
- boolean question = false;
-
- StringBuilder res = new StringBuilder( path == null ? "" : path );
-
- for ( Map.Entry<String, String> entry : remoteRepository.getExtraParameters( ).entrySet( ) )
- {
- if ( !question )
- {
- res.append( '?' ).append( entry.getKey( ) ).append( '=' ).append( entry.getValue( ) );
- }
- }
-
- return res.toString( );
- }
-
- }
-}
+++ /dev/null
-package org.apache.archiva.mock;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.filelock.DefaultFileLockManager;
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.repository.Repository;
-import org.apache.archiva.repository.storage.fs.FilesystemAsset;
-import org.apache.archiva.repository.storage.fs.FilesystemStorage;
-import org.apache.archiva.repository.storage.StorageAsset;
-import org.apache.maven.index.context.IndexingContext;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.NoSuchFileException;
-import java.sql.Date;
-import java.time.ZonedDateTime;
-import java.util.Set;
-
-/**
- * Maven implementation of index context
- */
-public class MavenIndexContextMock implements ArchivaIndexingContext {
-
- private boolean open = true;
-
- private IndexingContext delegate;
- private Repository repository;
- private FilesystemStorage filesystemStorage;
-
- MavenIndexContextMock( Repository repository, IndexingContext delegate) {
- this.delegate = delegate;
- this.repository = repository;
- try {
- filesystemStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath().getParent(), new DefaultFileLockManager());
- } catch (IOException e) {
- e.printStackTrace();
- }
-
- }
-
- @Override
- public String getId() {
- return delegate.getId();
- }
-
- @Override
- public Repository getRepository() {
- return repository;
- }
-
- @Override
- public StorageAsset getPath() {
- return new FilesystemAsset(filesystemStorage, delegate.getIndexDirectoryFile().toPath().getFileName().toString(), delegate.getIndexDirectoryFile().toPath());
-
- }
-
- @Override
- public boolean isEmpty() throws IOException {
- return Files.list(delegate.getIndexDirectoryFile().toPath()).count()==0;
- }
-
- @Override
- public void commit() throws IOException {
- delegate.commit();
- }
-
- @Override
- public void rollback() throws IOException {
- delegate.rollback();
- }
-
- @Override
- public void optimize() throws IOException {
- delegate.optimize();
- }
-
- @Override
- public void close(boolean deleteFiles) throws IOException {
- this.open = false;
- try {
- delegate.close(deleteFiles);
- } catch (NoSuchFileException e) {
- // Ignore missing directory
- }
- }
-
- @Override
- public void close() throws IOException {
- this.open = false;
- try {
- delegate.close(false);
- } catch (NoSuchFileException e) {
- // Ignore missing directory
- }
- }
-
- @Override
- public boolean isOpen() {
- return open;
- }
-
- @Override
- public void purge() throws IOException {
- delegate.purge();
- }
-
- @Override
- public boolean supports(Class<?> clazz) {
- return IndexingContext.class.equals(clazz);
- }
-
- @SuppressWarnings( "unchecked" )
- @Override
- public <T> T getBaseContext(Class<T> clazz) throws UnsupportedOperationException {
- if (IndexingContext.class.equals(clazz)) {
- return (T) delegate;
- } else {
- throw new UnsupportedOperationException("The class "+clazz+" is not supported by the maven indexer");
- }
- }
-
- @Override
- public Set<String> getGroups() throws IOException {
- return delegate.getAllGroups();
- }
-
- @Override
- public void updateTimestamp(boolean save) throws IOException {
- delegate.updateTimestamp(save);
- }
-
- @Override
- public void updateTimestamp(boolean save, ZonedDateTime time) throws IOException {
- delegate.updateTimestamp(save, Date.from(time.toInstant()));
- }
-
-
-}
+++ /dev/null
-package org.apache.archiva.mock;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.admin.model.AuditInformation;
-import org.apache.archiva.admin.model.RepositoryAdminException;
-import org.apache.archiva.admin.model.admin.ArchivaAdministration;
-import org.apache.archiva.admin.model.beans.FileType;
-import org.apache.archiva.admin.model.beans.LegacyArtifactPath;
-import org.apache.archiva.admin.model.beans.NetworkConfiguration;
-import org.apache.archiva.admin.model.beans.OrganisationInformation;
-import org.apache.archiva.admin.model.beans.UiConfiguration;
-import org.apache.archiva.configuration.ArchivaConfiguration;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * @author Olivier Lamy
- */
-public class MockArchivaAdministration
- implements ArchivaAdministration
-{
- private ArchivaConfiguration archivaConfiguration;
-
- @Override
- public List<LegacyArtifactPath> getLegacyArtifactPaths()
- throws RepositoryAdminException
- {
- return null;
- }
-
- @Override
- public void addLegacyArtifactPath( LegacyArtifactPath legacyArtifactPath, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public void deleteLegacyArtifactPath( String path, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public void addFileTypePattern( String fileTypeId, String pattern, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public void removeFileTypePattern( String fileTypeId, String pattern, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public List<FileType> getFileTypes()
- throws RepositoryAdminException
- {
- return null;
- }
-
- @Override
- public FileType getFileType( String fileTypeId )
- throws RepositoryAdminException
- {
- return null;
- }
-
- @Override
- public void addFileType( FileType fileType, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public void removeFileType( String fileTypeId, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public void addKnownContentConsumer( String knownContentConsumer, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public void setKnownContentConsumers( List<String> knownContentConsumers, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public List<String> getKnownContentConsumers()
- throws RepositoryAdminException
- {
- return new ArrayList<>(
- getArchivaConfiguration().getConfiguration().getRepositoryScanning().getKnownContentConsumers() );
- }
-
- @Override
- public void removeKnownContentConsumer( String knownContentConsumer, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public void addInvalidContentConsumer( String invalidContentConsumer, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public void setInvalidContentConsumers( List<String> invalidContentConsumers, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public List<String> getInvalidContentConsumers()
- throws RepositoryAdminException
- {
- return new ArrayList<>(
- getArchivaConfiguration().getConfiguration().getRepositoryScanning().getInvalidContentConsumers() );
- }
-
- @Override
- public void removeInvalidContentConsumer( String invalidContentConsumer, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public OrganisationInformation getOrganisationInformation()
- throws RepositoryAdminException
- {
- return null;
- }
-
- @Override
- public void setOrganisationInformation( OrganisationInformation organisationInformation )
- throws RepositoryAdminException
- {
-
- }
-
- public ArchivaConfiguration getArchivaConfiguration()
- {
- return archivaConfiguration;
- }
-
- public void setArchivaConfiguration( ArchivaConfiguration archivaConfiguration )
- {
- this.archivaConfiguration = archivaConfiguration;
- }
-
- @Override
- public UiConfiguration getUiConfiguration()
- throws RepositoryAdminException
- {
- return null;
- }
-
- @Override
- public void updateUiConfiguration( UiConfiguration uiConfiguration )
- throws RepositoryAdminException
- {
-
- }
-
- @Override
- public NetworkConfiguration getNetworkConfiguration()
- throws RepositoryAdminException
- {
- return null;
- }
-
- @Override
- public void setNetworkConfiguration( NetworkConfiguration networkConfiguration )
- throws RepositoryAdminException
- {
-
- }
-}
+++ /dev/null
-package org.apache.archiva.mock;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.admin.model.AuditInformation;
-import org.apache.archiva.admin.model.RepositoryAdminException;
-import org.apache.archiva.admin.model.beans.RemoteRepository;
-import org.apache.archiva.admin.model.remote.RemoteRepositoryAdmin;
-import org.apache.archiva.configuration.ArchivaConfiguration;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author Olivier Lamy
- */
-public class MockRemoteRepositoryAdmin
- implements RemoteRepositoryAdmin
-{
- private ArchivaConfiguration archivaConfiguration;
-
- @Override
- public List<RemoteRepository> getRemoteRepositories()
- throws RepositoryAdminException
- {
- return null; //To change body of implemented methods use File | Settings | File Templates.
- }
-
- @Override
- public RemoteRepository getRemoteRepository( String repositoryId )
- throws RepositoryAdminException
- {
- return null; //To change body of implemented methods use File | Settings | File Templates.
- }
-
- @Override
- public Boolean deleteRemoteRepository( String repositoryId, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
- return null; //To change body of implemented methods use File | Settings | File Templates.
- }
-
- @Override
- public Boolean addRemoteRepository( RemoteRepository remoteRepository, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
- return null; //To change body of implemented methods use File | Settings | File Templates.
- }
-
- @Override
- public Boolean updateRemoteRepository( RemoteRepository remoteRepository, AuditInformation auditInformation )
- throws RepositoryAdminException
- {
- return null; //To change body of implemented methods use File | Settings | File Templates.
- }
-
- @Override
- public Map<String, RemoteRepository> getRemoteRepositoriesAsMap()
- throws RepositoryAdminException
- {
- return null; //To change body of implemented methods use File | Settings | File Templates.
- }
-
- public ArchivaConfiguration getArchivaConfiguration()
- {
- return archivaConfiguration;
- }
-
- public void setArchivaConfiguration( ArchivaConfiguration archivaConfiguration )
- {
- this.archivaConfiguration = archivaConfiguration;
- }
-
-}
--- /dev/null
+package org.apache.archiva.scheduler.repository.mock;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
+import org.apache.archiva.common.utils.FileUtils;
+import org.apache.archiva.common.utils.PathUtil;
+import org.apache.archiva.configuration.ArchivaConfiguration;
+import org.apache.archiva.indexer.ArchivaIndexManager;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.indexer.IndexCreationFailedException;
+import org.apache.archiva.indexer.IndexUpdateFailedException;
+import org.apache.archiva.indexer.UnsupportedBaseContextException;
+import org.apache.archiva.maven.common.proxy.WagonFactory;
+import org.apache.archiva.maven.common.proxy.WagonFactoryException;
+import org.apache.archiva.maven.common.proxy.WagonFactoryRequest;
+import org.apache.archiva.proxy.model.NetworkProxy;
+import org.apache.archiva.repository.EditableRepository;
+import org.apache.archiva.repository.ManagedRepository;
+import org.apache.archiva.repository.base.PasswordCredentials;
+import org.apache.archiva.repository.RemoteRepository;
+import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.RepositoryType;
+import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
+import org.apache.archiva.repository.storage.fs.FilesystemAsset;
+import org.apache.archiva.repository.storage.fs.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.features.RemoteIndexFeature;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.maven.index.ArtifactContext;
+import org.apache.maven.index.ArtifactContextProducer;
+import org.apache.maven.index.DefaultScannerListener;
+import org.apache.maven.index.Indexer;
+import org.apache.maven.index.IndexerEngine;
+import org.apache.maven.index.Scanner;
+import org.apache.maven.index.ScanningRequest;
+import org.apache.maven.index.ScanningResult;
+import org.apache.maven.index.context.IndexCreator;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.packer.IndexPacker;
+import org.apache.maven.index.packer.IndexPackingRequest;
+import org.apache.maven.index.updater.IndexUpdateRequest;
+import org.apache.maven.index.updater.ResourceFetcher;
+import org.apache.maven.index_shaded.lucene.index.IndexFormatTooOldException;
+import org.apache.maven.wagon.ConnectionException;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.StreamWagon;
+import org.apache.maven.wagon.TransferFailedException;
+import org.apache.maven.wagon.Wagon;
+import org.apache.maven.wagon.authentication.AuthenticationException;
+import org.apache.maven.wagon.authentication.AuthenticationInfo;
+import org.apache.maven.wagon.authorization.AuthorizationException;
+import org.apache.maven.wagon.events.TransferEvent;
+import org.apache.maven.wagon.events.TransferListener;
+import org.apache.maven.wagon.proxy.ProxyInfo;
+import org.apache.maven.wagon.shared.http.AbstractHttpClientWagon;
+import org.apache.maven.wagon.shared.http.HttpConfiguration;
+import org.apache.maven.wagon.shared.http.HttpMethodConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentSkipListSet;
+import java.util.stream.Collectors;
+
+@Service("archivaIndexManager#maven")
+public class ArchivaIndexManagerMock implements ArchivaIndexManager {
+
+ private static final Logger log = LoggerFactory.getLogger( ArchivaIndexManagerMock.class );
+
+ @Inject
+ private Indexer indexer;
+
+ @Inject
+ private IndexerEngine indexerEngine;
+
+ @Inject
+ private List<? extends IndexCreator> indexCreators;
+
+ @Inject
+ private IndexPacker indexPacker;
+
+ @Inject
+ private Scanner scanner;
+
+ @Inject
+ private ArchivaConfiguration archivaConfiguration;
+
+ @Inject
+ private WagonFactory wagonFactory;
+
+
+ @Inject
+ private ArtifactContextProducer artifactContextProducer;
+
+ private ConcurrentSkipListSet<Path> activeContexts = new ConcurrentSkipListSet<>( );
+
+ private static final int WAIT_TIME = 100;
+ private static final int MAX_WAIT = 10;
+
+
+ public static IndexingContext getMvnContext(ArchivaIndexingContext context ) throws UnsupportedBaseContextException
+ {
+ if ( !context.supports( IndexingContext.class ) )
+ {
+ log.error( "The provided archiva index context does not support the maven IndexingContext" );
+ throw new UnsupportedBaseContextException( "The context does not support the Maven IndexingContext" );
+ }
+ return context.getBaseContext( IndexingContext.class );
+ }
+
+ private Path getIndexPath( ArchivaIndexingContext ctx )
+ {
+ return ctx.getPath().getFilePath();
+ }
+
+ @FunctionalInterface
+ interface IndexUpdateConsumer
+ {
+
+ void accept( IndexingContext indexingContext ) throws IndexUpdateFailedException;
+ }
+
+ /*
+ * This method is used to do some actions around the update execution code. And to make sure, that no other
+ * method is running on the same index.
+ */
+ private void executeUpdateFunction( ArchivaIndexingContext context, IndexUpdateConsumer function ) throws IndexUpdateFailedException
+ {
+ IndexingContext indexingContext = null;
+ try
+ {
+ indexingContext = getMvnContext( context );
+ }
+ catch ( UnsupportedBaseContextException e )
+ {
+ throw new IndexUpdateFailedException( "Maven index is not supported by this context", e );
+ }
+ final Path ctxPath = getIndexPath( context );
+ int loop = MAX_WAIT;
+ boolean active = false;
+ while ( loop-- > 0 && !active )
+ {
+ active = activeContexts.add( ctxPath );
+ try
+ {
+ Thread.currentThread( ).sleep( WAIT_TIME );
+ }
+ catch ( InterruptedException e )
+ {
+ // Ignore this
+ }
+ }
+ if ( active )
+ {
+ try
+ {
+ function.accept( indexingContext );
+ }
+ finally
+ {
+ activeContexts.remove( ctxPath );
+ }
+ }
+ else
+ {
+ throw new IndexUpdateFailedException( "Timeout while waiting for index release on context " + context.getId( ) );
+ }
+ }
+
+ @Override
+ public void pack( final ArchivaIndexingContext context ) throws IndexUpdateFailedException
+ {
+ executeUpdateFunction( context, indexingContext -> {
+ try
+ {
+ IndexPackingRequest request = new IndexPackingRequest( indexingContext,
+ indexingContext.acquireIndexSearcher( ).getIndexReader( ),
+ indexingContext.getIndexDirectoryFile( ) );
+ indexPacker.packIndex( request );
+ indexingContext.updateTimestamp( true );
+ }
+ catch ( IOException e )
+ {
+ log.error( "IOException while packing index of context " + context.getId( ) + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ) );
+ throw new IndexUpdateFailedException( "IOException during update of " + context.getId( ), e );
+ }
+ }
+ );
+
+ }
+
+ @Override
+ public void scan(final ArchivaIndexingContext context) throws IndexUpdateFailedException
+ {
+ executeUpdateFunction( context, indexingContext -> {
+ DefaultScannerListener listener = new DefaultScannerListener( indexingContext, indexerEngine, true, null );
+ ScanningRequest request = new ScanningRequest( indexingContext, listener );
+ ScanningResult result = scanner.scan( request );
+ if ( result.hasExceptions( ) )
+ {
+ log.error( "Exceptions occured during index scan of " + context.getId( ) );
+ result.getExceptions( ).stream( ).map( e -> e.getMessage( ) ).distinct( ).limit( 5 ).forEach(
+ s -> log.error( "Message: " + s )
+ );
+ }
+
+ } );
+ }
+
+ @Override
+ public void update(final ArchivaIndexingContext context, final boolean fullUpdate) throws IndexUpdateFailedException
+ {
+ log.info( "start download remote index for remote repository {}", context.getRepository( ).getId( ) );
+ URI remoteUpdateUri;
+ if ( !( context.getRepository( ) instanceof RemoteRepository) || !(context.getRepository().supportsFeature(RemoteIndexFeature.class)) )
+ {
+ throw new IndexUpdateFailedException( "The context is not associated to a remote repository with remote index " + context.getId( ) );
+ } else {
+ RemoteIndexFeature rif = context.getRepository().getFeature(RemoteIndexFeature.class).get();
+ remoteUpdateUri = context.getRepository().getLocation().resolve(rif.getIndexUri());
+ }
+ final RemoteRepository remoteRepository = (RemoteRepository) context.getRepository( );
+
+ executeUpdateFunction( context,
+ indexingContext -> {
+ try
+ {
+ // create a temp directory to download files
+ Path tempIndexDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".tmpIndex" );
+ Path indexCacheDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".indexCache" );
+ Files.createDirectories( indexCacheDirectory );
+ if ( Files.exists( tempIndexDirectory ) )
+ {
+ FileUtils.deleteDirectory( tempIndexDirectory );
+ }
+ Files.createDirectories( tempIndexDirectory );
+ tempIndexDirectory.toFile( ).deleteOnExit( );
+ String baseIndexUrl = indexingContext.getIndexUpdateUrl( );
+
+ String wagonProtocol = remoteUpdateUri.toURL( ).getProtocol( );
+
+ NetworkProxy networkProxy = null;
+ if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
+ {
+ RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
+
+ final StreamWagon wagon = (StreamWagon) wagonFactory.getWagon(
+ new WagonFactoryRequest( wagonProtocol, remoteRepository.getExtraHeaders( ) ).networkProxy(
+ networkProxy )
+ );
+ int readTimeout = (int) rif.getDownloadTimeout( ).toMillis( ) * 1000;
+ wagon.setReadTimeout( readTimeout );
+ wagon.setTimeout( (int) remoteRepository.getTimeout( ).toMillis( ) * 1000 );
+
+ if ( wagon instanceof AbstractHttpClientWagon)
+ {
+ HttpConfiguration httpConfiguration = new HttpConfiguration( );
+ HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration( );
+ httpMethodConfiguration.setUsePreemptive( true );
+ httpMethodConfiguration.setReadTimeout( readTimeout );
+ httpConfiguration.setGet( httpMethodConfiguration );
+ AbstractHttpClientWagon.class.cast( wagon ).setHttpConfiguration( httpConfiguration );
+ }
+
+ wagon.addTransferListener( new DownloadListener( ) );
+ ProxyInfo proxyInfo = null;
+ if ( networkProxy != null )
+ {
+ proxyInfo = new ProxyInfo( );
+ proxyInfo.setType( networkProxy.getProtocol( ) );
+ proxyInfo.setHost( networkProxy.getHost( ) );
+ proxyInfo.setPort( networkProxy.getPort( ) );
+ proxyInfo.setUserName( networkProxy.getUsername( ) );
+ proxyInfo.setPassword(new String(networkProxy.getPassword()));
+ }
+ AuthenticationInfo authenticationInfo = null;
+ if ( remoteRepository.getLoginCredentials( ) != null && ( remoteRepository.getLoginCredentials( ) instanceof PasswordCredentials) )
+ {
+ PasswordCredentials creds = (PasswordCredentials) remoteRepository.getLoginCredentials( );
+ authenticationInfo = new AuthenticationInfo( );
+ authenticationInfo.setUserName( creds.getUsername( ) );
+ authenticationInfo.setPassword( new String( creds.getPassword( ) ) );
+ }
+ wagon.connect( new org.apache.maven.wagon.repository.Repository( remoteRepository.getId( ), baseIndexUrl ), authenticationInfo,
+ proxyInfo );
+
+ Path indexDirectory = indexingContext.getIndexDirectoryFile( ).toPath( );
+ if ( !Files.exists( indexDirectory ) )
+ {
+ Files.createDirectories( indexDirectory );
+ }
+
+ ResourceFetcher resourceFetcher =
+ new WagonResourceFetcher( log, tempIndexDirectory, wagon, remoteRepository );
+ IndexUpdateRequest request = new IndexUpdateRequest( indexingContext, resourceFetcher );
+ request.setForceFullUpdate( fullUpdate );
+ request.setLocalIndexCacheDir( indexCacheDirectory.toFile( ) );
+
+ // indexUpdater.fetchAndUpdateIndex( request );
+
+ indexingContext.updateTimestamp( true );
+ }
+
+ }
+ catch ( AuthenticationException e )
+ {
+ log.error( "Could not login to the remote proxy for updating index of {}", remoteRepository.getId( ), e );
+ throw new IndexUpdateFailedException( "Login in to proxy failed while updating remote repository " + remoteRepository.getId( ), e );
+ }
+ catch ( ConnectionException e )
+ {
+ log.error( "Connection error during index update for remote repository {}", remoteRepository.getId( ), e );
+ throw new IndexUpdateFailedException( "Connection error during index update for remote repository " + remoteRepository.getId( ), e );
+ }
+ catch ( MalformedURLException e )
+ {
+ log.error( "URL for remote index update of remote repository {} is not correct {}", remoteRepository.getId( ), remoteUpdateUri, e );
+ throw new IndexUpdateFailedException( "URL for remote index update of repository is not correct " + remoteUpdateUri, e );
+ }
+ catch ( IOException e )
+ {
+ log.error( "IOException during index update of remote repository {}: {}", remoteRepository.getId( ), e.getMessage( ), e );
+ throw new IndexUpdateFailedException( "IOException during index update of remote repository " + remoteRepository.getId( )
+ + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
+ }
+ catch ( WagonFactoryException e )
+ {
+ log.error( "Wagon for remote index download of {} could not be created: {}", remoteRepository.getId( ), e.getMessage( ), e );
+ throw new IndexUpdateFailedException( "Error while updating the remote index of " + remoteRepository.getId( ), e );
+ }
+ } );
+
+ }
+
+ @Override
+ public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
+ {
+ StorageAsset ctxUri = context.getPath();
+ executeUpdateFunction(context, indexingContext -> {
+ Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
+ try {
+ indexer.addArtifactsToIndex(artifacts, indexingContext);
+ } catch (IOException e) {
+ log.error("IOException while adding artifact {}", e.getMessage(), e);
+ throw new IndexUpdateFailedException("Error occured while adding artifact to index of "+context.getId()
+ + (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
+ }
+ });
+ }
+
+ @Override
+ public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
+ {
+ final StorageAsset ctxUri = context.getPath();
+ executeUpdateFunction(context, indexingContext -> {
+ Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
+ try {
+ indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
+ } catch (IOException e) {
+ log.error("IOException while removing artifact {}", e.getMessage(), e);
+ throw new IndexUpdateFailedException("Error occured while removing artifact from index of "+context.getId()
+ + (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
+ }
+ });
+
+ }
+
+ @Override
+ public boolean supportsRepository( RepositoryType type )
+ {
+ return type == RepositoryType.MAVEN;
+ }
+
+ @Override
+ public ArchivaIndexingContext createContext( Repository repository ) throws IndexCreationFailedException
+ {
+ log.debug("Creating context for repo {}, type: {}", repository.getId(), repository.getType());
+ if ( repository.getType( ) != RepositoryType.MAVEN )
+ {
+ throw new UnsupportedRepositoryTypeException( repository.getType( ) );
+ }
+ IndexingContext mvnCtx = null;
+ try
+ {
+ if ( repository instanceof RemoteRepository )
+ {
+ mvnCtx = createRemoteContext( (RemoteRepository) repository );
+ }
+ else if ( repository instanceof ManagedRepository )
+ {
+ mvnCtx = createManagedContext( (ManagedRepository) repository );
+ }
+ }
+ catch ( IOException e )
+ {
+ log.error( "IOException during context creation " + e.getMessage( ), e );
+ throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
+ + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
+ }
+ MavenIndexContextMock context = new MavenIndexContextMock( repository, mvnCtx );
+
+ return context;
+ }
+
+ @Override
+ public ArchivaIndexingContext reset(ArchivaIndexingContext context) throws IndexUpdateFailedException {
+ ArchivaIndexingContext ctx;
+ executeUpdateFunction(context, indexingContext -> {
+ try {
+ indexingContext.close(true);
+ } catch (IOException e) {
+ log.warn("Index close failed");
+ }
+ org.apache.archiva.repository.storage.util.StorageUtil.deleteRecursively(context.getPath());
+ });
+ try {
+ Repository repo = context.getRepository();
+ ctx = createContext(context.getRepository());
+ if (repo instanceof EditableRepository) {
+ ((EditableRepository)repo).setIndexingContext(ctx);
+ }
+ } catch (IndexCreationFailedException e) {
+ throw new IndexUpdateFailedException("Could not create index");
+ }
+ return ctx;
+ }
+
+ @Override
+ public ArchivaIndexingContext move(ArchivaIndexingContext context, Repository repo) throws IndexCreationFailedException {
+ if (context==null) {
+ return null;
+ }
+ if (context.supports(IndexingContext.class)) {
+ try {
+ StorageAsset newPath = getIndexPath(repo);
+ IndexingContext ctx = context.getBaseContext(IndexingContext.class);
+ Path oldPath = ctx.getIndexDirectoryFile().toPath();
+ if (oldPath.equals(newPath)) {
+ // Nothing to do, if path does not change
+ return context;
+ }
+ if (!Files.exists(oldPath)) {
+ return createContext(repo);
+ } else if (context.isEmpty()) {
+ context.close();
+ return createContext(repo);
+ } else {
+ context.close(false);
+ Files.move(oldPath, newPath.getFilePath());
+ return createContext(repo);
+ }
+ } catch (IOException e) {
+ log.error("IOException while moving index directory {}", e.getMessage(), e);
+ throw new IndexCreationFailedException("Could not recreated the index.", e);
+ } catch (UnsupportedBaseContextException e) {
+ throw new IndexCreationFailedException("The given context, is not a maven context.");
+ }
+ } else {
+ throw new IndexCreationFailedException("Bad context type. This is not a maven context.");
+ }
+ }
+
+ @Override
+ public void updateLocalIndexPath(Repository repo) {
+ if (repo.supportsFeature(IndexCreationFeature.class)) {
+ IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
+ try {
+ icf.setLocalIndexPath(getIndexPath(repo));
+ } catch (IOException e) {
+ log.error("Could not set local index path for {}. New URI: {}", repo.getId(), icf.getIndexPath());
+ }
+ }
+ }
+
+ @Override
+ public ArchivaIndexingContext mergeContexts(Repository destinationRepo, List<ArchivaIndexingContext> contexts, boolean packIndex) throws UnsupportedOperationException, IndexCreationFailedException {
+ return null;
+ }
+
+ private StorageAsset getIndexPath( Repository repo) throws IOException {
+ IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
+ Path repoDir = repo.getRoot().getFilePath();
+ URI indexDir = icf.getIndexPath();
+ String indexPath = indexDir.getPath();
+ Path indexDirectory = null;
+ FilesystemStorage filesystemStorage = (FilesystemStorage) repo.getRoot().getStorage();
+ if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
+ {
+
+ indexDirectory = PathUtil.getPathFromUri( indexDir );
+ // not absolute so create it in repository directory
+ if ( indexDirectory.isAbsolute( ) )
+ {
+ indexPath = indexDirectory.getFileName().toString();
+ filesystemStorage = new FilesystemStorage(indexDirectory, new DefaultFileLockManager());
+ }
+ else
+ {
+ indexDirectory = repoDir.resolve( indexDirectory );
+ }
+ }
+ else
+ {
+ indexDirectory = repoDir.resolve( ".index" );
+ indexPath = ".index";
+ }
+
+ if ( !Files.exists( indexDirectory ) )
+ {
+ Files.createDirectories( indexDirectory );
+ }
+ return new FilesystemAsset( filesystemStorage, indexPath, indexDirectory);
+ }
+
+ private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
+ {
+ Path appServerBase = archivaConfiguration.getAppServerBaseDir( );
+
+ String contextKey = "remote-" + remoteRepository.getId( );
+
+
+ // create remote repository path
+ Path repoDir = remoteRepository.getRoot().getFilePath();
+ if ( !Files.exists( repoDir ) )
+ {
+ Files.createDirectories( repoDir );
+ }
+
+ StorageAsset indexDirectory = null;
+
+ // is there configured indexDirectory ?
+ if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
+ {
+ RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
+ indexDirectory = getIndexPath(remoteRepository);
+ String remoteIndexUrl = calculateIndexRemoteUrl( remoteRepository.getLocation( ), rif );
+ try
+ {
+
+ return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
+ }
+ catch ( IndexFormatTooOldException e )
+ {
+ // existing index with an old lucene format so we need to delete it!!!
+ // delete it first then recreate it.
+ log.warn( "the index of repository {} is too old we have to delete and recreate it", //
+ remoteRepository.getId( ) );
+ FileUtils.deleteDirectory( indexDirectory.getFilePath() );
+ return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
+
+ }
+ }
+ else
+ {
+ throw new IOException( "No remote index defined" );
+ }
+ }
+
+ private IndexingContext getIndexingContext( Repository repository, String contextKey, Path repoDir, StorageAsset indexDirectory, String indexUrl ) throws IOException
+ {
+ return indexer.createIndexingContext( contextKey, repository.getId( ), repoDir.toFile( ), indexDirectory.getFilePath().toFile( ),
+ repository.getLocation( ) == null ? null : repository.getLocation( ).toString( ),
+ indexUrl,
+ true, false,
+ indexCreators );
+ }
+
+ private IndexingContext createManagedContext( ManagedRepository repository ) throws IOException
+ {
+
+ IndexingContext context;
+ // take care first about repository location as can be relative
+ Path repositoryDirectory = repository.getRoot().getFilePath();
+
+ if ( !Files.exists( repositoryDirectory ) )
+ {
+ try
+ {
+ Files.createDirectories( repositoryDirectory );
+ }
+ catch ( IOException e )
+ {
+ log.error( "Could not create directory {}", repositoryDirectory );
+ }
+ }
+
+ StorageAsset indexDirectory = null;
+
+ if ( repository.supportsFeature( IndexCreationFeature.class ) )
+ {
+ indexDirectory = getIndexPath(repository);
+
+ String indexUrl = repositoryDirectory.toUri( ).toURL( ).toExternalForm( );
+ try
+ {
+ context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
+ context.setSearchable( repository.isScanned( ) );
+ }
+ catch ( IndexFormatTooOldException e )
+ {
+ // existing index with an old lucene format so we need to delete it!!!
+ // delete it first then recreate it.
+ log.warn( "the index of repository {} is too old we have to delete and recreate it", //
+ repository.getId( ) );
+ FileUtils.deleteDirectory( indexDirectory.getFilePath() );
+ context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
+ context.setSearchable( repository.isScanned( ) );
+ }
+ return context;
+ }
+ else
+ {
+ throw new IOException( "No repository index defined" );
+ }
+ }
+
+ private String calculateIndexRemoteUrl( URI baseUri, RemoteIndexFeature rif )
+ {
+ if ( rif.getIndexUri( ) == null )
+ {
+ return baseUri.resolve( ".index" ).toString( );
+ }
+ else
+ {
+ return baseUri.resolve( rif.getIndexUri( ) ).toString( );
+ }
+ }
+
+ private static final class DownloadListener
+ implements TransferListener
+ {
+ private Logger log = LoggerFactory.getLogger( getClass( ) );
+
+ private String resourceName;
+
+ private long startTime;
+
+ private int totalLength = 0;
+
+ @Override
+ public void transferInitiated( TransferEvent transferEvent )
+ {
+ startTime = System.currentTimeMillis( );
+ resourceName = transferEvent.getResource( ).getName( );
+ log.debug( "initiate transfer of {}", resourceName );
+ }
+
+ @Override
+ public void transferStarted( TransferEvent transferEvent )
+ {
+ this.totalLength = 0;
+ resourceName = transferEvent.getResource( ).getName( );
+ log.info( "start transfer of {}", transferEvent.getResource( ).getName( ) );
+ }
+
+ @Override
+ public void transferProgress( TransferEvent transferEvent, byte[] buffer, int length )
+ {
+ log.debug( "transfer of {} : {}/{}", transferEvent.getResource( ).getName( ), buffer.length, length );
+ this.totalLength += length;
+ }
+
+ @Override
+ public void transferCompleted( TransferEvent transferEvent )
+ {
+ resourceName = transferEvent.getResource( ).getName( );
+ long endTime = System.currentTimeMillis( );
+ log.info( "end of transfer file {} {} kb: {}s", transferEvent.getResource( ).getName( ),
+ this.totalLength / 1024, ( endTime - startTime ) / 1000 );
+ }
+
+ @Override
+ public void transferError( TransferEvent transferEvent )
+ {
+ log.info( "error of transfer file {}: {}", transferEvent.getResource( ).getName( ),
+ transferEvent.getException( ).getMessage( ), transferEvent.getException( ) );
+ }
+
+ @Override
+ public void debug( String message )
+ {
+ log.debug( "transfer debug {}", message );
+ }
+ }
+
+ private static class WagonResourceFetcher
+ implements ResourceFetcher
+ {
+
+ Logger log;
+
+ Path tempIndexDirectory;
+
+ Wagon wagon;
+
+ RemoteRepository remoteRepository;
+
+ private WagonResourceFetcher( Logger log, Path tempIndexDirectory, Wagon wagon,
+ RemoteRepository remoteRepository )
+ {
+ this.log = log;
+ this.tempIndexDirectory = tempIndexDirectory;
+ this.wagon = wagon;
+ this.remoteRepository = remoteRepository;
+ }
+
+ @Override
+ public void connect( String id, String url )
+ throws IOException
+ {
+ //no op
+ }
+
+ @Override
+ public void disconnect( )
+ throws IOException
+ {
+ // no op
+ }
+
+ @Override
+ public InputStream retrieve(String name )
+ throws IOException, FileNotFoundException
+ {
+ try
+ {
+ log.info( "index update retrieve file, name:{}", name );
+ Path file = tempIndexDirectory.resolve( name );
+ Files.deleteIfExists( file );
+ file.toFile( ).deleteOnExit( );
+ wagon.get( addParameters( name, remoteRepository ), file.toFile( ) );
+ return Files.newInputStream( file );
+ }
+ catch ( AuthorizationException | TransferFailedException e )
+ {
+ throw new IOException( e.getMessage( ), e );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ FileNotFoundException fnfe = new FileNotFoundException( e.getMessage( ) );
+ fnfe.initCause( e );
+ throw fnfe;
+ }
+ }
+
+ // FIXME remove crappy copy/paste
+ protected String addParameters( String path, RemoteRepository remoteRepository )
+ {
+ if ( remoteRepository.getExtraParameters( ).isEmpty( ) )
+ {
+ return path;
+ }
+
+ boolean question = false;
+
+ StringBuilder res = new StringBuilder( path == null ? "" : path );
+
+ for ( Map.Entry<String, String> entry : remoteRepository.getExtraParameters( ).entrySet( ) )
+ {
+ if ( !question )
+ {
+ res.append( '?' ).append( entry.getKey( ) ).append( '=' ).append( entry.getValue( ) );
+ }
+ }
+
+ return res.toString( );
+ }
+
+ }
+}
--- /dev/null
+package org.apache.archiva.scheduler.repository.mock;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.storage.fs.FilesystemAsset;
+import org.apache.archiva.repository.storage.fs.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.maven.index.context.IndexingContext;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.NoSuchFileException;
+import java.sql.Date;
+import java.time.ZonedDateTime;
+import java.util.Set;
+
+/**
+ * Maven implementation of index context
+ */
+public class MavenIndexContextMock implements ArchivaIndexingContext {
+
+ private boolean open = true;
+
+ private IndexingContext delegate;
+ private Repository repository;
+ private FilesystemStorage filesystemStorage;
+
+ MavenIndexContextMock( Repository repository, IndexingContext delegate) {
+ this.delegate = delegate;
+ this.repository = repository;
+ try {
+ filesystemStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath().getParent(), new DefaultFileLockManager());
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ @Override
+ public String getId() {
+ return delegate.getId();
+ }
+
+ @Override
+ public Repository getRepository() {
+ return repository;
+ }
+
+ @Override
+ public StorageAsset getPath() {
+ return new FilesystemAsset(filesystemStorage, delegate.getIndexDirectoryFile().toPath().getFileName().toString(), delegate.getIndexDirectoryFile().toPath());
+
+ }
+
+ @Override
+ public boolean isEmpty() throws IOException {
+ return Files.list(delegate.getIndexDirectoryFile().toPath()).count()==0;
+ }
+
+ @Override
+ public void commit() throws IOException {
+ delegate.commit();
+ }
+
+ @Override
+ public void rollback() throws IOException {
+ delegate.rollback();
+ }
+
+ @Override
+ public void optimize() throws IOException {
+ delegate.optimize();
+ }
+
+ @Override
+ public void close(boolean deleteFiles) throws IOException {
+ this.open = false;
+ try {
+ delegate.close(deleteFiles);
+ } catch (NoSuchFileException e) {
+ // Ignore missing directory
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ this.open = false;
+ try {
+ delegate.close(false);
+ } catch (NoSuchFileException e) {
+ // Ignore missing directory
+ }
+ }
+
+ @Override
+ public boolean isOpen() {
+ return open;
+ }
+
+ @Override
+ public void purge() throws IOException {
+ delegate.purge();
+ }
+
+ @Override
+ public boolean supports(Class<?> clazz) {
+ return IndexingContext.class.equals(clazz);
+ }
+
+ @SuppressWarnings( "unchecked" )
+ @Override
+ public <T> T getBaseContext(Class<T> clazz) throws UnsupportedOperationException {
+ if (IndexingContext.class.equals(clazz)) {
+ return (T) delegate;
+ } else {
+ throw new UnsupportedOperationException("The class "+clazz+" is not supported by the maven indexer");
+ }
+ }
+
+ @Override
+ public Set<String> getGroups() throws IOException {
+ return delegate.getAllGroups();
+ }
+
+ @Override
+ public void updateTimestamp(boolean save) throws IOException {
+ delegate.updateTimestamp(save);
+ }
+
+ @Override
+ public void updateTimestamp(boolean save, ZonedDateTime time) throws IOException {
+ delegate.updateTimestamp(save, Date.from(time.toInstant()));
+ }
+
+
+}
--- /dev/null
+package org.apache.archiva.scheduler.repository.mock;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.admin.model.AuditInformation;
+import org.apache.archiva.admin.model.RepositoryAdminException;
+import org.apache.archiva.admin.model.admin.ArchivaAdministration;
+import org.apache.archiva.admin.model.beans.FileType;
+import org.apache.archiva.admin.model.beans.LegacyArtifactPath;
+import org.apache.archiva.admin.model.beans.NetworkConfiguration;
+import org.apache.archiva.admin.model.beans.OrganisationInformation;
+import org.apache.archiva.admin.model.beans.UiConfiguration;
+import org.apache.archiva.configuration.ArchivaConfiguration;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @author Olivier Lamy
+ */
+public class MockArchivaAdministration
+ implements ArchivaAdministration
+{
+ private ArchivaConfiguration archivaConfiguration;
+
+ @Override
+ public List<LegacyArtifactPath> getLegacyArtifactPaths()
+ throws RepositoryAdminException
+ {
+ return null;
+ }
+
+ @Override
+ public void addLegacyArtifactPath( LegacyArtifactPath legacyArtifactPath, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public void deleteLegacyArtifactPath( String path, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public void addFileTypePattern( String fileTypeId, String pattern, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public void removeFileTypePattern( String fileTypeId, String pattern, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public List<FileType> getFileTypes()
+ throws RepositoryAdminException
+ {
+ return null;
+ }
+
+ @Override
+ public FileType getFileType( String fileTypeId )
+ throws RepositoryAdminException
+ {
+ return null;
+ }
+
+ @Override
+ public void addFileType( FileType fileType, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public void removeFileType( String fileTypeId, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public void addKnownContentConsumer( String knownContentConsumer, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public void setKnownContentConsumers( List<String> knownContentConsumers, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public List<String> getKnownContentConsumers()
+ throws RepositoryAdminException
+ {
+ return new ArrayList<>(
+ getArchivaConfiguration().getConfiguration().getRepositoryScanning().getKnownContentConsumers() );
+ }
+
+ @Override
+ public void removeKnownContentConsumer( String knownContentConsumer, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public void addInvalidContentConsumer( String invalidContentConsumer, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public void setInvalidContentConsumers( List<String> invalidContentConsumers, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public List<String> getInvalidContentConsumers()
+ throws RepositoryAdminException
+ {
+ return new ArrayList<>(
+ getArchivaConfiguration().getConfiguration().getRepositoryScanning().getInvalidContentConsumers() );
+ }
+
+ @Override
+ public void removeInvalidContentConsumer( String invalidContentConsumer, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public OrganisationInformation getOrganisationInformation()
+ throws RepositoryAdminException
+ {
+ return null;
+ }
+
+ @Override
+ public void setOrganisationInformation( OrganisationInformation organisationInformation )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ public ArchivaConfiguration getArchivaConfiguration()
+ {
+ return archivaConfiguration;
+ }
+
+ public void setArchivaConfiguration( ArchivaConfiguration archivaConfiguration )
+ {
+ this.archivaConfiguration = archivaConfiguration;
+ }
+
+ @Override
+ public UiConfiguration getUiConfiguration()
+ throws RepositoryAdminException
+ {
+ return null;
+ }
+
+ @Override
+ public void updateUiConfiguration( UiConfiguration uiConfiguration )
+ throws RepositoryAdminException
+ {
+
+ }
+
+ @Override
+ public NetworkConfiguration getNetworkConfiguration()
+ throws RepositoryAdminException
+ {
+ return null;
+ }
+
+ @Override
+ public void setNetworkConfiguration( NetworkConfiguration networkConfiguration )
+ throws RepositoryAdminException
+ {
+
+ }
+}
--- /dev/null
+package org.apache.archiva.scheduler.repository.mock;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.admin.model.AuditInformation;
+import org.apache.archiva.admin.model.RepositoryAdminException;
+import org.apache.archiva.admin.model.beans.RemoteRepository;
+import org.apache.archiva.admin.model.remote.RemoteRepositoryAdmin;
+import org.apache.archiva.configuration.ArchivaConfiguration;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @author Olivier Lamy
+ */
+public class MockRemoteRepositoryAdmin
+ implements RemoteRepositoryAdmin
+{
+ private ArchivaConfiguration archivaConfiguration;
+
+ @Override
+ public List<RemoteRepository> getRemoteRepositories()
+ throws RepositoryAdminException
+ {
+ return null; //To change body of implemented methods use File | Settings | File Templates.
+ }
+
+ @Override
+ public RemoteRepository getRemoteRepository( String repositoryId )
+ throws RepositoryAdminException
+ {
+ return null; //To change body of implemented methods use File | Settings | File Templates.
+ }
+
+ @Override
+ public Boolean deleteRemoteRepository( String repositoryId, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+ return null; //To change body of implemented methods use File | Settings | File Templates.
+ }
+
+ @Override
+ public Boolean addRemoteRepository( RemoteRepository remoteRepository, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+ return null; //To change body of implemented methods use File | Settings | File Templates.
+ }
+
+ @Override
+ public Boolean updateRemoteRepository( RemoteRepository remoteRepository, AuditInformation auditInformation )
+ throws RepositoryAdminException
+ {
+ return null; //To change body of implemented methods use File | Settings | File Templates.
+ }
+
+ @Override
+ public Map<String, RemoteRepository> getRemoteRepositoriesAsMap()
+ throws RepositoryAdminException
+ {
+ return null; //To change body of implemented methods use File | Settings | File Templates.
+ }
+
+ public ArchivaConfiguration getArchivaConfiguration()
+ {
+ return archivaConfiguration;
+ }
+
+ public void setArchivaConfiguration( ArchivaConfiguration archivaConfiguration )
+ {
+ this.archivaConfiguration = archivaConfiguration;
+ }
+
+}
<context:annotation-config/>
- <bean name="archivaAdministration#mock" class="org.apache.archiva.mock.MockArchivaAdministration">
+ <bean name="archivaAdministration#mock" class="org.apache.archiva.scheduler.repository.mock.MockArchivaAdministration">
<property name="archivaConfiguration" ref="archivaConfiguration#test-repository-scanning"/>
</bean>
- <bean name="remoteRepositoryAdmin#mock" class="org.apache.archiva.mock.MockRemoteRepositoryAdmin">
+ <bean name="remoteRepositoryAdmin#mock" class="org.apache.archiva.scheduler.repository.mock.MockRemoteRepositoryAdmin">
<property name="archivaConfiguration" ref="archivaConfiguration#test-repository-scanning"/>
</bean>