*/
public class MavenIndexContextMock implements ArchivaIndexingContext {
+ private boolean open = true;
private IndexingContext delegate;
private Repository repository;
private FilesystemStorage filesystemStorage;
@Override
public void close(boolean deleteFiles) throws IOException {
+ open = false;
try {
delegate.close(deleteFiles);
} catch (NoSuchFileException e) {
@Override
public void close() throws IOException {
+ open = false;
try {
delegate.close(false);
} catch (NoSuchFileException e) {
}
}
+ @Override
+ public boolean isOpen() {
+ return open;
+ }
+
@Override
public void purge() throws IOException {
delegate.purge();
import org.apache.archiva.admin.model.networkproxy.NetworkProxyAdmin;
import org.apache.archiva.admin.repository.AbstractRepositoryAdminTest;
import org.apache.archiva.metadata.model.facets.AuditEvent;
+import org.apache.commons.lang.StringUtils;
import org.junit.Test;
import javax.inject.Inject;
remoteRepository = remoteRepositoryAdmin.getRemoteRepository( getRemoteRepository().getId() );
- assertNull( remoteRepository.getRemoteDownloadNetworkProxyId() );
+ assertTrue(StringUtils.isEmpty(remoteRepository.getRemoteDownloadNetworkProxyId()) );
remoteRepositoryAdmin.deleteRemoteRepository( getRemoteRepository().getId(), getFakeAuditInformation() );
}
*/
void close() throws IOException;
+ /**
+ * Returns the status of this context. This method will return <code>false</code>, after the {@link #close()} method
+ * has been called.
+ *
+ * @return <code>true</code>, if the <code>close()</code> method has not been called, otherwise <code>false</code>
+ */
+ boolean isOpen();
+
/**
* Removes all entries from the index. After this method finished,
* isEmpty() should return true.
*/
void close();
+ /**
+ * Returns the current status of this repository.
+ *
+ * @return <code>true</code>, if repository has not been closed, otherwise <code>false</code>
+ */
+ boolean isOpen();
+
}
final EventType type;
final LocalDateTime instant;
- public Event(EventType type, O originator) {
+ public <OO extends O> Event(EventType type, OO originator) {
this.originator = originator;
this.type = type;
this.instant = LocalDateTime.now();
--- /dev/null
+package org.apache.archiva.repository.events;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.repository.Repository;
+
+import java.net.URI;
+
+public class IndexCreationEvent<O> extends RepositoryValueEvent<O, URI> {
+
+ public enum Index implements EventType {
+ INDEX_URI_CHANGE, PACKED_INDEX_URI_CHANGE
+ }
+
+ IndexCreationEvent(Repository repo, O origin, URI oldValue, URI value) {
+ super(Index.INDEX_URI_CHANGE, origin, repo, oldValue, value);
+ }
+
+ IndexCreationEvent(Index type, O origin, Repository repo, URI oldValue, URI value) {
+ super(type, origin, repo, oldValue, value);
+ }
+
+ public static final <O> IndexCreationEvent indexUriChange(O origin, Repository repo, URI oldValue, URI newValue) {
+ return new IndexCreationEvent(Index.INDEX_URI_CHANGE, origin, repo, oldValue, newValue);
+ }
+
+ public static final <O> IndexCreationEvent packedIndexUriChange(O origin, Repository repo, URI oldValue, URI newValue) {
+ return new IndexCreationEvent(Index.PACKED_INDEX_URI_CHANGE, origin, repo, oldValue, newValue);
+ }
+}
REGISTERED,UNREGISTERED,UPDATED
}
- public LifecycleEvent(LifecycleEventType type, O origin, Repository repository) {
+ public <OO extends O> LifecycleEvent(LifecycleEventType type, OO origin, Repository repository) {
super(type, origin, repository);
}
}
private final Repository repository;
- public RepositoryEvent(EventType type, O origin, Repository repository) {
+ public <OO extends O> RepositoryEvent(EventType type, OO origin, Repository repository) {
super(type, origin);
this.repository = repository;
}
* under the License.
*/
-public class RepositoryRegistryEvent<O> extends Event {
+public class RepositoryRegistryEvent<O> extends Event<O> {
public enum RegistryEventType implements EventType {
RELOADED,DESTROYED
}
- public RepositoryRegistryEvent(RegistryEventType type, O origin) {
+ public <OO extends O> RepositoryRegistryEvent(RegistryEventType type, OO origin) {
super(type, origin);
}
}
+++ /dev/null
-package org.apache.archiva.repository.features;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.repository.Repository;
-import org.apache.archiva.repository.events.EventType;
-import org.apache.archiva.repository.events.RepositoryValueEvent;
-
-import java.net.URI;
-
-public class IndexCreationEvent<O> extends RepositoryValueEvent<O, URI> {
-
- public enum Index implements EventType {
- INDEX_URI_CHANGE, PACKED_INDEX_URI_CHANGE
- }
-
- IndexCreationEvent(Repository repo, O origin, URI oldValue, URI value) {
- super(Index.INDEX_URI_CHANGE, origin, repo, oldValue, value);
- }
-
- IndexCreationEvent(Index type, O origin, Repository repo, URI oldValue, URI value) {
- super(type, origin, repo, oldValue, value);
- }
-
- public static final <O> IndexCreationEvent indexUriChange(O origin, Repository repo, URI oldValue, URI newValue) {
- return new IndexCreationEvent(Index.INDEX_URI_CHANGE, origin, repo, oldValue, newValue);
- }
-
- public static final <O> IndexCreationEvent packedIndexUriChange(O origin, Repository repo, URI oldValue, URI newValue) {
- return new IndexCreationEvent(Index.PACKED_INDEX_URI_CHANGE, origin, repo, oldValue, newValue);
- }
-}
import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.events.IndexCreationEvent;
import org.apache.archiva.repository.events.RepositoryEventListener;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang3.StringUtils;
/**
*
- * This feature provides some information about index creation.
+ * This feature provides information about index creation.
+ *
+ * Repositories that support this feature are able to create indexes and download them from remote repositories.
+ *
+ * Repositories may have a normal and packed index. A normal index is used by repository search utilities, the packed
+ * index is for downloading purpose.
+ *
+ * A index may have a remote and a local representation. The remote representation is used for downloading and
+ * updating the local representation.
+ *
+ * The feature is throwing a {@link IndexCreationEvent}, if the URI of the index has been changed.
*
*/
public class IndexCreationFeature extends AbstractFeature implements RepositoryFeature<IndexCreationFeature>{
private Repository repo;
- public IndexCreationFeature(Repository repoId, RepositoryEventListener listener) {
+ public IndexCreationFeature(Repository repository, RepositoryEventListener listener) {
super(listener);
- this.repo = repoId;
+ this.repo = repository;
try {
this.indexPath = new URI(DEFAULT_INDEX_PATH);
this.packedIndexPath = new URI(DEFAULT_PACKED_INDEX_PATH);
}
/**
- * Returns the path that is used to store the index.
+ * Returns the path that is used to store the index. The path may be a absolute URI or relative to the
+ * base URI of the repository.
+ *
* @return the uri (may be relative or absolute)
*/
public URI getIndexPath( )
}
/**
- * Sets the path that is used to store the index.
+ * Sets the path that is used to store the index. The path may be either absolute or a
+ * path that is relative to the repository storage path (either a local or remote path).
+ *
* @param indexPath the uri to the index path (may be relative)
*/
public void setIndexPath( URI indexPath )
{
- URI oldVal = this.indexPath;
- this.indexPath = indexPath;
- pushEvent(IndexCreationEvent.indexUriChange(this, repo, oldVal, this.indexPath));
+ if ((this.indexPath==null && indexPath!=null) || !this.indexPath.equals(indexPath)) {
+ URI oldVal = this.indexPath;
+ this.indexPath = indexPath;
+ pushEvent(IndexCreationEvent.indexUriChange(this, repo, oldVal, this.indexPath));
+ }
}
-
+ /**
+ * Returns true, if this repository has a index defined.
+ *
+ * @return <code>true</code>, if a index path is set, otherwise <code>false</code>
+ */
public boolean hasIndex() {
return this.indexPath!=null && !StringUtils.isEmpty( this.indexPath.getPath() );
}
}
/**
- * Sets the path where the index is stored physically. This method should only be used by the
- * MavenIndexProvider implementations.
+ * Sets the path where the index is stored locally.
*
* @param localIndexPath
*/
/**
* Sets the path (relative or absolute) of the packed index.
- * @param packedIndexPath
+ *
+ * Throws a {@link IndexCreationEvent.Index#PACKED_INDEX_URI_CHANGE}, if the value changes.
+ *
+ * @param packedIndexPath the new path uri for the packed index
*/
public void setPackedIndexPath(URI packedIndexPath) {
URI oldVal = this.packedIndexPath;
/**
- * Created by martin on 30.09.17.
+ *
+ * The repository feature holds information about specific features. The may not be available by all repository implementations.
+ * Features should be simple objects for storing additional data, the should not implement too much functionality.
+ * Additional functionality the uses the information in the feature objects should be implemented in the specific repository
+ * provider and repository implementations, or in the repository registry if it is generic.
+ *
+ * But features may throw events, if it's data is changed.
+ *
+ *
+ * This interface is to get access to a concrete feature by accessing the generic interface.
+ *
+ * @param <T> the concrete feature implementation.
+ *
+ * @author Martin Stockhammer <martin_s@apache.org>
+ * @since 3.0
*/
public interface RepositoryFeature<T extends RepositoryFeature<T>> {
+ /**
+ * Unique Identifier of this feature. Each feature implementation has its own unique identifier.
+ *
+ * @return the identifier string which should be unique for the implementation class.
+ */
default String getId() {
return this.getClass().getName();
}
+ /**
+ * Tells, if this instance is a feature of the given identifier.
+ *
+ * @param featureId the feature identifier string to check
+ * @return true, if this instance is a instance with the feature id, otherwise <code>false</code>
+ */
default boolean isFeature(String featureId) {
return this.getClass().getName().equals(featureId);
}
+ /**
+ * Tells, if the this instance is a feature of the given feature class.
+ *
+ * @param clazz The class to check against.
+ * @param <K> the concrete feature implementation.
+ * @return
+ */
default <K extends RepositoryFeature<K>> boolean isFeature(Class<K> clazz) {
return this.getClass().equals(clazz);
}
+ /**
+ * Returns the concrete feature instance.
+ * @return the feature instance.
+ */
T get();
}
import java.util.Locale;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
/**
Logger log = LoggerFactory.getLogger(AbstractRepository.class);
+ private final AtomicBoolean openStatus = new AtomicBoolean(false);
+
+
private final RepositoryType type;
private final String id;
private Map<Locale, String> names = new HashMap<>( );
this.type = type;
this.storage = repositoryStorage;
this.location = repositoryStorage.getLocation();
+ this.openStatus.compareAndSet(false, true);
}
public AbstractRepository(Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage repositoryStorage) {
this.type = type;
this.storage = repositoryStorage;
this.location = repositoryStorage.getLocation();
+ this.openStatus.compareAndSet(false, true);
}
protected void setPrimaryLocale(Locale locale) {
@Override
public void setIndexingContext(ArchivaIndexingContext context) {
+ if (this.indexingContext!=null) {
+
+ }
this.indexingContext = context;
}
@Override
public void close() {
- ArchivaIndexingContext ctx = getIndexingContext();
- if (ctx!=null) {
- try {
- ctx.close();
- } catch (IOException e) {
- log.warn("Error during index context close.",e);
+ if (this.openStatus.compareAndSet(true, false)) {
+ ArchivaIndexingContext ctx = getIndexingContext();
+ if (ctx != null) {
+ try {
+ ctx.close();
+ } catch (IOException e) {
+ log.warn("Error during index context close.", e);
+ }
+ this.indexingContext = null;
+
}
- }
- if (supportsFeature(StagingRepositoryFeature.class)) {
- StagingRepositoryFeature sf = getFeature(StagingRepositoryFeature.class).get();
- if (sf.getStagingRepository()!=null) {
- sf.getStagingRepository().close();
+ if (supportsFeature(StagingRepositoryFeature.class)) {
+ StagingRepositoryFeature sf = getFeature(StagingRepositoryFeature.class).get();
+ if (sf.getStagingRepository() != null) {
+ sf.getStagingRepository().close();
+ }
}
+ clearListeners();
}
- clearListeners();
+ }
+
+ @Override
+ public boolean isOpen() {
+ return openStatus.get();
}
@Override
private void callListeners(Event event, List<RepositoryEventListener> evtListeners) {
for(RepositoryEventListener listener : evtListeners) {
try {
- listener.raise(event);
+ listener.raise(event.recreate(this));
} catch (Throwable e) {
log.error("Could not raise event {} on listener {}: {}", event, listener, e.getMessage());
}
import org.apache.archiva.indexer.*;
import org.apache.archiva.redback.components.registry.RegistryException;
import org.apache.archiva.repository.events.*;
-import org.apache.archiva.repository.features.IndexCreationEvent;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.StagingRepositoryFeature;
import org.apache.commons.lang3.StringUtils;
* configuration save fails the changes are rolled back.
* <p>
* TODO: Audit events
+ *
+ * @since 3.0
*/
@Service("repositoryRegistry")
public class RepositoryRegistry implements ConfigurationListener, RepositoryEventHandler, RepositoryEventListener {
rwLock.writeLock().lock();
try {
log.debug("Initializing repository registry");
- for (ManagedRepository rep : managedRepositories.values()) {
- rep.close();
- }
- managedRepositories.clear();
updateManagedRepositoriesFromConfig();
- for (RemoteRepository repo : remoteRepositories.values()) {
- repo.close();
- }
- remoteRepositories.clear();
updateRemoteRepositoriesFromConfig();
repositoryGroups.clear();
} finally {
rwLock.writeLock().unlock();
}
- pushEvent(new RepositoryRegistryEvent(RepositoryRegistryEvent.RegistryEventType.RELOADED, this));
+ pushEvent(new RepositoryRegistryEvent<>(RepositoryRegistryEvent.RegistryEventType.RELOADED, this));
}
@PreDestroy
repo.close();
}
remoteRepositories.clear();
- pushEvent(new RepositoryRegistryEvent(RepositoryRegistryEvent.RegistryEventType.DESTROYED, this));
+ pushEvent(new RepositoryRegistryEvent<>(RepositoryRegistryEvent.RegistryEventType.DESTROYED, this));
}
return repositoryProviders.stream().filter(repositoryProvider -> repositoryProvider.provides().contains(type)).findFirst().orElseThrow(() -> new RepositoryException("Repository type cannot be handled: " + type));
}
+ /*
+ * Updates the repositories
+ */
private void updateManagedRepositoriesFromConfig() {
try {
+
+ Set<String> configRepoIds = new HashSet<>();
List<ManagedRepositoryConfiguration> managedRepoConfigs =
getArchivaConfiguration().getConfiguration().getManagedRepositories();
return;
}
- Map<RepositoryType, RepositoryProvider> providerMap = createProviderMap();
for (ManagedRepositoryConfiguration repoConfig : managedRepoConfigs) {
- if (managedRepositories.containsKey(repoConfig.getId())) {
- log.warn("Duplicate repository definitions for {} in config found.", repoConfig.getId());
- continue;
- }
- RepositoryType repositoryType = RepositoryType.valueOf(repoConfig.getType());
- if (providerMap.containsKey(repositoryType)) {
- try {
- ManagedRepository repo = createNewManagedRepository(providerMap.get(repositoryType), repoConfig);
- managedRepositories.put(repo.getId(), repo);
- pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.REGISTERED, this, repo));
- } catch (Exception e) {
- log.error("Could not create managed repository {}: {}", repoConfig.getId(), e.getMessage(), e);
+ ManagedRepository repo = putRepository(repoConfig, null);
+ configRepoIds.add(repoConfig.getId());
+ if (repo.supportsFeature(StagingRepositoryFeature.class)) {
+ StagingRepositoryFeature stagF = repo.getFeature(StagingRepositoryFeature.class).get();
+ if (stagF.getStagingRepository() != null) {
+ configRepoIds.add(stagF.getStagingRepository().getId());
}
}
}
- return;
+ List<String> toRemove = managedRepositories.keySet().stream().filter(id -> !configRepoIds.contains(id)).collect(Collectors.toList());
+ for (String id : toRemove) {
+ ManagedRepository removed = managedRepositories.remove(id);
+ removed.close();
+ }
} catch (Throwable e) {
log.error("Could not initialize repositories from config: {}", e.getMessage(), e);
- //noinspection unchecked
return;
}
}
StagingRepositoryFeature feature = repo.getFeature(StagingRepositoryFeature.class).get();
if (feature.isStageRepoNeeded() && feature.getStagingRepository() == null) {
ManagedRepository stageRepo = getManagedRepository(getStagingId(repo.getId()));
- if (stageRepo==null) {
+ if (stageRepo == null) {
stageRepo = getStagingRepository(provider, cfg, configuration);
managedRepositories.put(stageRepo.getId(), stageRepo);
if (configuration != null) {
replaceOrAddRepositoryConfig(provider.getManagedConfiguration(stageRepo), configuration);
}
+ pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.REGISTERED, this, stageRepo));
}
feature.setStagingRepository(stageRepo);
- pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.REGISTERED, this, stageRepo));
}
}
if (repo instanceof EditableManagedRepository) {
editableRepo.getContent().setRepository(editableRepo);
}
log.debug("Index repo: " + repo.hasIndex());
- if (repo.hasIndex() && repo.getIndexingContext() == null) {
+ if (repo.hasIndex() && ( repo.getIndexingContext() == null || !repo.getIndexingContext().isOpen() )) {
log.debug("Creating indexing context for {}", repo.getId());
createIndexingContext(editableRepo);
}
}
-
+ repo.register(this);
}
public ArchivaIndexManager getIndexManager(RepositoryType type) {
getArchivaConfiguration().getConfiguration().getRemoteRepositories();
if (remoteRepoConfigs == null) {
- //noinspection unchecked
return;
}
-
- Map<RepositoryType, RepositoryProvider> providerMap = createProviderMap();
+ Set<String> repoIds = new HashSet<>();
for (RemoteRepositoryConfiguration repoConfig : remoteRepoConfigs) {
- RepositoryType repositoryType = RepositoryType.valueOf(repoConfig.getType());
- if (providerMap.containsKey(repositoryType)) {
- RepositoryProvider provider = getProvider(repositoryType);
- try {
+ putRepository(repoConfig, null);
+ repoIds.add(repoConfig.getId());
+ }
- RemoteRepository remoteRepository = createNewRemoteRepository(provider, repoConfig);
- remoteRepositories.put(repoConfig.getId(), remoteRepository);
- pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.REGISTERED, this, remoteRepository));
- } catch (Exception e) {
- log.error("Could not create repository {} from config: {}", repoConfig.getId(), e.getMessage(), e);
- }
- }
+ List<String> toRemove = remoteRepositories.keySet().stream().filter(id -> !repoIds.contains(id)).collect(Collectors.toList());
+ for (String id : toRemove) {
+ RemoteRepository removed = remoteRepositories.remove(id);
+ removed.close();
}
- return;
} catch (Throwable e) {
log.error("Could not initialize remote repositories from config: {}", e.getMessage(), e);
- //noinspection unchecked
return;
}
}
private RemoteRepository createNewRemoteRepository(RepositoryProvider provider, RemoteRepositoryConfiguration cfg) throws RepositoryException {
log.debug("Creating remote repo {}", cfg.getId());
RemoteRepository repo = provider.createRemoteInstance(cfg);
- repo.register(this);
updateRepositoryReferences(provider, repo, cfg, null);
return repo;
}
- @SuppressWarnings("unchecked")
private void updateRepositoryReferences(RepositoryProvider provider, RemoteRepository repo, RemoteRepositoryConfiguration cfg, Configuration configuration) throws RepositoryException {
if (repo instanceof EditableRemoteRepository && repo.getContent() == null) {
EditableRemoteRepository editableRepo = (EditableRemoteRepository) repo;
createIndexingContext(editableRepo);
}
}
+ repo.register(this);
}
private Map<String, RepositoryGroup> getRepositorGroupsFromConfig() {
return repositoryGroupMap;
} catch (Throwable e) {
log.error("Could not initialize repositories from config: {}", e.getMessage(), e);
- //noinspection unchecked
return Collections.emptyMap();
}
}
- RepositoryGroup createNewRepositoryGroup(RepositoryProvider provider, RepositoryGroupConfiguration config) throws RepositoryException {
+ private RepositoryGroup createNewRepositoryGroup(RepositoryProvider provider, RepositoryGroupConfiguration config) throws RepositoryException {
RepositoryGroup repositoryGroup = provider.createRepositoryGroup(config);
repositoryGroup.register(this);
updateRepositoryReferences(provider, repositoryGroup, config);
if (remoteRepositories.containsKey(id)) {
throw new RepositoryException("There exists a remote repository with id " + id + ". Could not update with managed repository.");
}
-
ManagedRepository originRepo = managedRepositories.put(id, managedRepository);
try {
- if (originRepo != null) {
+ if (originRepo != null && originRepo != managedRepository) {
originRepo.close();
}
RepositoryProvider provider = getProvider(managedRepository.getType());
}
configuration.addManagedRepository(newCfg);
saveConfiguration(configuration);
- pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.REGISTERED, this, managedRepository));
+ if (originRepo != managedRepository) {
+ pushEvent(new LifecycleEvent<>(LifecycleEvent.LifecycleEventType.REGISTERED, this, managedRepository));
+ } else {
+ pushEvent(new LifecycleEvent<>(LifecycleEvent.LifecycleEventType.UPDATED, this, managedRepository));
+ }
return managedRepository;
} catch (Exception e) {
- // Rollback
+ // Rollback only partly, because repository is closed already
if (originRepo != null) {
managedRepositories.put(id, originRepo);
} else {
* Adds a new repository or updates the repository with the same id. The given configuration object is updated, but
* the configuration is not saved.
*
- * @param managedRepositoryConfiguration the new or changed repository configuration
- * @param configuration the configuration object
+ * @param managedRepositoryConfiguration the new or changed managed repository configuration
+ * @param configuration the configuration object (may be <code>null</code>)
* @return the new or updated repository
* @throws RepositoryException if the configuration cannot be saved or updated
*/
- @SuppressWarnings("unchecked")
public ManagedRepository putRepository(ManagedRepositoryConfiguration managedRepositoryConfiguration, Configuration configuration) throws RepositoryException {
rwLock.writeLock().lock();
try {
final String id = managedRepositoryConfiguration.getId();
final RepositoryType repoType = RepositoryType.valueOf(managedRepositoryConfiguration.getType());
ManagedRepository repo;
- if (managedRepositories.containsKey(id)) {
- repo = managedRepositories.get(id);
+ boolean registeredNew = false;
+ repo = managedRepositories.get(id);
+ if (repo != null && repo.isOpen()) {
if (repo instanceof EditableManagedRepository) {
getProvider(repoType).updateManagedInstance((EditableManagedRepository) repo, managedRepositoryConfiguration);
} else {
}
} else {
repo = getProvider(repoType).createManagedInstance(managedRepositoryConfiguration);
- repo.register(this);
managedRepositories.put(id, repo);
+ registeredNew = true;
}
updateRepositoryReferences(getProvider(repoType), repo, managedRepositoryConfiguration, configuration);
replaceOrAddRepositoryConfig(managedRepositoryConfiguration, configuration);
- pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.REGISTERED, this, repo));
+ if (registeredNew) {
+ pushEvent(new LifecycleEvent<>(LifecycleEvent.LifecycleEventType.REGISTERED, this, repo));
+ } else {
+ pushEvent(new LifecycleEvent<>(LifecycleEvent.LifecycleEventType.UPDATED, this, repo));
+ }
return repo;
} finally {
rwLock.writeLock().unlock();
rwLock.writeLock().lock();
try {
final String id = repositoryGroup.getId();
- RepositoryGroup originRepo = repositoryGroups.put(id, repositoryGroup);
+ RepositoryGroup originRepoGroup = repositoryGroups.put(id, repositoryGroup);
try {
- if (originRepo != null) {
- originRepo.close();
+ if (originRepoGroup != null && originRepoGroup != repositoryGroup) {
+ originRepoGroup.close();
}
RepositoryProvider provider = getProvider(repositoryGroup.getType());
RepositoryGroupConfiguration newCfg = provider.getRepositoryGroupConfiguration(repositoryGroup);
return repositoryGroup;
} catch (Exception e) {
// Rollback
- if (originRepo != null) {
- repositoryGroups.put(id, originRepo);
+ if (originRepoGroup != null) {
+ repositoryGroups.put(id, originRepoGroup);
} else {
repositoryGroups.remove(id);
}
* Adds a new repository group or updates the repository group with the same id. The given configuration object is updated, but
* the configuration is not saved.
*
- * @param repositoryGroupConfiguration the new or changed repository configuration
- * @param configuration the configuration object
- * @return the new or updated repository
+ * @param repositoryGroupConfiguration The configuration of the new or changed repository group.
+ * @param configuration The configuration object. If it is <code>null</code>, the configuration is not saved.
+ * @return The new or updated repository group
* @throws RepositoryException if the configuration cannot be saved or updated
*/
- @SuppressWarnings("unchecked")
public RepositoryGroup putRepositoryGroup(RepositoryGroupConfiguration repositoryGroupConfiguration, Configuration configuration) throws RepositoryException {
rwLock.writeLock().lock();
try {
}
} else {
repo = getProvider(repoType).createRepositoryGroup(repositoryGroupConfiguration);
- repo.register(this);
repositoryGroups.put(id, repo);
}
updateRepositoryReferences(getProvider(repoType), repo, repositoryGroupConfiguration);
}
private void replaceOrAddRepositoryConfig(ManagedRepositoryConfiguration managedRepositoryConfiguration, Configuration configuration) {
- ManagedRepositoryConfiguration oldCfg = configuration.findManagedRepositoryById(managedRepositoryConfiguration.getId());
- if (oldCfg != null) {
- configuration.removeManagedRepository(oldCfg);
+ if (configuration != null) {
+ ManagedRepositoryConfiguration oldCfg = configuration.findManagedRepositoryById(managedRepositoryConfiguration.getId());
+ if (oldCfg != null) {
+ configuration.removeManagedRepository(oldCfg);
+ }
+ configuration.addManagedRepository(managedRepositoryConfiguration);
}
- configuration.addManagedRepository(managedRepositoryConfiguration);
}
private void replaceOrAddRepositoryConfig(RemoteRepositoryConfiguration remoteRepositoryConfiguration, Configuration configuration) {
- RemoteRepositoryConfiguration oldCfg = configuration.findRemoteRepositoryById(remoteRepositoryConfiguration.getId());
- if (oldCfg != null) {
- configuration.removeRemoteRepository(oldCfg);
+ if (configuration != null) {
+ RemoteRepositoryConfiguration oldCfg = configuration.findRemoteRepositoryById(remoteRepositoryConfiguration.getId());
+ if (oldCfg != null) {
+ configuration.removeRemoteRepository(oldCfg);
+ }
+ configuration.addRemoteRepository(remoteRepositoryConfiguration);
}
- configuration.addRemoteRepository(remoteRepositoryConfiguration);
}
private void replaceOrAddRepositoryConfig(RepositoryGroupConfiguration repositoryGroupConfiguration, Configuration configuration) {
RemoteRepositoryConfiguration oldCfg = null;
RemoteRepositoryConfiguration newCfg;
try {
- if (originRepo != null) {
+ if (originRepo != null && originRepo != remoteRepository) {
originRepo.close();
}
final RepositoryProvider provider = getProvider(remoteRepository.getType());
configuration.removeRemoteRepository(oldCfg);
}
configuration.addRemoteRepository(newCfg);
- pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.REGISTERED, this, remoteRepository));
+ if (remoteRepository != originRepo) {
+ pushEvent(new LifecycleEvent<>(LifecycleEvent.LifecycleEventType.REGISTERED, this, remoteRepository));
+ } else {
+ pushEvent(new LifecycleEvent<>(LifecycleEvent.LifecycleEventType.UPDATED, this, remoteRepository));
+ }
return remoteRepository;
} catch (Exception e) {
// Rollback
final String id = remoteRepositoryConfiguration.getId();
final RepositoryType repoType = RepositoryType.valueOf(remoteRepositoryConfiguration.getType());
RemoteRepository repo;
- if (remoteRepositories.containsKey(id)) {
- repo = remoteRepositories.get(id);
+ boolean registeredNew = false;
+ repo = remoteRepositories.get(id);
+ if (repo != null && repo.isOpen()) {
if (repo instanceof EditableRemoteRepository) {
getProvider(repoType).updateRemoteInstance((EditableRemoteRepository) repo, remoteRepositoryConfiguration);
} else {
}
} else {
repo = getProvider(repoType).createRemoteInstance(remoteRepositoryConfiguration);
- repo.register(this);
remoteRepositories.put(id, repo);
+ registeredNew = true;
}
updateRepositoryReferences(getProvider(repoType), repo, remoteRepositoryConfiguration, configuration);
replaceOrAddRepositoryConfig(remoteRepositoryConfiguration, configuration);
- pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.REGISTERED, this, repo));
+ if (registeredNew) {
+ pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.REGISTERED, this, repo));
+ } else {
+ pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.UPDATED, this, repo));
+ }
return repo;
} finally {
rwLock.writeLock().unlock();
}
}
- @SuppressWarnings("unchecked")
public void removeRepository(Repository repo) throws RepositoryException {
if (repo == null) {
log.warn("Trying to remove null repository");
* @throws RepositoryException if a error occurs during configuration save
*/
public void removeRepository(ManagedRepository managedRepository) throws RepositoryException {
+ if (managedRepository == null) {
+ return;
+ }
final String id = managedRepository.getId();
ManagedRepository repo = getManagedRepository(id);
if (repo != null) {
}
saveConfiguration(configuration);
}
- pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.UNREGISTERED, this, repo));
+ pushEvent(new LifecycleEvent<>(LifecycleEvent.LifecycleEventType.UNREGISTERED, this, repo));
} catch (RegistryException | IndeterminateConfigurationException e) {
// Rollback
log.error("Could not save config after repository removal: {}", e.getMessage(), e);
}
public void removeRepository(ManagedRepository managedRepository, Configuration configuration) throws RepositoryException {
+ if (managedRepository == null) {
+ return;
+ }
final String id = managedRepository.getId();
ManagedRepository repo = getManagedRepository(id);
if (repo != null) {
configuration.removeManagedRepository(cfg);
}
}
- pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.UNREGISTERED, this, repo));
+ pushEvent(new LifecycleEvent<>(LifecycleEvent.LifecycleEventType.UNREGISTERED, this, repo));
} finally {
rwLock.writeLock().unlock();
}
* @throws RepositoryException if a error occurs during configuration save
*/
public void removeRepositoryGroup(RepositoryGroup repositoryGroup) throws RepositoryException {
+ if (repositoryGroup == null) {
+ return;
+ }
final String id = repositoryGroup.getId();
RepositoryGroup repo = getRepositoryGroup(id);
if (repo != null) {
}
public void removeRepositoryGroup(RepositoryGroup repositoryGroup, Configuration configuration) throws RepositoryException {
+ if (repositoryGroup == null) {
+ return;
+ }
final String id = repositoryGroup.getId();
RepositoryGroup repo = getRepositoryGroup(id);
if (repo != null) {
* @throws RepositoryException if a error occurs during configuration save
*/
public void removeRepository(RemoteRepository remoteRepository) throws RepositoryException {
-
+ if (remoteRepository == null) {
+ return;
+ }
final String id = remoteRepository.getId();
RemoteRepository repo = getRemoteRepository(id);
if (repo != null) {
doRemoveRepo(repo, configuration);
saveConfiguration(configuration);
}
- pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.UNREGISTERED, this, repo));
+ pushEvent(new LifecycleEvent<>(LifecycleEvent.LifecycleEventType.UNREGISTERED, this, repo));
} catch (RegistryException | IndeterminateConfigurationException e) {
// Rollback
log.error("Could not save config after repository removal: {}", e.getMessage(), e);
}
public void removeRepository(RemoteRepository remoteRepository, Configuration configuration) throws RepositoryException {
+ if (remoteRepository == null) {
+ return;
+ }
final String id = remoteRepository.getId();
RemoteRepository repo = getRemoteRepository(id);
if (repo != null) {
if (repo != null) {
doRemoveRepo(repo, configuration);
}
- pushEvent(new LifecycleEvent(LifecycleEvent.LifecycleEventType.UNREGISTERED, this, repo));
+ pushEvent(new LifecycleEvent<>(LifecycleEvent.LifecycleEventType.UNREGISTERED, this, repo));
} finally {
rwLock.writeLock().unlock();
}
/**
* Resets the indexing context of a given repository.
*
- * @param repo
- * @throws IndexUpdateFailedException
+ * @param repository The repository
+ * @throws IndexUpdateFailedException If the index could not be resetted.
*/
- @SuppressWarnings("unchecked")
- public void resetIndexingContext(Repository repo) throws IndexUpdateFailedException {
- if (repo.hasIndex() && repo instanceof EditableRepository) {
- EditableRepository eRepo = (EditableRepository) repo;
- ArchivaIndexingContext newCtx = getIndexManager(repo.getType()).reset(repo.getIndexingContext());
+ public void resetIndexingContext(Repository repository) throws IndexUpdateFailedException {
+ if (repository.hasIndex() && repository instanceof EditableRepository) {
+ EditableRepository eRepo = (EditableRepository) repository;
+ ArchivaIndexingContext newCtx = getIndexManager(repository.getType()).reset(repository.getIndexingContext());
eRepo.setIndexingContext(newCtx);
}
}
return cloned;
}
- @SuppressWarnings("unchecked")
public <T extends Repository> Repository clone(T repo, String newId) throws RepositoryException {
if (repo instanceof RemoteRepository) {
return this.clone((RemoteRepository) repo, newId);
return;
}
if (event instanceof IndexCreationEvent) {
- IndexCreationEvent idxEvent = (IndexCreationEvent) event;
- if (managedRepositories.containsKey(idxEvent.getRepository().getId()) ||
- remoteRepositories.containsKey(idxEvent.getRepository().getId())) {
- EditableRepository repo = (EditableRepository) idxEvent.getRepository();
- if (repo != null && repo.getIndexingContext() != null) {
- try {
- ArchivaIndexManager idxmgr = getIndexManager(repo.getType());
- if (idxmgr != null) {
- ArchivaIndexingContext newCtx = idxmgr.move(repo.getIndexingContext(), repo);
- repo.setIndexingContext(newCtx);
- idxmgr.updateLocalIndexPath(repo);
- }
-
- } catch (IndexCreationFailedException e) {
- log.error("Could not move index to new directory {}", e.getMessage(), e);
+ handleIndexCreationEvent((IndexCreationEvent) event);
+ }
+ // We propagate all events to our listeners, but with context of repository registry
+ pushEvent(event.recreate(this));
+ }
+
+ private void handleIndexCreationEvent(IndexCreationEvent event) {
+ IndexCreationEvent idxEvent = event;
+ if (managedRepositories.containsKey(idxEvent.getRepository().getId()) ||
+ remoteRepositories.containsKey(idxEvent.getRepository().getId())) {
+ EditableRepository repo = (EditableRepository) idxEvent.getRepository();
+ if (repo != null && repo.getIndexingContext() != null) {
+ try {
+ ArchivaIndexManager idxmgr = getIndexManager(repo.getType());
+ if (idxmgr != null) {
+ ArchivaIndexingContext newCtx = idxmgr.move(repo.getIndexingContext(), repo);
+ repo.setIndexingContext(newCtx);
+ idxmgr.updateLocalIndexPath(repo);
}
+
+ } catch (IndexCreationFailedException e) {
+ log.error("Could not move index to new directory {}", e.getMessage(), e);
}
}
}
- // We propagate all events to our listeners
- pushEvent(event.recreate(this));
}
private boolean sameOriginator(Event event) {
- if (event.getOriginator()==this) {
+ if (event.getOriginator() == this) {
return true;
} else if (event.hasPreviousEvent()) {
return sameOriginator(event.getPreviousEvent());
import org.junit.runners.model.InitializationError;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
import java.util.List;
/**
extends SpringJUnit4ClassRunner
{
+ static {
+
+ if (System.getProperty("archiva.user.configFileName")!=null && !"".equals(System.getProperty("archiva.user.configFileName").trim())) {
+ try {
+ Path file = Files.createTempFile("archiva-test-conf", ".xml");
+ System.setProperty("archiva.user.configFileName", file.toAbsolutePath().toString());
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
public ArchivaSpringJUnit4ClassRunner( Class<?> clazz )
throws InitializationError
{
import java.sql.Date;
import java.time.ZonedDateTime;
import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
/**
* Maven implementation of index context
private static final Logger log = LoggerFactory.getLogger(ArchivaIndexingContext.class);
+
+ private AtomicBoolean openStatus = new AtomicBoolean(false);
private IndexingContext delegate;
private Repository repository;
private StorageAsset dir = null;
protected MavenIndexContext(Repository repository, IndexingContext delegate) {
this.delegate = delegate;
this.repository = repository;
+ this.openStatus.set(true);
}
@Override
public void close(boolean deleteFiles) throws IOException {
- try {
- delegate.close(deleteFiles);
- } catch (NoSuchFileException e) {
- // Ignore missing directory
+ if (openStatus.compareAndSet(true,false)) {
+ try {
+ delegate.close(deleteFiles);
+ } catch (NoSuchFileException e) {
+ // Ignore missing directory
+ }
}
}
@Override
public void close() throws IOException {
- try {
- delegate.close(false);
- } catch (NoSuchFileException e) {
- // Ignore missing directory
+ if (openStatus.compareAndSet(true,false)) {
+ try {
+ delegate.close(false);
+ } catch (NoSuchFileException e) {
+ // Ignore missing directory
+ }
}
}
+ @Override
+ public boolean isOpen() {
+ return openStatus.get();
+ }
+
@Override
public void purge() throws IOException {
delegate.purge();
import org.apache.archiva.configuration.Configuration;
import org.apache.archiva.configuration.ConfigurationListener;
import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.indexer.search.SearchResultHit;
import org.apache.archiva.indexer.search.SearchResults;
import org.apache.archiva.repository.Repository;
EasyMock.expectLastCall().anyTimes();
archivaConfigControl.replay();
repositoryRegistry.reload();
- archivaConfigControl.reset();
+
}
@After
IndexCreationFeature icf = rRepo.getFeature(IndexCreationFeature.class).get();
- IndexingContext context = rRepo.getIndexingContext().getBaseContext(IndexingContext.class);
+ ArchivaIndexingContext archivaCtx = rRepo.getIndexingContext();
+ IndexingContext context = archivaCtx.getBaseContext(IndexingContext.class);
- if ( context != null )
+ if ( archivaCtx != null )
{
- context.close(true);
+ archivaCtx.close(true);
}
Path repoDir = Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir()).resolve("target").resolve("repos").resolve(repository);
{
Files.delete(lockFile);
}
-
assertFalse( Files.exists(lockFile) );
-
- Path repo = Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + repository );
- assertTrue( Files.exists(repo) );
- org.apache.commons.io.FileUtils.copyDirectory(repo.toFile(), repoDir.toFile());
-
if (indexDir==null) {
Path indexDirectory =
Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/index/test-" + Long.toString(System.currentTimeMillis()));
icf.setIndexPath(indexDir.toUri());
}
- context = rRepo.getIndexingContext().getBaseContext(IndexingContext.class);
+ Path repo = Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + repository );
+ assertTrue( Files.exists(repo) );
+ org.apache.commons.io.FileUtils.copyDirectory(repo.toFile(), repoDir.toFile());
+
+
+
+
+ archivaConfigControl.reset();
+ archivaConfig.addListener( EasyMock.anyObject( ConfigurationListener.class ) );
+ EasyMock.expect( archivaConfig.getConfiguration() ).andReturn(config).anyTimes();
+ archivaConfig.save(EasyMock.anyObject(Configuration.class));
+ EasyMock.expectLastCall().anyTimes();
+ archivaConfigControl.replay();
+ repositoryRegistry.reload();
+ archivaConfigControl.reset();
+
+ rRepo = repositoryRegistry.getRepository(repository);
+ icf = rRepo.getFeature(IndexCreationFeature.class).get();
+
+
+ archivaCtx = rRepo.getIndexingContext();
+ context = archivaCtx.getBaseContext(IndexingContext.class);
// minimize datas in memory
List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
// search artifactId
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
archivaConfigControl.replay();
List<String> selectedRepos = new ArrayList<>();
selectedRepos.add( TEST_REPO_1 );
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+ // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ // EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
- archivaConfigControl.replay();
+ // archivaConfigControl.replay();
SearchResults results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
assertNotNull( results );
List<String> selectedRepos = new ArrayList<>();
selectedRepos.add( "non-existing-repo" );
- archivaConfigControl.replay();
+ // archivaConfigControl.replay();
SearchResults results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
assertNotNull( results );
try
{
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+ // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ // EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
- archivaConfigControl.replay();
+ // archivaConfigControl.replay();
search.search( "user", searchFields, null );
try
{
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
- archivaConfigControl.replay();
+ // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ // EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+ // archivaConfigControl.replay();
search.search( "user", searchFields, null );
searchFields.setGroupId( "org.apache.archiva" );
searchFields.setRepositories( selectedRepos );
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+ // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ // EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
- archivaConfigControl.replay();
+ // archivaConfigControl.replay();
SearchResults results = search.search( "user", searchFields, null );
SearchResultLimits limits = new SearchResultLimits( SearchResultLimits.ALL_PAGES );
limits.setPageSize( 300 );
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ // EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
archivaConfigControl.replay();
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.utils.PathUtil;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.*;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.content.maven2.MavenRepositoryRequestInfo;
FilesystemStorage storage = new FilesystemStorage(basePath.resolve(id), lockManager);
return new MavenManagedRepository(id, name, storage);
}
+
+ @Override
+ public void setIndexingContext(ArchivaIndexingContext context) {
+ super.setIndexingContext(context);
+ }
+
}
import org.apache.archiva.configuration.*;
import org.apache.archiva.repository.*;
import org.apache.archiva.repository.events.Event;
-import org.apache.archiva.repository.events.RepositoryValueEvent;
-import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.features.ArtifactCleanupFeature;
import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.apache.archiva.repository.features.StagingRepositoryFeature;
+import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
repo.setScanned(cfg.isScanned());
if (cfg.isReleases()) {
repo.addActiveReleaseScheme(ReleaseScheme.RELEASE);
+ } else {
+ repo.removeActiveReleaseScheme(ReleaseScheme.RELEASE);
}
if (cfg.isSnapshots()) {
repo.addActiveReleaseScheme(ReleaseScheme.SNAPSHOT);
+ } else {
+ repo.removeActiveReleaseScheme(ReleaseScheme.SNAPSHOT);
}
StagingRepositoryFeature stagingRepositoryFeature = repo.getFeature(StagingRepositoryFeature.class).get();
cfg.setPackedIndexDir(convertUriToPath(indexCreationFeature.getPackedIndexPath()));
RemoteIndexFeature remoteIndexFeature = remoteRepository.getFeature(RemoteIndexFeature.class).get();
- if (remoteIndexFeature.getIndexUri()!=null) {
+ if (remoteIndexFeature.getIndexUri() != null) {
cfg.setRemoteIndexUrl(remoteIndexFeature.getIndexUri().toString());
}
cfg.setRemoteDownloadTimeout((int) remoteIndexFeature.getDownloadTimeout().get(ChronoUnit.SECONDS));
cfg.setDownloadRemoteIndexOnStartup(remoteIndexFeature.isDownloadRemoteIndexOnStartup());
cfg.setDownloadRemoteIndex(remoteIndexFeature.isDownloadRemoteIndex());
cfg.setRemoteDownloadNetworkProxyId(remoteIndexFeature.getProxyId());
+ if (!StringUtils.isEmpty(remoteIndexFeature.getProxyId())) {
+ cfg.setRemoteDownloadNetworkProxyId(remoteIndexFeature.getProxyId());
+ } else {
+ cfg.setRemoteDownloadNetworkProxyId("");
+ }
+
+
return cfg;
import org.apache.archiva.metadata.repository.storage.RepositoryStorageRuntimeException;
import org.apache.archiva.proxy.maven.WagonFactory;
import org.apache.archiva.proxy.maven.WagonFactoryRequest;
+import org.apache.archiva.repository.ReleaseScheme;
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.commons.io.FileUtils;
testRepo.setSnapshots( false );
configuration.save( c );
repositoryRegistry.reload();
+ assertFalse(repositoryRegistry.getManagedRepository(testRepo.getId()).getActiveReleaseSchemes().contains(ReleaseScheme.SNAPSHOT));
assertFalse( c.getManagedRepositories().get( 0 ).isSnapshots() );
copyTestArtifactWithParent( "target/test-classes/com/example/test/test-snapshot-artifact-module-a",
"target/test-repository/com/example/test/test-snapshot-artifact-module-a" );
*/
public class MavenIndexContextMock implements ArchivaIndexingContext {
+ private boolean open = true;
private IndexingContext delegate;
private Repository repository;
private FilesystemStorage indexStorage;
@Override
public void close(boolean deleteFiles) throws IOException {
+ open = false;
try {
delegate.close(deleteFiles);
} catch (NoSuchFileException e) {
@Override
public void close() throws IOException {
+ open = false;
try {
delegate.close(false);
} catch (NoSuchFileException e) {
}
}
+ @Override
+ public boolean isOpen() {
+ return open;
+ }
+
@Override
public void purge() throws IOException {
delegate.purge();
*/
public class MavenIndexContextMock implements ArchivaIndexingContext {
+ private boolean open = true;
+
private IndexingContext delegate;
private Repository repository;
private FilesystemStorage filesystemStorage;
@Override
public void close(boolean deleteFiles) throws IOException {
+ this.open = false;
try {
delegate.close(deleteFiles);
} catch (NoSuchFileException e) {
@Override
public void close() throws IOException {
+ this.open = false;
try {
delegate.close(false);
} catch (NoSuchFileException e) {
}
}
+ @Override
+ public boolean isOpen() {
+ return open;
+ }
+
@Override
public void purge() throws IOException {
delegate.purge();
<openjpa.Log>${openjpa.Log}</openjpa.Log>
<org.apache.jackrabbit.core.state.validatehierarchy>true</org.apache.jackrabbit.core.state.validatehierarchy>
</systemPropertyVariables>
+ <trimStackTrace>false</trimStackTrace>
</configuration>
</plugin>
</plugins>
ArchivaIndexingContext ctx = repositoryRegistry.getManagedRepository( REPOID_INTERNAL ).getIndexingContext( );
try
{
- repositoryRegistry.getIndexManager( RepositoryType.MAVEN ).pack( ctx );
+ if (repositoryRegistry.getIndexManager(RepositoryType.MAVEN)!=null) {
+ repositoryRegistry.getIndexManager(RepositoryType.MAVEN).pack(ctx);
+ }
} finally
{
ctx.close( );
repo.setLocation( location.toAbsolutePath().toString() );
repo.setBlockRedeployments( blockRedeployments );
repo.setType( "MAVEN" );
+ repo.setIndexDir(".indexer");
+ repo.setPackedIndexDir(".index");
return repo;
}
throws Exception
{
repositoryRegistry.setArchivaConfiguration(archivaConfiguration);
- repositoryRegistry.reload();
+ // repositoryRegistry.reload();
archivaConfiguration.save( archivaConfiguration.getConfiguration() );
}