* under the License.
*/
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
-import static org.junit.Assert.*;
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.junit.jupiter.api.Assertions.*;
/**
* @author Martin Stockhammer <martin_s@apache.org>
public class EventManagerTest
{
+ private class TestHandler implements EventHandler<Event> {
+
+ private List<Event> eventList = new ArrayList<>( );
+ @Override
+ public void handle( Event event )
+ {
+ eventList.add( event );
+ }
+
+ public List<Event> getEventList() {
+ return eventList;
+ }
+ }
+
+ private EventType<Event> testType = new EventType<>( "TEST" );
+ private EventType<Event> testTestType = new EventType<>( testType,"TEST.TEST" );
+ private EventType<Event> otherType = new EventType( "OTHER" );
+
@Test
public void registerEventHandler( )
{
+ EventManager eventManager = new EventManager( this );
+ TestHandler handler1 = new TestHandler( );
+ TestHandler handler2 = new TestHandler( );
+ TestHandler handler3 = new TestHandler( );
+ TestHandler handler4 = new TestHandler( );
+
+ eventManager.registerEventHandler( Event.ANY, handler1 );
+ eventManager.registerEventHandler( testType, handler2 );
+ eventManager.registerEventHandler( testTestType, handler3 );
+ eventManager.registerEventHandler( otherType, handler4 );
+
+ Event event1 = new Event( testType, this );
+ eventManager.fireEvent( event1 );
+ assertEquals( 1, handler1.eventList.size( ) );
+ assertEquals( 1, handler2.eventList.size( ) );
+ assertEquals( 0, handler3.eventList.size( ) );
+ assertEquals( 0, handler4.eventList.size( ) );
+
+ Event event2 = new Event( testTestType, event1 );
+ eventManager.fireEvent( event2 );
+ assertEquals( 2, handler1.eventList.size( ) );
+ assertEquals( 2, handler2.eventList.size( ) );
+ assertEquals( 1, handler3.eventList.size( ) );
+ assertEquals( 0, handler4.eventList.size( ) );
+
+ Event event3 = new Event( otherType, event1 );
+ eventManager.fireEvent( event3 );
+ assertEquals( 3, handler1.eventList.size( ) );
+ assertEquals( 2, handler2.eventList.size( ) );
+ assertEquals( 1, handler3.eventList.size( ) );
+ assertEquals( 1, handler4.eventList.size( ) );
+
+
+
}
@Test
public void unregisterEventHandler( )
{
+ EventManager eventManager = new EventManager( this );
+ TestHandler handler1 = new TestHandler( );
+ TestHandler handler2 = new TestHandler( );
+ TestHandler handler3 = new TestHandler( );
+ TestHandler handler4 = new TestHandler( );
+
+ eventManager.registerEventHandler( Event.ANY, handler1 );
+ eventManager.registerEventHandler( testType, handler2 );
+ eventManager.registerEventHandler( testTestType, handler3 );
+ eventManager.registerEventHandler( otherType, handler4 );
+
+ eventManager.unregisterEventHandler( Event.ANY, handler1 );
+ Event event1 = new Event( testType, this );
+ eventManager.fireEvent( event1 );
+ assertEquals( 0, handler1.eventList.size( ) );
+ assertEquals( 1, handler2.eventList.size( ) );
+ assertEquals( 0, handler3.eventList.size( ) );
+ assertEquals( 0, handler4.eventList.size( ) );
+
+ eventManager.unregisterEventHandler( otherType, handler2 );
+ Event event2 = new Event( testType, this );
+ eventManager.fireEvent( event2 );
+ assertEquals( 0, handler1.eventList.size( ) );
+ assertEquals( 2, handler2.eventList.size( ) );
+ assertEquals( 0, handler3.eventList.size( ) );
+ assertEquals( 0, handler4.eventList.size( ) );
}
@Test
public void fireEvent( )
{
+ Object other = new Object( );
+ EventManager eventManager = new EventManager( this );
+ assertThrows( NullPointerException.class, ( ) -> eventManager.fireEvent( null ) );
+ Event event = new Event( EventType.ROOT, other );
+ assertEquals( other, event.getSource( ) );
+ TestHandler handler = new TestHandler( );
+ eventManager.registerEventHandler( EventType.ROOT, handler );
+ eventManager.fireEvent( event );
+ assertEquals( 1, handler.getEventList( ).size( ) );
+ Event newEvent = handler.getEventList( ).get( 0 );
+ assertNotEquals( event, newEvent );
+ assertEquals( this, newEvent.getSource( ) );
+
}
}
\ No newline at end of file
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.utils.FileUtils;
-import org.apache.archiva.indexer.ArchivaIndexManager;
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.indexer.IndexCreationFailedException;
-import org.apache.archiva.indexer.merger.IndexMerger;
-import org.apache.archiva.indexer.merger.IndexMergerException;
-import org.apache.archiva.indexer.merger.IndexMergerRequest;
-import org.apache.archiva.indexer.merger.TemporaryGroupIndex;
-import org.apache.archiva.repository.Repository;
-import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.repository.storage.StorageAsset;
-import org.apache.archiva.repository.storage.StorageUtil;
-import org.apache.commons.lang3.time.StopWatch;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.scheduling.annotation.Async;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Collection;
-import java.util.List;
-import java.util.Optional;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.stream.Collectors;
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M2
- */
-@Service("indexMerger#default")
-public class DefaultIndexMerger
- implements IndexMerger
-{
-
- @Inject
- RepositoryRegistry repositoryRegistry;
-
- private Logger log = LoggerFactory.getLogger( getClass() );
-
- private List<TemporaryGroupIndex> temporaryGroupIndexes = new CopyOnWriteArrayList<>();
-
- private List<ArchivaIndexingContext> temporaryContextes = new CopyOnWriteArrayList<>( );
-
- private List<String> runningGroups = new CopyOnWriteArrayList<>();
-
- @Inject
- public DefaultIndexMerger( )
- {
- }
-
- @Override
- public ArchivaIndexingContext buildMergedIndex(IndexMergerRequest indexMergerRequest )
- throws IndexMergerException
- {
- String groupId = indexMergerRequest.getGroupId();
-
- if ( runningGroups.contains( groupId ) )
- {
- log.info( "skip build merge remote indexes for id: '{}' as already running", groupId );
- return null;
- }
-
- runningGroups.add( groupId );
- StopWatch stopWatch = new StopWatch();
- try {
- stopWatch.reset();
- stopWatch.start();
-
- StorageAsset mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory();
- Repository destinationRepository = repositoryRegistry.getRepository(indexMergerRequest.getGroupId());
-
- ArchivaIndexManager idxManager = repositoryRegistry.getIndexManager(destinationRepository.getType());
- List<ArchivaIndexingContext> sourceContexts = indexMergerRequest.getRepositoriesIds().stream().map(id -> repositoryRegistry.getRepository(id).getIndexingContext()).collect(Collectors.toList());
- try {
- ArchivaIndexingContext result = idxManager.mergeContexts(destinationRepository, sourceContexts, indexMergerRequest.isPackIndex());
- if ( indexMergerRequest.isTemporary() )
- {
- String tempRepoId = destinationRepository.getId()+System.currentTimeMillis();
- temporaryGroupIndexes.add( new TemporaryGroupIndex( mergedIndexDirectory, tempRepoId, groupId,
- indexMergerRequest.getMergedIndexTtl() ) );
- temporaryContextes.add(result);
- }
- return result;
- } catch (IndexCreationFailedException e) {
- throw new IndexMergerException("Index merging failed " + e.getMessage(), e);
- }
-
- } finally {
- stopWatch.stop();
- log.info( "merged index for repos {} in {} s", indexMergerRequest.getRepositoriesIds(),
- stopWatch.getTime() );
- runningGroups.remove(groupId);
- }
- }
-
- @Async
- @Override
- public void cleanTemporaryGroupIndex( TemporaryGroupIndex temporaryGroupIndex )
- {
- if ( temporaryGroupIndex == null )
- {
- return;
- }
-
- try
- {
- Optional<ArchivaIndexingContext> ctxOpt = temporaryContextes.stream( ).filter( ctx -> ctx.getId( ).equals( temporaryGroupIndex.getIndexId( ) ) ).findFirst( );
- if (ctxOpt.isPresent()) {
- ArchivaIndexingContext ctx = ctxOpt.get();
- ctx.close(true);
- temporaryGroupIndexes.remove( temporaryGroupIndex );
- temporaryContextes.remove( ctx );
- StorageAsset directory = temporaryGroupIndex.getDirectory();
- if ( directory != null && directory.exists() )
- {
- StorageUtil.deleteRecursively( directory );
- }
- }
- }
- catch ( IOException e )
- {
- log.warn( "fail to delete temporary group index {}", temporaryGroupIndex.getIndexId(), e );
- }
- }
-
- @Override
- public Collection<TemporaryGroupIndex> getTemporaryGroupIndexes()
- {
- return this.temporaryGroupIndexes;
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.repository.ManagedRepository;
-import org.apache.archiva.repository.RepositoryGroup;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.repository.storage.StorageAsset;
-import org.apache.commons.lang3.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.scheduling.TaskScheduler;
-import org.springframework.scheduling.support.CronTrigger;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import javax.inject.Named;
-import java.nio.file.Path;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ScheduledFuture;
-import java.util.stream.Collectors;
-
-/**
- * @author Olivier Lamy
- * @since 2.0.0
- */
-@Service( "mergedRemoteIndexesScheduler#default" )
-public class DefaultMergedRemoteIndexesScheduler
- implements MergedRemoteIndexesScheduler
-{
-
- private Logger logger = LoggerFactory.getLogger( getClass() );
-
- @Inject
- @Named( value = "taskScheduler#mergeRemoteIndexes" )
- private TaskScheduler taskScheduler;
-
- @Inject
- private IndexMerger indexMerger;
-
- private Map<String, ScheduledFuture> scheduledFutureMap = new ConcurrentHashMap<>();
-
- @Override
- public void schedule(RepositoryGroup repositoryGroup, StorageAsset directory )
- {
- if ( StringUtils.isEmpty( repositoryGroup.getSchedulingDefinition() ) )
- {
- return;
- }
- CronTrigger cronTrigger = new CronTrigger( repositoryGroup.getSchedulingDefinition() );
-
- List<ManagedRepository> repositories = repositoryGroup.getRepositories();
-
- if (repositoryGroup.supportsFeature( IndexCreationFeature.class ))
- {
-
- IndexCreationFeature indexCreationFeature = repositoryGroup.getFeature( IndexCreationFeature.class ).get();
- Path indexPath = indexCreationFeature.getLocalIndexPath().getFilePath();
- if (indexPath!=null)
- {
- IndexMergerRequest indexMergerRequest =
- new IndexMergerRequest( repositories.stream( ).map( r -> r.getId( ) ).collect( Collectors.toList( ) ), true, repositoryGroup.getId( ),
- indexPath.toString( ),
- repositoryGroup.getMergedIndexTTL( ) ).mergedIndexDirectory( directory );
-
- MergedRemoteIndexesTaskRequest taskRequest =
- new MergedRemoteIndexesTaskRequest( indexMergerRequest, indexMerger );
-
- logger.info( "schedule merge remote index for group {} with cron {}", repositoryGroup.getId( ),
- repositoryGroup.getSchedulingDefinition( ) );
-
- ScheduledFuture scheduledFuture =
- taskScheduler.schedule( new MergedRemoteIndexesTask( taskRequest ), cronTrigger );
- scheduledFutureMap.put( repositoryGroup.getId( ), scheduledFuture );
- } else {
- logger.error("Requested index merger for repository group {} with non local index path {}", repositoryGroup.getId(), indexCreationFeature.getLocalIndexPath());
- }
- } else {
- logger.error("Scheduling merged index for repository group {}, but it does not support IndexCreationFeature.", repositoryGroup.getId());
- }
- }
-
- @Override
- public void unschedule( RepositoryGroup repositoryGroup )
- {
- ScheduledFuture scheduledFuture = scheduledFutureMap.remove( repositoryGroup.getId() );
- if ( scheduledFuture != null )
- {
- scheduledFuture.cancel( true );
- }
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * @author Olivier Lamy
- * @since 2.0.0
- */
-public class MergedRemoteIndexesTask
- implements Runnable
-{
-
- private Logger logger = LoggerFactory.getLogger( getClass() );
-
- private MergedRemoteIndexesTaskRequest mergedRemoteIndexesTaskRequest;
-
- public MergedRemoteIndexesTask( MergedRemoteIndexesTaskRequest mergedRemoteIndexesTaskRequest )
- {
- this.mergedRemoteIndexesTaskRequest = mergedRemoteIndexesTaskRequest;
- }
-
- @Override
- public void run()
- {
- try
- {
- this.execute();
- }
- catch ( IndexMergerException e )
- {
- logger.error( e.getMessage(), e );
- }
- }
-
- public MergedRemoteIndexesTaskResult execute()
- throws IndexMergerException
- {
- IndexMerger indexMerger = mergedRemoteIndexesTaskRequest.getIndexMerger();
-
- ArchivaIndexingContext indexingContext =
- indexMerger.buildMergedIndex( mergedRemoteIndexesTaskRequest.getIndexMergerRequest() );
-
- return new MergedRemoteIndexesTaskResult( indexingContext );
- }
-
- @Override
- public boolean equals( Object o )
- {
- if ( this == o )
- {
- return true;
- }
- if ( !( o instanceof MergedRemoteIndexesTask ) )
- {
- return false;
- }
-
- MergedRemoteIndexesTask that = (MergedRemoteIndexesTask) o;
-
- return mergedRemoteIndexesTaskRequest.equals( that.mergedRemoteIndexesTaskRequest );
- }
-
- @Override
- public int hashCode()
- {
- return mergedRemoteIndexesTaskRequest.hashCode();
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * @author Olivier Lamy
- * @since 2.0.0
- */
-public class MergedRemoteIndexesTaskRequest
-{
- private IndexMergerRequest indexMergerRequest;
-
- private IndexMerger indexMerger;
-
- public MergedRemoteIndexesTaskRequest( IndexMergerRequest indexMergerRequest, IndexMerger indexMerger )
- {
- this.indexMergerRequest = indexMergerRequest;
- this.indexMerger = indexMerger;
- }
-
- public IndexMergerRequest getIndexMergerRequest()
- {
- return indexMergerRequest;
- }
-
- public void setIndexMergerRequest( IndexMergerRequest indexMergerRequest )
- {
- this.indexMergerRequest = indexMergerRequest;
- }
-
- public IndexMerger getIndexMerger()
- {
- return indexMerger;
- }
-
- public void setIndexMerger( IndexMerger indexMerger )
- {
- this.indexMerger = indexMerger;
- }
-
- @Override
- public boolean equals( Object o )
- {
- if ( this == o )
- {
- return true;
- }
- if ( o == null || getClass() != o.getClass() )
- {
- return false;
- }
-
- MergedRemoteIndexesTaskRequest that = (MergedRemoteIndexesTaskRequest) o;
-
- return indexMergerRequest.equals( that.indexMergerRequest );
- }
-
- @Override
- public int hashCode()
- {
- return indexMergerRequest.hashCode();
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-
-/**
- * @author Olivier Lamy
- * @since 2.0.0
- */
-public class MergedRemoteIndexesTaskResult
-{
- private ArchivaIndexingContext indexingContext;
-
- public MergedRemoteIndexesTaskResult( ArchivaIndexingContext indexingContext )
- {
- this.indexingContext = indexingContext;
- }
-
- public ArchivaIndexingContext getIndexingContext()
- {
- return indexingContext;
- }
-
- public void setIndexingContext( ArchivaIndexingContext indexingContext )
- {
- this.indexingContext = indexingContext;
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.scheduling.annotation.Scheduled;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import java.util.Date;
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M2
- */
-@Service
-public class TemporaryGroupIndexCleaner
-{
- private Logger log = LoggerFactory.getLogger( getClass() );
-
- @Inject
- private IndexMerger indexMerger;
-
-
- public TemporaryGroupIndexCleaner( )
- {
-
- }
-
- // 900000
- @Scheduled(fixedDelay = 900000)
- public void cleanTemporaryIndex()
- {
-
- indexMerger.getTemporaryGroupIndexes()
- .stream()
- .forEach( temporaryGroupIndex ->
- {
- // cleanup files older than the ttl
- if ( new Date().getTime() - temporaryGroupIndex.getCreationTime() >
- temporaryGroupIndex.getMergedIndexTtl() )
- {
- log.info( "cleanTemporaryIndex for groupId {}", temporaryGroupIndex.getGroupId() );
- indexMerger.cleanTemporaryGroupIndex( temporaryGroupIndex );
-
- }
- }
- );
- }
-}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.utils.FileUtils;
+import org.apache.archiva.indexer.ArchivaIndexManager;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.indexer.IndexCreationFailedException;
+import org.apache.archiva.indexer.merger.IndexMerger;
+import org.apache.archiva.indexer.merger.IndexMergerException;
+import org.apache.archiva.indexer.merger.IndexMergerRequest;
+import org.apache.archiva.indexer.merger.TemporaryGroupIndex;
+import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.RepositoryRegistry;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.repository.storage.StorageUtil;
+import org.apache.commons.lang3.time.StopWatch;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.scheduling.annotation.Async;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Collection;
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.stream.Collectors;
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M2
+ */
+@Service("indexMerger#default")
+public class DefaultIndexMerger
+ implements IndexMerger
+{
+
+ @Inject
+ RepositoryRegistry repositoryRegistry;
+
+ private Logger log = LoggerFactory.getLogger( getClass() );
+
+ private List<TemporaryGroupIndex> temporaryGroupIndexes = new CopyOnWriteArrayList<>();
+
+ private List<ArchivaIndexingContext> temporaryContextes = new CopyOnWriteArrayList<>( );
+
+ private List<String> runningGroups = new CopyOnWriteArrayList<>();
+
+ @Inject
+ public DefaultIndexMerger( )
+ {
+ }
+
+ @Override
+ public ArchivaIndexingContext buildMergedIndex(IndexMergerRequest indexMergerRequest )
+ throws IndexMergerException
+ {
+ String groupId = indexMergerRequest.getGroupId();
+
+ if ( runningGroups.contains( groupId ) )
+ {
+ log.info( "skip build merge remote indexes for id: '{}' as already running", groupId );
+ return null;
+ }
+
+ runningGroups.add( groupId );
+ StopWatch stopWatch = new StopWatch();
+ try {
+ stopWatch.reset();
+ stopWatch.start();
+
+ StorageAsset mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory();
+ Repository destinationRepository = repositoryRegistry.getRepository(indexMergerRequest.getGroupId());
+
+ ArchivaIndexManager idxManager = repositoryRegistry.getIndexManager(destinationRepository.getType());
+ List<ArchivaIndexingContext> sourceContexts = indexMergerRequest.getRepositoriesIds().stream().map(id -> repositoryRegistry.getRepository(id).getIndexingContext()).collect(Collectors.toList());
+ try {
+ ArchivaIndexingContext result = idxManager.mergeContexts(destinationRepository, sourceContexts, indexMergerRequest.isPackIndex());
+ if ( indexMergerRequest.isTemporary() )
+ {
+ String tempRepoId = destinationRepository.getId()+System.currentTimeMillis();
+ temporaryGroupIndexes.add( new TemporaryGroupIndex( mergedIndexDirectory, tempRepoId, groupId,
+ indexMergerRequest.getMergedIndexTtl() ) );
+ temporaryContextes.add(result);
+ }
+ return result;
+ } catch (IndexCreationFailedException e) {
+ throw new IndexMergerException("Index merging failed " + e.getMessage(), e);
+ }
+
+ } finally {
+ stopWatch.stop();
+ log.info( "merged index for repos {} in {} s", indexMergerRequest.getRepositoriesIds(),
+ stopWatch.getTime() );
+ runningGroups.remove(groupId);
+ }
+ }
+
+ @Async
+ @Override
+ public void cleanTemporaryGroupIndex( TemporaryGroupIndex temporaryGroupIndex )
+ {
+ if ( temporaryGroupIndex == null )
+ {
+ return;
+ }
+
+ try
+ {
+ Optional<ArchivaIndexingContext> ctxOpt = temporaryContextes.stream( ).filter( ctx -> ctx.getId( ).equals( temporaryGroupIndex.getIndexId( ) ) ).findFirst( );
+ if (ctxOpt.isPresent()) {
+ ArchivaIndexingContext ctx = ctxOpt.get();
+ ctx.close(true);
+ temporaryGroupIndexes.remove( temporaryGroupIndex );
+ temporaryContextes.remove( ctx );
+ StorageAsset directory = temporaryGroupIndex.getDirectory();
+ if ( directory != null && directory.exists() )
+ {
+ StorageUtil.deleteRecursively( directory );
+ }
+ }
+ }
+ catch ( IOException e )
+ {
+ log.warn( "fail to delete temporary group index {}", temporaryGroupIndex.getIndexId(), e );
+ }
+ }
+
+ @Override
+ public Collection<TemporaryGroupIndex> getTemporaryGroupIndexes()
+ {
+ return this.temporaryGroupIndexes;
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.repository.ManagedRepository;
+import org.apache.archiva.repository.RepositoryGroup;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.scheduling.TaskScheduler;
+import org.springframework.scheduling.support.CronTrigger;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+import java.nio.file.Path;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ScheduledFuture;
+import java.util.stream.Collectors;
+
+/**
+ * @author Olivier Lamy
+ * @since 2.0.0
+ */
+@Service( "mergedRemoteIndexesScheduler#default" )
+public class DefaultMergedRemoteIndexesScheduler
+ implements MergedRemoteIndexesScheduler
+{
+
+ private Logger logger = LoggerFactory.getLogger( getClass() );
+
+ @Inject
+ @Named( value = "taskScheduler#mergeRemoteIndexes" )
+ private TaskScheduler taskScheduler;
+
+ @Inject
+ private IndexMerger indexMerger;
+
+ private Map<String, ScheduledFuture> scheduledFutureMap = new ConcurrentHashMap<>();
+
+ @Override
+ public void schedule(RepositoryGroup repositoryGroup, StorageAsset directory )
+ {
+ if ( StringUtils.isEmpty( repositoryGroup.getSchedulingDefinition() ) )
+ {
+ return;
+ }
+ CronTrigger cronTrigger = new CronTrigger( repositoryGroup.getSchedulingDefinition() );
+
+ List<ManagedRepository> repositories = repositoryGroup.getRepositories();
+
+ if (repositoryGroup.supportsFeature( IndexCreationFeature.class ))
+ {
+
+ IndexCreationFeature indexCreationFeature = repositoryGroup.getFeature( IndexCreationFeature.class ).get();
+ Path indexPath = indexCreationFeature.getLocalIndexPath().getFilePath();
+ if (indexPath!=null)
+ {
+ IndexMergerRequest indexMergerRequest =
+ new IndexMergerRequest( repositories.stream( ).map( r -> r.getId( ) ).collect( Collectors.toList( ) ), true, repositoryGroup.getId( ),
+ indexPath.toString( ),
+ repositoryGroup.getMergedIndexTTL( ) ).mergedIndexDirectory( directory );
+
+ MergedRemoteIndexesTaskRequest taskRequest =
+ new MergedRemoteIndexesTaskRequest( indexMergerRequest, indexMerger );
+
+ logger.info( "schedule merge remote index for group {} with cron {}", repositoryGroup.getId( ),
+ repositoryGroup.getSchedulingDefinition( ) );
+
+ ScheduledFuture scheduledFuture =
+ taskScheduler.schedule( new MergedRemoteIndexesTask( taskRequest ), cronTrigger );
+ scheduledFutureMap.put( repositoryGroup.getId( ), scheduledFuture );
+ } else {
+ logger.error("Requested index merger for repository group {} with non local index path {}", repositoryGroup.getId(), indexCreationFeature.getLocalIndexPath());
+ }
+ } else {
+ logger.error("Scheduling merged index for repository group {}, but it does not support IndexCreationFeature.", repositoryGroup.getId());
+ }
+ }
+
+ @Override
+ public void unschedule( RepositoryGroup repositoryGroup )
+ {
+ ScheduledFuture scheduledFuture = scheduledFutureMap.remove( repositoryGroup.getId() );
+ if ( scheduledFuture != null )
+ {
+ scheduledFuture.cancel( true );
+ }
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author Olivier Lamy
+ * @since 2.0.0
+ */
+public class MergedRemoteIndexesTask
+ implements Runnable
+{
+
+ private Logger logger = LoggerFactory.getLogger( getClass() );
+
+ private MergedRemoteIndexesTaskRequest mergedRemoteIndexesTaskRequest;
+
+ public MergedRemoteIndexesTask( MergedRemoteIndexesTaskRequest mergedRemoteIndexesTaskRequest )
+ {
+ this.mergedRemoteIndexesTaskRequest = mergedRemoteIndexesTaskRequest;
+ }
+
+ @Override
+ public void run()
+ {
+ try
+ {
+ this.execute();
+ }
+ catch ( IndexMergerException e )
+ {
+ logger.error( e.getMessage(), e );
+ }
+ }
+
+ public MergedRemoteIndexesTaskResult execute()
+ throws IndexMergerException
+ {
+ IndexMerger indexMerger = mergedRemoteIndexesTaskRequest.getIndexMerger();
+
+ ArchivaIndexingContext indexingContext =
+ indexMerger.buildMergedIndex( mergedRemoteIndexesTaskRequest.getIndexMergerRequest() );
+
+ return new MergedRemoteIndexesTaskResult( indexingContext );
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( this == o )
+ {
+ return true;
+ }
+ if ( !( o instanceof MergedRemoteIndexesTask ) )
+ {
+ return false;
+ }
+
+ MergedRemoteIndexesTask that = (MergedRemoteIndexesTask) o;
+
+ return mergedRemoteIndexesTaskRequest.equals( that.mergedRemoteIndexesTaskRequest );
+ }
+
+ @Override
+ public int hashCode()
+ {
+ return mergedRemoteIndexesTaskRequest.hashCode();
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * @author Olivier Lamy
+ * @since 2.0.0
+ */
+public class MergedRemoteIndexesTaskRequest
+{
+ private IndexMergerRequest indexMergerRequest;
+
+ private IndexMerger indexMerger;
+
+ public MergedRemoteIndexesTaskRequest( IndexMergerRequest indexMergerRequest, IndexMerger indexMerger )
+ {
+ this.indexMergerRequest = indexMergerRequest;
+ this.indexMerger = indexMerger;
+ }
+
+ public IndexMergerRequest getIndexMergerRequest()
+ {
+ return indexMergerRequest;
+ }
+
+ public void setIndexMergerRequest( IndexMergerRequest indexMergerRequest )
+ {
+ this.indexMergerRequest = indexMergerRequest;
+ }
+
+ public IndexMerger getIndexMerger()
+ {
+ return indexMerger;
+ }
+
+ public void setIndexMerger( IndexMerger indexMerger )
+ {
+ this.indexMerger = indexMerger;
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( this == o )
+ {
+ return true;
+ }
+ if ( o == null || getClass() != o.getClass() )
+ {
+ return false;
+ }
+
+ MergedRemoteIndexesTaskRequest that = (MergedRemoteIndexesTaskRequest) o;
+
+ return indexMergerRequest.equals( that.indexMergerRequest );
+ }
+
+ @Override
+ public int hashCode()
+ {
+ return indexMergerRequest.hashCode();
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+
+/**
+ * @author Olivier Lamy
+ * @since 2.0.0
+ */
+public class MergedRemoteIndexesTaskResult
+{
+ private ArchivaIndexingContext indexingContext;
+
+ public MergedRemoteIndexesTaskResult( ArchivaIndexingContext indexingContext )
+ {
+ this.indexingContext = indexingContext;
+ }
+
+ public ArchivaIndexingContext getIndexingContext()
+ {
+ return indexingContext;
+ }
+
+ public void setIndexingContext( ArchivaIndexingContext indexingContext )
+ {
+ this.indexingContext = indexingContext;
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import java.util.Date;
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M2
+ */
+@Service
+public class TemporaryGroupIndexCleaner
+{
+ private Logger log = LoggerFactory.getLogger( getClass() );
+
+ @Inject
+ private IndexMerger indexMerger;
+
+
+ public TemporaryGroupIndexCleaner( )
+ {
+
+ }
+
+ // 900000
+ @Scheduled(fixedDelay = 900000)
+ public void cleanTemporaryIndex()
+ {
+
+ indexMerger.getTemporaryGroupIndexes()
+ .stream()
+ .forEach( temporaryGroupIndex ->
+ {
+ // cleanup files older than the ttl
+ if ( new Date().getTime() - temporaryGroupIndex.getCreationTime() >
+ temporaryGroupIndex.getMergedIndexTtl() )
+ {
+ log.info( "cleanTemporaryIndex for groupId {}", temporaryGroupIndex.getGroupId() );
+ indexMerger.cleanTemporaryGroupIndex( temporaryGroupIndex );
+
+ }
+ }
+ );
+ }
+}
+++ /dev/null
-package org.apache.archiva.repository;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-import org.apache.archiva.repository.storage.RepositoryStorage;
-
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Locale;
-import java.util.Set;
-
-/**
- * Simple implementation of a managed repository.
- */
-public abstract class AbstractManagedRepository extends AbstractRepository implements EditableManagedRepository
-{
- private boolean blocksRedeployment = false;
- private ManagedRepositoryContent content;
- private Set<ReleaseScheme> activeReleaseSchemes = new HashSet<>( );
- private Set<ReleaseScheme> uActiveReleaseSchemes = Collections.unmodifiableSet( activeReleaseSchemes );
-
- public AbstractManagedRepository(RepositoryType type, String id, String name, RepositoryStorage storage)
- {
- super( type, id, name, storage );
- }
-
- public AbstractManagedRepository( Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage storage )
- {
- super( primaryLocale, type, id, name, storage );
- }
-
- @Override
- public ManagedRepositoryContent getContent( )
- {
- return content;
- }
-
- @Override
- public void setContent(ManagedRepositoryContent content) {
- this.content = content;
- }
-
- @Override
- public void setBlocksRedeployment( boolean blocksRedeployment )
- {
- this.blocksRedeployment = blocksRedeployment;
- }
-
- @Override
- public boolean blocksRedeployments( )
- {
- return blocksRedeployment;
- }
-
- @Override
- public Set<ReleaseScheme> getActiveReleaseSchemes( )
- {
- return uActiveReleaseSchemes;
- }
-
- @Override
- public void addActiveReleaseScheme( ReleaseScheme scheme )
- {
- this.activeReleaseSchemes.add(scheme);
- }
-
- @Override
- public void removeActiveReleaseScheme( ReleaseScheme scheme )
- {
- this.activeReleaseSchemes.remove(scheme);
- }
-
- @Override
- public void clearActiveReleaseSchemes( )
- {
- this.activeReleaseSchemes.clear();
- }
-
-
-}
+++ /dev/null
-package org.apache.archiva.repository;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-import org.apache.archiva.repository.storage.RepositoryStorage;
-import org.apache.archiva.repository.storage.StorageAsset;
-
-import java.net.URI;
-import java.nio.file.Path;
-import java.time.Duration;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Locale;
-import java.util.Map;
-
-/**
- * Abstract implementation of a remote repository. Abstract classes must implement the
- * features and capabilities by themselves.
- */
-public abstract class AbstractRemoteRepository extends AbstractRepository implements EditableRemoteRepository
-{
-
- private RepositoryCredentials credentials;
- private String checkPath;
- private Map<String,String> extraParameters = new HashMap<>( );
- private Map<String,String> uExtraParameters = Collections.unmodifiableMap( extraParameters );
- private Map<String,String> extraHeaders = new HashMap<>( );
- private Map<String,String> uExtraHeaders = Collections.unmodifiableMap( extraHeaders );
- private Duration timeout = Duration.ofSeconds( 60 );
- private String proxyId;
- private RemoteRepositoryContent content;
-
- public AbstractRemoteRepository(RepositoryType type, String id, String name , RepositoryStorage storage)
- {
- super( type, id, name, storage );
- }
-
- public AbstractRemoteRepository( Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage storage )
- {
- super( primaryLocale, type, id, name, storage );
- }
-
- @Override
- public void setCredentials( RepositoryCredentials credentials )
- {
- this.credentials = credentials;
- }
-
- @Override
- public void setCheckPath( String path )
- {
- this.checkPath = path;
- }
-
- @Override
- public void setExtraParameters( Map<String, String> params )
- {
- this.extraParameters.clear();
- this.extraParameters.putAll(params);
- }
-
- @Override
- public void addExtraParameter( String key, String value )
- {
- this.extraParameters.put(key, value);
- }
-
- @Override
- public void setExtraHeaders( Map<String, String> headers )
- {
- this.extraHeaders.clear();
- this.extraHeaders.putAll(headers);
- }
-
- @Override
- public void addExtraHeader( String header, String value )
- {
- this.extraHeaders.put(header, value);
- }
-
- @Override
- public void setTimeout( Duration duration )
- {
- this.timeout = duration;
- }
-
- @Override
- public RemoteRepositoryContent getContent( )
- {
- return content;
- }
-
- @Override
- public void setContent(RemoteRepositoryContent content) {
- this.content = content;
- }
-
- @Override
- public RepositoryCredentials getLoginCredentials( )
- {
- return credentials;
- }
-
- @Override
- public String getCheckPath( )
- {
- return checkPath;
- }
-
- @Override
- public Map<String, String> getExtraParameters( )
- {
- return uExtraParameters;
- }
-
- @Override
- public Map<String, String> getExtraHeaders( )
- {
- return uExtraHeaders;
- }
-
- @Override
- public Duration getTimeout( )
- {
- return timeout;
- }
-
- /**
- * Remote repositories resolve always relative to the base directory.
- * @return
- */
- @Override
- public StorageAsset getLocalPath() {
- return getStorage().getAsset("");
- }
-
- @Override
- public String toString() {
- StringBuilder str = new StringBuilder();
- return str.append("checkPath=").append(checkPath)
- .append(",creds:").append(credentials).toString();
- }
-
- @Override
- public void setLocation(URI location) {
- // Location of remote repositories is not for the local filestore
- super.location = location;
- }
-}
+++ /dev/null
-package org.apache.archiva.repository;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import com.cronutils.model.CronType;
-import com.cronutils.model.definition.CronDefinition;
-import com.cronutils.model.definition.CronDefinitionBuilder;
-import com.cronutils.parser.CronParser;
-import org.apache.archiva.event.Event;
-import org.apache.archiva.event.EventHandler;
-import org.apache.archiva.event.EventManager;
-import org.apache.archiva.event.EventType;
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.repository.event.*;
-import org.apache.archiva.repository.storage.RepositoryStorage;
-import org.apache.archiva.repository.storage.StorageAsset;
-import org.apache.archiva.repository.features.RepositoryFeature;
-import org.apache.archiva.repository.features.StagingRepositoryFeature;
-import org.apache.commons.lang3.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.net.URI;
-import java.nio.channels.ReadableByteChannel;
-import java.nio.channels.WritableByteChannel;
-import java.nio.file.CopyOption;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.function.Consumer;
-
-/**
- * Implementation of a repository with the necessary fields for a bare repository.
- * No features are provided. Capabilities and features must be implemented by concrete classes.
- *
- */
-public abstract class AbstractRepository implements EditableRepository, EventHandler<RepositoryEvent>
-{
-
-
- Logger log = LoggerFactory.getLogger(AbstractRepository.class);
-
- private final AtomicBoolean openStatus = new AtomicBoolean(false);
-
-
- private final RepositoryType type;
- private final String id;
- private Map<Locale, String> names = new HashMap<>( );
- private Map<Locale, String> descriptions = new HashMap<>( );
-
- private Locale primaryLocale = new Locale("en_US");
- protected URI location;
- private URI baseUri;
- private Set<URI> failoverLocations = new HashSet<>( );
- private Set<URI> uFailoverLocations = Collections.unmodifiableSet( failoverLocations );
- private boolean scanned = true;
- String schedulingDefinition = "0 0 02 * * ?";
- private String layout = "default";
- public static final CronDefinition CRON_DEFINITION = CronDefinitionBuilder.instanceDefinitionFor(CronType.QUARTZ);
-
- private final EventManager eventManager;
-
- Map<Class<? extends RepositoryFeature<?>>, RepositoryFeature<?>> featureMap = new HashMap<>( );
-
- private ArchivaIndexingContext indexingContext;
- private RepositoryStorage storage;
-
- public AbstractRepository(RepositoryType type, String id, String name, RepositoryStorage repositoryStorage) {
- this.id = id;
- this.names.put( primaryLocale, name);
- this.type = type;
- this.storage = repositoryStorage;
- this.location = repositoryStorage.getLocation();
- this.openStatus.compareAndSet(false, true);
- this.eventManager = new EventManager(this);
- }
-
- public AbstractRepository(Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage repositoryStorage) {
- setPrimaryLocale( primaryLocale );
- this.id = id;
- this.names.put( primaryLocale, name);
- this.type = type;
- this.storage = repositoryStorage;
- this.location = repositoryStorage.getLocation();
- this.openStatus.compareAndSet(false, true);
- this.eventManager = new EventManager(this);
- }
-
- protected void setPrimaryLocale(Locale locale) {
- this.primaryLocale = locale;
- }
-
- @Override
- public String getId( )
- {
- return id;
- }
-
- @Override
- public String getName( )
- {
- return getName( primaryLocale );
- }
-
- @Override
- public String getName( Locale locale )
- {
- return names.get(locale);
- }
-
- @Override
- public String getDescription( )
- {
- return getDescription( primaryLocale );
- }
-
- @Override
- public String getDescription( Locale locale )
- {
- return descriptions.get(primaryLocale);
- }
-
- @Override
- public RepositoryType getType( )
- {
- return type;
- }
-
- @Override
- public URI getLocation( )
- {
- return location;
- }
-
- @Override
- public StorageAsset getLocalPath() {
- return storage.getAsset("");
- }
-
- @Override
- public Set<URI> getFailoverLocations( )
- {
- return uFailoverLocations;
- }
-
- @Override
- public boolean isScanned( )
- {
- return scanned;
- }
-
- @Override
- public String getSchedulingDefinition( )
- {
- return schedulingDefinition;
- }
-
- @Override
- public abstract boolean hasIndex( );
-
- @Override
- public String getLayout( )
- {
- return layout;
- }
-
- @Override
- public abstract RepositoryCapabilities getCapabilities( );
-
- @SuppressWarnings( "unchecked" )
- @Override
- public <T extends RepositoryFeature<T>> RepositoryFeature<T> getFeature( Class<T> clazz ) throws UnsupportedFeatureException
- {
- if (featureMap.containsKey( clazz )) {
- return (RepositoryFeature<T>) featureMap.get(clazz);
- } else
- {
- throw new UnsupportedFeatureException( "Feature " + clazz + " not supported" );
- }
- }
-
- @Override
- public <T extends RepositoryFeature<T>> boolean supportsFeature( Class<T> clazz )
- {
- return featureMap.containsKey( clazz );
- }
-
- @Override
- public Locale getPrimaryLocale( )
- {
- return primaryLocale;
- }
-
- @Override
- public void setName( Locale locale, String name )
- {
- names.put(locale, name);
- }
-
- @Override
- public void setDescription( Locale locale, String description )
- {
- descriptions.put(locale, description);
- }
-
- @Override
- public void setLocation( final URI location )
- {
- if (location!=null && ( this.location == null || !this.location.equals(location))) {
- try {
- updateLocation(location);
- } catch (IOException e) {
- log.error("Could not update location of repository {} to {}", getId(), location, e);
- }
- }
- }
-
- @Override
- public void updateLocation(URI newLocation) throws IOException {
- storage.updateLocation(newLocation);
- this.location = newLocation;
- }
-
- @Override
- public void addFailoverLocation( URI location )
- {
- this.failoverLocations.add(location);
- }
-
- @Override
- public void removeFailoverLocation( URI location )
- {
- this.failoverLocations.remove( location );
- }
-
- @Override
- public void clearFailoverLocations( )
- {
- this.failoverLocations.clear();
- }
-
- @Override
- public void setScanned( boolean scanned )
- {
- this.scanned = scanned;
- }
-
- @Override
- public void setLayout( String layout )
- {
- this.layout = layout;
- }
-
- @Override
- public void setBaseUri(URI baseUri) {
- this.baseUri = baseUri;
- }
-
- @Override
- public void setSchedulingDefinition(String cronExpression) {
- if (StringUtils.isNotEmpty( cronExpression ))
- {
- CronParser parser = new CronParser( CRON_DEFINITION );
- parser.parse( cronExpression ).validate( );
- }
- this.schedulingDefinition = cronExpression;
- }
-
- @SuppressWarnings( "unchecked" )
- protected <T extends RepositoryFeature<T>> void addFeature(RepositoryFeature<T> feature) {
- featureMap.put( (Class<? extends RepositoryFeature<?>>) feature.getClass(), feature);
- }
-
- @Override
- public void setIndexingContext(ArchivaIndexingContext context) {
- if (this.indexingContext!=null) {
-
- }
- this.indexingContext = context;
- }
-
- @Override
- public ArchivaIndexingContext getIndexingContext() {
- return indexingContext;
- }
-
- @Override
- public void close() {
- if (this.openStatus.compareAndSet(true, false)) {
- ArchivaIndexingContext ctx = getIndexingContext();
- if (ctx != null) {
- try {
- ctx.close();
- } catch (IOException e) {
- log.warn("Error during index context close.", e);
- }
- this.indexingContext = null;
-
- }
- if (supportsFeature(StagingRepositoryFeature.class)) {
- StagingRepositoryFeature sf = getFeature(StagingRepositoryFeature.class).get();
- if (sf.getStagingRepository() != null) {
- sf.getStagingRepository().close();
- }
- }
- }
-
- }
-
- @Override
- public boolean isOpen() {
- return openStatus.get();
- }
-
- @Override
- public void handle(RepositoryEvent event) {
- // We just rethrow the events
- eventManager.fireEvent(event);
- }
-
- @Override
- public <T extends Event> void registerEventHandler( EventType<T> eventType, EventHandler<? super T> eventHandler) {
- if (!EventType.isInstanceOf(eventType, RepositoryEvent.ANY)) {
- throw new IllegalArgumentException("Can only register RepositoryEvent Handlers");
- }
- eventManager.registerEventHandler(eventType, eventHandler);
- }
-
- @Override
- public <T extends Event> void unregisterEventHandler(EventType<T> type, EventHandler<? super T> eventHandler) {
- eventManager.unregisterEventHandler(type, eventHandler);
- }
-
- @Override
- public StorageAsset getAsset(String path )
- {
- return storage.getAsset(path);
- }
-
- @Override
- public StorageAsset addAsset( String path, boolean container )
- {
- return storage.addAsset(path, container);
- }
-
- @Override
- public void removeAsset( StorageAsset asset ) throws IOException
- {
- storage.removeAsset(asset);
- }
-
- @Override
- public StorageAsset moveAsset( StorageAsset origin, String destination, CopyOption... copyOptions ) throws IOException
- {
- return storage.moveAsset(origin, destination);
- }
-
- @Override
- public void moveAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException
- {
- storage.moveAsset( origin, destination, copyOptions );
- }
-
- @Override
- public StorageAsset copyAsset( StorageAsset origin, String destination, CopyOption... copyOptions ) throws IOException
- {
- return storage.copyAsset(origin, destination);
- }
-
- @Override
- public void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException
- {
- storage.copyAsset( origin, destination, copyOptions);
- }
-
- @Override
- public void consumeData(StorageAsset asset, Consumer<InputStream> consumerFunction, boolean readLock ) throws IOException
- {
- storage.consumeData(asset, consumerFunction, readLock);
- }
-
- @Override
- public void consumeDataFromChannel( StorageAsset asset, Consumer<ReadableByteChannel> consumerFunction, boolean readLock ) throws IOException
- {
- storage.consumeDataFromChannel( asset, consumerFunction, readLock );
- }
-
- @Override
- public void writeData( StorageAsset asset, Consumer<OutputStream> consumerFunction, boolean writeLock ) throws IOException
- {
- storage.writeData( asset, consumerFunction, writeLock );
- }
-
- @Override
- public void writeDataToChannel( StorageAsset asset, Consumer<WritableByteChannel> consumerFunction, boolean writeLock ) throws IOException
- {
- storage.writeDataToChannel( asset, consumerFunction, writeLock );
- }
-
- protected void setStorage( RepositoryStorage storage) {
- this.storage = storage;
- }
-
- protected RepositoryStorage getStorage() {
- return storage;
- }
-}
+++ /dev/null
-package org.apache.archiva.repository;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.repository.storage.RepositoryStorage;
-import org.apache.commons.collections4.map.ListOrderedMap;
-
-import java.util.List;
-import java.util.Locale;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-/**
- * Abstract repository group implementation.
- *
- */
-public class AbstractRepositoryGroup extends AbstractRepository implements EditableRepositoryGroup {
-
- private ListOrderedMap<String, ManagedRepository> repositories = new ListOrderedMap<>();
-
- private int mergedIndexTTL;
-
- private final ReadWriteLock rwl = new ReentrantReadWriteLock();
-
- private RepositoryCapabilities capabilities;
-
- public AbstractRepositoryGroup(RepositoryType type, String id, String name, RepositoryStorage storage) {
- super(type, id, name, storage);
- }
-
- public AbstractRepositoryGroup(Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage storage) {
- super(primaryLocale, type, id, name, storage);
- }
-
- @Override
- public boolean hasIndex() {
- return true;
- }
-
- @Override
- public RepositoryCapabilities getCapabilities() {
- return capabilities;
- }
-
-
- @Override
- public void clearRepositories() {
- rwl.writeLock().lock();
- try {
- repositories.clear();
- } finally {
- rwl.writeLock().unlock();
- }
- }
-
- @Override
- public void setRepositories(List<ManagedRepository> newRepositories) {
- rwl.writeLock().lock();
- try {
- repositories.clear();
- for(ManagedRepository repo : newRepositories) {
- if (repo!=null)
- repositories.put(repo.getId(), repo);
- }
- } finally {
- rwl.writeLock().unlock();
- }
- }
-
- @Override
- public void addRepository(ManagedRepository repository) {
- rwl.writeLock().lock();
- try {
- if (repository!=null)
- repositories.put(repository.getId(), repository);
- } finally {
- rwl.writeLock().unlock();
- }
- }
-
- @Override
- public void addRepository(int index, ManagedRepository repository) {
- rwl.writeLock().lock();
- try {
- if (repository!=null)
- repositories.put(index, repository.getId(), repository);
- } finally {
- rwl.writeLock().unlock();
- }
- }
-
- @Override
- public boolean removeRepository(ManagedRepository repository) {
- rwl.writeLock().lock();
- try {
- return repositories.remove(repository.getId(), repository);
- } finally {
- rwl.writeLock().unlock();
- }
- }
-
- @Override
- public ManagedRepository removeRepository(String repoId) {
- rwl.writeLock().lock();
- try {
- return repositories.remove(repoId);
- } finally {
- rwl.writeLock().unlock();
- }
- }
-
- @Override
- public void setMergedIndexTTL(int timeInSeconds) {
- this.mergedIndexTTL = timeInSeconds;
- }
-
- @Override
- public List<ManagedRepository> getRepositories() {
- rwl.readLock().lock();
- try {
- return repositories.valueList();
- } finally {
- rwl.readLock().unlock();
- }
- }
-
- @Override
- public boolean contains(ManagedRepository repository) {
- rwl.readLock().lock();
- try {
- return repositories.containsValue(repository);
- } finally {
- rwl.readLock().unlock();
- }
- }
-
- @Override
- public boolean contains(String id) {
- rwl.readLock().lock();
- try {
- return repositories.containsKey(id);
- } finally {
- rwl.readLock().unlock();
- }
- }
-
- @Override
- public int getMergedIndexTTL() {
- return mergedIndexTTL;
- }
-
- protected void setCapabilities(RepositoryCapabilities capabilities) {
- this.capabilities = capabilities;
- }
-}
+++ /dev/null
-package org.apache.archiva.repository;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.configuration.*;
-import org.apache.archiva.event.Event;
-import org.apache.archiva.event.EventManager;
-import org.apache.archiva.event.EventSource;
-import org.apache.archiva.event.EventType;
-import org.apache.archiva.indexer.*;
-import org.apache.archiva.redback.components.registry.RegistryException;
-import org.apache.archiva.repository.event.*;
-import org.apache.archiva.event.EventHandler;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.repository.features.StagingRepositoryFeature;
-import org.apache.commons.lang3.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-import javax.annotation.PostConstruct;
-import javax.annotation.PreDestroy;
-import javax.inject.Inject;
-import javax.inject.Named;
-import java.util.*;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
-import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
-
-/**
- * Registry for repositories. This is the central entry point for repositories. It provides methods for
- * retrieving, adding and removing repositories.
- * <p>
- * The modification methods addXX and removeXX persist the changes immediately to the configuration. If the
- * configuration save fails the changes are rolled back.
- * <p>
- * TODO: Audit events
- *
- * @since 3.0
- */
-@Service("repositoryRegistry")
-public class ArchivaRepositoryRegistry implements ConfigurationListener, EventHandler<Event>,
- RepositoryRegistry
-{
-
- private static final Logger log = LoggerFactory.getLogger(RepositoryRegistry.class);
-
- /**
- * We inject all repository providers
- */
- @Inject
- List<RepositoryProvider> repositoryProviders;
-
- @Inject
- IndexManagerFactory indexManagerFactory;
-
- @Inject
- ArchivaConfiguration archivaConfiguration;
-
- @Inject
- @Named("repositoryContentFactory#default")
- RepositoryContentFactory repositoryContentFactory;
-
- private final EventManager eventManager;
-
-
- private Map<String, ManagedRepository> managedRepositories = new HashMap<>();
- private Map<String, ManagedRepository> uManagedRepository = Collections.unmodifiableMap(managedRepositories);
-
- private Map<String, RemoteRepository> remoteRepositories = new HashMap<>();
- private Map<String, RemoteRepository> uRemoteRepositories = Collections.unmodifiableMap(remoteRepositories);
-
- private Map<String, RepositoryGroup> repositoryGroups = new HashMap<>();
- private Map<String, RepositoryGroup> uRepositoryGroups = Collections.unmodifiableMap(repositoryGroups);
-
- private ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock();
-
- private volatile boolean ignoreConfigEvents = false;
-
- public ArchivaRepositoryRegistry() {
- this.eventManager = new EventManager(this);
- }
-
- @Override
- public void setArchivaConfiguration( ArchivaConfiguration archivaConfiguration ) {
- this.archivaConfiguration = archivaConfiguration;
- }
-
- @PostConstruct
- private void initialize() {
- rwLock.writeLock().lock();
- try {
- log.debug("Initializing repository registry");
- updateManagedRepositoriesFromConfig();
- updateRemoteRepositoriesFromConfig();
-
- repositoryGroups.clear();
- Map<String, RepositoryGroup> repositoryGroups = getRepositorGroupsFromConfig();
- this.repositoryGroups.putAll(repositoryGroups);
-
- // archivaConfiguration.addChangeListener(this);
- archivaConfiguration.addListener(this);
- } finally {
- rwLock.writeLock().unlock();
- }
- pushEvent(new RepositoryRegistryEvent(RepositoryRegistryEvent.RELOADED, this));
- }
-
- @PreDestroy
- public void destroy() {
- for (ManagedRepository rep : managedRepositories.values()) {
- rep.close();
- }
- managedRepositories.clear();
- for (RemoteRepository repo : remoteRepositories.values()) {
- repo.close();
- }
- remoteRepositories.clear();
- pushEvent(new RepositoryRegistryEvent(RepositoryRegistryEvent.DESTROYED, this));
- }
-
-
- private Map<RepositoryType, RepositoryProvider> createProviderMap() {
- Map<RepositoryType, RepositoryProvider> map = new HashMap<>();
- if (repositoryProviders != null) {
- for (RepositoryProvider provider : repositoryProviders) {
- for (RepositoryType type : provider.provides()) {
- map.put(type, provider);
- }
- }
- }
- return map;
- }
-
- private RepositoryProvider getProvider(RepositoryType type) throws RepositoryException {
- return repositoryProviders.stream().filter(repositoryProvider -> repositoryProvider.provides().contains(type)).findFirst().orElseThrow(() -> new RepositoryException("Repository type cannot be handled: " + type));
- }
-
- /*
- * Updates the repositories
- */
- private void updateManagedRepositoriesFromConfig() {
- try {
-
- Set<String> configRepoIds = new HashSet<>();
- List<ManagedRepositoryConfiguration> managedRepoConfigs =
- getArchivaConfiguration().getConfiguration().getManagedRepositories();
-
- if (managedRepoConfigs == null) {
- return;
- }
-
- for (ManagedRepositoryConfiguration repoConfig : managedRepoConfigs) {
- ManagedRepository repo = putRepository(repoConfig, null);
- configRepoIds.add(repoConfig.getId());
- if (repo.supportsFeature(StagingRepositoryFeature.class)) {
- StagingRepositoryFeature stagF = repo.getFeature(StagingRepositoryFeature.class).get();
- if (stagF.getStagingRepository() != null) {
- configRepoIds.add(stagF.getStagingRepository().getId());
- }
- }
- }
- List<String> toRemove = managedRepositories.keySet().stream().filter(id -> !configRepoIds.contains(id)).collect(Collectors.toList());
- for (String id : toRemove) {
- ManagedRepository removed = managedRepositories.remove(id);
- removed.close();
- }
- } catch (Throwable e) {
- log.error("Could not initialize repositories from config: {}", e.getMessage(), e);
- return;
- }
- }
-
- private ManagedRepository createNewManagedRepository(RepositoryProvider provider, ManagedRepositoryConfiguration cfg) throws RepositoryException {
- log.debug("Creating repo {}", cfg.getId());
- ManagedRepository repo = provider.createManagedInstance(cfg);
- repo.registerEventHandler(RepositoryEvent.ANY, this);
- updateRepositoryReferences(provider, repo, cfg, null);
- return repo;
-
- }
-
- private String getStagingId(String repoId) {
- return repoId + StagingRepositoryFeature.STAGING_REPO_POSTFIX;
- }
-
- @SuppressWarnings("unchecked")
- private void updateRepositoryReferences(RepositoryProvider provider, ManagedRepository repo, ManagedRepositoryConfiguration cfg, Configuration configuration) throws RepositoryException {
- log.debug("Updating references of repo {}", repo.getId());
- if (repo.supportsFeature(StagingRepositoryFeature.class)) {
- StagingRepositoryFeature feature = repo.getFeature(StagingRepositoryFeature.class).get();
- if (feature.isStageRepoNeeded() && feature.getStagingRepository() == null) {
- ManagedRepository stageRepo = getManagedRepository(getStagingId(repo.getId()));
- if (stageRepo == null) {
- stageRepo = getStagingRepository(provider, cfg, configuration);
- managedRepositories.put(stageRepo.getId(), stageRepo);
- if (configuration != null) {
- replaceOrAddRepositoryConfig(provider.getManagedConfiguration(stageRepo), configuration);
- }
- pushEvent(new LifecycleEvent(LifecycleEvent.REGISTERED, this, stageRepo));
- }
- feature.setStagingRepository(stageRepo);
- }
- }
- if (repo instanceof EditableManagedRepository) {
- EditableManagedRepository editableRepo = (EditableManagedRepository) repo;
- if (repo.getContent() == null) {
- editableRepo.setContent(repositoryContentFactory.getManagedRepositoryContent(repo));
- editableRepo.getContent().setRepository(editableRepo);
- }
- log.debug("Index repo: " + repo.hasIndex());
- if (repo.hasIndex() && ( repo.getIndexingContext() == null || !repo.getIndexingContext().isOpen() )) {
- log.debug("Creating indexing context for {}", repo.getId());
- createIndexingContext(editableRepo);
- }
- }
- repo.registerEventHandler(RepositoryEvent.ANY, this);
- }
-
- @Override
- public ArchivaIndexManager getIndexManager( RepositoryType type ) {
- return indexManagerFactory.getIndexManager(type);
- }
-
- private void createIndexingContext(EditableRepository editableRepo) throws RepositoryException {
- if (editableRepo.supportsFeature(IndexCreationFeature.class)) {
- ArchivaIndexManager idxManager = getIndexManager(editableRepo.getType());
- try {
- editableRepo.setIndexingContext(idxManager.createContext(editableRepo));
- idxManager.updateLocalIndexPath(editableRepo);
- } catch (IndexCreationFailedException e) {
- throw new RepositoryException("Could not create index for repository " + editableRepo.getId() + ": " + e.getMessage(), e);
- }
- }
- }
-
- private ManagedRepository getStagingRepository(RepositoryProvider provider, ManagedRepositoryConfiguration baseRepoCfg, Configuration configuration) throws RepositoryException {
- ManagedRepository stageRepo = getManagedRepository(getStagingId(baseRepoCfg.getId()));
- if (stageRepo == null) {
- stageRepo = provider.createStagingInstance(baseRepoCfg);
- if (stageRepo.supportsFeature(StagingRepositoryFeature.class)) {
- stageRepo.getFeature(StagingRepositoryFeature.class).get().setStageRepoNeeded(false);
- }
- ManagedRepositoryConfiguration stageCfg = provider.getManagedConfiguration(stageRepo);
- updateRepositoryReferences(provider, stageRepo, stageCfg, configuration);
- }
- return stageRepo;
- }
-
-
- private void updateRemoteRepositoriesFromConfig() {
- try {
- List<RemoteRepositoryConfiguration> remoteRepoConfigs =
- getArchivaConfiguration().getConfiguration().getRemoteRepositories();
-
- if (remoteRepoConfigs == null) {
- return;
- }
- Set<String> repoIds = new HashSet<>();
- for (RemoteRepositoryConfiguration repoConfig : remoteRepoConfigs) {
- putRepository(repoConfig, null);
- repoIds.add(repoConfig.getId());
- }
-
- List<String> toRemove = remoteRepositories.keySet().stream().filter(id -> !repoIds.contains(id)).collect(Collectors.toList());
- for (String id : toRemove) {
- RemoteRepository removed = remoteRepositories.remove(id);
- removed.close();
- }
-
- } catch (Throwable e) {
- log.error("Could not initialize remote repositories from config: {}", e.getMessage(), e);
- return;
- }
- }
-
- private RemoteRepository createNewRemoteRepository(RepositoryProvider provider, RemoteRepositoryConfiguration cfg) throws RepositoryException {
- log.debug("Creating remote repo {}", cfg.getId());
- RemoteRepository repo = provider.createRemoteInstance(cfg);
- updateRepositoryReferences(provider, repo, cfg, null);
- return repo;
-
- }
-
- private void updateRepositoryReferences(RepositoryProvider provider, RemoteRepository repo, RemoteRepositoryConfiguration cfg, Configuration configuration) throws RepositoryException {
- if (repo instanceof EditableRemoteRepository && repo.getContent() == null) {
- EditableRemoteRepository editableRepo = (EditableRemoteRepository) repo;
- editableRepo.setContent(repositoryContentFactory.getRemoteRepositoryContent(repo));
- if (repo.supportsFeature(IndexCreationFeature.class) && repo.getIndexingContext() == null) {
- createIndexingContext(editableRepo);
- }
- }
- repo.registerEventHandler(RepositoryEvent.ANY, this);
- }
-
- private Map<String, RepositoryGroup> getRepositorGroupsFromConfig() {
- try {
- List<RepositoryGroupConfiguration> repositoryGroupConfigurations =
- getArchivaConfiguration().getConfiguration().getRepositoryGroups();
-
- if (repositoryGroupConfigurations == null) {
- return Collections.emptyMap();
- }
-
- Map<String, RepositoryGroup> repositoryGroupMap = new LinkedHashMap<>(repositoryGroupConfigurations.size());
-
- Map<RepositoryType, RepositoryProvider> providerMap = createProviderMap();
- for (RepositoryGroupConfiguration repoConfig : repositoryGroupConfigurations) {
- RepositoryType repositoryType = RepositoryType.valueOf(repoConfig.getType());
- if (providerMap.containsKey(repositoryType)) {
- try {
- RepositoryGroup repo = createNewRepositoryGroup(providerMap.get(repositoryType), repoConfig);
- repositoryGroupMap.put(repo.getId(), repo);
- } catch (Exception e) {
- log.error("Could not create repository group {}: {}", repoConfig.getId(), e.getMessage(), e);
- }
- }
- }
- return repositoryGroupMap;
- } catch (Throwable e) {
- log.error("Could not initialize repositories from config: {}", e.getMessage(), e);
- return Collections.emptyMap();
- }
- }
-
- private RepositoryGroup createNewRepositoryGroup(RepositoryProvider provider, RepositoryGroupConfiguration config) throws RepositoryException {
- RepositoryGroup repositoryGroup = provider.createRepositoryGroup(config);
- repositoryGroup.registerEventHandler(RepositoryEvent.ANY, this);
- updateRepositoryReferences(provider, repositoryGroup, config);
- return repositoryGroup;
- }
-
- private void updateRepositoryReferences(RepositoryProvider provider, RepositoryGroup group, RepositoryGroupConfiguration configuration) {
- if (group instanceof EditableRepositoryGroup) {
- EditableRepositoryGroup eGroup = (EditableRepositoryGroup) group;
- eGroup.setRepositories(configuration.getRepositories().stream().map(r -> getManagedRepository(r)).collect(Collectors.toList()));
- }
- }
-
- private ArchivaConfiguration getArchivaConfiguration() {
- return this.archivaConfiguration;
- }
-
- /**
- * Returns all repositories that are registered. There is no defined order of the returned repositories.
- *
- * @return a list of managed and remote repositories
- */
- @Override
- public Collection<Repository> getRepositories( ) {
- rwLock.readLock().lock();
- try {
- return Stream.concat(managedRepositories.values().stream(), remoteRepositories.values().stream()).collect(Collectors.toList());
- } finally {
- rwLock.readLock().unlock();
- }
- }
-
- /**
- * Returns only the managed repositories. There is no defined order of the returned repositories.
- *
- * @return a list of managed repositories
- */
- @Override
- public Collection<ManagedRepository> getManagedRepositories( ) {
- rwLock.readLock().lock();
- try {
- return uManagedRepository.values();
- } finally {
- rwLock.readLock().unlock();
- }
- }
-
- /**
- * Returns only the remote repositories. There is no defined order of the returned repositories.
- *
- * @return a list of remote repositories
- */
- @Override
- public Collection<RemoteRepository> getRemoteRepositories( ) {
- rwLock.readLock().lock();
- try {
- return uRemoteRepositories.values();
- } finally {
- rwLock.readLock().unlock();
- }
- }
-
- @Override
- public Collection<RepositoryGroup> getRepositoryGroups( ) {
- rwLock.readLock().lock();
- try {
- return uRepositoryGroups.values();
- } finally {
- rwLock.readLock().unlock();
- }
- }
-
- /**
- * Returns the repository with the given id. The returned repository may be a managed or remote repository.
- * It returns null, if no repository is registered with the given id.
- *
- * @param repoId the repository id
- * @return the repository if found, otherwise null
- */
- @Override
- public Repository getRepository( String repoId ) {
- rwLock.readLock().lock();
- try {
- log.debug("getRepository {}", repoId);
- if (managedRepositories.containsKey(repoId)) {
- log.debug("Managed repo");
- return managedRepositories.get(repoId);
- } else if (remoteRepositories.containsKey(repoId)) {
- log.debug("Remote repo");
- return remoteRepositories.get(repoId);
- } else if (repositoryGroups.containsKey(repoId)) {
- return repositoryGroups.get(repoId);
- } else {
- return null;
- }
- } finally {
- rwLock.readLock().unlock();
- }
- }
-
- /**
- * Convenience method, that returns the managed repository with the given id.
- * It returns null, if no managed repository is registered with this id.
- *
- * @param repoId the repository id
- * @return the managed repository if found, otherwise null
- */
- @Override
- public ManagedRepository getManagedRepository( String repoId ) {
- rwLock.readLock().lock();
- try {
- return managedRepositories.get(repoId);
- } finally {
- rwLock.readLock().unlock();
- }
- }
-
- /**
- * Convenience method, that returns the remote repository with the given id.
- * It returns null, if no remote repository is registered with this id.
- *
- * @param repoId the repository id
- * @return the remote repository if found, otherwise null
- */
- @Override
- public RemoteRepository getRemoteRepository( String repoId ) {
- rwLock.readLock().lock();
- try {
- return remoteRepositories.get(repoId);
- } finally {
- rwLock.readLock().unlock();
- }
- }
-
- @Override
- public RepositoryGroup getRepositoryGroup( String groupId ) {
- rwLock.readLock().lock();
- try {
- return repositoryGroups.get(groupId);
- } finally {
- rwLock.readLock().unlock();
- }
- }
-
- /*
- * The <code>ignoreConfigEvents</code> works only for synchronized configuration events.
- * If the configuration throws async events, we cannot know, if the event is caused by this instance or another thread.
- */
- private void saveConfiguration(Configuration configuration) throws IndeterminateConfigurationException, RegistryException {
- ignoreConfigEvents = true;
- try {
- getArchivaConfiguration().save(configuration);
- } finally {
- ignoreConfigEvents = false;
- }
- }
-
- /**
- * Adds a new repository to the current list, or replaces the repository definition with
- * the same id, if it exists already.
- * The change is saved to the configuration immediately.
- *
- * @param managedRepository the new repository.
- * @throws RepositoryException if the new repository could not be saved to the configuration.
- */
- @Override
- public ManagedRepository putRepository( ManagedRepository managedRepository ) throws RepositoryException {
- rwLock.writeLock().lock();
- try {
- final String id = managedRepository.getId();
- if (remoteRepositories.containsKey(id)) {
- throw new RepositoryException("There exists a remote repository with id " + id + ". Could not update with managed repository.");
- }
- ManagedRepository originRepo = managedRepositories.put(id, managedRepository);
- try {
- if (originRepo != null && originRepo != managedRepository) {
- originRepo.close();
- }
- RepositoryProvider provider = getProvider(managedRepository.getType());
- ManagedRepositoryConfiguration newCfg = provider.getManagedConfiguration(managedRepository);
- Configuration configuration = getArchivaConfiguration().getConfiguration();
- updateRepositoryReferences(provider, managedRepository, newCfg, configuration);
- ManagedRepositoryConfiguration oldCfg = configuration.findManagedRepositoryById(id);
- if (oldCfg != null) {
- configuration.removeManagedRepository(oldCfg);
- }
- configuration.addManagedRepository(newCfg);
- saveConfiguration(configuration);
- if (originRepo != managedRepository) {
- pushEvent(new LifecycleEvent(LifecycleEvent.REGISTERED, this, managedRepository));
- } else {
- pushEvent(new LifecycleEvent(LifecycleEvent.UPDATED, this, managedRepository));
- }
- return managedRepository;
- } catch (Exception e) {
- // Rollback only partly, because repository is closed already
- if (originRepo != null) {
- managedRepositories.put(id, originRepo);
- } else {
- managedRepositories.remove(id);
- }
- log.error("Exception during configuration update {}", e.getMessage(), e);
- throw new RepositoryException("Could not save the configuration" + (e.getMessage() == null ? "" : ": " + e.getMessage()));
- }
- } finally {
- rwLock.writeLock().unlock();
- }
- }
-
- /**
- * Adds a new repository or updates the repository with the same id, if it exists already.
- * The configuration is saved immediately.
- *
- * @param managedRepositoryConfiguration the repository configuration
- * @return the updated or created repository
- * @throws RepositoryException if an error occurs, or the configuration is not valid.
- */
- @Override
- public ManagedRepository putRepository( ManagedRepositoryConfiguration managedRepositoryConfiguration ) throws RepositoryException {
- rwLock.writeLock().lock();
- try {
- final String id = managedRepositoryConfiguration.getId();
- final RepositoryType repositoryType = RepositoryType.valueOf(managedRepositoryConfiguration.getType());
- Configuration configuration = getArchivaConfiguration().getConfiguration();
- ManagedRepository repo = managedRepositories.get(id);
- ManagedRepositoryConfiguration oldCfg = repo != null ? getProvider(repositoryType).getManagedConfiguration(repo) : null;
- repo = putRepository(managedRepositoryConfiguration, configuration);
- try {
- saveConfiguration(configuration);
- } catch (IndeterminateConfigurationException | RegistryException e) {
- if (oldCfg != null) {
- getProvider(repositoryType).updateManagedInstance((EditableManagedRepository) repo, oldCfg);
- }
- log.error("Could not save the configuration for repository {}: {}", id, e.getMessage(), e);
- throw new RepositoryException("Could not save the configuration for repository " + id + ": " + e.getMessage());
- }
- return repo;
- } finally {
- rwLock.writeLock().unlock();
- }
-
- }
-
- /**
- * Adds a new repository or updates the repository with the same id. The given configuration object is updated, but
- * the configuration is not saved.
- *
- * @param managedRepositoryConfiguration the new or changed managed repository configuration
- * @param configuration the configuration object (may be <code>null</code>)
- * @return the new or updated repository
- * @throws RepositoryException if the configuration cannot be saved or updated
- */
- @Override
- public ManagedRepository putRepository( ManagedRepositoryConfiguration managedRepositoryConfiguration, Configuration configuration ) throws RepositoryException {
- rwLock.writeLock().lock();
- try {
- final String id = managedRepositoryConfiguration.getId();
- final RepositoryType repoType = RepositoryType.valueOf(managedRepositoryConfiguration.getType());
- ManagedRepository repo;
- boolean registeredNew = false;
- repo = managedRepositories.get(id);
- if (repo != null && repo.isOpen()) {
- if (repo instanceof EditableManagedRepository) {
- getProvider(repoType).updateManagedInstance((EditableManagedRepository) repo, managedRepositoryConfiguration);
- } else {
- throw new RepositoryException("The repository is not editable " + id);
- }
- } else {
- repo = getProvider(repoType).createManagedInstance(managedRepositoryConfiguration);
- managedRepositories.put(id, repo);
- registeredNew = true;
- }
- updateRepositoryReferences(getProvider(repoType), repo, managedRepositoryConfiguration, configuration);
- replaceOrAddRepositoryConfig(managedRepositoryConfiguration, configuration);
- if (registeredNew) {
- pushEvent(new LifecycleEvent(LifecycleEvent.REGISTERED, this, repo));
- } else {
- pushEvent(new LifecycleEvent(LifecycleEvent.UPDATED, this, repo));
- }
- return repo;
- } finally {
- rwLock.writeLock().unlock();
- }
- }
-
-
- /**
- * Adds a new repository group to the current list, or replaces the repository group definition with
- * the same id, if it exists already.
- * The change is saved to the configuration immediately.
- *
- * @param repositoryGroup the new repository group.
- * @throws RepositoryException if the new repository group could not be saved to the configuration.
- */
- @Override
- public RepositoryGroup putRepositoryGroup( RepositoryGroup repositoryGroup ) throws RepositoryException {
- rwLock.writeLock().lock();
- try {
- final String id = repositoryGroup.getId();
- RepositoryGroup originRepoGroup = repositoryGroups.put(id, repositoryGroup);
- try {
- if (originRepoGroup != null && originRepoGroup != repositoryGroup) {
- originRepoGroup.close();
- }
- RepositoryProvider provider = getProvider(repositoryGroup.getType());
- RepositoryGroupConfiguration newCfg = provider.getRepositoryGroupConfiguration(repositoryGroup);
- Configuration configuration = getArchivaConfiguration().getConfiguration();
- updateRepositoryReferences(provider, repositoryGroup, newCfg);
- RepositoryGroupConfiguration oldCfg = configuration.findRepositoryGroupById(id);
- if (oldCfg != null) {
- configuration.removeRepositoryGroup(oldCfg);
- }
- configuration.addRepositoryGroup(newCfg);
- saveConfiguration(configuration);
- return repositoryGroup;
- } catch (Exception e) {
- // Rollback
- if (originRepoGroup != null) {
- repositoryGroups.put(id, originRepoGroup);
- } else {
- repositoryGroups.remove(id);
- }
- log.error("Exception during configuration update {}", e.getMessage(), e);
- throw new RepositoryException("Could not save the configuration" + (e.getMessage() == null ? "" : ": " + e.getMessage()));
- }
- } finally {
- rwLock.writeLock().unlock();
- }
- }
-
- /**
- * Adds a new repository group or updates the repository with the same id, if it exists already.
- * The configuration is saved immediately.
- *
- * @param repositoryGroupConfiguration the repository configuration
- * @return the updated or created repository
- * @throws RepositoryException if an error occurs, or the configuration is not valid.
- */
- @Override
- public RepositoryGroup putRepositoryGroup( RepositoryGroupConfiguration repositoryGroupConfiguration ) throws RepositoryException {
- rwLock.writeLock().lock();
- try {
- final String id = repositoryGroupConfiguration.getId();
- final RepositoryType repositoryType = RepositoryType.valueOf(repositoryGroupConfiguration.getType());
- Configuration configuration = getArchivaConfiguration().getConfiguration();
- RepositoryGroup repo = repositoryGroups.get(id);
- RepositoryGroupConfiguration oldCfg = repo != null ? getProvider(repositoryType).getRepositoryGroupConfiguration(repo) : null;
- repo = putRepositoryGroup(repositoryGroupConfiguration, configuration);
- try {
- saveConfiguration(configuration);
- } catch (IndeterminateConfigurationException | RegistryException e) {
- if (oldCfg != null) {
- getProvider(repositoryType).updateRepositoryGroupInstance((EditableRepositoryGroup) repo, oldCfg);
- }
- log.error("Could not save the configuration for repository group {}: {}", id, e.getMessage(), e);
- throw new RepositoryException("Could not save the configuration for repository group " + id + ": " + e.getMessage());
- }
- return repo;
- } finally {
- rwLock.writeLock().unlock();
- }
-
- }
-
- /**
- * Adds a new repository group or updates the repository group with the same id. The given configuration object is updated, but
- * the configuration is not saved.
- *
- * @param repositoryGroupConfiguration The configuration of the new or changed repository group.
- * @param configuration The configuration object. If it is <code>null</code>, the configuration is not saved.
- * @return The new or updated repository group
- * @throws RepositoryException if the configuration cannot be saved or updated
- */
- @Override
- public RepositoryGroup putRepositoryGroup( RepositoryGroupConfiguration repositoryGroupConfiguration, Configuration configuration ) throws RepositoryException {
- rwLock.writeLock().lock();
- try {
- final String id = repositoryGroupConfiguration.getId();
- final RepositoryType repoType = RepositoryType.valueOf(repositoryGroupConfiguration.getType());
- RepositoryGroup repo;
- setRepositoryGroupDefaults(repositoryGroupConfiguration);
- if (repositoryGroups.containsKey(id)) {
- repo = repositoryGroups.get(id);
- if (repo instanceof EditableRepositoryGroup) {
- getProvider(repoType).updateRepositoryGroupInstance((EditableRepositoryGroup) repo, repositoryGroupConfiguration);
- } else {
- throw new RepositoryException("The repository is not editable " + id);
- }
- } else {
- repo = getProvider(repoType).createRepositoryGroup(repositoryGroupConfiguration);
- repositoryGroups.put(id, repo);
- }
- updateRepositoryReferences(getProvider(repoType), repo, repositoryGroupConfiguration);
- replaceOrAddRepositoryConfig(repositoryGroupConfiguration, configuration);
- return repo;
- } finally {
- rwLock.writeLock().unlock();
- }
- }
-
- private void setRepositoryGroupDefaults(RepositoryGroupConfiguration repositoryGroupConfiguration) {
- if (StringUtils.isEmpty(repositoryGroupConfiguration.getMergedIndexPath())) {
- repositoryGroupConfiguration.setMergedIndexPath(DEFAULT_INDEX_PATH);
- }
- if (repositoryGroupConfiguration.getMergedIndexTtl() <= 0) {
- repositoryGroupConfiguration.setMergedIndexTtl(300);
- }
- if (StringUtils.isEmpty(repositoryGroupConfiguration.getCronExpression())) {
- repositoryGroupConfiguration.setCronExpression("0 0 03 ? * MON");
- }
- }
-
- private void replaceOrAddRepositoryConfig(ManagedRepositoryConfiguration managedRepositoryConfiguration, Configuration configuration) {
- if (configuration != null) {
- ManagedRepositoryConfiguration oldCfg = configuration.findManagedRepositoryById(managedRepositoryConfiguration.getId());
- if (oldCfg != null) {
- configuration.removeManagedRepository(oldCfg);
- }
- configuration.addManagedRepository(managedRepositoryConfiguration);
- }
- }
-
- private void replaceOrAddRepositoryConfig(RemoteRepositoryConfiguration remoteRepositoryConfiguration, Configuration configuration) {
- if (configuration != null) {
- RemoteRepositoryConfiguration oldCfg = configuration.findRemoteRepositoryById(remoteRepositoryConfiguration.getId());
- if (oldCfg != null) {
- configuration.removeRemoteRepository(oldCfg);
- }
- configuration.addRemoteRepository(remoteRepositoryConfiguration);
- }
- }
-
- private void replaceOrAddRepositoryConfig(RepositoryGroupConfiguration repositoryGroupConfiguration, Configuration configuration) {
- RepositoryGroupConfiguration oldCfg = configuration.findRepositoryGroupById(repositoryGroupConfiguration.getId());
- if (oldCfg != null) {
- configuration.removeRepositoryGroup(oldCfg);
- }
- configuration.addRepositoryGroup(repositoryGroupConfiguration);
- }
-
- @Override
- public RemoteRepository putRepository( RemoteRepository remoteRepository, Configuration configuration ) throws RepositoryException {
- rwLock.writeLock().lock();
- try {
- final String id = remoteRepository.getId();
- if (managedRepositories.containsKey(id)) {
- throw new RepositoryException("There exists a managed repository with id " + id + ". Could not update with remote repository.");
- }
- RemoteRepository originRepo = remoteRepositories.put(id, remoteRepository);
- RemoteRepositoryConfiguration oldCfg = null;
- RemoteRepositoryConfiguration newCfg;
- try {
- if (originRepo != null && originRepo != remoteRepository) {
- originRepo.close();
- }
- final RepositoryProvider provider = getProvider(remoteRepository.getType());
- newCfg = provider.getRemoteConfiguration(remoteRepository);
- updateRepositoryReferences(provider, remoteRepository, newCfg, configuration);
- oldCfg = configuration.findRemoteRepositoryById(id);
- if (oldCfg != null) {
- configuration.removeRemoteRepository(oldCfg);
- }
- configuration.addRemoteRepository(newCfg);
- if (remoteRepository != originRepo) {
- pushEvent(new LifecycleEvent(LifecycleEvent.REGISTERED, this, remoteRepository));
- } else {
- pushEvent(new LifecycleEvent(LifecycleEvent.UPDATED, this, remoteRepository));
- }
- return remoteRepository;
- } catch (Exception e) {
- // Rollback
- if (originRepo != null) {
- remoteRepositories.put(id, originRepo);
- } else {
- remoteRepositories.remove(id);
- }
- if (oldCfg != null) {
- RemoteRepositoryConfiguration cfg = configuration.findRemoteRepositoryById(id);
- if (cfg != null) {
- configuration.removeRemoteRepository(cfg);
- configuration.addRemoteRepository(oldCfg);
- }
- }
- log.error("Error while adding remote repository {}", e.getMessage(), e);
- throw new RepositoryException("Could not save the configuration" + (e.getMessage() == null ? "" : ": " + e.getMessage()));
- }
- } finally {
- rwLock.writeLock().unlock();
- }
- }
-
- /**
- * Adds a remote repository, or overwrites the repository definition with the same id, if it exists already.
- * The modification is saved to the configuration immediately.
- *
- * @param remoteRepository the remote repository to add
- * @throws RepositoryException if an error occurs during configuration save
- */
- @Override
- public RemoteRepository putRepository( RemoteRepository remoteRepository ) throws RepositoryException {
- rwLock.writeLock().lock();
- try {
- Configuration configuration = getArchivaConfiguration().getConfiguration();
- try {
- RemoteRepository repo = putRepository(remoteRepository, configuration);
- saveConfiguration(configuration);
- return repo;
- } catch (RegistryException | IndeterminateConfigurationException e) {
- log.error("Error while saving remote repository {}", e.getMessage(), e);
- throw new RepositoryException("Could not save the configuration" + (e.getMessage() == null ? "" : ": " + e.getMessage()));
- }
- } finally {
- rwLock.writeLock().unlock();
- }
- }
-
- /**
- * Adds a new repository or updates the repository with the same id, if it exists already.
- * The configuration is saved immediately.
- *
- * @param remoteRepositoryConfiguration the repository configuration
- * @return the updated or created repository
- * @throws RepositoryException if an error occurs, or the configuration is not valid.
- */
- @Override
- public RemoteRepository putRepository( RemoteRepositoryConfiguration remoteRepositoryConfiguration ) throws RepositoryException {
- rwLock.writeLock().lock();
- try {
- final String id = remoteRepositoryConfiguration.getId();
- final RepositoryType repositoryType = RepositoryType.valueOf(remoteRepositoryConfiguration.getType());
- Configuration configuration = getArchivaConfiguration().getConfiguration();
- RemoteRepository repo = remoteRepositories.get(id);
- RemoteRepositoryConfiguration oldCfg = repo != null ? getProvider(repositoryType).getRemoteConfiguration(repo) : null;
- repo = putRepository(remoteRepositoryConfiguration, configuration);
- try {
- saveConfiguration(configuration);
- } catch (IndeterminateConfigurationException | RegistryException e) {
- if (oldCfg != null) {
- getProvider(repositoryType).updateRemoteInstance((EditableRemoteRepository) repo, oldCfg);
- }
- log.error("Could not save the configuration for repository {}: {}", id, e.getMessage(), e);
- throw new RepositoryException("Could not save the configuration for repository " + id + ": " + e.getMessage());
- }
- return repo;
- } finally {
- rwLock.writeLock().unlock();
- }
-
- }
-
- /**
- * Adds a new repository or updates the repository with the same id. The given configuration object is updated, but
- * the configuration is not saved.
- *
- * @param remoteRepositoryConfiguration the new or changed repository configuration
- * @param configuration the configuration object
- * @return the new or updated repository
- * @throws RepositoryException if the configuration cannot be saved or updated
- */
- @Override
- @SuppressWarnings("unchecked")
- public RemoteRepository putRepository( RemoteRepositoryConfiguration remoteRepositoryConfiguration, Configuration configuration ) throws RepositoryException {
- rwLock.writeLock().lock();
- try {
- final String id = remoteRepositoryConfiguration.getId();
- final RepositoryType repoType = RepositoryType.valueOf(remoteRepositoryConfiguration.getType());
- RemoteRepository repo;
- boolean registeredNew = false;
- repo = remoteRepositories.get(id);
- if (repo != null && repo.isOpen()) {
- if (repo instanceof EditableRemoteRepository) {
- getProvider(repoType).updateRemoteInstance((EditableRemoteRepository) repo, remoteRepositoryConfiguration);
- } else {
- throw new RepositoryException("The repository is not editable " + id);
- }
- } else {
- repo = getProvider(repoType).createRemoteInstance(remoteRepositoryConfiguration);
- remoteRepositories.put(id, repo);
- registeredNew = true;
- }
- updateRepositoryReferences(getProvider(repoType), repo, remoteRepositoryConfiguration, configuration);
- replaceOrAddRepositoryConfig(remoteRepositoryConfiguration, configuration);
- if (registeredNew) {
- pushEvent(new LifecycleEvent(LifecycleEvent.REGISTERED, this, repo));
- } else {
- pushEvent(new LifecycleEvent(LifecycleEvent.UPDATED, this, repo));
- }
- return repo;
- } finally {
- rwLock.writeLock().unlock();
- }
-
-
- }
-
- @Override
- public void removeRepository( String repoId ) throws RepositoryException {
- Repository repo = getRepository(repoId);
- if (repo != null) {
- removeRepository(repo);
- }
- }
-
- @Override
- public void removeRepository( Repository repo ) throws RepositoryException {
- if (repo == null) {
- log.warn("Trying to remove null repository");
- return;
- }
- if (repo instanceof RemoteRepository) {
- removeRepository((RemoteRepository) repo);
- } else if (repo instanceof ManagedRepository) {
- removeRepository((ManagedRepository) repo);
- } else if (repo instanceof RepositoryGroup) {
- removeRepositoryGroup((RepositoryGroup) repo);
- } else {
- throw new RepositoryException("Repository type not known: " + repo.getClass());
- }
- }
-
- /**
- * Removes a managed repository from the registry and configuration, if it exists.
- * The change is saved to the configuration immediately.
- *
- * @param managedRepository the managed repository to remove
- * @throws RepositoryException if a error occurs during configuration save
- */
- @Override
- public void removeRepository( ManagedRepository managedRepository ) throws RepositoryException {
- if (managedRepository == null) {
- return;
- }
- final String id = managedRepository.getId();
- ManagedRepository repo = getManagedRepository(id);
- if (repo != null) {
- rwLock.writeLock().lock();
- try {
- repo = managedRepositories.remove(id);
- if (repo != null) {
- repo.close();
- removeRepositoryFromGroups(repo);
- Configuration configuration = getArchivaConfiguration().getConfiguration();
- ManagedRepositoryConfiguration cfg = configuration.findManagedRepositoryById(id);
- if (cfg != null) {
- configuration.removeManagedRepository(cfg);
- }
- saveConfiguration(configuration);
- }
- pushEvent(new LifecycleEvent(LifecycleEvent.UNREGISTERED, this, repo));
- } catch (RegistryException | IndeterminateConfigurationException e) {
- // Rollback
- log.error("Could not save config after repository removal: {}", e.getMessage(), e);
- managedRepositories.put(repo.getId(), repo);
- throw new RepositoryException("Could not save configuration after repository removal: " + e.getMessage());
- } finally {
- rwLock.writeLock().unlock();
- }
- }
- }
-
- private void removeRepositoryFromGroups(ManagedRepository repo) {
- if (repo != null) {
- repositoryGroups.values().stream().filter(repoGroup -> repoGroup instanceof EditableRepository).
- map(repoGroup -> (EditableRepositoryGroup) repoGroup).forEach(repoGroup -> repoGroup.removeRepository(repo));
- }
- }
-
- @Override
- public void removeRepository( ManagedRepository managedRepository, Configuration configuration ) throws RepositoryException {
- if (managedRepository == null) {
- return;
- }
- final String id = managedRepository.getId();
- ManagedRepository repo = getManagedRepository(id);
- if (repo != null) {
- rwLock.writeLock().lock();
- try {
- repo = managedRepositories.remove(id);
- if (repo != null) {
- repo.close();
- removeRepositoryFromGroups(repo);
- ManagedRepositoryConfiguration cfg = configuration.findManagedRepositoryById(id);
- if (cfg != null) {
- configuration.removeManagedRepository(cfg);
- }
- }
- pushEvent(new LifecycleEvent(LifecycleEvent.UNREGISTERED, this, repo));
- } finally {
- rwLock.writeLock().unlock();
- }
- }
-
- }
-
-
- /**
- * Removes a repository group from the registry and configuration, if it exists.
- * The change is saved to the configuration immediately.
- *
- * @param repositoryGroup the repository group to remove
- * @throws RepositoryException if a error occurs during configuration save
- */
- @Override
- public void removeRepositoryGroup( RepositoryGroup repositoryGroup ) throws RepositoryException {
- if (repositoryGroup == null) {
- return;
- }
- final String id = repositoryGroup.getId();
- RepositoryGroup repo = getRepositoryGroup(id);
- if (repo != null) {
- rwLock.writeLock().lock();
- try {
- repo = repositoryGroups.remove(id);
- if (repo != null) {
- repo.close();
- Configuration configuration = getArchivaConfiguration().getConfiguration();
- RepositoryGroupConfiguration cfg = configuration.findRepositoryGroupById(id);
- if (cfg != null) {
- configuration.removeRepositoryGroup(cfg);
- }
- saveConfiguration(configuration);
- }
-
- } catch (RegistryException | IndeterminateConfigurationException e) {
- // Rollback
- log.error("Could not save config after repository removal: {}", e.getMessage(), e);
- repositoryGroups.put(repo.getId(), repo);
- throw new RepositoryException("Could not save configuration after repository removal: " + e.getMessage());
- } finally {
- rwLock.writeLock().unlock();
- }
- }
- }
-
- @Override
- public void removeRepositoryGroup( RepositoryGroup repositoryGroup, Configuration configuration ) throws RepositoryException {
- if (repositoryGroup == null) {
- return;
- }
- final String id = repositoryGroup.getId();
- RepositoryGroup repo = getRepositoryGroup(id);
- if (repo != null) {
- rwLock.writeLock().lock();
- try {
- repo = repositoryGroups.remove(id);
- if (repo != null) {
- repo.close();
- RepositoryGroupConfiguration cfg = configuration.findRepositoryGroupById(id);
- if (cfg != null) {
- configuration.removeRepositoryGroup(cfg);
- }
- }
- } finally {
- rwLock.writeLock().unlock();
- }
- }
-
- }
-
- private void doRemoveRepo(RemoteRepository repo, Configuration configuration) {
- repo.close();
- RemoteRepositoryConfiguration cfg = configuration.findRemoteRepositoryById(repo.getId());
- if (cfg != null) {
- configuration.removeRemoteRepository(cfg);
- }
- List<ProxyConnectorConfiguration> proxyConnectors = new ArrayList<>(configuration.getProxyConnectors());
- for (ProxyConnectorConfiguration proxyConnector : proxyConnectors) {
- if (StringUtils.equals(proxyConnector.getTargetRepoId(), repo.getId())) {
- configuration.removeProxyConnector(proxyConnector);
- }
- }
- }
-
- /**
- * Removes the remote repository from the registry and configuration.
- * The change is saved to the configuration immediately.
- *
- * @param remoteRepository the remote repository to remove
- * @throws RepositoryException if a error occurs during configuration save
- */
- @Override
- public void removeRepository( RemoteRepository remoteRepository ) throws RepositoryException {
- if (remoteRepository == null) {
- return;
- }
- final String id = remoteRepository.getId();
- RemoteRepository repo = getRemoteRepository(id);
- if (repo != null) {
- rwLock.writeLock().lock();
- try {
- repo = remoteRepositories.remove(id);
- if (repo != null) {
- Configuration configuration = getArchivaConfiguration().getConfiguration();
- doRemoveRepo(repo, configuration);
- saveConfiguration(configuration);
- }
- pushEvent(new LifecycleEvent(LifecycleEvent.UNREGISTERED, this, repo));
- } catch (RegistryException | IndeterminateConfigurationException e) {
- // Rollback
- log.error("Could not save config after repository removal: {}", e.getMessage(), e);
- remoteRepositories.put(repo.getId(), repo);
- throw new RepositoryException("Could not save configuration after repository removal: " + e.getMessage());
- } finally {
- rwLock.writeLock().unlock();
- }
- }
- }
-
- @Override
- public void removeRepository( RemoteRepository remoteRepository, Configuration configuration ) throws RepositoryException {
- if (remoteRepository == null) {
- return;
- }
- final String id = remoteRepository.getId();
- RemoteRepository repo = getRemoteRepository(id);
- if (repo != null) {
- rwLock.writeLock().lock();
- try {
- repo = remoteRepositories.remove(id);
- if (repo != null) {
- doRemoveRepo(repo, configuration);
- }
- pushEvent(new LifecycleEvent(LifecycleEvent.UNREGISTERED, this, repo));
- } finally {
- rwLock.writeLock().unlock();
- }
- }
-
- }
-
- /**
- * Reloads the registry from the configuration.
- */
- @Override
- public void reload( ) {
- initialize();
- }
-
- /**
- * Resets the indexing context of a given repository.
- *
- * @param repository The repository
- * @throws IndexUpdateFailedException If the index could not be resetted.
- */
- @Override
- public void resetIndexingContext( Repository repository ) throws IndexUpdateFailedException {
- if (repository.hasIndex() && repository instanceof EditableRepository) {
- EditableRepository eRepo = (EditableRepository) repository;
- ArchivaIndexingContext newCtx = getIndexManager(repository.getType()).reset(repository.getIndexingContext());
- eRepo.setIndexingContext(newCtx);
- }
- }
-
-
- /**
- * Creates a new repository instance with the same settings as this one. The cloned repository is not
- * registered or saved to the configuration.
- *
- * @param repo The origin repository
- * @return The cloned repository.
- */
- @Override
- public ManagedRepository clone( ManagedRepository repo, String newId ) throws RepositoryException {
- if (managedRepositories.containsKey(newId) || remoteRepositories.containsKey(newId)) {
- throw new RepositoryException("The given id exists already " + newId);
- }
- RepositoryProvider provider = getProvider(repo.getType());
- ManagedRepositoryConfiguration cfg = provider.getManagedConfiguration(repo);
- cfg.setId(newId);
- ManagedRepository cloned = provider.createManagedInstance(cfg);
- cloned.registerEventHandler(RepositoryEvent.ANY, this);
- return cloned;
- }
-
- @Override
- public <T extends Repository> Repository clone( T repo, String newId ) throws RepositoryException {
- if (repo instanceof RemoteRepository) {
- return this.clone((RemoteRepository) repo, newId);
- } else if (repo instanceof ManagedRepository) {
- return this.clone((ManagedRepository) repo, newId);
- } else {
- throw new RepositoryException("This repository class is not supported " + repo.getClass().getName());
- }
- }
-
- /**
- * Creates a new repository instance with the same settings as this one. The cloned repository is not
- * registered or saved to the configuration.
- *
- * @param repo The origin repository
- * @return The cloned repository.
- */
- @Override
- public RemoteRepository clone( RemoteRepository repo, String newId ) throws RepositoryException {
- if (managedRepositories.containsKey(newId) || remoteRepositories.containsKey(newId)) {
- throw new RepositoryException("The given id exists already " + newId);
- }
- RepositoryProvider provider = getProvider(repo.getType());
- RemoteRepositoryConfiguration cfg = provider.getRemoteConfiguration(repo);
- cfg.setId(newId);
- RemoteRepository cloned = provider.createRemoteInstance(cfg);
- cloned.registerEventHandler(RepositoryEvent.ANY, this);
- return cloned;
- }
-
-
- @Override
- public void configurationEvent(ConfigurationEvent event) {
- // Note: the ignoreConfigEvents flag does not work, if the config events are asynchronous.
- if (!ignoreConfigEvents) {
- reload();
- }
- }
-
-
- @Override
- public <T extends Event> void registerEventHandler( EventType<T> type, EventHandler<? super T> eventHandler) {
- eventManager.registerEventHandler(type, eventHandler);
- }
-
-
- @Override
- public <T extends Event> void unregisterEventHandler(EventType<T> type, EventHandler<? super T> eventHandler) {
- eventManager.unregisterEventHandler(type, eventHandler);
- }
-
-
- @Override
- public void handle(Event event) {
- // To avoid event cycles:
- if (sameOriginator(event)) {
- return;
- }
- if (event instanceof RepositoryIndexEvent) {
- handleIndexCreationEvent((RepositoryIndexEvent) event);
- }
- // We propagate all events to our listeners, but with context of repository registry
- pushEvent(event);
- }
-
- private void handleIndexCreationEvent(RepositoryIndexEvent event) {
- RepositoryIndexEvent idxEvent = event;
- if (managedRepositories.containsKey(idxEvent.getRepository().getId()) ||
- remoteRepositories.containsKey(idxEvent.getRepository().getId())) {
- EditableRepository repo = (EditableRepository) idxEvent.getRepository();
- if (repo != null && repo.getIndexingContext() != null) {
- try {
- ArchivaIndexManager idxmgr = getIndexManager(repo.getType());
- if (idxmgr != null) {
- ArchivaIndexingContext newCtx = idxmgr.move(repo.getIndexingContext(), repo);
- repo.setIndexingContext(newCtx);
- idxmgr.updateLocalIndexPath(repo);
- }
-
- } catch (IndexCreationFailedException e) {
- log.error("Could not move index to new directory {}", e.getMessage(), e);
- }
- }
- }
- }
-
- private boolean sameOriginator(Event event) {
- if (event.getSource() == this) {
- return true;
- } else if (event.hasPreviousEvent()) {
- return sameOriginator(event.getPreviousEvent());
- } else {
- return false;
- }
- }
-
- private void pushEvent(Event event) {
- eventManager.fireEvent(event);
- }
-
-
-
-}
+++ /dev/null
-package org.apache.archiva.repository;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.filelock.DefaultFileLockManager;
-import org.apache.archiva.common.filelock.FileLockManager;
-import org.apache.archiva.repository.storage.FilesystemStorage;
-import org.apache.archiva.repository.storage.RepositoryStorage;
-import org.apache.archiva.repository.features.ArtifactCleanupFeature;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.repository.features.StagingRepositoryFeature;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.nio.file.Path;
-import java.util.Locale;
-
-/**
- *
- * Just a helper class, mainly used for unit tests.
- *
- *
- */
-public class BasicManagedRepository extends AbstractManagedRepository
-
-{
- Logger log = LoggerFactory.getLogger(BasicManagedRepository.class);
- ArtifactCleanupFeature artifactCleanupFeature = new ArtifactCleanupFeature( );
- StagingRepositoryFeature stagingRepositoryFeature = new StagingRepositoryFeature( );
-
- static final StandardCapabilities CAPABILITIES = new StandardCapabilities( new ReleaseScheme[] {
- ReleaseScheme.RELEASE, ReleaseScheme.SNAPSHOT
- }, new String[] {"default"}, new String[0], new String[] {
- ArtifactCleanupFeature.class.toString(), IndexCreationFeature.class.toString(),
- StagingRepositoryFeature.class.toString()
- }, true, true, true, true, true );
-
- public BasicManagedRepository( String id, String name, RepositoryStorage repositoryStorage )
- {
- super( RepositoryType.MAVEN, id, name, repositoryStorage );
- initFeatures();
- }
-
- public BasicManagedRepository( Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage repositoryStorage )
- {
- super( primaryLocale, type, id, name, repositoryStorage);
- initFeatures();
- }
-
- private void initFeatures() {
- IndexCreationFeature indexCreationFeature = new IndexCreationFeature(this, this);
- addFeature( artifactCleanupFeature );
- addFeature( indexCreationFeature );
- addFeature( stagingRepositoryFeature );
- }
-
- @Override
- public boolean hasIndex( )
- {
- return true;
- }
-
- @Override
- public RepositoryCapabilities getCapabilities( )
- {
- return CAPABILITIES;
- }
-
-
- @Override
- public RepositoryRequestInfo getRequestInfo() {
- return null;
- }
-
- /**
- * Creates a filesystem based repository instance. The path is built by basePath/repository-id
- *
- * @param id The repository id
- * @param name The name of the repository
- * @param repositoryPath The path to the repository
- * @return The repository instance
- * @throws IOException
- */
- public static BasicManagedRepository newFilesystemInstance(String id, String name, Path repositoryPath) throws IOException {
- FileLockManager lockManager = new DefaultFileLockManager();
- FilesystemStorage storage = new FilesystemStorage(repositoryPath, lockManager);
- return new BasicManagedRepository(id, name, storage);
- }
-
-}
+++ /dev/null
-package org.apache.archiva.repository;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.filelock.DefaultFileLockManager;
-import org.apache.archiva.common.filelock.FileLockManager;
-import org.apache.archiva.repository.storage.FilesystemStorage;
-import org.apache.archiva.repository.storage.RepositoryStorage;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.repository.features.RemoteIndexFeature;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.nio.file.Path;
-import java.util.Locale;
-
-/**
- *
- * Just a helper class, mainly used for unit tests.
- *
- *
- */
-public class BasicRemoteRepository extends AbstractRemoteRepository
-
-{
- Logger log = LoggerFactory.getLogger(BasicRemoteRepository.class);
-
- RemoteIndexFeature remoteIndexFeature = new RemoteIndexFeature();
- IndexCreationFeature indexCreationFeature = new IndexCreationFeature(true);
-
-
- static final StandardCapabilities CAPABILITIES = new StandardCapabilities( new ReleaseScheme[] {
- ReleaseScheme.RELEASE, ReleaseScheme.SNAPSHOT
- }, new String[] {"default"}, new String[0], new String[] {
- RemoteIndexFeature.class.toString(),
- IndexCreationFeature.class.toString()
- }, true, true, true, true, true );
-
- public BasicRemoteRepository( String id, String name, RepositoryStorage storage)
- {
- super( RepositoryType.MAVEN, id, name, storage);
- initFeatures();
- }
-
- public BasicRemoteRepository( Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage storage )
- {
- super( primaryLocale, type, id, name, storage );
- initFeatures();
- }
-
- private void initFeatures() {
- addFeature( remoteIndexFeature );
- addFeature( indexCreationFeature );
- }
-
- @Override
- public boolean hasIndex( )
- {
- return true;
- }
-
- @Override
- public RepositoryCapabilities getCapabilities( )
- {
- return CAPABILITIES;
- }
-
-
- public static BasicRemoteRepository newFilesystemInstance(String id, String name, Path basePath) throws IOException {
- FileLockManager lockManager = new DefaultFileLockManager();
- FilesystemStorage storage = new FilesystemStorage(basePath.resolve(id), lockManager);
- return new BasicRemoteRepository(id, name, storage);
- }
-}
+++ /dev/null
-package org.apache.archiva.repository;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Simple credentials that hold username and password
- */
-public class PasswordCredentials implements RepositoryCredentials
-{
- String username;
- char[] password;
-
- public PasswordCredentials(String username, char[] password) {
- this.username = username;
- this.password = password;
- }
-
- public String getUsername( )
- {
- return username;
- }
-
- public void setUsername( String username )
- {
- this.username = username;
- }
-
- public char[] getPassword( )
- {
- return password;
- }
-
- public void setPassword( char[] password )
- {
- this.password = password;
- }
-
- @Override
- public String toString() {
- return "name="+username+",pwd=*****";
- }
-}
+++ /dev/null
-package org.apache.archiva.repository.content;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Maven 1.x request type to classifier mapping for translating to a Maven 2.x storage
- *
- * TODO reuse mappings for other repositories
- *
- * @since 1.1
- */
-public class ArtifactClassifierMapping
-{
- private static final Map<String, String> typeToClassifierMap;
-
- static
- {
- // note additional 's' on type as these are maven 1.x directory components
- typeToClassifierMap = new HashMap<>( 3 );
- typeToClassifierMap.put( "java-sources", "sources" );
- typeToClassifierMap.put( "javadoc.jars", "javadoc" );
- typeToClassifierMap.put( "javadocs", "javadoc" );
- }
-
- public static String getClassifier( String type )
- {
- // Try specialized types first.
- if ( typeToClassifierMap.containsKey( type ) )
- {
- return typeToClassifierMap.get( type );
- }
-
- // No classifier
- return null;
- }
-}
-
+++ /dev/null
-package org.apache.archiva.repository.content;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.model.ArtifactReference;
-import org.apache.archiva.repository.ManagedRepository;
-import org.apache.archiva.repository.ManagedRepositoryContent;
-import org.apache.archiva.repository.RepositoryContentFactory;
-import org.apache.archiva.repository.RepositoryException;
-import org.apache.archiva.repository.storage.StorageAsset;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-
-/**
- * Utility class that gives information about the physical location of artifacts.
- */
-@Service( "ArtifactUtil#default" )
-public class ArtifactUtil {
-
- @Inject
- RepositoryContentFactory repositoryContentFactory;
-
- /**
- * Returns the physical location of a given artifact in the repository. There is no check for the
- * existence of the returned file.
- *
- * @param repository The repository, where the artifact is stored.
- * @param artifactReference The artifact reference.
- * @return The absolute path to the artifact.
- * @throws RepositoryException
- */
- public Path getArtifactPath(ManagedRepository repository, ArtifactReference artifactReference) throws RepositoryException {
- final ManagedRepositoryContent content = repositoryContentFactory.getManagedRepositoryContent(repository);
- final String artifactPath = content.toPath( artifactReference );
- return Paths.get(repository.getLocation()).resolve(artifactPath);
- }
-
- /**
- * Returns the physical location of a given artifact in the repository. There is no check for the
- * existence of the returned file.
- *
- * @param repository The repository, where the artifact is stored.
- * @param artifactReference The artifact reference.
- * @return The asset representation of the artifact.
- * @throws RepositoryException
- */
- public StorageAsset getArtifactAsset(ManagedRepository repository, ArtifactReference artifactReference) throws RepositoryException {
- final ManagedRepositoryContent content = repositoryContentFactory.getManagedRepositoryContent(repository);
- final String artifactPath = content.toPath( artifactReference );
- return repository.getAsset(artifactPath);
- }
-
-}
--- /dev/null
+package org.apache.archiva.repository.internal.content;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Maven 1.x request type to classifier mapping for translating to a Maven 2.x storage
+ *
+ * TODO reuse mappings for other repositories
+ *
+ * @since 1.1
+ */
+public class ArtifactClassifierMapping
+{
+ private static final Map<String, String> typeToClassifierMap;
+
+ static
+ {
+ // note additional 's' on type as these are maven 1.x directory components
+ typeToClassifierMap = new HashMap<>( 3 );
+ typeToClassifierMap.put( "java-sources", "sources" );
+ typeToClassifierMap.put( "javadoc.jars", "javadoc" );
+ typeToClassifierMap.put( "javadocs", "javadoc" );
+ }
+
+ public static String getClassifier( String type )
+ {
+ // Try specialized types first.
+ if ( typeToClassifierMap.containsKey( type ) )
+ {
+ return typeToClassifierMap.get( type );
+ }
+
+ // No classifier
+ return null;
+ }
+}
+
--- /dev/null
+package org.apache.archiva.repository.internal.content;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.model.ArtifactReference;
+import org.apache.archiva.repository.ManagedRepository;
+import org.apache.archiva.repository.ManagedRepositoryContent;
+import org.apache.archiva.repository.RepositoryContentFactory;
+import org.apache.archiva.repository.RepositoryException;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+/**
+ * Utility class that gives information about the physical location of artifacts.
+ */
+@Service( "ArtifactUtil#default" )
+public class ArtifactUtil {
+
+ @Inject
+ RepositoryContentFactory repositoryContentFactory;
+
+ /**
+ * Returns the physical location of a given artifact in the repository. There is no check for the
+ * existence of the returned file.
+ *
+ * @param repository The repository, where the artifact is stored.
+ * @param artifactReference The artifact reference.
+ * @return The absolute path to the artifact.
+ * @throws RepositoryException
+ */
+ public Path getArtifactPath(ManagedRepository repository, ArtifactReference artifactReference) throws RepositoryException {
+ final ManagedRepositoryContent content = repositoryContentFactory.getManagedRepositoryContent(repository);
+ final String artifactPath = content.toPath( artifactReference );
+ return Paths.get(repository.getLocation()).resolve(artifactPath);
+ }
+
+ /**
+ * Returns the physical location of a given artifact in the repository. There is no check for the
+ * existence of the returned file.
+ *
+ * @param repository The repository, where the artifact is stored.
+ * @param artifactReference The artifact reference.
+ * @return The asset representation of the artifact.
+ * @throws RepositoryException
+ */
+ public StorageAsset getArtifactAsset(ManagedRepository repository, ArtifactReference artifactReference) throws RepositoryException {
+ final ManagedRepositoryContent content = repositoryContentFactory.getManagedRepositoryContent(repository);
+ final String artifactPath = content.toPath( artifactReference );
+ return repository.getAsset(artifactPath);
+ }
+
+}
--- /dev/null
+package org.apache.archiva.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+import org.apache.archiva.repository.storage.RepositoryStorage;
+
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Locale;
+import java.util.Set;
+
+/**
+ * Simple implementation of a managed repository.
+ */
+public abstract class AbstractManagedRepository extends AbstractRepository implements EditableManagedRepository
+{
+ private boolean blocksRedeployment = false;
+ private ManagedRepositoryContent content;
+ private Set<ReleaseScheme> activeReleaseSchemes = new HashSet<>( );
+ private Set<ReleaseScheme> uActiveReleaseSchemes = Collections.unmodifiableSet( activeReleaseSchemes );
+
+ public AbstractManagedRepository(RepositoryType type, String id, String name, RepositoryStorage storage)
+ {
+ super( type, id, name, storage );
+ }
+
+ public AbstractManagedRepository( Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage storage )
+ {
+ super( primaryLocale, type, id, name, storage );
+ }
+
+ @Override
+ public ManagedRepositoryContent getContent( )
+ {
+ return content;
+ }
+
+ @Override
+ public void setContent(ManagedRepositoryContent content) {
+ this.content = content;
+ }
+
+ @Override
+ public void setBlocksRedeployment( boolean blocksRedeployment )
+ {
+ this.blocksRedeployment = blocksRedeployment;
+ }
+
+ @Override
+ public boolean blocksRedeployments( )
+ {
+ return blocksRedeployment;
+ }
+
+ @Override
+ public Set<ReleaseScheme> getActiveReleaseSchemes( )
+ {
+ return uActiveReleaseSchemes;
+ }
+
+ @Override
+ public void addActiveReleaseScheme( ReleaseScheme scheme )
+ {
+ this.activeReleaseSchemes.add(scheme);
+ }
+
+ @Override
+ public void removeActiveReleaseScheme( ReleaseScheme scheme )
+ {
+ this.activeReleaseSchemes.remove(scheme);
+ }
+
+ @Override
+ public void clearActiveReleaseSchemes( )
+ {
+ this.activeReleaseSchemes.clear();
+ }
+
+
+}
--- /dev/null
+package org.apache.archiva.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+import org.apache.archiva.repository.storage.RepositoryStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
+
+import java.net.URI;
+import java.nio.file.Path;
+import java.time.Duration;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+
+/**
+ * Abstract implementation of a remote repository. Abstract classes must implement the
+ * features and capabilities by themselves.
+ */
+public abstract class AbstractRemoteRepository extends AbstractRepository implements EditableRemoteRepository
+{
+
+ private RepositoryCredentials credentials;
+ private String checkPath;
+ private Map<String,String> extraParameters = new HashMap<>( );
+ private Map<String,String> uExtraParameters = Collections.unmodifiableMap( extraParameters );
+ private Map<String,String> extraHeaders = new HashMap<>( );
+ private Map<String,String> uExtraHeaders = Collections.unmodifiableMap( extraHeaders );
+ private Duration timeout = Duration.ofSeconds( 60 );
+ private String proxyId;
+ private RemoteRepositoryContent content;
+
+ public AbstractRemoteRepository(RepositoryType type, String id, String name , RepositoryStorage storage)
+ {
+ super( type, id, name, storage );
+ }
+
+ public AbstractRemoteRepository( Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage storage )
+ {
+ super( primaryLocale, type, id, name, storage );
+ }
+
+ @Override
+ public void setCredentials( RepositoryCredentials credentials )
+ {
+ this.credentials = credentials;
+ }
+
+ @Override
+ public void setCheckPath( String path )
+ {
+ this.checkPath = path;
+ }
+
+ @Override
+ public void setExtraParameters( Map<String, String> params )
+ {
+ this.extraParameters.clear();
+ this.extraParameters.putAll(params);
+ }
+
+ @Override
+ public void addExtraParameter( String key, String value )
+ {
+ this.extraParameters.put(key, value);
+ }
+
+ @Override
+ public void setExtraHeaders( Map<String, String> headers )
+ {
+ this.extraHeaders.clear();
+ this.extraHeaders.putAll(headers);
+ }
+
+ @Override
+ public void addExtraHeader( String header, String value )
+ {
+ this.extraHeaders.put(header, value);
+ }
+
+ @Override
+ public void setTimeout( Duration duration )
+ {
+ this.timeout = duration;
+ }
+
+ @Override
+ public RemoteRepositoryContent getContent( )
+ {
+ return content;
+ }
+
+ @Override
+ public void setContent(RemoteRepositoryContent content) {
+ this.content = content;
+ }
+
+ @Override
+ public RepositoryCredentials getLoginCredentials( )
+ {
+ return credentials;
+ }
+
+ @Override
+ public String getCheckPath( )
+ {
+ return checkPath;
+ }
+
+ @Override
+ public Map<String, String> getExtraParameters( )
+ {
+ return uExtraParameters;
+ }
+
+ @Override
+ public Map<String, String> getExtraHeaders( )
+ {
+ return uExtraHeaders;
+ }
+
+ @Override
+ public Duration getTimeout( )
+ {
+ return timeout;
+ }
+
+ /**
+ * Remote repositories resolve always relative to the base directory.
+ * @return
+ */
+ @Override
+ public StorageAsset getLocalPath() {
+ return getStorage().getAsset("");
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder str = new StringBuilder();
+ return str.append("checkPath=").append(checkPath)
+ .append(",creds:").append(credentials).toString();
+ }
+
+ @Override
+ public void setLocation(URI location) {
+ // Location of remote repositories is not for the local filestore
+ super.location = location;
+ }
+}
--- /dev/null
+package org.apache.archiva.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import com.cronutils.model.CronType;
+import com.cronutils.model.definition.CronDefinition;
+import com.cronutils.model.definition.CronDefinitionBuilder;
+import com.cronutils.parser.CronParser;
+import org.apache.archiva.event.Event;
+import org.apache.archiva.event.EventHandler;
+import org.apache.archiva.event.EventManager;
+import org.apache.archiva.event.EventType;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.repository.event.*;
+import org.apache.archiva.repository.storage.RepositoryStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.repository.features.RepositoryFeature;
+import org.apache.archiva.repository.features.StagingRepositoryFeature;
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URI;
+import java.nio.channels.ReadableByteChannel;
+import java.nio.channels.WritableByteChannel;
+import java.nio.file.CopyOption;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.function.Consumer;
+
+/**
+ * Implementation of a repository with the necessary fields for a bare repository.
+ * No features are provided. Capabilities and features must be implemented by concrete classes.
+ *
+ */
+public abstract class AbstractRepository implements EditableRepository, EventHandler<RepositoryEvent>
+{
+
+
+ Logger log = LoggerFactory.getLogger(AbstractRepository.class);
+
+ private final AtomicBoolean openStatus = new AtomicBoolean(false);
+
+
+ private final RepositoryType type;
+ private final String id;
+ private Map<Locale, String> names = new HashMap<>( );
+ private Map<Locale, String> descriptions = new HashMap<>( );
+
+ private Locale primaryLocale = new Locale("en_US");
+ protected URI location;
+ private URI baseUri;
+ private Set<URI> failoverLocations = new HashSet<>( );
+ private Set<URI> uFailoverLocations = Collections.unmodifiableSet( failoverLocations );
+ private boolean scanned = true;
+ String schedulingDefinition = "0 0 02 * * ?";
+ private String layout = "default";
+ public static final CronDefinition CRON_DEFINITION = CronDefinitionBuilder.instanceDefinitionFor(CronType.QUARTZ);
+
+ private final EventManager eventManager;
+
+ Map<Class<? extends RepositoryFeature<?>>, RepositoryFeature<?>> featureMap = new HashMap<>( );
+
+ private ArchivaIndexingContext indexingContext;
+ private RepositoryStorage storage;
+
+ public AbstractRepository(RepositoryType type, String id, String name, RepositoryStorage repositoryStorage) {
+ this.id = id;
+ this.names.put( primaryLocale, name);
+ this.type = type;
+ this.storage = repositoryStorage;
+ this.location = repositoryStorage.getLocation();
+ this.openStatus.compareAndSet(false, true);
+ this.eventManager = new EventManager(this);
+ }
+
+ public AbstractRepository(Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage repositoryStorage) {
+ setPrimaryLocale( primaryLocale );
+ this.id = id;
+ this.names.put( primaryLocale, name);
+ this.type = type;
+ this.storage = repositoryStorage;
+ this.location = repositoryStorage.getLocation();
+ this.openStatus.compareAndSet(false, true);
+ this.eventManager = new EventManager(this);
+ }
+
+ protected void setPrimaryLocale(Locale locale) {
+ this.primaryLocale = locale;
+ }
+
+ @Override
+ public String getId( )
+ {
+ return id;
+ }
+
+ @Override
+ public String getName( )
+ {
+ return getName( primaryLocale );
+ }
+
+ @Override
+ public String getName( Locale locale )
+ {
+ return names.get(locale);
+ }
+
+ @Override
+ public String getDescription( )
+ {
+ return getDescription( primaryLocale );
+ }
+
+ @Override
+ public String getDescription( Locale locale )
+ {
+ return descriptions.get(primaryLocale);
+ }
+
+ @Override
+ public RepositoryType getType( )
+ {
+ return type;
+ }
+
+ @Override
+ public URI getLocation( )
+ {
+ return location;
+ }
+
+ @Override
+ public StorageAsset getLocalPath() {
+ return storage.getAsset("");
+ }
+
+ @Override
+ public Set<URI> getFailoverLocations( )
+ {
+ return uFailoverLocations;
+ }
+
+ @Override
+ public boolean isScanned( )
+ {
+ return scanned;
+ }
+
+ @Override
+ public String getSchedulingDefinition( )
+ {
+ return schedulingDefinition;
+ }
+
+ @Override
+ public abstract boolean hasIndex( );
+
+ @Override
+ public String getLayout( )
+ {
+ return layout;
+ }
+
+ @Override
+ public abstract RepositoryCapabilities getCapabilities( );
+
+ @SuppressWarnings( "unchecked" )
+ @Override
+ public <T extends RepositoryFeature<T>> RepositoryFeature<T> getFeature( Class<T> clazz ) throws UnsupportedFeatureException
+ {
+ if (featureMap.containsKey( clazz )) {
+ return (RepositoryFeature<T>) featureMap.get(clazz);
+ } else
+ {
+ throw new UnsupportedFeatureException( "Feature " + clazz + " not supported" );
+ }
+ }
+
+ @Override
+ public <T extends RepositoryFeature<T>> boolean supportsFeature( Class<T> clazz )
+ {
+ return featureMap.containsKey( clazz );
+ }
+
+ @Override
+ public Locale getPrimaryLocale( )
+ {
+ return primaryLocale;
+ }
+
+ @Override
+ public void setName( Locale locale, String name )
+ {
+ names.put(locale, name);
+ }
+
+ @Override
+ public void setDescription( Locale locale, String description )
+ {
+ descriptions.put(locale, description);
+ }
+
+ @Override
+ public void setLocation( final URI location )
+ {
+ if (location!=null && ( this.location == null || !this.location.equals(location))) {
+ try {
+ updateLocation(location);
+ } catch (IOException e) {
+ log.error("Could not update location of repository {} to {}", getId(), location, e);
+ }
+ }
+ }
+
+ @Override
+ public void updateLocation(URI newLocation) throws IOException {
+ storage.updateLocation(newLocation);
+ this.location = newLocation;
+ }
+
+ @Override
+ public void addFailoverLocation( URI location )
+ {
+ this.failoverLocations.add(location);
+ }
+
+ @Override
+ public void removeFailoverLocation( URI location )
+ {
+ this.failoverLocations.remove( location );
+ }
+
+ @Override
+ public void clearFailoverLocations( )
+ {
+ this.failoverLocations.clear();
+ }
+
+ @Override
+ public void setScanned( boolean scanned )
+ {
+ this.scanned = scanned;
+ }
+
+ @Override
+ public void setLayout( String layout )
+ {
+ this.layout = layout;
+ }
+
+ @Override
+ public void setBaseUri(URI baseUri) {
+ this.baseUri = baseUri;
+ }
+
+ @Override
+ public void setSchedulingDefinition(String cronExpression) {
+ if (StringUtils.isNotEmpty( cronExpression ))
+ {
+ CronParser parser = new CronParser( CRON_DEFINITION );
+ parser.parse( cronExpression ).validate( );
+ }
+ this.schedulingDefinition = cronExpression;
+ }
+
+ @SuppressWarnings( "unchecked" )
+ protected <T extends RepositoryFeature<T>> void addFeature(RepositoryFeature<T> feature) {
+ featureMap.put( (Class<? extends RepositoryFeature<?>>) feature.getClass(), feature);
+ }
+
+ @Override
+ public void setIndexingContext(ArchivaIndexingContext context) {
+ if (this.indexingContext!=null) {
+
+ }
+ this.indexingContext = context;
+ }
+
+ @Override
+ public ArchivaIndexingContext getIndexingContext() {
+ return indexingContext;
+ }
+
+ @Override
+ public void close() {
+ if (this.openStatus.compareAndSet(true, false)) {
+ ArchivaIndexingContext ctx = getIndexingContext();
+ if (ctx != null) {
+ try {
+ ctx.close();
+ } catch (IOException e) {
+ log.warn("Error during index context close.", e);
+ }
+ this.indexingContext = null;
+
+ }
+ if (supportsFeature(StagingRepositoryFeature.class)) {
+ StagingRepositoryFeature sf = getFeature(StagingRepositoryFeature.class).get();
+ if (sf.getStagingRepository() != null) {
+ sf.getStagingRepository().close();
+ }
+ }
+ }
+
+ }
+
+ @Override
+ public boolean isOpen() {
+ return openStatus.get();
+ }
+
+ @Override
+ public void handle(RepositoryEvent event) {
+ // We just rethrow the events
+ eventManager.fireEvent(event);
+ }
+
+ @Override
+ public <T extends Event> void registerEventHandler( EventType<T> eventType, EventHandler<? super T> eventHandler) {
+ if (!EventType.isInstanceOf(eventType, RepositoryEvent.ANY)) {
+ throw new IllegalArgumentException("Can only register RepositoryEvent Handlers");
+ }
+ eventManager.registerEventHandler(eventType, eventHandler);
+ }
+
+ @Override
+ public <T extends Event> void unregisterEventHandler(EventType<T> type, EventHandler<? super T> eventHandler) {
+ eventManager.unregisterEventHandler(type, eventHandler);
+ }
+
+ @Override
+ public StorageAsset getAsset(String path )
+ {
+ return storage.getAsset(path);
+ }
+
+ @Override
+ public StorageAsset addAsset( String path, boolean container )
+ {
+ return storage.addAsset(path, container);
+ }
+
+ @Override
+ public void removeAsset( StorageAsset asset ) throws IOException
+ {
+ storage.removeAsset(asset);
+ }
+
+ @Override
+ public StorageAsset moveAsset( StorageAsset origin, String destination, CopyOption... copyOptions ) throws IOException
+ {
+ return storage.moveAsset(origin, destination);
+ }
+
+ @Override
+ public void moveAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException
+ {
+ storage.moveAsset( origin, destination, copyOptions );
+ }
+
+ @Override
+ public StorageAsset copyAsset( StorageAsset origin, String destination, CopyOption... copyOptions ) throws IOException
+ {
+ return storage.copyAsset(origin, destination);
+ }
+
+ @Override
+ public void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException
+ {
+ storage.copyAsset( origin, destination, copyOptions);
+ }
+
+ @Override
+ public void consumeData(StorageAsset asset, Consumer<InputStream> consumerFunction, boolean readLock ) throws IOException
+ {
+ storage.consumeData(asset, consumerFunction, readLock);
+ }
+
+ @Override
+ public void consumeDataFromChannel( StorageAsset asset, Consumer<ReadableByteChannel> consumerFunction, boolean readLock ) throws IOException
+ {
+ storage.consumeDataFromChannel( asset, consumerFunction, readLock );
+ }
+
+ @Override
+ public void writeData( StorageAsset asset, Consumer<OutputStream> consumerFunction, boolean writeLock ) throws IOException
+ {
+ storage.writeData( asset, consumerFunction, writeLock );
+ }
+
+ @Override
+ public void writeDataToChannel( StorageAsset asset, Consumer<WritableByteChannel> consumerFunction, boolean writeLock ) throws IOException
+ {
+ storage.writeDataToChannel( asset, consumerFunction, writeLock );
+ }
+
+ protected void setStorage( RepositoryStorage storage) {
+ this.storage = storage;
+ }
+
+ protected RepositoryStorage getStorage() {
+ return storage;
+ }
+}
--- /dev/null
+package org.apache.archiva.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.repository.storage.RepositoryStorage;
+import org.apache.commons.collections4.map.ListOrderedMap;
+
+import java.util.List;
+import java.util.Locale;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+/**
+ * Abstract repository group implementation.
+ *
+ */
+public class AbstractRepositoryGroup extends AbstractRepository implements EditableRepositoryGroup {
+
+ private ListOrderedMap<String, ManagedRepository> repositories = new ListOrderedMap<>();
+
+ private int mergedIndexTTL;
+
+ private final ReadWriteLock rwl = new ReentrantReadWriteLock();
+
+ private RepositoryCapabilities capabilities;
+
+ public AbstractRepositoryGroup(RepositoryType type, String id, String name, RepositoryStorage storage) {
+ super(type, id, name, storage);
+ }
+
+ public AbstractRepositoryGroup(Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage storage) {
+ super(primaryLocale, type, id, name, storage);
+ }
+
+ @Override
+ public boolean hasIndex() {
+ return true;
+ }
+
+ @Override
+ public RepositoryCapabilities getCapabilities() {
+ return capabilities;
+ }
+
+
+ @Override
+ public void clearRepositories() {
+ rwl.writeLock().lock();
+ try {
+ repositories.clear();
+ } finally {
+ rwl.writeLock().unlock();
+ }
+ }
+
+ @Override
+ public void setRepositories(List<ManagedRepository> newRepositories) {
+ rwl.writeLock().lock();
+ try {
+ repositories.clear();
+ for(ManagedRepository repo : newRepositories) {
+ if (repo!=null)
+ repositories.put(repo.getId(), repo);
+ }
+ } finally {
+ rwl.writeLock().unlock();
+ }
+ }
+
+ @Override
+ public void addRepository(ManagedRepository repository) {
+ rwl.writeLock().lock();
+ try {
+ if (repository!=null)
+ repositories.put(repository.getId(), repository);
+ } finally {
+ rwl.writeLock().unlock();
+ }
+ }
+
+ @Override
+ public void addRepository(int index, ManagedRepository repository) {
+ rwl.writeLock().lock();
+ try {
+ if (repository!=null)
+ repositories.put(index, repository.getId(), repository);
+ } finally {
+ rwl.writeLock().unlock();
+ }
+ }
+
+ @Override
+ public boolean removeRepository(ManagedRepository repository) {
+ rwl.writeLock().lock();
+ try {
+ return repositories.remove(repository.getId(), repository);
+ } finally {
+ rwl.writeLock().unlock();
+ }
+ }
+
+ @Override
+ public ManagedRepository removeRepository(String repoId) {
+ rwl.writeLock().lock();
+ try {
+ return repositories.remove(repoId);
+ } finally {
+ rwl.writeLock().unlock();
+ }
+ }
+
+ @Override
+ public void setMergedIndexTTL(int timeInSeconds) {
+ this.mergedIndexTTL = timeInSeconds;
+ }
+
+ @Override
+ public List<ManagedRepository> getRepositories() {
+ rwl.readLock().lock();
+ try {
+ return repositories.valueList();
+ } finally {
+ rwl.readLock().unlock();
+ }
+ }
+
+ @Override
+ public boolean contains(ManagedRepository repository) {
+ rwl.readLock().lock();
+ try {
+ return repositories.containsValue(repository);
+ } finally {
+ rwl.readLock().unlock();
+ }
+ }
+
+ @Override
+ public boolean contains(String id) {
+ rwl.readLock().lock();
+ try {
+ return repositories.containsKey(id);
+ } finally {
+ rwl.readLock().unlock();
+ }
+ }
+
+ @Override
+ public int getMergedIndexTTL() {
+ return mergedIndexTTL;
+ }
+
+ protected void setCapabilities(RepositoryCapabilities capabilities) {
+ this.capabilities = capabilities;
+ }
+}
--- /dev/null
+package org.apache.archiva.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.configuration.*;
+import org.apache.archiva.event.Event;
+import org.apache.archiva.event.EventManager;
+import org.apache.archiva.event.EventSource;
+import org.apache.archiva.event.EventType;
+import org.apache.archiva.indexer.*;
+import org.apache.archiva.redback.components.registry.RegistryException;
+import org.apache.archiva.repository.event.*;
+import org.apache.archiva.event.EventHandler;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.features.StagingRepositoryFeature;
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+import javax.inject.Inject;
+import javax.inject.Named;
+import java.util.*;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
+
+/**
+ * Registry for repositories. This is the central entry point for repositories. It provides methods for
+ * retrieving, adding and removing repositories.
+ * <p>
+ * The modification methods addXX and removeXX persist the changes immediately to the configuration. If the
+ * configuration save fails the changes are rolled back.
+ * <p>
+ * TODO: Audit events
+ *
+ * @since 3.0
+ */
+@Service("repositoryRegistry")
+public class ArchivaRepositoryRegistry implements ConfigurationListener, EventHandler<Event>,
+ RepositoryRegistry
+{
+
+ private static final Logger log = LoggerFactory.getLogger(RepositoryRegistry.class);
+
+ /**
+ * We inject all repository providers
+ */
+ @Inject
+ List<RepositoryProvider> repositoryProviders;
+
+ @Inject
+ IndexManagerFactory indexManagerFactory;
+
+ @Inject
+ ArchivaConfiguration archivaConfiguration;
+
+ @Inject
+ @Named("repositoryContentFactory#default")
+ RepositoryContentFactory repositoryContentFactory;
+
+ private final EventManager eventManager;
+
+
+ private Map<String, ManagedRepository> managedRepositories = new HashMap<>();
+ private Map<String, ManagedRepository> uManagedRepository = Collections.unmodifiableMap(managedRepositories);
+
+ private Map<String, RemoteRepository> remoteRepositories = new HashMap<>();
+ private Map<String, RemoteRepository> uRemoteRepositories = Collections.unmodifiableMap(remoteRepositories);
+
+ private Map<String, RepositoryGroup> repositoryGroups = new HashMap<>();
+ private Map<String, RepositoryGroup> uRepositoryGroups = Collections.unmodifiableMap(repositoryGroups);
+
+ private ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock();
+
+ private volatile boolean ignoreConfigEvents = false;
+
+ public ArchivaRepositoryRegistry() {
+ this.eventManager = new EventManager(this);
+ }
+
+ @Override
+ public void setArchivaConfiguration( ArchivaConfiguration archivaConfiguration ) {
+ this.archivaConfiguration = archivaConfiguration;
+ }
+
+ @PostConstruct
+ private void initialize() {
+ rwLock.writeLock().lock();
+ try {
+ log.debug("Initializing repository registry");
+ updateManagedRepositoriesFromConfig();
+ updateRemoteRepositoriesFromConfig();
+
+ repositoryGroups.clear();
+ Map<String, RepositoryGroup> repositoryGroups = getRepositorGroupsFromConfig();
+ this.repositoryGroups.putAll(repositoryGroups);
+
+ // archivaConfiguration.addChangeListener(this);
+ archivaConfiguration.addListener(this);
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ pushEvent(new RepositoryRegistryEvent(RepositoryRegistryEvent.RELOADED, this));
+ }
+
+ @PreDestroy
+ public void destroy() {
+ for (ManagedRepository rep : managedRepositories.values()) {
+ rep.close();
+ }
+ managedRepositories.clear();
+ for (RemoteRepository repo : remoteRepositories.values()) {
+ repo.close();
+ }
+ remoteRepositories.clear();
+ pushEvent(new RepositoryRegistryEvent(RepositoryRegistryEvent.DESTROYED, this));
+ }
+
+
+ private Map<RepositoryType, RepositoryProvider> createProviderMap() {
+ Map<RepositoryType, RepositoryProvider> map = new HashMap<>();
+ if (repositoryProviders != null) {
+ for (RepositoryProvider provider : repositoryProviders) {
+ for (RepositoryType type : provider.provides()) {
+ map.put(type, provider);
+ }
+ }
+ }
+ return map;
+ }
+
+ private RepositoryProvider getProvider(RepositoryType type) throws RepositoryException {
+ return repositoryProviders.stream().filter(repositoryProvider -> repositoryProvider.provides().contains(type)).findFirst().orElseThrow(() -> new RepositoryException("Repository type cannot be handled: " + type));
+ }
+
+ /*
+ * Updates the repositories
+ */
+ private void updateManagedRepositoriesFromConfig() {
+ try {
+
+ Set<String> configRepoIds = new HashSet<>();
+ List<ManagedRepositoryConfiguration> managedRepoConfigs =
+ getArchivaConfiguration().getConfiguration().getManagedRepositories();
+
+ if (managedRepoConfigs == null) {
+ return;
+ }
+
+ for (ManagedRepositoryConfiguration repoConfig : managedRepoConfigs) {
+ ManagedRepository repo = putRepository(repoConfig, null);
+ configRepoIds.add(repoConfig.getId());
+ if (repo.supportsFeature(StagingRepositoryFeature.class)) {
+ StagingRepositoryFeature stagF = repo.getFeature(StagingRepositoryFeature.class).get();
+ if (stagF.getStagingRepository() != null) {
+ configRepoIds.add(stagF.getStagingRepository().getId());
+ }
+ }
+ }
+ List<String> toRemove = managedRepositories.keySet().stream().filter(id -> !configRepoIds.contains(id)).collect(Collectors.toList());
+ for (String id : toRemove) {
+ ManagedRepository removed = managedRepositories.remove(id);
+ removed.close();
+ }
+ } catch (Throwable e) {
+ log.error("Could not initialize repositories from config: {}", e.getMessage(), e);
+ return;
+ }
+ }
+
+ private ManagedRepository createNewManagedRepository(RepositoryProvider provider, ManagedRepositoryConfiguration cfg) throws RepositoryException {
+ log.debug("Creating repo {}", cfg.getId());
+ ManagedRepository repo = provider.createManagedInstance(cfg);
+ repo.registerEventHandler(RepositoryEvent.ANY, this);
+ updateRepositoryReferences(provider, repo, cfg, null);
+ return repo;
+
+ }
+
+ private String getStagingId(String repoId) {
+ return repoId + StagingRepositoryFeature.STAGING_REPO_POSTFIX;
+ }
+
+ @SuppressWarnings("unchecked")
+ private void updateRepositoryReferences(RepositoryProvider provider, ManagedRepository repo, ManagedRepositoryConfiguration cfg, Configuration configuration) throws RepositoryException {
+ log.debug("Updating references of repo {}", repo.getId());
+ if (repo.supportsFeature(StagingRepositoryFeature.class)) {
+ StagingRepositoryFeature feature = repo.getFeature(StagingRepositoryFeature.class).get();
+ if (feature.isStageRepoNeeded() && feature.getStagingRepository() == null) {
+ ManagedRepository stageRepo = getManagedRepository(getStagingId(repo.getId()));
+ if (stageRepo == null) {
+ stageRepo = getStagingRepository(provider, cfg, configuration);
+ managedRepositories.put(stageRepo.getId(), stageRepo);
+ if (configuration != null) {
+ replaceOrAddRepositoryConfig(provider.getManagedConfiguration(stageRepo), configuration);
+ }
+ pushEvent(new LifecycleEvent(LifecycleEvent.REGISTERED, this, stageRepo));
+ }
+ feature.setStagingRepository(stageRepo);
+ }
+ }
+ if (repo instanceof EditableManagedRepository) {
+ EditableManagedRepository editableRepo = (EditableManagedRepository) repo;
+ if (repo.getContent() == null) {
+ editableRepo.setContent(repositoryContentFactory.getManagedRepositoryContent(repo));
+ editableRepo.getContent().setRepository(editableRepo);
+ }
+ log.debug("Index repo: " + repo.hasIndex());
+ if (repo.hasIndex() && ( repo.getIndexingContext() == null || !repo.getIndexingContext().isOpen() )) {
+ log.debug("Creating indexing context for {}", repo.getId());
+ createIndexingContext(editableRepo);
+ }
+ }
+ repo.registerEventHandler(RepositoryEvent.ANY, this);
+ }
+
+ @Override
+ public ArchivaIndexManager getIndexManager( RepositoryType type ) {
+ return indexManagerFactory.getIndexManager(type);
+ }
+
+ private void createIndexingContext(EditableRepository editableRepo) throws RepositoryException {
+ if (editableRepo.supportsFeature(IndexCreationFeature.class)) {
+ ArchivaIndexManager idxManager = getIndexManager(editableRepo.getType());
+ try {
+ editableRepo.setIndexingContext(idxManager.createContext(editableRepo));
+ idxManager.updateLocalIndexPath(editableRepo);
+ } catch (IndexCreationFailedException e) {
+ throw new RepositoryException("Could not create index for repository " + editableRepo.getId() + ": " + e.getMessage(), e);
+ }
+ }
+ }
+
+ private ManagedRepository getStagingRepository(RepositoryProvider provider, ManagedRepositoryConfiguration baseRepoCfg, Configuration configuration) throws RepositoryException {
+ ManagedRepository stageRepo = getManagedRepository(getStagingId(baseRepoCfg.getId()));
+ if (stageRepo == null) {
+ stageRepo = provider.createStagingInstance(baseRepoCfg);
+ if (stageRepo.supportsFeature(StagingRepositoryFeature.class)) {
+ stageRepo.getFeature(StagingRepositoryFeature.class).get().setStageRepoNeeded(false);
+ }
+ ManagedRepositoryConfiguration stageCfg = provider.getManagedConfiguration(stageRepo);
+ updateRepositoryReferences(provider, stageRepo, stageCfg, configuration);
+ }
+ return stageRepo;
+ }
+
+
+ private void updateRemoteRepositoriesFromConfig() {
+ try {
+ List<RemoteRepositoryConfiguration> remoteRepoConfigs =
+ getArchivaConfiguration().getConfiguration().getRemoteRepositories();
+
+ if (remoteRepoConfigs == null) {
+ return;
+ }
+ Set<String> repoIds = new HashSet<>();
+ for (RemoteRepositoryConfiguration repoConfig : remoteRepoConfigs) {
+ putRepository(repoConfig, null);
+ repoIds.add(repoConfig.getId());
+ }
+
+ List<String> toRemove = remoteRepositories.keySet().stream().filter(id -> !repoIds.contains(id)).collect(Collectors.toList());
+ for (String id : toRemove) {
+ RemoteRepository removed = remoteRepositories.remove(id);
+ removed.close();
+ }
+
+ } catch (Throwable e) {
+ log.error("Could not initialize remote repositories from config: {}", e.getMessage(), e);
+ return;
+ }
+ }
+
+ private RemoteRepository createNewRemoteRepository(RepositoryProvider provider, RemoteRepositoryConfiguration cfg) throws RepositoryException {
+ log.debug("Creating remote repo {}", cfg.getId());
+ RemoteRepository repo = provider.createRemoteInstance(cfg);
+ updateRepositoryReferences(provider, repo, cfg, null);
+ return repo;
+
+ }
+
+ private void updateRepositoryReferences(RepositoryProvider provider, RemoteRepository repo, RemoteRepositoryConfiguration cfg, Configuration configuration) throws RepositoryException {
+ if (repo instanceof EditableRemoteRepository && repo.getContent() == null) {
+ EditableRemoteRepository editableRepo = (EditableRemoteRepository) repo;
+ editableRepo.setContent(repositoryContentFactory.getRemoteRepositoryContent(repo));
+ if (repo.supportsFeature(IndexCreationFeature.class) && repo.getIndexingContext() == null) {
+ createIndexingContext(editableRepo);
+ }
+ }
+ repo.registerEventHandler(RepositoryEvent.ANY, this);
+ }
+
+ private Map<String, RepositoryGroup> getRepositorGroupsFromConfig() {
+ try {
+ List<RepositoryGroupConfiguration> repositoryGroupConfigurations =
+ getArchivaConfiguration().getConfiguration().getRepositoryGroups();
+
+ if (repositoryGroupConfigurations == null) {
+ return Collections.emptyMap();
+ }
+
+ Map<String, RepositoryGroup> repositoryGroupMap = new LinkedHashMap<>(repositoryGroupConfigurations.size());
+
+ Map<RepositoryType, RepositoryProvider> providerMap = createProviderMap();
+ for (RepositoryGroupConfiguration repoConfig : repositoryGroupConfigurations) {
+ RepositoryType repositoryType = RepositoryType.valueOf(repoConfig.getType());
+ if (providerMap.containsKey(repositoryType)) {
+ try {
+ RepositoryGroup repo = createNewRepositoryGroup(providerMap.get(repositoryType), repoConfig);
+ repositoryGroupMap.put(repo.getId(), repo);
+ } catch (Exception e) {
+ log.error("Could not create repository group {}: {}", repoConfig.getId(), e.getMessage(), e);
+ }
+ }
+ }
+ return repositoryGroupMap;
+ } catch (Throwable e) {
+ log.error("Could not initialize repositories from config: {}", e.getMessage(), e);
+ return Collections.emptyMap();
+ }
+ }
+
+ private RepositoryGroup createNewRepositoryGroup(RepositoryProvider provider, RepositoryGroupConfiguration config) throws RepositoryException {
+ RepositoryGroup repositoryGroup = provider.createRepositoryGroup(config);
+ repositoryGroup.registerEventHandler(RepositoryEvent.ANY, this);
+ updateRepositoryReferences(provider, repositoryGroup, config);
+ return repositoryGroup;
+ }
+
+ private void updateRepositoryReferences(RepositoryProvider provider, RepositoryGroup group, RepositoryGroupConfiguration configuration) {
+ if (group instanceof EditableRepositoryGroup) {
+ EditableRepositoryGroup eGroup = (EditableRepositoryGroup) group;
+ eGroup.setRepositories(configuration.getRepositories().stream().map(r -> getManagedRepository(r)).collect(Collectors.toList()));
+ }
+ }
+
+ private ArchivaConfiguration getArchivaConfiguration() {
+ return this.archivaConfiguration;
+ }
+
+ /**
+ * Returns all repositories that are registered. There is no defined order of the returned repositories.
+ *
+ * @return a list of managed and remote repositories
+ */
+ @Override
+ public Collection<Repository> getRepositories( ) {
+ rwLock.readLock().lock();
+ try {
+ return Stream.concat(managedRepositories.values().stream(), remoteRepositories.values().stream()).collect(Collectors.toList());
+ } finally {
+ rwLock.readLock().unlock();
+ }
+ }
+
+ /**
+ * Returns only the managed repositories. There is no defined order of the returned repositories.
+ *
+ * @return a list of managed repositories
+ */
+ @Override
+ public Collection<ManagedRepository> getManagedRepositories( ) {
+ rwLock.readLock().lock();
+ try {
+ return uManagedRepository.values();
+ } finally {
+ rwLock.readLock().unlock();
+ }
+ }
+
+ /**
+ * Returns only the remote repositories. There is no defined order of the returned repositories.
+ *
+ * @return a list of remote repositories
+ */
+ @Override
+ public Collection<RemoteRepository> getRemoteRepositories( ) {
+ rwLock.readLock().lock();
+ try {
+ return uRemoteRepositories.values();
+ } finally {
+ rwLock.readLock().unlock();
+ }
+ }
+
+ @Override
+ public Collection<RepositoryGroup> getRepositoryGroups( ) {
+ rwLock.readLock().lock();
+ try {
+ return uRepositoryGroups.values();
+ } finally {
+ rwLock.readLock().unlock();
+ }
+ }
+
+ /**
+ * Returns the repository with the given id. The returned repository may be a managed or remote repository.
+ * It returns null, if no repository is registered with the given id.
+ *
+ * @param repoId the repository id
+ * @return the repository if found, otherwise null
+ */
+ @Override
+ public Repository getRepository( String repoId ) {
+ rwLock.readLock().lock();
+ try {
+ log.debug("getRepository {}", repoId);
+ if (managedRepositories.containsKey(repoId)) {
+ log.debug("Managed repo");
+ return managedRepositories.get(repoId);
+ } else if (remoteRepositories.containsKey(repoId)) {
+ log.debug("Remote repo");
+ return remoteRepositories.get(repoId);
+ } else if (repositoryGroups.containsKey(repoId)) {
+ return repositoryGroups.get(repoId);
+ } else {
+ return null;
+ }
+ } finally {
+ rwLock.readLock().unlock();
+ }
+ }
+
+ /**
+ * Convenience method, that returns the managed repository with the given id.
+ * It returns null, if no managed repository is registered with this id.
+ *
+ * @param repoId the repository id
+ * @return the managed repository if found, otherwise null
+ */
+ @Override
+ public ManagedRepository getManagedRepository( String repoId ) {
+ rwLock.readLock().lock();
+ try {
+ return managedRepositories.get(repoId);
+ } finally {
+ rwLock.readLock().unlock();
+ }
+ }
+
+ /**
+ * Convenience method, that returns the remote repository with the given id.
+ * It returns null, if no remote repository is registered with this id.
+ *
+ * @param repoId the repository id
+ * @return the remote repository if found, otherwise null
+ */
+ @Override
+ public RemoteRepository getRemoteRepository( String repoId ) {
+ rwLock.readLock().lock();
+ try {
+ return remoteRepositories.get(repoId);
+ } finally {
+ rwLock.readLock().unlock();
+ }
+ }
+
+ @Override
+ public RepositoryGroup getRepositoryGroup( String groupId ) {
+ rwLock.readLock().lock();
+ try {
+ return repositoryGroups.get(groupId);
+ } finally {
+ rwLock.readLock().unlock();
+ }
+ }
+
+ /*
+ * The <code>ignoreConfigEvents</code> works only for synchronized configuration events.
+ * If the configuration throws async events, we cannot know, if the event is caused by this instance or another thread.
+ */
+ private void saveConfiguration(Configuration configuration) throws IndeterminateConfigurationException, RegistryException {
+ ignoreConfigEvents = true;
+ try {
+ getArchivaConfiguration().save(configuration);
+ } finally {
+ ignoreConfigEvents = false;
+ }
+ }
+
+ /**
+ * Adds a new repository to the current list, or replaces the repository definition with
+ * the same id, if it exists already.
+ * The change is saved to the configuration immediately.
+ *
+ * @param managedRepository the new repository.
+ * @throws RepositoryException if the new repository could not be saved to the configuration.
+ */
+ @Override
+ public ManagedRepository putRepository( ManagedRepository managedRepository ) throws RepositoryException {
+ rwLock.writeLock().lock();
+ try {
+ final String id = managedRepository.getId();
+ if (remoteRepositories.containsKey(id)) {
+ throw new RepositoryException("There exists a remote repository with id " + id + ". Could not update with managed repository.");
+ }
+ ManagedRepository originRepo = managedRepositories.put(id, managedRepository);
+ try {
+ if (originRepo != null && originRepo != managedRepository) {
+ originRepo.close();
+ }
+ RepositoryProvider provider = getProvider(managedRepository.getType());
+ ManagedRepositoryConfiguration newCfg = provider.getManagedConfiguration(managedRepository);
+ Configuration configuration = getArchivaConfiguration().getConfiguration();
+ updateRepositoryReferences(provider, managedRepository, newCfg, configuration);
+ ManagedRepositoryConfiguration oldCfg = configuration.findManagedRepositoryById(id);
+ if (oldCfg != null) {
+ configuration.removeManagedRepository(oldCfg);
+ }
+ configuration.addManagedRepository(newCfg);
+ saveConfiguration(configuration);
+ if (originRepo != managedRepository) {
+ pushEvent(new LifecycleEvent(LifecycleEvent.REGISTERED, this, managedRepository));
+ } else {
+ pushEvent(new LifecycleEvent(LifecycleEvent.UPDATED, this, managedRepository));
+ }
+ return managedRepository;
+ } catch (Exception e) {
+ // Rollback only partly, because repository is closed already
+ if (originRepo != null) {
+ managedRepositories.put(id, originRepo);
+ } else {
+ managedRepositories.remove(id);
+ }
+ log.error("Exception during configuration update {}", e.getMessage(), e);
+ throw new RepositoryException("Could not save the configuration" + (e.getMessage() == null ? "" : ": " + e.getMessage()));
+ }
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ }
+
+ /**
+ * Adds a new repository or updates the repository with the same id, if it exists already.
+ * The configuration is saved immediately.
+ *
+ * @param managedRepositoryConfiguration the repository configuration
+ * @return the updated or created repository
+ * @throws RepositoryException if an error occurs, or the configuration is not valid.
+ */
+ @Override
+ public ManagedRepository putRepository( ManagedRepositoryConfiguration managedRepositoryConfiguration ) throws RepositoryException {
+ rwLock.writeLock().lock();
+ try {
+ final String id = managedRepositoryConfiguration.getId();
+ final RepositoryType repositoryType = RepositoryType.valueOf(managedRepositoryConfiguration.getType());
+ Configuration configuration = getArchivaConfiguration().getConfiguration();
+ ManagedRepository repo = managedRepositories.get(id);
+ ManagedRepositoryConfiguration oldCfg = repo != null ? getProvider(repositoryType).getManagedConfiguration(repo) : null;
+ repo = putRepository(managedRepositoryConfiguration, configuration);
+ try {
+ saveConfiguration(configuration);
+ } catch (IndeterminateConfigurationException | RegistryException e) {
+ if (oldCfg != null) {
+ getProvider(repositoryType).updateManagedInstance((EditableManagedRepository) repo, oldCfg);
+ }
+ log.error("Could not save the configuration for repository {}: {}", id, e.getMessage(), e);
+ throw new RepositoryException("Could not save the configuration for repository " + id + ": " + e.getMessage());
+ }
+ return repo;
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+
+ }
+
+ /**
+ * Adds a new repository or updates the repository with the same id. The given configuration object is updated, but
+ * the configuration is not saved.
+ *
+ * @param managedRepositoryConfiguration the new or changed managed repository configuration
+ * @param configuration the configuration object (may be <code>null</code>)
+ * @return the new or updated repository
+ * @throws RepositoryException if the configuration cannot be saved or updated
+ */
+ @Override
+ public ManagedRepository putRepository( ManagedRepositoryConfiguration managedRepositoryConfiguration, Configuration configuration ) throws RepositoryException {
+ rwLock.writeLock().lock();
+ try {
+ final String id = managedRepositoryConfiguration.getId();
+ final RepositoryType repoType = RepositoryType.valueOf(managedRepositoryConfiguration.getType());
+ ManagedRepository repo;
+ boolean registeredNew = false;
+ repo = managedRepositories.get(id);
+ if (repo != null && repo.isOpen()) {
+ if (repo instanceof EditableManagedRepository) {
+ getProvider(repoType).updateManagedInstance((EditableManagedRepository) repo, managedRepositoryConfiguration);
+ } else {
+ throw new RepositoryException("The repository is not editable " + id);
+ }
+ } else {
+ repo = getProvider(repoType).createManagedInstance(managedRepositoryConfiguration);
+ managedRepositories.put(id, repo);
+ registeredNew = true;
+ }
+ updateRepositoryReferences(getProvider(repoType), repo, managedRepositoryConfiguration, configuration);
+ replaceOrAddRepositoryConfig(managedRepositoryConfiguration, configuration);
+ if (registeredNew) {
+ pushEvent(new LifecycleEvent(LifecycleEvent.REGISTERED, this, repo));
+ } else {
+ pushEvent(new LifecycleEvent(LifecycleEvent.UPDATED, this, repo));
+ }
+ return repo;
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ }
+
+
+ /**
+ * Adds a new repository group to the current list, or replaces the repository group definition with
+ * the same id, if it exists already.
+ * The change is saved to the configuration immediately.
+ *
+ * @param repositoryGroup the new repository group.
+ * @throws RepositoryException if the new repository group could not be saved to the configuration.
+ */
+ @Override
+ public RepositoryGroup putRepositoryGroup( RepositoryGroup repositoryGroup ) throws RepositoryException {
+ rwLock.writeLock().lock();
+ try {
+ final String id = repositoryGroup.getId();
+ RepositoryGroup originRepoGroup = repositoryGroups.put(id, repositoryGroup);
+ try {
+ if (originRepoGroup != null && originRepoGroup != repositoryGroup) {
+ originRepoGroup.close();
+ }
+ RepositoryProvider provider = getProvider(repositoryGroup.getType());
+ RepositoryGroupConfiguration newCfg = provider.getRepositoryGroupConfiguration(repositoryGroup);
+ Configuration configuration = getArchivaConfiguration().getConfiguration();
+ updateRepositoryReferences(provider, repositoryGroup, newCfg);
+ RepositoryGroupConfiguration oldCfg = configuration.findRepositoryGroupById(id);
+ if (oldCfg != null) {
+ configuration.removeRepositoryGroup(oldCfg);
+ }
+ configuration.addRepositoryGroup(newCfg);
+ saveConfiguration(configuration);
+ return repositoryGroup;
+ } catch (Exception e) {
+ // Rollback
+ if (originRepoGroup != null) {
+ repositoryGroups.put(id, originRepoGroup);
+ } else {
+ repositoryGroups.remove(id);
+ }
+ log.error("Exception during configuration update {}", e.getMessage(), e);
+ throw new RepositoryException("Could not save the configuration" + (e.getMessage() == null ? "" : ": " + e.getMessage()));
+ }
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ }
+
+ /**
+ * Adds a new repository group or updates the repository with the same id, if it exists already.
+ * The configuration is saved immediately.
+ *
+ * @param repositoryGroupConfiguration the repository configuration
+ * @return the updated or created repository
+ * @throws RepositoryException if an error occurs, or the configuration is not valid.
+ */
+ @Override
+ public RepositoryGroup putRepositoryGroup( RepositoryGroupConfiguration repositoryGroupConfiguration ) throws RepositoryException {
+ rwLock.writeLock().lock();
+ try {
+ final String id = repositoryGroupConfiguration.getId();
+ final RepositoryType repositoryType = RepositoryType.valueOf(repositoryGroupConfiguration.getType());
+ Configuration configuration = getArchivaConfiguration().getConfiguration();
+ RepositoryGroup repo = repositoryGroups.get(id);
+ RepositoryGroupConfiguration oldCfg = repo != null ? getProvider(repositoryType).getRepositoryGroupConfiguration(repo) : null;
+ repo = putRepositoryGroup(repositoryGroupConfiguration, configuration);
+ try {
+ saveConfiguration(configuration);
+ } catch (IndeterminateConfigurationException | RegistryException e) {
+ if (oldCfg != null) {
+ getProvider(repositoryType).updateRepositoryGroupInstance((EditableRepositoryGroup) repo, oldCfg);
+ }
+ log.error("Could not save the configuration for repository group {}: {}", id, e.getMessage(), e);
+ throw new RepositoryException("Could not save the configuration for repository group " + id + ": " + e.getMessage());
+ }
+ return repo;
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+
+ }
+
+ /**
+ * Adds a new repository group or updates the repository group with the same id. The given configuration object is updated, but
+ * the configuration is not saved.
+ *
+ * @param repositoryGroupConfiguration The configuration of the new or changed repository group.
+ * @param configuration The configuration object. If it is <code>null</code>, the configuration is not saved.
+ * @return The new or updated repository group
+ * @throws RepositoryException if the configuration cannot be saved or updated
+ */
+ @Override
+ public RepositoryGroup putRepositoryGroup( RepositoryGroupConfiguration repositoryGroupConfiguration, Configuration configuration ) throws RepositoryException {
+ rwLock.writeLock().lock();
+ try {
+ final String id = repositoryGroupConfiguration.getId();
+ final RepositoryType repoType = RepositoryType.valueOf(repositoryGroupConfiguration.getType());
+ RepositoryGroup repo;
+ setRepositoryGroupDefaults(repositoryGroupConfiguration);
+ if (repositoryGroups.containsKey(id)) {
+ repo = repositoryGroups.get(id);
+ if (repo instanceof EditableRepositoryGroup) {
+ getProvider(repoType).updateRepositoryGroupInstance((EditableRepositoryGroup) repo, repositoryGroupConfiguration);
+ } else {
+ throw new RepositoryException("The repository is not editable " + id);
+ }
+ } else {
+ repo = getProvider(repoType).createRepositoryGroup(repositoryGroupConfiguration);
+ repositoryGroups.put(id, repo);
+ }
+ updateRepositoryReferences(getProvider(repoType), repo, repositoryGroupConfiguration);
+ replaceOrAddRepositoryConfig(repositoryGroupConfiguration, configuration);
+ return repo;
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ }
+
+ private void setRepositoryGroupDefaults(RepositoryGroupConfiguration repositoryGroupConfiguration) {
+ if (StringUtils.isEmpty(repositoryGroupConfiguration.getMergedIndexPath())) {
+ repositoryGroupConfiguration.setMergedIndexPath(DEFAULT_INDEX_PATH);
+ }
+ if (repositoryGroupConfiguration.getMergedIndexTtl() <= 0) {
+ repositoryGroupConfiguration.setMergedIndexTtl(300);
+ }
+ if (StringUtils.isEmpty(repositoryGroupConfiguration.getCronExpression())) {
+ repositoryGroupConfiguration.setCronExpression("0 0 03 ? * MON");
+ }
+ }
+
+ private void replaceOrAddRepositoryConfig(ManagedRepositoryConfiguration managedRepositoryConfiguration, Configuration configuration) {
+ if (configuration != null) {
+ ManagedRepositoryConfiguration oldCfg = configuration.findManagedRepositoryById(managedRepositoryConfiguration.getId());
+ if (oldCfg != null) {
+ configuration.removeManagedRepository(oldCfg);
+ }
+ configuration.addManagedRepository(managedRepositoryConfiguration);
+ }
+ }
+
+ private void replaceOrAddRepositoryConfig(RemoteRepositoryConfiguration remoteRepositoryConfiguration, Configuration configuration) {
+ if (configuration != null) {
+ RemoteRepositoryConfiguration oldCfg = configuration.findRemoteRepositoryById(remoteRepositoryConfiguration.getId());
+ if (oldCfg != null) {
+ configuration.removeRemoteRepository(oldCfg);
+ }
+ configuration.addRemoteRepository(remoteRepositoryConfiguration);
+ }
+ }
+
+ private void replaceOrAddRepositoryConfig(RepositoryGroupConfiguration repositoryGroupConfiguration, Configuration configuration) {
+ RepositoryGroupConfiguration oldCfg = configuration.findRepositoryGroupById(repositoryGroupConfiguration.getId());
+ if (oldCfg != null) {
+ configuration.removeRepositoryGroup(oldCfg);
+ }
+ configuration.addRepositoryGroup(repositoryGroupConfiguration);
+ }
+
+ @Override
+ public RemoteRepository putRepository( RemoteRepository remoteRepository, Configuration configuration ) throws RepositoryException {
+ rwLock.writeLock().lock();
+ try {
+ final String id = remoteRepository.getId();
+ if (managedRepositories.containsKey(id)) {
+ throw new RepositoryException("There exists a managed repository with id " + id + ". Could not update with remote repository.");
+ }
+ RemoteRepository originRepo = remoteRepositories.put(id, remoteRepository);
+ RemoteRepositoryConfiguration oldCfg = null;
+ RemoteRepositoryConfiguration newCfg;
+ try {
+ if (originRepo != null && originRepo != remoteRepository) {
+ originRepo.close();
+ }
+ final RepositoryProvider provider = getProvider(remoteRepository.getType());
+ newCfg = provider.getRemoteConfiguration(remoteRepository);
+ updateRepositoryReferences(provider, remoteRepository, newCfg, configuration);
+ oldCfg = configuration.findRemoteRepositoryById(id);
+ if (oldCfg != null) {
+ configuration.removeRemoteRepository(oldCfg);
+ }
+ configuration.addRemoteRepository(newCfg);
+ if (remoteRepository != originRepo) {
+ pushEvent(new LifecycleEvent(LifecycleEvent.REGISTERED, this, remoteRepository));
+ } else {
+ pushEvent(new LifecycleEvent(LifecycleEvent.UPDATED, this, remoteRepository));
+ }
+ return remoteRepository;
+ } catch (Exception e) {
+ // Rollback
+ if (originRepo != null) {
+ remoteRepositories.put(id, originRepo);
+ } else {
+ remoteRepositories.remove(id);
+ }
+ if (oldCfg != null) {
+ RemoteRepositoryConfiguration cfg = configuration.findRemoteRepositoryById(id);
+ if (cfg != null) {
+ configuration.removeRemoteRepository(cfg);
+ configuration.addRemoteRepository(oldCfg);
+ }
+ }
+ log.error("Error while adding remote repository {}", e.getMessage(), e);
+ throw new RepositoryException("Could not save the configuration" + (e.getMessage() == null ? "" : ": " + e.getMessage()));
+ }
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ }
+
+ /**
+ * Adds a remote repository, or overwrites the repository definition with the same id, if it exists already.
+ * The modification is saved to the configuration immediately.
+ *
+ * @param remoteRepository the remote repository to add
+ * @throws RepositoryException if an error occurs during configuration save
+ */
+ @Override
+ public RemoteRepository putRepository( RemoteRepository remoteRepository ) throws RepositoryException {
+ rwLock.writeLock().lock();
+ try {
+ Configuration configuration = getArchivaConfiguration().getConfiguration();
+ try {
+ RemoteRepository repo = putRepository(remoteRepository, configuration);
+ saveConfiguration(configuration);
+ return repo;
+ } catch (RegistryException | IndeterminateConfigurationException e) {
+ log.error("Error while saving remote repository {}", e.getMessage(), e);
+ throw new RepositoryException("Could not save the configuration" + (e.getMessage() == null ? "" : ": " + e.getMessage()));
+ }
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ }
+
+ /**
+ * Adds a new repository or updates the repository with the same id, if it exists already.
+ * The configuration is saved immediately.
+ *
+ * @param remoteRepositoryConfiguration the repository configuration
+ * @return the updated or created repository
+ * @throws RepositoryException if an error occurs, or the configuration is not valid.
+ */
+ @Override
+ public RemoteRepository putRepository( RemoteRepositoryConfiguration remoteRepositoryConfiguration ) throws RepositoryException {
+ rwLock.writeLock().lock();
+ try {
+ final String id = remoteRepositoryConfiguration.getId();
+ final RepositoryType repositoryType = RepositoryType.valueOf(remoteRepositoryConfiguration.getType());
+ Configuration configuration = getArchivaConfiguration().getConfiguration();
+ RemoteRepository repo = remoteRepositories.get(id);
+ RemoteRepositoryConfiguration oldCfg = repo != null ? getProvider(repositoryType).getRemoteConfiguration(repo) : null;
+ repo = putRepository(remoteRepositoryConfiguration, configuration);
+ try {
+ saveConfiguration(configuration);
+ } catch (IndeterminateConfigurationException | RegistryException e) {
+ if (oldCfg != null) {
+ getProvider(repositoryType).updateRemoteInstance((EditableRemoteRepository) repo, oldCfg);
+ }
+ log.error("Could not save the configuration for repository {}: {}", id, e.getMessage(), e);
+ throw new RepositoryException("Could not save the configuration for repository " + id + ": " + e.getMessage());
+ }
+ return repo;
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+
+ }
+
+ /**
+ * Adds a new repository or updates the repository with the same id. The given configuration object is updated, but
+ * the configuration is not saved.
+ *
+ * @param remoteRepositoryConfiguration the new or changed repository configuration
+ * @param configuration the configuration object
+ * @return the new or updated repository
+ * @throws RepositoryException if the configuration cannot be saved or updated
+ */
+ @Override
+ @SuppressWarnings("unchecked")
+ public RemoteRepository putRepository( RemoteRepositoryConfiguration remoteRepositoryConfiguration, Configuration configuration ) throws RepositoryException {
+ rwLock.writeLock().lock();
+ try {
+ final String id = remoteRepositoryConfiguration.getId();
+ final RepositoryType repoType = RepositoryType.valueOf(remoteRepositoryConfiguration.getType());
+ RemoteRepository repo;
+ boolean registeredNew = false;
+ repo = remoteRepositories.get(id);
+ if (repo != null && repo.isOpen()) {
+ if (repo instanceof EditableRemoteRepository) {
+ getProvider(repoType).updateRemoteInstance((EditableRemoteRepository) repo, remoteRepositoryConfiguration);
+ } else {
+ throw new RepositoryException("The repository is not editable " + id);
+ }
+ } else {
+ repo = getProvider(repoType).createRemoteInstance(remoteRepositoryConfiguration);
+ remoteRepositories.put(id, repo);
+ registeredNew = true;
+ }
+ updateRepositoryReferences(getProvider(repoType), repo, remoteRepositoryConfiguration, configuration);
+ replaceOrAddRepositoryConfig(remoteRepositoryConfiguration, configuration);
+ if (registeredNew) {
+ pushEvent(new LifecycleEvent(LifecycleEvent.REGISTERED, this, repo));
+ } else {
+ pushEvent(new LifecycleEvent(LifecycleEvent.UPDATED, this, repo));
+ }
+ return repo;
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+
+
+ }
+
+ @Override
+ public void removeRepository( String repoId ) throws RepositoryException {
+ Repository repo = getRepository(repoId);
+ if (repo != null) {
+ removeRepository(repo);
+ }
+ }
+
+ @Override
+ public void removeRepository( Repository repo ) throws RepositoryException {
+ if (repo == null) {
+ log.warn("Trying to remove null repository");
+ return;
+ }
+ if (repo instanceof RemoteRepository) {
+ removeRepository((RemoteRepository) repo);
+ } else if (repo instanceof ManagedRepository) {
+ removeRepository((ManagedRepository) repo);
+ } else if (repo instanceof RepositoryGroup) {
+ removeRepositoryGroup((RepositoryGroup) repo);
+ } else {
+ throw new RepositoryException("Repository type not known: " + repo.getClass());
+ }
+ }
+
+ /**
+ * Removes a managed repository from the registry and configuration, if it exists.
+ * The change is saved to the configuration immediately.
+ *
+ * @param managedRepository the managed repository to remove
+ * @throws RepositoryException if a error occurs during configuration save
+ */
+ @Override
+ public void removeRepository( ManagedRepository managedRepository ) throws RepositoryException {
+ if (managedRepository == null) {
+ return;
+ }
+ final String id = managedRepository.getId();
+ ManagedRepository repo = getManagedRepository(id);
+ if (repo != null) {
+ rwLock.writeLock().lock();
+ try {
+ repo = managedRepositories.remove(id);
+ if (repo != null) {
+ repo.close();
+ removeRepositoryFromGroups(repo);
+ Configuration configuration = getArchivaConfiguration().getConfiguration();
+ ManagedRepositoryConfiguration cfg = configuration.findManagedRepositoryById(id);
+ if (cfg != null) {
+ configuration.removeManagedRepository(cfg);
+ }
+ saveConfiguration(configuration);
+ }
+ pushEvent(new LifecycleEvent(LifecycleEvent.UNREGISTERED, this, repo));
+ } catch (RegistryException | IndeterminateConfigurationException e) {
+ // Rollback
+ log.error("Could not save config after repository removal: {}", e.getMessage(), e);
+ managedRepositories.put(repo.getId(), repo);
+ throw new RepositoryException("Could not save configuration after repository removal: " + e.getMessage());
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ }
+ }
+
+ private void removeRepositoryFromGroups(ManagedRepository repo) {
+ if (repo != null) {
+ repositoryGroups.values().stream().filter(repoGroup -> repoGroup instanceof EditableRepository).
+ map(repoGroup -> (EditableRepositoryGroup) repoGroup).forEach(repoGroup -> repoGroup.removeRepository(repo));
+ }
+ }
+
+ @Override
+ public void removeRepository( ManagedRepository managedRepository, Configuration configuration ) throws RepositoryException {
+ if (managedRepository == null) {
+ return;
+ }
+ final String id = managedRepository.getId();
+ ManagedRepository repo = getManagedRepository(id);
+ if (repo != null) {
+ rwLock.writeLock().lock();
+ try {
+ repo = managedRepositories.remove(id);
+ if (repo != null) {
+ repo.close();
+ removeRepositoryFromGroups(repo);
+ ManagedRepositoryConfiguration cfg = configuration.findManagedRepositoryById(id);
+ if (cfg != null) {
+ configuration.removeManagedRepository(cfg);
+ }
+ }
+ pushEvent(new LifecycleEvent(LifecycleEvent.UNREGISTERED, this, repo));
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ }
+
+ }
+
+
+ /**
+ * Removes a repository group from the registry and configuration, if it exists.
+ * The change is saved to the configuration immediately.
+ *
+ * @param repositoryGroup the repository group to remove
+ * @throws RepositoryException if a error occurs during configuration save
+ */
+ @Override
+ public void removeRepositoryGroup( RepositoryGroup repositoryGroup ) throws RepositoryException {
+ if (repositoryGroup == null) {
+ return;
+ }
+ final String id = repositoryGroup.getId();
+ RepositoryGroup repo = getRepositoryGroup(id);
+ if (repo != null) {
+ rwLock.writeLock().lock();
+ try {
+ repo = repositoryGroups.remove(id);
+ if (repo != null) {
+ repo.close();
+ Configuration configuration = getArchivaConfiguration().getConfiguration();
+ RepositoryGroupConfiguration cfg = configuration.findRepositoryGroupById(id);
+ if (cfg != null) {
+ configuration.removeRepositoryGroup(cfg);
+ }
+ saveConfiguration(configuration);
+ }
+
+ } catch (RegistryException | IndeterminateConfigurationException e) {
+ // Rollback
+ log.error("Could not save config after repository removal: {}", e.getMessage(), e);
+ repositoryGroups.put(repo.getId(), repo);
+ throw new RepositoryException("Could not save configuration after repository removal: " + e.getMessage());
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ }
+ }
+
+ @Override
+ public void removeRepositoryGroup( RepositoryGroup repositoryGroup, Configuration configuration ) throws RepositoryException {
+ if (repositoryGroup == null) {
+ return;
+ }
+ final String id = repositoryGroup.getId();
+ RepositoryGroup repo = getRepositoryGroup(id);
+ if (repo != null) {
+ rwLock.writeLock().lock();
+ try {
+ repo = repositoryGroups.remove(id);
+ if (repo != null) {
+ repo.close();
+ RepositoryGroupConfiguration cfg = configuration.findRepositoryGroupById(id);
+ if (cfg != null) {
+ configuration.removeRepositoryGroup(cfg);
+ }
+ }
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ }
+
+ }
+
+ private void doRemoveRepo(RemoteRepository repo, Configuration configuration) {
+ repo.close();
+ RemoteRepositoryConfiguration cfg = configuration.findRemoteRepositoryById(repo.getId());
+ if (cfg != null) {
+ configuration.removeRemoteRepository(cfg);
+ }
+ List<ProxyConnectorConfiguration> proxyConnectors = new ArrayList<>(configuration.getProxyConnectors());
+ for (ProxyConnectorConfiguration proxyConnector : proxyConnectors) {
+ if (StringUtils.equals(proxyConnector.getTargetRepoId(), repo.getId())) {
+ configuration.removeProxyConnector(proxyConnector);
+ }
+ }
+ }
+
+ /**
+ * Removes the remote repository from the registry and configuration.
+ * The change is saved to the configuration immediately.
+ *
+ * @param remoteRepository the remote repository to remove
+ * @throws RepositoryException if a error occurs during configuration save
+ */
+ @Override
+ public void removeRepository( RemoteRepository remoteRepository ) throws RepositoryException {
+ if (remoteRepository == null) {
+ return;
+ }
+ final String id = remoteRepository.getId();
+ RemoteRepository repo = getRemoteRepository(id);
+ if (repo != null) {
+ rwLock.writeLock().lock();
+ try {
+ repo = remoteRepositories.remove(id);
+ if (repo != null) {
+ Configuration configuration = getArchivaConfiguration().getConfiguration();
+ doRemoveRepo(repo, configuration);
+ saveConfiguration(configuration);
+ }
+ pushEvent(new LifecycleEvent(LifecycleEvent.UNREGISTERED, this, repo));
+ } catch (RegistryException | IndeterminateConfigurationException e) {
+ // Rollback
+ log.error("Could not save config after repository removal: {}", e.getMessage(), e);
+ remoteRepositories.put(repo.getId(), repo);
+ throw new RepositoryException("Could not save configuration after repository removal: " + e.getMessage());
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ }
+ }
+
+ @Override
+ public void removeRepository( RemoteRepository remoteRepository, Configuration configuration ) throws RepositoryException {
+ if (remoteRepository == null) {
+ return;
+ }
+ final String id = remoteRepository.getId();
+ RemoteRepository repo = getRemoteRepository(id);
+ if (repo != null) {
+ rwLock.writeLock().lock();
+ try {
+ repo = remoteRepositories.remove(id);
+ if (repo != null) {
+ doRemoveRepo(repo, configuration);
+ }
+ pushEvent(new LifecycleEvent(LifecycleEvent.UNREGISTERED, this, repo));
+ } finally {
+ rwLock.writeLock().unlock();
+ }
+ }
+
+ }
+
+ /**
+ * Reloads the registry from the configuration.
+ */
+ @Override
+ public void reload( ) {
+ initialize();
+ }
+
+ /**
+ * Resets the indexing context of a given repository.
+ *
+ * @param repository The repository
+ * @throws IndexUpdateFailedException If the index could not be resetted.
+ */
+ @Override
+ public void resetIndexingContext( Repository repository ) throws IndexUpdateFailedException {
+ if (repository.hasIndex() && repository instanceof EditableRepository) {
+ EditableRepository eRepo = (EditableRepository) repository;
+ ArchivaIndexingContext newCtx = getIndexManager(repository.getType()).reset(repository.getIndexingContext());
+ eRepo.setIndexingContext(newCtx);
+ }
+ }
+
+
+ /**
+ * Creates a new repository instance with the same settings as this one. The cloned repository is not
+ * registered or saved to the configuration.
+ *
+ * @param repo The origin repository
+ * @return The cloned repository.
+ */
+ @Override
+ public ManagedRepository clone( ManagedRepository repo, String newId ) throws RepositoryException {
+ if (managedRepositories.containsKey(newId) || remoteRepositories.containsKey(newId)) {
+ throw new RepositoryException("The given id exists already " + newId);
+ }
+ RepositoryProvider provider = getProvider(repo.getType());
+ ManagedRepositoryConfiguration cfg = provider.getManagedConfiguration(repo);
+ cfg.setId(newId);
+ ManagedRepository cloned = provider.createManagedInstance(cfg);
+ cloned.registerEventHandler(RepositoryEvent.ANY, this);
+ return cloned;
+ }
+
+ @Override
+ public <T extends Repository> Repository clone( T repo, String newId ) throws RepositoryException {
+ if (repo instanceof RemoteRepository) {
+ return this.clone((RemoteRepository) repo, newId);
+ } else if (repo instanceof ManagedRepository) {
+ return this.clone((ManagedRepository) repo, newId);
+ } else {
+ throw new RepositoryException("This repository class is not supported " + repo.getClass().getName());
+ }
+ }
+
+ /**
+ * Creates a new repository instance with the same settings as this one. The cloned repository is not
+ * registered or saved to the configuration.
+ *
+ * @param repo The origin repository
+ * @return The cloned repository.
+ */
+ @Override
+ public RemoteRepository clone( RemoteRepository repo, String newId ) throws RepositoryException {
+ if (managedRepositories.containsKey(newId) || remoteRepositories.containsKey(newId)) {
+ throw new RepositoryException("The given id exists already " + newId);
+ }
+ RepositoryProvider provider = getProvider(repo.getType());
+ RemoteRepositoryConfiguration cfg = provider.getRemoteConfiguration(repo);
+ cfg.setId(newId);
+ RemoteRepository cloned = provider.createRemoteInstance(cfg);
+ cloned.registerEventHandler(RepositoryEvent.ANY, this);
+ return cloned;
+ }
+
+
+ @Override
+ public void configurationEvent(ConfigurationEvent event) {
+ // Note: the ignoreConfigEvents flag does not work, if the config events are asynchronous.
+ if (!ignoreConfigEvents) {
+ reload();
+ }
+ }
+
+
+ @Override
+ public <T extends Event> void registerEventHandler( EventType<T> type, EventHandler<? super T> eventHandler) {
+ eventManager.registerEventHandler(type, eventHandler);
+ }
+
+
+ @Override
+ public <T extends Event> void unregisterEventHandler(EventType<T> type, EventHandler<? super T> eventHandler) {
+ eventManager.unregisterEventHandler(type, eventHandler);
+ }
+
+
+ @Override
+ public void handle(Event event) {
+ // To avoid event cycles:
+ if (sameOriginator(event)) {
+ return;
+ }
+ if (event instanceof RepositoryIndexEvent) {
+ handleIndexCreationEvent((RepositoryIndexEvent) event);
+ }
+ // We propagate all events to our listeners, but with context of repository registry
+ pushEvent(event);
+ }
+
+ private void handleIndexCreationEvent(RepositoryIndexEvent event) {
+ RepositoryIndexEvent idxEvent = event;
+ if (managedRepositories.containsKey(idxEvent.getRepository().getId()) ||
+ remoteRepositories.containsKey(idxEvent.getRepository().getId())) {
+ EditableRepository repo = (EditableRepository) idxEvent.getRepository();
+ if (repo != null && repo.getIndexingContext() != null) {
+ try {
+ ArchivaIndexManager idxmgr = getIndexManager(repo.getType());
+ if (idxmgr != null) {
+ ArchivaIndexingContext newCtx = idxmgr.move(repo.getIndexingContext(), repo);
+ repo.setIndexingContext(newCtx);
+ idxmgr.updateLocalIndexPath(repo);
+ }
+
+ } catch (IndexCreationFailedException e) {
+ log.error("Could not move index to new directory {}", e.getMessage(), e);
+ }
+ }
+ }
+ }
+
+ private boolean sameOriginator(Event event) {
+ if (event.getSource() == this) {
+ return true;
+ } else if (event.hasPreviousEvent()) {
+ return sameOriginator(event.getPreviousEvent());
+ } else {
+ return false;
+ }
+ }
+
+ private void pushEvent(Event event) {
+ eventManager.fireEvent(event);
+ }
+
+
+
+}
--- /dev/null
+package org.apache.archiva.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
+import org.apache.archiva.common.filelock.FileLockManager;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.RepositoryStorage;
+import org.apache.archiva.repository.features.ArtifactCleanupFeature;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.features.StagingRepositoryFeature;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.util.Locale;
+
+/**
+ *
+ * Just a helper class, mainly used for unit tests.
+ *
+ *
+ */
+public class BasicManagedRepository extends AbstractManagedRepository
+
+{
+ Logger log = LoggerFactory.getLogger(BasicManagedRepository.class);
+ ArtifactCleanupFeature artifactCleanupFeature = new ArtifactCleanupFeature( );
+ StagingRepositoryFeature stagingRepositoryFeature = new StagingRepositoryFeature( );
+
+ static final StandardCapabilities CAPABILITIES = new StandardCapabilities( new ReleaseScheme[] {
+ ReleaseScheme.RELEASE, ReleaseScheme.SNAPSHOT
+ }, new String[] {"default"}, new String[0], new String[] {
+ ArtifactCleanupFeature.class.toString(), IndexCreationFeature.class.toString(),
+ StagingRepositoryFeature.class.toString()
+ }, true, true, true, true, true );
+
+ public BasicManagedRepository( String id, String name, RepositoryStorage repositoryStorage )
+ {
+ super( RepositoryType.MAVEN, id, name, repositoryStorage );
+ initFeatures();
+ }
+
+ public BasicManagedRepository( Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage repositoryStorage )
+ {
+ super( primaryLocale, type, id, name, repositoryStorage);
+ initFeatures();
+ }
+
+ private void initFeatures() {
+ IndexCreationFeature indexCreationFeature = new IndexCreationFeature(this, this);
+ addFeature( artifactCleanupFeature );
+ addFeature( indexCreationFeature );
+ addFeature( stagingRepositoryFeature );
+ }
+
+ @Override
+ public boolean hasIndex( )
+ {
+ return true;
+ }
+
+ @Override
+ public RepositoryCapabilities getCapabilities( )
+ {
+ return CAPABILITIES;
+ }
+
+
+ @Override
+ public RepositoryRequestInfo getRequestInfo() {
+ return null;
+ }
+
+ /**
+ * Creates a filesystem based repository instance. The path is built by basePath/repository-id
+ *
+ * @param id The repository id
+ * @param name The name of the repository
+ * @param repositoryPath The path to the repository
+ * @return The repository instance
+ * @throws IOException
+ */
+ public static BasicManagedRepository newFilesystemInstance(String id, String name, Path repositoryPath) throws IOException {
+ FileLockManager lockManager = new DefaultFileLockManager();
+ FilesystemStorage storage = new FilesystemStorage(repositoryPath, lockManager);
+ return new BasicManagedRepository(id, name, storage);
+ }
+
+}
--- /dev/null
+package org.apache.archiva.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
+import org.apache.archiva.common.filelock.FileLockManager;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.RepositoryStorage;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.features.RemoteIndexFeature;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.util.Locale;
+
+/**
+ *
+ * Just a helper class, mainly used for unit tests.
+ *
+ *
+ */
+public class BasicRemoteRepository extends AbstractRemoteRepository
+
+{
+ Logger log = LoggerFactory.getLogger(BasicRemoteRepository.class);
+
+ RemoteIndexFeature remoteIndexFeature = new RemoteIndexFeature();
+ IndexCreationFeature indexCreationFeature = new IndexCreationFeature(true);
+
+
+ static final StandardCapabilities CAPABILITIES = new StandardCapabilities( new ReleaseScheme[] {
+ ReleaseScheme.RELEASE, ReleaseScheme.SNAPSHOT
+ }, new String[] {"default"}, new String[0], new String[] {
+ RemoteIndexFeature.class.toString(),
+ IndexCreationFeature.class.toString()
+ }, true, true, true, true, true );
+
+ public BasicRemoteRepository( String id, String name, RepositoryStorage storage)
+ {
+ super( RepositoryType.MAVEN, id, name, storage);
+ initFeatures();
+ }
+
+ public BasicRemoteRepository( Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage storage )
+ {
+ super( primaryLocale, type, id, name, storage );
+ initFeatures();
+ }
+
+ private void initFeatures() {
+ addFeature( remoteIndexFeature );
+ addFeature( indexCreationFeature );
+ }
+
+ @Override
+ public boolean hasIndex( )
+ {
+ return true;
+ }
+
+ @Override
+ public RepositoryCapabilities getCapabilities( )
+ {
+ return CAPABILITIES;
+ }
+
+
+ public static BasicRemoteRepository newFilesystemInstance(String id, String name, Path basePath) throws IOException {
+ FileLockManager lockManager = new DefaultFileLockManager();
+ FilesystemStorage storage = new FilesystemStorage(basePath.resolve(id), lockManager);
+ return new BasicRemoteRepository(id, name, storage);
+ }
+}
--- /dev/null
+package org.apache.archiva.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Simple credentials that hold username and password
+ */
+public class PasswordCredentials implements RepositoryCredentials
+{
+ String username;
+ char[] password;
+
+ public PasswordCredentials(String username, char[] password) {
+ this.username = username;
+ this.password = password;
+ }
+
+ public String getUsername( )
+ {
+ return username;
+ }
+
+ public void setUsername( String username )
+ {
+ this.username = username;
+ }
+
+ public char[] getPassword( )
+ {
+ return password;
+ }
+
+ public void setPassword( char[] password )
+ {
+ this.password = password;
+ }
+
+ @Override
+ public String toString() {
+ return "name="+username+",pwd=*****";
+ }
+}
+++ /dev/null
-package org.apache.archiva.repository.metadata;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.checksum.ChecksumAlgorithm;
-import org.apache.archiva.checksum.ChecksummedFile;
-import org.apache.archiva.common.utils.PathUtil;
-import org.apache.archiva.common.utils.VersionComparator;
-import org.apache.archiva.common.utils.VersionUtil;
-import org.apache.archiva.configuration.ArchivaConfiguration;
-import org.apache.archiva.configuration.ConfigurationNames;
-import org.apache.archiva.configuration.FileTypes;
-import org.apache.archiva.configuration.ProxyConnectorConfiguration;
-import org.apache.archiva.maven2.metadata.MavenMetadataReader;
-import org.apache.archiva.model.ArchivaRepositoryMetadata;
-import org.apache.archiva.model.ArtifactReference;
-import org.apache.archiva.model.Plugin;
-import org.apache.archiva.model.ProjectReference;
-import org.apache.archiva.model.SnapshotVersion;
-import org.apache.archiva.model.VersionedReference;
-import org.apache.archiva.redback.components.registry.Registry;
-import org.apache.archiva.redback.components.registry.RegistryListener;
-import org.apache.archiva.repository.ContentNotFoundException;
-import org.apache.archiva.repository.LayoutException;
-import org.apache.archiva.repository.ManagedRepositoryContent;
-import org.apache.archiva.repository.RemoteRepositoryContent;
-import org.apache.archiva.repository.storage.StorageAsset;
-import org.apache.archiva.xml.XMLException;
-import org.apache.commons.collections4.CollectionUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.lang3.math.NumberUtils;
-import org.apache.commons.lang3.time.DateUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-import javax.annotation.PostConstruct;
-import javax.inject.Inject;
-import javax.inject.Named;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.*;
-import java.util.regex.Matcher;
-import java.util.stream.Stream;
-
-/**
- * MetadataTools
- *
- *
- */
-@Service( "metadataTools#default" )
-public class MetadataTools
- implements RegistryListener
-{
- private Logger log = LoggerFactory.getLogger( getClass() );
-
- public static final String MAVEN_METADATA = "maven-metadata.xml";
-
- public static final String MAVEN_ARCHETYPE_CATALOG ="archetype-catalog.xml";
-
- private static final char PATH_SEPARATOR = '/';
-
- private static final char GROUP_SEPARATOR = '.';
-
- /**
- *
- */
- @Inject
- @Named( value = "archivaConfiguration#default" )
- private ArchivaConfiguration configuration;
-
- /**
- *
- */
- @Inject
- @Named( value = "fileTypes" )
- private FileTypes filetypes;
-
- private List<ChecksumAlgorithm> algorithms = Arrays.asList(ChecksumAlgorithm.SHA256, ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5 );
-
- private List<String> artifactPatterns;
-
- private Map<String, Set<String>> proxies;
-
- private static final char NUMS[] = new char[]{ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' };
-
- private SimpleDateFormat lastUpdatedFormat;
-
- public MetadataTools()
- {
- lastUpdatedFormat = new SimpleDateFormat( "yyyyMMddHHmmss" );
- lastUpdatedFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- }
-
- @Override
- public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
- {
- if ( ConfigurationNames.isProxyConnector( propertyName ) )
- {
- initConfigVariables();
- }
- }
-
- @Override
- public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
- {
- /* nothing to do */
- }
-
- /**
- * Gather the set of snapshot versions found in a particular versioned reference.
- *
- * @return the Set of snapshot artifact versions found.
- * @throws LayoutException
- * @throws ContentNotFoundException
- */
- public Set<String> gatherSnapshotVersions( ManagedRepositoryContent managedRepository,
- VersionedReference reference )
- throws LayoutException, IOException, ContentNotFoundException
- {
- Set<String> foundVersions = managedRepository.getVersions( reference );
-
- // Next gather up the referenced 'latest' versions found in any proxied repositories
- // maven-metadata-${proxyId}.xml files that may be present.
-
- // Does this repository have a set of remote proxied repositories?
- Set<String> proxiedRepoIds = this.proxies.get( managedRepository.getId() );
-
- if ( CollectionUtils.isNotEmpty( proxiedRepoIds ) )
- {
- String baseVersion = VersionUtil.getBaseVersion( reference.getVersion() );
- baseVersion = baseVersion.substring( 0, baseVersion.indexOf( VersionUtil.SNAPSHOT ) - 1 );
-
- // Add in the proxied repo version ids too.
- Iterator<String> it = proxiedRepoIds.iterator();
- while ( it.hasNext() )
- {
- String proxyId = it.next();
-
- ArchivaRepositoryMetadata proxyMetadata = readProxyMetadata( managedRepository, reference, proxyId );
- if ( proxyMetadata == null )
- {
- // There is no proxy metadata, skip it.
- continue;
- }
-
- // Is there some snapshot info?
- SnapshotVersion snapshot = proxyMetadata.getSnapshotVersion();
- if ( snapshot != null )
- {
- String timestamp = snapshot.getTimestamp();
- int buildNumber = snapshot.getBuildNumber();
-
- // Only interested in the timestamp + buildnumber.
- if ( StringUtils.isNotBlank( timestamp ) && ( buildNumber > 0 ) )
- {
- foundVersions.add( baseVersion + "-" + timestamp + "-" + buildNumber );
- }
- }
- }
- }
-
- return foundVersions;
- }
-
- /**
- * Take a path to a maven-metadata.xml, and attempt to translate it to a VersionedReference.
- *
- * @param path
- * @return
- */
- public VersionedReference toVersionedReference( String path )
- throws RepositoryMetadataException
- {
- if ( !path.endsWith( "/" + MAVEN_METADATA ) )
- {
- throw new RepositoryMetadataException( "Cannot convert to versioned reference, not a metadata file. " );
- }
-
- VersionedReference reference = new VersionedReference();
-
- String normalizedPath = StringUtils.replace( path, "\\", "/" );
- String pathParts[] = StringUtils.split( normalizedPath, '/' );
-
- int versionOffset = pathParts.length - 2;
- int artifactIdOffset = versionOffset - 1;
- int groupIdEnd = artifactIdOffset - 1;
-
- reference.setVersion( pathParts[versionOffset] );
-
- if ( !hasNumberAnywhere( reference.getVersion() ) )
- {
- // Scary check, but without it, all paths are version references;
- throw new RepositoryMetadataException(
- "Not a versioned reference, as version id on path has no number in it." );
- }
-
- reference.setArtifactId( pathParts[artifactIdOffset] );
-
- StringBuilder gid = new StringBuilder();
- for ( int i = 0; i <= groupIdEnd; i++ )
- {
- if ( i > 0 )
- {
- gid.append( "." );
- }
- gid.append( pathParts[i] );
- }
-
- reference.setGroupId( gid.toString() );
-
- return reference;
- }
-
- private boolean hasNumberAnywhere( String version )
- {
- return StringUtils.indexOfAny( version, NUMS ) != ( -1 );
- }
-
- public ProjectReference toProjectReference( String path )
- throws RepositoryMetadataException
- {
- if ( !path.endsWith( "/" + MAVEN_METADATA ) )
- {
- throw new RepositoryMetadataException( "Cannot convert to versioned reference, not a metadata file. " );
- }
-
- ProjectReference reference = new ProjectReference();
-
- String normalizedPath = StringUtils.replace( path, "\\", "/" );
- String pathParts[] = StringUtils.split( normalizedPath, '/' );
-
- // Assume last part of the path is the version.
-
- int artifactIdOffset = pathParts.length - 2;
- int groupIdEnd = artifactIdOffset - 1;
-
- reference.setArtifactId( pathParts[artifactIdOffset] );
-
- StringBuilder gid = new StringBuilder();
- for ( int i = 0; i <= groupIdEnd; i++ )
- {
- if ( i > 0 )
- {
- gid.append( "." );
- }
- gid.append( pathParts[i] );
- }
-
- reference.setGroupId( gid.toString() );
-
- return reference;
- }
-
-
-
- public String toPath( ProjectReference reference )
- {
- StringBuilder path = new StringBuilder();
-
- path.append( formatAsDirectory( reference.getGroupId() ) ).append( PATH_SEPARATOR );
- path.append( reference.getArtifactId() ).append( PATH_SEPARATOR );
- path.append( MAVEN_METADATA );
-
- return path.toString();
- }
-
- public String toPath( VersionedReference reference )
- {
- StringBuilder path = new StringBuilder();
-
- path.append( formatAsDirectory( reference.getGroupId() ) ).append( PATH_SEPARATOR );
- path.append( reference.getArtifactId() ).append( PATH_SEPARATOR );
- if ( reference.getVersion() != null )
- {
- // add the version only if it is present
- path.append( VersionUtil.getBaseVersion( reference.getVersion() ) ).append( PATH_SEPARATOR );
- }
- path.append( MAVEN_METADATA );
-
- return path.toString();
- }
-
- private String formatAsDirectory( String directory )
- {
- return directory.replace( GROUP_SEPARATOR, PATH_SEPARATOR );
- }
-
- /**
- * Adjusts a path for a metadata.xml file to its repository specific path.
- *
- * @param repository the repository to base new path off of.
- * @param path the path to the metadata.xml file to adjust the name of.
- * @return the newly adjusted path reference to the repository specific metadata path.
- */
- public String getRepositorySpecificName( RemoteRepositoryContent repository, String path )
- {
- return getRepositorySpecificName( repository.getId(), path );
- }
-
- /**
- * Adjusts a path for a metadata.xml file to its repository specific path.
- *
- * @param proxyId the repository id to base new path off of.
- * @param path the path to the metadata.xml file to adjust the name of.
- * @return the newly adjusted path reference to the repository specific metadata path.
- */
- public String getRepositorySpecificName( String proxyId, String path )
- {
- StringBuilder ret = new StringBuilder();
-
- int idx = path.lastIndexOf( '/' );
- if ( idx > 0 )
- {
- ret.append( path.substring( 0, idx + 1 ) );
- }
-
- // TODO: need to filter out 'bad' characters from the proxy id.
- ret.append( "maven-metadata-" ).append( proxyId ).append( ".xml" );
-
- return ret.toString();
- }
-
- @PostConstruct
- public void initialize()
- {
- assert(configuration != null);
- this.artifactPatterns = new ArrayList<>();
- this.proxies = new HashMap<>();
- initConfigVariables();
-
- configuration.addChangeListener( this );
- }
-
- public ArchivaRepositoryMetadata readProxyMetadata( ManagedRepositoryContent managedRepository,
- ProjectReference reference, String proxyId )
- {
- String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) );
- StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );
-
- if ( !metadataFile.exists() || metadataFile.isContainer())
- {
- // Nothing to do. return null.
- return null;
- }
-
- try
- {
- return MavenMetadataReader.read( metadataFile );
- }
- catch (XMLException | IOException e )
- {
- // TODO: [monitor] consider a monitor for this event.
- // TODO: consider a read-redo on monitor return code?
- log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
- return null;
- }
- }
-
- public ArchivaRepositoryMetadata readProxyMetadata( ManagedRepositoryContent managedRepository,
- String logicalResource, String proxyId )
- {
- String metadataPath = getRepositorySpecificName( proxyId, logicalResource );
- StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );
-
- if ( !metadataFile.exists() || metadataFile.isContainer())
- {
- // Nothing to do. return null.
- return null;
- }
-
- try
- {
- return MavenMetadataReader.read( metadataFile );
- }
- catch (XMLException | IOException e )
- {
- // TODO: [monitor] consider a monitor for this event.
- // TODO: consider a read-redo on monitor return code?
- log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
- return null;
- }
- }
-
- public ArchivaRepositoryMetadata readProxyMetadata( ManagedRepositoryContent managedRepository,
- VersionedReference reference, String proxyId )
- {
- String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) );
- StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );
-
- if ( !metadataFile.exists() || metadataFile.isContainer())
- {
- // Nothing to do. return null.
- return null;
- }
-
- try
- {
- return MavenMetadataReader.read( metadataFile );
- }
- catch (XMLException | IOException e )
- {
- // TODO: [monitor] consider a monitor for this event.
- // TODO: consider a read-redo on monitor return code?
- log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
- return null;
- }
- }
-
- public void updateMetadata( ManagedRepositoryContent managedRepository, String logicalResource )
- throws RepositoryMetadataException
- {
- final StorageAsset metadataFile = managedRepository.getRepository().getAsset( logicalResource );
- ArchivaRepositoryMetadata metadata = null;
-
- //Gather and merge all metadata available
- List<ArchivaRepositoryMetadata> metadatas =
- getMetadatasForManagedRepository( managedRepository, logicalResource );
- for ( ArchivaRepositoryMetadata proxiedMetadata : metadatas )
- {
- if ( metadata == null )
- {
- metadata = proxiedMetadata;
- continue;
- }
- metadata = RepositoryMetadataMerge.merge( metadata, proxiedMetadata );
- }
-
- if ( metadata == null )
- {
- log.debug( "No metadata to update for {}", logicalResource );
- return;
- }
-
- Set<String> availableVersions = new HashSet<String>();
- List<String> metadataAvailableVersions = metadata.getAvailableVersions();
- if ( metadataAvailableVersions != null )
- {
- availableVersions.addAll( metadataAvailableVersions );
- }
- availableVersions = findPossibleVersions( availableVersions, metadataFile.getParent() );
-
- if ( availableVersions.size() > 0 )
- {
- updateMetadataVersions( availableVersions, metadata );
- }
-
- RepositoryMetadataWriter.write( metadata, metadataFile );
-
- ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
- checksum.fixChecksums( algorithms );
- }
-
- /**
- * Skims the parent directory of a metadata in vain hope of finding
- * subdirectories that contain poms.
- *
- * @param metadataParentDirectory
- * @return origional set plus newly found versions
- */
- private Set<String> findPossibleVersions( Set<String> versions, StorageAsset metadataParentDirectory )
- {
-
- Set<String> result = new HashSet<String>( versions );
-
- metadataParentDirectory.list().stream().filter(asset ->
- asset.isContainer()).filter(asset -> {
- return asset.list().stream().anyMatch(f -> !f.isContainer() && f.getName().endsWith(".pom"));
- }
- ).forEach( p -> result.add(p.getName()));
-
- return result;
- }
-
- private List<ArchivaRepositoryMetadata> getMetadatasForManagedRepository(
- ManagedRepositoryContent managedRepository, String logicalResource )
- {
- List<ArchivaRepositoryMetadata> metadatas = new ArrayList<>();
- StorageAsset file = managedRepository.getRepository().getAsset( logicalResource );
-
- if ( file.exists() )
- {
- try
- {
- ArchivaRepositoryMetadata existingMetadata = MavenMetadataReader.read( file );
- if ( existingMetadata != null )
- {
- metadatas.add( existingMetadata );
- }
- }
- catch (XMLException | IOException e )
- {
- log.debug( "Could not read metadata at {}. Metadata will be removed.", file.getPath() );
- try {
- file.getStorage().removeAsset(file);
- } catch (IOException ex) {
- log.error("Could not remove asset {}", file.getPath());
- }
- }
- }
-
- Set<String> proxyIds = proxies.get( managedRepository.getId() );
- if ( proxyIds != null )
- {
- for ( String proxyId : proxyIds )
- {
- ArchivaRepositoryMetadata proxyMetadata =
- readProxyMetadata( managedRepository, logicalResource, proxyId );
- if ( proxyMetadata != null )
- {
- metadatas.add( proxyMetadata );
- }
- }
- }
-
- return metadatas;
- }
-
-
- /**
- * Update the metadata to represent the all versions/plugins of
- * the provided groupId:artifactId project or group reference,
- * based off of information present in the repository,
- * the maven-metadata.xml files, and the proxy/repository specific
- * metadata file contents.
- * <p>
- * We must treat this as a group or a project metadata file as there is no way to know in advance
- *
- * @param managedRepository the managed repository where the metadata is kept.
- * @param reference the reference to update.
- * @throws LayoutException
- * @throws RepositoryMetadataException
- * @throws IOException
- * @throws ContentNotFoundException
- * @deprecated
- */
- public void updateMetadata( ManagedRepositoryContent managedRepository, ProjectReference reference )
- throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException
- {
-
- StorageAsset metadataFile = managedRepository.getRepository().getAsset( toPath( reference ) );
-
- long lastUpdated = getExistingLastUpdated( metadataFile );
-
- ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata();
- metadata.setGroupId( reference.getGroupId() );
- metadata.setArtifactId( reference.getArtifactId() );
-
- // Gather up all versions found in the managed repository.
- Set<String> allVersions = managedRepository.getVersions( reference );
-
- // Gather up all plugins found in the managed repository.
- // TODO: do we know this information instead?
-// Set<Plugin> allPlugins = managedRepository.getPlugins( reference );
- Set<Plugin> allPlugins;
- if ( metadataFile.exists())
- {
- try
- {
- allPlugins = new LinkedHashSet<Plugin>( MavenMetadataReader.read( metadataFile ).getPlugins() );
- }
- catch ( XMLException e )
- {
- throw new RepositoryMetadataException( e.getMessage(), e );
- }
- }
- else
- {
- allPlugins = new LinkedHashSet<Plugin>();
- }
-
- // Does this repository have a set of remote proxied repositories?
- Set<String> proxiedRepoIds = this.proxies.get( managedRepository.getId() );
-
- if ( CollectionUtils.isNotEmpty( proxiedRepoIds ) )
- {
- // Add in the proxied repo version ids too.
- Iterator<String> it = proxiedRepoIds.iterator();
- while ( it.hasNext() )
- {
- String proxyId = it.next();
-
- ArchivaRepositoryMetadata proxyMetadata = readProxyMetadata( managedRepository, reference, proxyId );
- if ( proxyMetadata != null )
- {
- allVersions.addAll( proxyMetadata.getAvailableVersions() );
- allPlugins.addAll( proxyMetadata.getPlugins() );
- long proxyLastUpdated = getLastUpdated( proxyMetadata );
-
- lastUpdated = Math.max( lastUpdated, proxyLastUpdated );
- }
- }
- }
-
- if ( !allVersions.isEmpty() )
- {
- updateMetadataVersions( allVersions, metadata );
- }
- else
- {
- // Add the plugins to the metadata model.
- metadata.setPlugins( new ArrayList<>( allPlugins ) );
-
- // artifact ID was actually the last part of the group
- metadata.setGroupId( metadata.getGroupId() + "." + metadata.getArtifactId() );
- metadata.setArtifactId( null );
- }
-
- if ( lastUpdated > 0 )
- {
- metadata.setLastUpdatedTimestamp( toLastUpdatedDate( lastUpdated ) );
- }
-
- // Save the metadata model to disk.
- RepositoryMetadataWriter.write( metadata, metadataFile );
- ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
- checksum.fixChecksums( algorithms );
- }
-
- private void updateMetadataVersions( Collection<String> allVersions, ArchivaRepositoryMetadata metadata )
- {
- // Sort the versions
- List<String> sortedVersions = new ArrayList<>( allVersions );
- Collections.sort( sortedVersions, VersionComparator.getInstance() );
-
- // Split the versions into released and snapshots.
- List<String> releasedVersions = new ArrayList<>();
- List<String> snapshotVersions = new ArrayList<>();
-
- for ( String version : sortedVersions )
- {
- if ( VersionUtil.isSnapshot( version ) )
- {
- snapshotVersions.add( version );
- }
- else
- {
- releasedVersions.add( version );
- }
- }
-
- Collections.sort( releasedVersions, VersionComparator.getInstance() );
- Collections.sort( snapshotVersions, VersionComparator.getInstance() );
-
- String latestVersion = sortedVersions.get( sortedVersions.size() - 1 );
- String releaseVersion = null;
-
- if ( CollectionUtils.isNotEmpty( releasedVersions ) )
- {
- releaseVersion = releasedVersions.get( releasedVersions.size() - 1 );
- }
-
- // Add the versions to the metadata model.
- metadata.setAvailableVersions( sortedVersions );
-
- metadata.setLatestVersion( latestVersion );
- metadata.setReleasedVersion( releaseVersion );
- }
-
- private Date toLastUpdatedDate( long lastUpdated )
- {
- Calendar cal = Calendar.getInstance( TimeZone.getTimeZone("UTC") );
- cal.setTimeInMillis( lastUpdated );
-
- return cal.getTime();
- }
-
- private long toLastUpdatedLong( String timestampString )
- {
- try
- {
- Date date = lastUpdatedFormat.parse( timestampString );
- Calendar cal = Calendar.getInstance( TimeZone.getTimeZone("UTC"));
- cal.setTime( date );
-
- return cal.getTimeInMillis();
- }
- catch ( ParseException e )
- {
- return 0;
- }
- }
-
- private long getLastUpdated( ArchivaRepositoryMetadata metadata )
- {
- if ( metadata == null )
- {
- // Doesn't exist.
- return 0;
- }
-
- try
- {
- String lastUpdated = metadata.getLastUpdated();
- if ( StringUtils.isBlank( lastUpdated ) )
- {
- // Not set.
- return 0;
- }
-
- Date lastUpdatedDate = lastUpdatedFormat.parse( lastUpdated );
- return lastUpdatedDate.getTime();
- }
- catch ( ParseException e )
- {
- // Bad format on the last updated string.
- return 0;
- }
- }
-
- private long getExistingLastUpdated( StorageAsset metadataFile )
- {
- if ( !metadataFile.exists() )
- {
- // Doesn't exist.
- return 0;
- }
-
- try
- {
- ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile );
-
- return getLastUpdated( metadata );
- }
- catch (XMLException | IOException e )
- {
- // Error.
- return 0;
- }
- }
-
- /**
- * Update the metadata based on the following rules.
- * <p>
- * 1) If this is a SNAPSHOT reference, then utilize the proxy/repository specific
- * metadata files to represent the current / latest SNAPSHOT available.
- * 2) If this is a RELEASE reference, and the metadata file does not exist, then
- * create the metadata file with contents required of the VersionedReference
- *
- * @param managedRepository the managed repository where the metadata is kept.
- * @param reference the versioned reference to update
- * @throws LayoutException
- * @throws RepositoryMetadataException
- * @throws IOException
- * @throws ContentNotFoundException
- * @deprecated
- */
- public void updateMetadata( ManagedRepositoryContent managedRepository, VersionedReference reference )
- throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException
- {
- StorageAsset metadataFile = managedRepository.getRepository().getAsset( toPath( reference ) );
-
- long lastUpdated = getExistingLastUpdated( metadataFile );
-
- ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata();
- metadata.setGroupId( reference.getGroupId() );
- metadata.setArtifactId( reference.getArtifactId() );
-
- if ( VersionUtil.isSnapshot( reference.getVersion() ) )
- {
- // Do SNAPSHOT handling.
- metadata.setVersion( VersionUtil.getBaseVersion( reference.getVersion() ) );
-
- // Gather up all of the versions found in the reference dir, and any
- // proxied maven-metadata.xml files.
- Set<String> snapshotVersions = gatherSnapshotVersions( managedRepository, reference );
-
- if ( snapshotVersions.isEmpty() )
- {
- throw new ContentNotFoundException(
- "No snapshot versions found on reference [" + VersionedReference.toKey( reference ) + "]." );
- }
-
- // sort the list to determine to aide in determining the Latest version.
- List<String> sortedVersions = new ArrayList<>();
- sortedVersions.addAll( snapshotVersions );
- Collections.sort( sortedVersions, new VersionComparator() );
-
- String latestVersion = sortedVersions.get( sortedVersions.size() - 1 );
-
- if ( VersionUtil.isUniqueSnapshot( latestVersion ) )
- {
- // The latestVersion will contain the full version string "1.0-alpha-5-20070821.213044-8"
- // This needs to be broken down into ${base}-${timestamp}-${build_number}
-
- Matcher m = VersionUtil.UNIQUE_SNAPSHOT_PATTERN.matcher( latestVersion );
- if ( m.matches() )
- {
- metadata.setSnapshotVersion( new SnapshotVersion() );
- int buildNumber = NumberUtils.toInt( m.group( 3 ), -1 );
- metadata.getSnapshotVersion().setBuildNumber( buildNumber );
-
- Matcher mtimestamp = VersionUtil.TIMESTAMP_PATTERN.matcher( m.group( 2 ) );
- if ( mtimestamp.matches() )
- {
- String tsDate = mtimestamp.group( 1 );
- String tsTime = mtimestamp.group( 2 );
-
- long snapshotLastUpdated = toLastUpdatedLong( tsDate + tsTime );
-
- lastUpdated = Math.max( lastUpdated, snapshotLastUpdated );
-
- metadata.getSnapshotVersion().setTimestamp( m.group( 2 ) );
- }
- }
- }
- else if ( VersionUtil.isGenericSnapshot( latestVersion ) )
- {
- // The latestVersion ends with the generic version string.
- // Example: 1.0-alpha-5-SNAPSHOT
-
- metadata.setSnapshotVersion( new SnapshotVersion() );
-
- /* Disabled due to decision in [MRM-535].
- * Do not set metadata.lastUpdated to file.lastModified.
- *
- * Should this be the last updated timestamp of the file, or in the case of an
- * archive, the most recent timestamp in the archive?
- *
- ArtifactReference artifact = getFirstArtifact( managedRepository, reference );
-
- if ( artifact == null )
- {
- throw new IOException( "Not snapshot artifact found to reference in " + reference );
- }
-
- File artifactFile = managedRepository.toFile( artifact );
-
- if ( artifactFile.exists() )
- {
- Date lastModified = new Date( artifactFile.lastModified() );
- metadata.setLastUpdatedTimestamp( lastModified );
- }
- */
- }
- else
- {
- throw new RepositoryMetadataException(
- "Unable to process snapshot version <" + latestVersion + "> reference <" + reference + ">" );
- }
- }
- else
- {
- // Do RELEASE handling.
- metadata.setVersion( reference.getVersion() );
- }
-
- // Set last updated
- if ( lastUpdated > 0 )
- {
- metadata.setLastUpdatedTimestamp( toLastUpdatedDate( lastUpdated ) );
- }
-
- // Save the metadata model to disk.
- RepositoryMetadataWriter.write( metadata, metadataFile );
- ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
- checksum.fixChecksums( algorithms );
- }
-
- private void initConfigVariables()
- {
- assert(this.artifactPatterns!=null);
- assert(proxies!=null);
- synchronized ( this.artifactPatterns )
- {
- this.artifactPatterns.clear();
-
- this.artifactPatterns.addAll( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
- }
-
- synchronized ( proxies )
- {
- this.proxies.clear();
-
- List<ProxyConnectorConfiguration> proxyConfigs = configuration.getConfiguration().getProxyConnectors();
- for ( ProxyConnectorConfiguration proxyConfig : proxyConfigs )
- {
- String key = proxyConfig.getSourceRepoId();
-
- Set<String> remoteRepoIds = this.proxies.get( key );
-
- if ( remoteRepoIds == null )
- {
- remoteRepoIds = new HashSet<String>();
- }
-
- remoteRepoIds.add( proxyConfig.getTargetRepoId() );
-
- this.proxies.put( key, remoteRepoIds );
- }
- }
- }
-
- /**
- * Get the first Artifact found in the provided VersionedReference location.
- *
- * @param managedRepository the repository to search within.
- * @param reference the reference to the versioned reference to search within
- * @return the ArtifactReference to the first artifact located within the versioned reference. or null if
- * no artifact was found within the versioned reference.
- * @throws IOException if the versioned reference is invalid (example: doesn't exist, or isn't a directory)
- * @throws LayoutException
- */
- public ArtifactReference getFirstArtifact( ManagedRepositoryContent managedRepository,
- VersionedReference reference )
- throws LayoutException, IOException
- {
- String path = toPath( reference );
-
- int idx = path.lastIndexOf( '/' );
- if ( idx > 0 )
- {
- path = path.substring( 0, idx );
- }
-
- Path repoDir = Paths.get( managedRepository.getRepoRoot(), path );
-
- if ( !Files.exists(repoDir))
- {
- throw new IOException( "Unable to gather the list of snapshot versions on a non-existant directory: "
- + repoDir.toAbsolutePath() );
- }
-
- if ( !Files.isDirectory( repoDir ))
- {
- throw new IOException(
- "Unable to gather the list of snapshot versions on a non-directory: " + repoDir.toAbsolutePath() );
- }
-
- try(Stream<Path> stream = Files.list(repoDir)) {
- String result = stream.filter( Files::isRegularFile ).map( path1 ->
- PathUtil.getRelative( managedRepository.getRepoRoot(), path1 )
- ).filter( filetypes::matchesArtifactPattern ).findFirst().orElse( null );
- if (result!=null) {
- return managedRepository.toArtifactReference( result );
- }
- }
- // No artifact was found.
- return null;
- }
-
- public ArchivaConfiguration getConfiguration()
- {
- return configuration;
- }
-
- public void setConfiguration( ArchivaConfiguration configuration )
- {
- this.configuration = configuration;
- }
-
- public FileTypes getFiletypes()
- {
- return filetypes;
- }
-
- public void setFiletypes( FileTypes filetypes )
- {
- this.filetypes = filetypes;
- }
-}
+++ /dev/null
-package org.apache.archiva.repository.metadata;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.repository.RepositoryException;
-
-/**
- * RepositoryMetadataException
- *
- *
- */
-public class RepositoryMetadataException
- extends RepositoryException
-{
-
- public RepositoryMetadataException()
- {
- super();
- }
-
- public RepositoryMetadataException( String message, Throwable cause )
- {
- super( message, cause );
- }
-
- public RepositoryMetadataException( String message )
- {
- super( message );
- }
-
- public RepositoryMetadataException( Throwable cause )
- {
- super( cause );
- }
-}
+++ /dev/null
-package org.apache.archiva.repository.metadata;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.model.ArchivaModelCloner;
-import org.apache.archiva.model.ArchivaRepositoryMetadata;
-import org.apache.archiva.model.Plugin;
-import org.apache.archiva.model.SnapshotVersion;
-import org.apache.commons.lang3.StringUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * RepositoryMetadataMerge
- *
- *
- */
-public class RepositoryMetadataMerge
-{
- public static ArchivaRepositoryMetadata merge( final ArchivaRepositoryMetadata mainMetadata,
- final ArchivaRepositoryMetadata sourceMetadata )
- throws RepositoryMetadataException
- {
- if ( mainMetadata == null )
- {
- throw new RepositoryMetadataException( "Cannot merge a null main project." );
- }
-
- if ( sourceMetadata == null )
- {
- throw new RepositoryMetadataException( "Cannot copy to a null parent project." );
- }
-
- ArchivaRepositoryMetadata merged = new ArchivaRepositoryMetadata();
-
- merged.setGroupId( merge( mainMetadata.getGroupId(), sourceMetadata.getGroupId() ) );
- merged.setArtifactId( merge(mainMetadata.getArtifactId(), sourceMetadata.getArtifactId()));
- merged.setVersion( merge(mainMetadata.getVersion(), sourceMetadata.getVersion()) );
- merged.setReleasedVersion( merge( mainMetadata.getReleasedVersion(), sourceMetadata.getReleasedVersion() ) );
- merged.setSnapshotVersion( merge( mainMetadata.getSnapshotVersion(), sourceMetadata.getSnapshotVersion() ) );
- merged.setAvailableVersions( mergeAvailableVersions( mainMetadata.getAvailableVersions(), sourceMetadata.getAvailableVersions() ) );
- merged.setPlugins( mergePlugins( mainMetadata.getPlugins(), sourceMetadata.getPlugins() ) );
-
- //Don't set if merge was not possible
- long lastUpdated = mergeTimestamp( mainMetadata.getLastUpdated(), sourceMetadata.getLastUpdated());
- if (lastUpdated > -1)
- {
- merged.setLastUpdated( Long.toString(lastUpdated) );
- }
-
- return merged;
- }
-
- private static boolean empty( String val )
- {
- if ( val == null )
- {
- return true;
- }
-
- return ( val.trim().length() <= 0 );
- }
-
- private static long mergeTimestamp(String mainTimestamp, String sourceTimestamp)
- {
- if (sourceTimestamp == null && mainTimestamp != null)
- {
- return convertTimestampToLong(mainTimestamp);
- }
-
- if (mainTimestamp == null && sourceTimestamp != null)
- {
- return convertTimestampToLong(sourceTimestamp);
- }
-
- if (sourceTimestamp == null && mainTimestamp == null)
- {
- return -1;
- }
-
- return mergeTimestamp(convertTimestampToLong(mainTimestamp), convertTimestampToLong(sourceTimestamp));
- }
-
- private static long mergeTimestamp(long mainTimestamp, long sourceTimestamp)
- {
- return Math.max( mainTimestamp, sourceTimestamp );
- }
-
- private static SnapshotVersion merge( SnapshotVersion mainSnapshotVersion, SnapshotVersion sourceSnapshotVersion )
- {
- if ( sourceSnapshotVersion == null )
- {
- return mainSnapshotVersion;
- }
-
- if ( mainSnapshotVersion == null )
- {
- return ArchivaModelCloner.clone( sourceSnapshotVersion );
- }
-
- SnapshotVersion merged = new SnapshotVersion();
-
- long mainSnapshotLastUpdated = convertTimestampToLong(mainSnapshotVersion.getTimestamp());
- long sourceSnapshotLastUpdated = convertTimestampToLong(sourceSnapshotVersion.getTimestamp());
-
- long lastUpdated = mergeTimestamp(mainSnapshotLastUpdated, sourceSnapshotLastUpdated);
-
- if (lastUpdated == mainSnapshotLastUpdated)
- {
- merged.setTimestamp(mainSnapshotVersion.getTimestamp());
- merged.setBuildNumber(mainSnapshotVersion.getBuildNumber());
- }
- else
- {
- merged.setTimestamp(sourceSnapshotVersion.getTimestamp());
- merged.setBuildNumber(sourceSnapshotVersion.getBuildNumber());
- }
-
- return merged;
- }
-
- private static long convertTimestampToLong(String timestamp)
- {
- if (timestamp == null)
- {
- return -1;
- }
-
- return getLongFromTimestampSafely(StringUtils.replace(timestamp, ".", ""));
- }
-
- private static long getLongFromTimestampSafely( String timestampString )
- {
- try
- {
- return Long.parseLong(timestampString);
- }
- catch (NumberFormatException e)
- {
- return -1;
- }
- }
-
- private static String merge( String main, String source )
- {
- if ( empty( main ) && !empty( source ) )
- {
- return source;
- }
-
- return main;
- }
-
- private static List<Plugin> mergePlugins(List<Plugin> mainPlugins, List<Plugin> sourcePlugins)
- {
- if ( sourcePlugins == null )
- {
- return mainPlugins;
- }
-
- if ( mainPlugins == null )
- {
- return clonePlugins( sourcePlugins );
- }
-
- List<Plugin> merged = clonePlugins( mainPlugins );
-
- for ( Plugin plugin : sourcePlugins )
- {
- if ( !merged.contains( plugin ) )
- {
- merged.add( plugin );
- }
- }
-
- return merged;
- }
-
- /**
- * Clones a list of plugins.
- *
- * This method exists because ArchivaModelCloner.clonePlugins()
- * only works with artifact references.
- *
- * @param plugins
- * @return list of cloned plugins
- */
- private static List<Plugin> clonePlugins(List<Plugin> plugins)
- {
- if (plugins == null)
- {
- return null;
- }
-
- List<Plugin> result = new ArrayList<>();
-
- for (Plugin plugin : plugins)
- {
- Plugin clonedPlugin = new Plugin();
- clonedPlugin.setArtifactId(plugin.getArtifactId());
- clonedPlugin.setName(plugin.getName());
- clonedPlugin.setPrefix(plugin.getPrefix());
- result.add(plugin);
- }
-
- return result;
- }
-
- private static List<String> mergeAvailableVersions( List<String> mainAvailableVersions, List<String> sourceAvailableVersions )
- {
- if ( sourceAvailableVersions == null )
- {
- return mainAvailableVersions;
- }
-
- if ( mainAvailableVersions == null )
- {
- return ArchivaModelCloner.cloneAvailableVersions( sourceAvailableVersions );
- }
-
- List<String> merged = ArchivaModelCloner.cloneAvailableVersions( mainAvailableVersions );
-
- for ( String sourceVersion : sourceAvailableVersions )
- {
- if ( !merged.contains( sourceVersion ) )
- {
- merged.add( sourceVersion );
- }
- }
-
- return merged;
- }
-}
+++ /dev/null
-package org.apache.archiva.repository.metadata;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.model.ArchivaRepositoryMetadata;
-import org.apache.archiva.model.Plugin;
-import org.apache.archiva.repository.storage.StorageAsset;
-import org.apache.archiva.xml.XMLException;
-import org.apache.archiva.xml.XMLWriter;
-import org.apache.archiva.xml.XmlUtil;
-import org.apache.commons.collections4.CollectionUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-
-import javax.xml.parsers.ParserConfigurationException;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * RepositoryMetadataWriter
- */
-public class RepositoryMetadataWriter
-{
- private static final Logger log = LoggerFactory.getLogger(RepositoryMetadataWriter.class);
-
- public static void write( ArchivaRepositoryMetadata metadata, StorageAsset outputFile )
- throws RepositoryMetadataException
- {
- boolean thrown = false;
- try (OutputStreamWriter writer = new OutputStreamWriter( outputFile.getWriteStream(true)))
- {
- write( metadata, writer );
- writer.flush();
- }
- catch ( IOException e )
- {
- thrown = true;
- throw new RepositoryMetadataException(
- "Unable to write metadata file: " + outputFile.getPath() + " - " + e.getMessage(), e );
- }
- finally
- {
- if ( thrown )
- {
- try {
- outputFile.getStorage().removeAsset(outputFile);
- } catch (IOException e) {
- log.error("Could not remove asset {}", outputFile);
- }
- }
- }
- }
-
- public static void write( ArchivaRepositoryMetadata metadata, Writer writer )
- throws RepositoryMetadataException
- {
- Document doc = null;
- try {
- doc = XmlUtil.createDocument();
- } catch (ParserConfigurationException e) {
- throw new RepositoryMetadataException("Could not create xml doc " + e.getMessage(), e);
- }
-
- Element root = doc.createElement( "metadata" );
- doc.appendChild(root);
-
- addOptionalElementText( root, "groupId", metadata.getGroupId() );
- addOptionalElementText( root, "artifactId", metadata.getArtifactId() );
- addOptionalElementText( root, "version", metadata.getVersion() );
-
- if ( CollectionUtils.isNotEmpty( metadata.getPlugins() ) )
- {
- Element plugins = XmlUtil.addChild(root, "plugins" );
-
- List<Plugin> pluginList = metadata.getPlugins();
- Collections.sort( pluginList, PluginComparator.INSTANCE );
-
- for ( Plugin plugin : metadata.getPlugins() )
- {
- Element p = XmlUtil.addChild(plugins, "plugin" );
- XmlUtil.addChild(doc, p, "prefix" ).setTextContent( plugin.getPrefix() );
- XmlUtil.addChild(doc, p, "artifactId" ).setTextContent( plugin.getArtifactId() );
- addOptionalElementText( p, "name", plugin.getName() );
- }
- }
-
- if ( CollectionUtils.isNotEmpty( metadata.getAvailableVersions() ) //
- || StringUtils.isNotBlank( metadata.getReleasedVersion() ) //
- || StringUtils.isNotBlank( metadata.getLatestVersion() ) //
- || StringUtils.isNotBlank( metadata.getLastUpdated() ) //
- || ( metadata.getSnapshotVersion() != null ) )
- {
- Element versioning = XmlUtil.addChild(root, "versioning" );
-
- addOptionalElementText( versioning, "latest", metadata.getLatestVersion() );
- addOptionalElementText( versioning, "release", metadata.getReleasedVersion() );
-
- if ( metadata.getSnapshotVersion() != null )
- {
- Element snapshot = XmlUtil.addChild(versioning, "snapshot" );
- String bnum = String.valueOf( metadata.getSnapshotVersion().getBuildNumber() );
- addOptionalElementText( snapshot, "buildNumber", bnum );
- addOptionalElementText( snapshot, "timestamp", metadata.getSnapshotVersion().getTimestamp() );
- }
-
- if ( CollectionUtils.isNotEmpty( metadata.getAvailableVersions() ) )
- {
- Element versions = XmlUtil.addChild(versioning, "versions" );
- Iterator<String> it = metadata.getAvailableVersions().iterator();
- while ( it.hasNext() )
- {
- String version = it.next();
- XmlUtil.addChild(versions, "version" ).setTextContent( version );
- }
- }
-
- addOptionalElementText( versioning, "lastUpdated", metadata.getLastUpdated() );
- }
-
- try
- {
- XMLWriter.write( doc, writer );
- }
- catch ( XMLException e )
- {
- throw new RepositoryMetadataException( "Unable to write xml contents to writer: " + e.getMessage(), e );
- }
- }
-
- private static void addOptionalElementText( Element elem, String elemName, String text )
- {
- if ( StringUtils.isBlank( text ) )
- {
- return;
- }
-
- XmlUtil.addChild(elem, elemName ).setTextContent( text );
- }
-
- private static class PluginComparator
- implements Comparator<Plugin>
- {
- private static final PluginComparator INSTANCE = new PluginComparator();
-
- @Override
- public int compare( Plugin plugin, Plugin plugin2 )
- {
- if ( plugin.getPrefix() != null && plugin2.getPrefix() != null )
- {
- return plugin.getPrefix().compareTo( plugin2.getPrefix() );
- }
- if ( plugin.getName() != null && plugin2.getName() != null )
- {
- return plugin.getName().compareTo( plugin2.getName() );
- }
- // we assume artifactId is not null which sounds good :-)
- return plugin.getArtifactId().compareTo( plugin2.getArtifactId() );
- }
- }
-}
--- /dev/null
+package org.apache.archiva.repository.internal.metadata;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.checksum.ChecksumAlgorithm;
+import org.apache.archiva.checksum.ChecksummedFile;
+import org.apache.archiva.common.utils.PathUtil;
+import org.apache.archiva.common.utils.VersionComparator;
+import org.apache.archiva.common.utils.VersionUtil;
+import org.apache.archiva.configuration.ArchivaConfiguration;
+import org.apache.archiva.configuration.ConfigurationNames;
+import org.apache.archiva.configuration.FileTypes;
+import org.apache.archiva.configuration.ProxyConnectorConfiguration;
+import org.apache.archiva.maven2.metadata.MavenMetadataReader;
+import org.apache.archiva.model.ArchivaRepositoryMetadata;
+import org.apache.archiva.model.ArtifactReference;
+import org.apache.archiva.model.Plugin;
+import org.apache.archiva.model.ProjectReference;
+import org.apache.archiva.model.SnapshotVersion;
+import org.apache.archiva.model.VersionedReference;
+import org.apache.archiva.redback.components.registry.Registry;
+import org.apache.archiva.redback.components.registry.RegistryListener;
+import org.apache.archiva.repository.ContentNotFoundException;
+import org.apache.archiva.repository.LayoutException;
+import org.apache.archiva.repository.ManagedRepositoryContent;
+import org.apache.archiva.repository.RemoteRepositoryContent;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.xml.XMLException;
+import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.math.NumberUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import javax.inject.Inject;
+import javax.inject.Named;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.stream.Stream;
+
+/**
+ * MetadataTools
+ *
+ *
+ */
+@Service( "metadataTools#default" )
+public class MetadataTools
+ implements RegistryListener
+{
+ private Logger log = LoggerFactory.getLogger( getClass() );
+
+ public static final String MAVEN_METADATA = "maven-metadata.xml";
+
+ public static final String MAVEN_ARCHETYPE_CATALOG ="archetype-catalog.xml";
+
+ private static final char PATH_SEPARATOR = '/';
+
+ private static final char GROUP_SEPARATOR = '.';
+
+ /**
+ *
+ */
+ @Inject
+ @Named( value = "archivaConfiguration#default" )
+ private ArchivaConfiguration configuration;
+
+ /**
+ *
+ */
+ @Inject
+ @Named( value = "fileTypes" )
+ private FileTypes filetypes;
+
+ private List<ChecksumAlgorithm> algorithms = Arrays.asList(ChecksumAlgorithm.SHA256, ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5 );
+
+ private List<String> artifactPatterns;
+
+ private Map<String, Set<String>> proxies;
+
+ private static final char NUMS[] = new char[]{ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' };
+
+ private SimpleDateFormat lastUpdatedFormat;
+
+ public MetadataTools()
+ {
+ lastUpdatedFormat = new SimpleDateFormat( "yyyyMMddHHmmss" );
+ lastUpdatedFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ }
+
+ @Override
+ public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
+ {
+ if ( ConfigurationNames.isProxyConnector( propertyName ) )
+ {
+ initConfigVariables();
+ }
+ }
+
+ @Override
+ public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
+ {
+ /* nothing to do */
+ }
+
+ /**
+ * Gather the set of snapshot versions found in a particular versioned reference.
+ *
+ * @return the Set of snapshot artifact versions found.
+ * @throws LayoutException
+ * @throws ContentNotFoundException
+ */
+ public Set<String> gatherSnapshotVersions( ManagedRepositoryContent managedRepository,
+ VersionedReference reference )
+ throws LayoutException, IOException, ContentNotFoundException
+ {
+ Set<String> foundVersions = managedRepository.getVersions( reference );
+
+ // Next gather up the referenced 'latest' versions found in any proxied repositories
+ // maven-metadata-${proxyId}.xml files that may be present.
+
+ // Does this repository have a set of remote proxied repositories?
+ Set<String> proxiedRepoIds = this.proxies.get( managedRepository.getId() );
+
+ if ( CollectionUtils.isNotEmpty( proxiedRepoIds ) )
+ {
+ String baseVersion = VersionUtil.getBaseVersion( reference.getVersion() );
+ baseVersion = baseVersion.substring( 0, baseVersion.indexOf( VersionUtil.SNAPSHOT ) - 1 );
+
+ // Add in the proxied repo version ids too.
+ Iterator<String> it = proxiedRepoIds.iterator();
+ while ( it.hasNext() )
+ {
+ String proxyId = it.next();
+
+ ArchivaRepositoryMetadata proxyMetadata = readProxyMetadata( managedRepository, reference, proxyId );
+ if ( proxyMetadata == null )
+ {
+ // There is no proxy metadata, skip it.
+ continue;
+ }
+
+ // Is there some snapshot info?
+ SnapshotVersion snapshot = proxyMetadata.getSnapshotVersion();
+ if ( snapshot != null )
+ {
+ String timestamp = snapshot.getTimestamp();
+ int buildNumber = snapshot.getBuildNumber();
+
+ // Only interested in the timestamp + buildnumber.
+ if ( StringUtils.isNotBlank( timestamp ) && ( buildNumber > 0 ) )
+ {
+ foundVersions.add( baseVersion + "-" + timestamp + "-" + buildNumber );
+ }
+ }
+ }
+ }
+
+ return foundVersions;
+ }
+
+ /**
+ * Take a path to a maven-metadata.xml, and attempt to translate it to a VersionedReference.
+ *
+ * @param path
+ * @return
+ */
+ public VersionedReference toVersionedReference( String path )
+ throws RepositoryMetadataException
+ {
+ if ( !path.endsWith( "/" + MAVEN_METADATA ) )
+ {
+ throw new RepositoryMetadataException( "Cannot convert to versioned reference, not a metadata file. " );
+ }
+
+ VersionedReference reference = new VersionedReference();
+
+ String normalizedPath = StringUtils.replace( path, "\\", "/" );
+ String pathParts[] = StringUtils.split( normalizedPath, '/' );
+
+ int versionOffset = pathParts.length - 2;
+ int artifactIdOffset = versionOffset - 1;
+ int groupIdEnd = artifactIdOffset - 1;
+
+ reference.setVersion( pathParts[versionOffset] );
+
+ if ( !hasNumberAnywhere( reference.getVersion() ) )
+ {
+ // Scary check, but without it, all paths are version references;
+ throw new RepositoryMetadataException(
+ "Not a versioned reference, as version id on path has no number in it." );
+ }
+
+ reference.setArtifactId( pathParts[artifactIdOffset] );
+
+ StringBuilder gid = new StringBuilder();
+ for ( int i = 0; i <= groupIdEnd; i++ )
+ {
+ if ( i > 0 )
+ {
+ gid.append( "." );
+ }
+ gid.append( pathParts[i] );
+ }
+
+ reference.setGroupId( gid.toString() );
+
+ return reference;
+ }
+
+ private boolean hasNumberAnywhere( String version )
+ {
+ return StringUtils.indexOfAny( version, NUMS ) != ( -1 );
+ }
+
+ public ProjectReference toProjectReference( String path )
+ throws RepositoryMetadataException
+ {
+ if ( !path.endsWith( "/" + MAVEN_METADATA ) )
+ {
+ throw new RepositoryMetadataException( "Cannot convert to versioned reference, not a metadata file. " );
+ }
+
+ ProjectReference reference = new ProjectReference();
+
+ String normalizedPath = StringUtils.replace( path, "\\", "/" );
+ String pathParts[] = StringUtils.split( normalizedPath, '/' );
+
+ // Assume last part of the path is the version.
+
+ int artifactIdOffset = pathParts.length - 2;
+ int groupIdEnd = artifactIdOffset - 1;
+
+ reference.setArtifactId( pathParts[artifactIdOffset] );
+
+ StringBuilder gid = new StringBuilder();
+ for ( int i = 0; i <= groupIdEnd; i++ )
+ {
+ if ( i > 0 )
+ {
+ gid.append( "." );
+ }
+ gid.append( pathParts[i] );
+ }
+
+ reference.setGroupId( gid.toString() );
+
+ return reference;
+ }
+
+
+
+ public String toPath( ProjectReference reference )
+ {
+ StringBuilder path = new StringBuilder();
+
+ path.append( formatAsDirectory( reference.getGroupId() ) ).append( PATH_SEPARATOR );
+ path.append( reference.getArtifactId() ).append( PATH_SEPARATOR );
+ path.append( MAVEN_METADATA );
+
+ return path.toString();
+ }
+
+ public String toPath( VersionedReference reference )
+ {
+ StringBuilder path = new StringBuilder();
+
+ path.append( formatAsDirectory( reference.getGroupId() ) ).append( PATH_SEPARATOR );
+ path.append( reference.getArtifactId() ).append( PATH_SEPARATOR );
+ if ( reference.getVersion() != null )
+ {
+ // add the version only if it is present
+ path.append( VersionUtil.getBaseVersion( reference.getVersion() ) ).append( PATH_SEPARATOR );
+ }
+ path.append( MAVEN_METADATA );
+
+ return path.toString();
+ }
+
+ private String formatAsDirectory( String directory )
+ {
+ return directory.replace( GROUP_SEPARATOR, PATH_SEPARATOR );
+ }
+
+ /**
+ * Adjusts a path for a metadata.xml file to its repository specific path.
+ *
+ * @param repository the repository to base new path off of.
+ * @param path the path to the metadata.xml file to adjust the name of.
+ * @return the newly adjusted path reference to the repository specific metadata path.
+ */
+ public String getRepositorySpecificName( RemoteRepositoryContent repository, String path )
+ {
+ return getRepositorySpecificName( repository.getId(), path );
+ }
+
+ /**
+ * Adjusts a path for a metadata.xml file to its repository specific path.
+ *
+ * @param proxyId the repository id to base new path off of.
+ * @param path the path to the metadata.xml file to adjust the name of.
+ * @return the newly adjusted path reference to the repository specific metadata path.
+ */
+ public String getRepositorySpecificName( String proxyId, String path )
+ {
+ StringBuilder ret = new StringBuilder();
+
+ int idx = path.lastIndexOf( '/' );
+ if ( idx > 0 )
+ {
+ ret.append( path.substring( 0, idx + 1 ) );
+ }
+
+ // TODO: need to filter out 'bad' characters from the proxy id.
+ ret.append( "maven-metadata-" ).append( proxyId ).append( ".xml" );
+
+ return ret.toString();
+ }
+
+ @PostConstruct
+ public void initialize()
+ {
+ assert(configuration != null);
+ this.artifactPatterns = new ArrayList<>();
+ this.proxies = new HashMap<>();
+ initConfigVariables();
+
+ configuration.addChangeListener( this );
+ }
+
+ public ArchivaRepositoryMetadata readProxyMetadata( ManagedRepositoryContent managedRepository,
+ ProjectReference reference, String proxyId )
+ {
+ String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) );
+ StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );
+
+ if ( !metadataFile.exists() || metadataFile.isContainer())
+ {
+ // Nothing to do. return null.
+ return null;
+ }
+
+ try
+ {
+ return MavenMetadataReader.read( metadataFile );
+ }
+ catch (XMLException | IOException e )
+ {
+ // TODO: [monitor] consider a monitor for this event.
+ // TODO: consider a read-redo on monitor return code?
+ log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
+ return null;
+ }
+ }
+
+ public ArchivaRepositoryMetadata readProxyMetadata( ManagedRepositoryContent managedRepository,
+ String logicalResource, String proxyId )
+ {
+ String metadataPath = getRepositorySpecificName( proxyId, logicalResource );
+ StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );
+
+ if ( !metadataFile.exists() || metadataFile.isContainer())
+ {
+ // Nothing to do. return null.
+ return null;
+ }
+
+ try
+ {
+ return MavenMetadataReader.read( metadataFile );
+ }
+ catch (XMLException | IOException e )
+ {
+ // TODO: [monitor] consider a monitor for this event.
+ // TODO: consider a read-redo on monitor return code?
+ log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
+ return null;
+ }
+ }
+
+ public ArchivaRepositoryMetadata readProxyMetadata( ManagedRepositoryContent managedRepository,
+ VersionedReference reference, String proxyId )
+ {
+ String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) );
+ StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );
+
+ if ( !metadataFile.exists() || metadataFile.isContainer())
+ {
+ // Nothing to do. return null.
+ return null;
+ }
+
+ try
+ {
+ return MavenMetadataReader.read( metadataFile );
+ }
+ catch (XMLException | IOException e )
+ {
+ // TODO: [monitor] consider a monitor for this event.
+ // TODO: consider a read-redo on monitor return code?
+ log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
+ return null;
+ }
+ }
+
+ public void updateMetadata( ManagedRepositoryContent managedRepository, String logicalResource )
+ throws RepositoryMetadataException
+ {
+ final StorageAsset metadataFile = managedRepository.getRepository().getAsset( logicalResource );
+ ArchivaRepositoryMetadata metadata = null;
+
+ //Gather and merge all metadata available
+ List<ArchivaRepositoryMetadata> metadatas =
+ getMetadatasForManagedRepository( managedRepository, logicalResource );
+ for ( ArchivaRepositoryMetadata proxiedMetadata : metadatas )
+ {
+ if ( metadata == null )
+ {
+ metadata = proxiedMetadata;
+ continue;
+ }
+ metadata = RepositoryMetadataMerge.merge( metadata, proxiedMetadata );
+ }
+
+ if ( metadata == null )
+ {
+ log.debug( "No metadata to update for {}", logicalResource );
+ return;
+ }
+
+ Set<String> availableVersions = new HashSet<String>();
+ List<String> metadataAvailableVersions = metadata.getAvailableVersions();
+ if ( metadataAvailableVersions != null )
+ {
+ availableVersions.addAll( metadataAvailableVersions );
+ }
+ availableVersions = findPossibleVersions( availableVersions, metadataFile.getParent() );
+
+ if ( availableVersions.size() > 0 )
+ {
+ updateMetadataVersions( availableVersions, metadata );
+ }
+
+ RepositoryMetadataWriter.write( metadata, metadataFile );
+
+ ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
+ checksum.fixChecksums( algorithms );
+ }
+
+ /**
+ * Skims the parent directory of a metadata in vain hope of finding
+ * subdirectories that contain poms.
+ *
+ * @param metadataParentDirectory
+ * @return origional set plus newly found versions
+ */
+ private Set<String> findPossibleVersions( Set<String> versions, StorageAsset metadataParentDirectory )
+ {
+
+ Set<String> result = new HashSet<String>( versions );
+
+ metadataParentDirectory.list().stream().filter(asset ->
+ asset.isContainer()).filter(asset -> {
+ return asset.list().stream().anyMatch(f -> !f.isContainer() && f.getName().endsWith(".pom"));
+ }
+ ).forEach( p -> result.add(p.getName()));
+
+ return result;
+ }
+
+ private List<ArchivaRepositoryMetadata> getMetadatasForManagedRepository(
+ ManagedRepositoryContent managedRepository, String logicalResource )
+ {
+ List<ArchivaRepositoryMetadata> metadatas = new ArrayList<>();
+ StorageAsset file = managedRepository.getRepository().getAsset( logicalResource );
+
+ if ( file.exists() )
+ {
+ try
+ {
+ ArchivaRepositoryMetadata existingMetadata = MavenMetadataReader.read( file );
+ if ( existingMetadata != null )
+ {
+ metadatas.add( existingMetadata );
+ }
+ }
+ catch (XMLException | IOException e )
+ {
+ log.debug( "Could not read metadata at {}. Metadata will be removed.", file.getPath() );
+ try {
+ file.getStorage().removeAsset(file);
+ } catch (IOException ex) {
+ log.error("Could not remove asset {}", file.getPath());
+ }
+ }
+ }
+
+ Set<String> proxyIds = proxies.get( managedRepository.getId() );
+ if ( proxyIds != null )
+ {
+ for ( String proxyId : proxyIds )
+ {
+ ArchivaRepositoryMetadata proxyMetadata =
+ readProxyMetadata( managedRepository, logicalResource, proxyId );
+ if ( proxyMetadata != null )
+ {
+ metadatas.add( proxyMetadata );
+ }
+ }
+ }
+
+ return metadatas;
+ }
+
+
+ /**
+ * Update the metadata to represent the all versions/plugins of
+ * the provided groupId:artifactId project or group reference,
+ * based off of information present in the repository,
+ * the maven-metadata.xml files, and the proxy/repository specific
+ * metadata file contents.
+ * <p>
+ * We must treat this as a group or a project metadata file as there is no way to know in advance
+ *
+ * @param managedRepository the managed repository where the metadata is kept.
+ * @param reference the reference to update.
+ * @throws LayoutException
+ * @throws RepositoryMetadataException
+ * @throws IOException
+ * @throws ContentNotFoundException
+ * @deprecated
+ */
+ public void updateMetadata( ManagedRepositoryContent managedRepository, ProjectReference reference )
+ throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException
+ {
+
+ StorageAsset metadataFile = managedRepository.getRepository().getAsset( toPath( reference ) );
+
+ long lastUpdated = getExistingLastUpdated( metadataFile );
+
+ ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata();
+ metadata.setGroupId( reference.getGroupId() );
+ metadata.setArtifactId( reference.getArtifactId() );
+
+ // Gather up all versions found in the managed repository.
+ Set<String> allVersions = managedRepository.getVersions( reference );
+
+ // Gather up all plugins found in the managed repository.
+ // TODO: do we know this information instead?
+// Set<Plugin> allPlugins = managedRepository.getPlugins( reference );
+ Set<Plugin> allPlugins;
+ if ( metadataFile.exists())
+ {
+ try
+ {
+ allPlugins = new LinkedHashSet<Plugin>( MavenMetadataReader.read( metadataFile ).getPlugins() );
+ }
+ catch ( XMLException e )
+ {
+ throw new RepositoryMetadataException( e.getMessage(), e );
+ }
+ }
+ else
+ {
+ allPlugins = new LinkedHashSet<Plugin>();
+ }
+
+ // Does this repository have a set of remote proxied repositories?
+ Set<String> proxiedRepoIds = this.proxies.get( managedRepository.getId() );
+
+ if ( CollectionUtils.isNotEmpty( proxiedRepoIds ) )
+ {
+ // Add in the proxied repo version ids too.
+ Iterator<String> it = proxiedRepoIds.iterator();
+ while ( it.hasNext() )
+ {
+ String proxyId = it.next();
+
+ ArchivaRepositoryMetadata proxyMetadata = readProxyMetadata( managedRepository, reference, proxyId );
+ if ( proxyMetadata != null )
+ {
+ allVersions.addAll( proxyMetadata.getAvailableVersions() );
+ allPlugins.addAll( proxyMetadata.getPlugins() );
+ long proxyLastUpdated = getLastUpdated( proxyMetadata );
+
+ lastUpdated = Math.max( lastUpdated, proxyLastUpdated );
+ }
+ }
+ }
+
+ if ( !allVersions.isEmpty() )
+ {
+ updateMetadataVersions( allVersions, metadata );
+ }
+ else
+ {
+ // Add the plugins to the metadata model.
+ metadata.setPlugins( new ArrayList<>( allPlugins ) );
+
+ // artifact ID was actually the last part of the group
+ metadata.setGroupId( metadata.getGroupId() + "." + metadata.getArtifactId() );
+ metadata.setArtifactId( null );
+ }
+
+ if ( lastUpdated > 0 )
+ {
+ metadata.setLastUpdatedTimestamp( toLastUpdatedDate( lastUpdated ) );
+ }
+
+ // Save the metadata model to disk.
+ RepositoryMetadataWriter.write( metadata, metadataFile );
+ ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
+ checksum.fixChecksums( algorithms );
+ }
+
+ private void updateMetadataVersions( Collection<String> allVersions, ArchivaRepositoryMetadata metadata )
+ {
+ // Sort the versions
+ List<String> sortedVersions = new ArrayList<>( allVersions );
+ Collections.sort( sortedVersions, VersionComparator.getInstance() );
+
+ // Split the versions into released and snapshots.
+ List<String> releasedVersions = new ArrayList<>();
+ List<String> snapshotVersions = new ArrayList<>();
+
+ for ( String version : sortedVersions )
+ {
+ if ( VersionUtil.isSnapshot( version ) )
+ {
+ snapshotVersions.add( version );
+ }
+ else
+ {
+ releasedVersions.add( version );
+ }
+ }
+
+ Collections.sort( releasedVersions, VersionComparator.getInstance() );
+ Collections.sort( snapshotVersions, VersionComparator.getInstance() );
+
+ String latestVersion = sortedVersions.get( sortedVersions.size() - 1 );
+ String releaseVersion = null;
+
+ if ( CollectionUtils.isNotEmpty( releasedVersions ) )
+ {
+ releaseVersion = releasedVersions.get( releasedVersions.size() - 1 );
+ }
+
+ // Add the versions to the metadata model.
+ metadata.setAvailableVersions( sortedVersions );
+
+ metadata.setLatestVersion( latestVersion );
+ metadata.setReleasedVersion( releaseVersion );
+ }
+
+ private Date toLastUpdatedDate( long lastUpdated )
+ {
+ Calendar cal = Calendar.getInstance( TimeZone.getTimeZone("UTC") );
+ cal.setTimeInMillis( lastUpdated );
+
+ return cal.getTime();
+ }
+
+ private long toLastUpdatedLong( String timestampString )
+ {
+ try
+ {
+ Date date = lastUpdatedFormat.parse( timestampString );
+ Calendar cal = Calendar.getInstance( TimeZone.getTimeZone("UTC"));
+ cal.setTime( date );
+
+ return cal.getTimeInMillis();
+ }
+ catch ( ParseException e )
+ {
+ return 0;
+ }
+ }
+
+ private long getLastUpdated( ArchivaRepositoryMetadata metadata )
+ {
+ if ( metadata == null )
+ {
+ // Doesn't exist.
+ return 0;
+ }
+
+ try
+ {
+ String lastUpdated = metadata.getLastUpdated();
+ if ( StringUtils.isBlank( lastUpdated ) )
+ {
+ // Not set.
+ return 0;
+ }
+
+ Date lastUpdatedDate = lastUpdatedFormat.parse( lastUpdated );
+ return lastUpdatedDate.getTime();
+ }
+ catch ( ParseException e )
+ {
+ // Bad format on the last updated string.
+ return 0;
+ }
+ }
+
+ private long getExistingLastUpdated( StorageAsset metadataFile )
+ {
+ if ( !metadataFile.exists() )
+ {
+ // Doesn't exist.
+ return 0;
+ }
+
+ try
+ {
+ ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile );
+
+ return getLastUpdated( metadata );
+ }
+ catch (XMLException | IOException e )
+ {
+ // Error.
+ return 0;
+ }
+ }
+
+ /**
+ * Update the metadata based on the following rules.
+ * <p>
+ * 1) If this is a SNAPSHOT reference, then utilize the proxy/repository specific
+ * metadata files to represent the current / latest SNAPSHOT available.
+ * 2) If this is a RELEASE reference, and the metadata file does not exist, then
+ * create the metadata file with contents required of the VersionedReference
+ *
+ * @param managedRepository the managed repository where the metadata is kept.
+ * @param reference the versioned reference to update
+ * @throws LayoutException
+ * @throws RepositoryMetadataException
+ * @throws IOException
+ * @throws ContentNotFoundException
+ * @deprecated
+ */
+ public void updateMetadata( ManagedRepositoryContent managedRepository, VersionedReference reference )
+ throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException
+ {
+ StorageAsset metadataFile = managedRepository.getRepository().getAsset( toPath( reference ) );
+
+ long lastUpdated = getExistingLastUpdated( metadataFile );
+
+ ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata();
+ metadata.setGroupId( reference.getGroupId() );
+ metadata.setArtifactId( reference.getArtifactId() );
+
+ if ( VersionUtil.isSnapshot( reference.getVersion() ) )
+ {
+ // Do SNAPSHOT handling.
+ metadata.setVersion( VersionUtil.getBaseVersion( reference.getVersion() ) );
+
+ // Gather up all of the versions found in the reference dir, and any
+ // proxied maven-metadata.xml files.
+ Set<String> snapshotVersions = gatherSnapshotVersions( managedRepository, reference );
+
+ if ( snapshotVersions.isEmpty() )
+ {
+ throw new ContentNotFoundException(
+ "No snapshot versions found on reference [" + VersionedReference.toKey( reference ) + "]." );
+ }
+
+ // sort the list to determine to aide in determining the Latest version.
+ List<String> sortedVersions = new ArrayList<>();
+ sortedVersions.addAll( snapshotVersions );
+ Collections.sort( sortedVersions, new VersionComparator() );
+
+ String latestVersion = sortedVersions.get( sortedVersions.size() - 1 );
+
+ if ( VersionUtil.isUniqueSnapshot( latestVersion ) )
+ {
+ // The latestVersion will contain the full version string "1.0-alpha-5-20070821.213044-8"
+ // This needs to be broken down into ${base}-${timestamp}-${build_number}
+
+ Matcher m = VersionUtil.UNIQUE_SNAPSHOT_PATTERN.matcher( latestVersion );
+ if ( m.matches() )
+ {
+ metadata.setSnapshotVersion( new SnapshotVersion() );
+ int buildNumber = NumberUtils.toInt( m.group( 3 ), -1 );
+ metadata.getSnapshotVersion().setBuildNumber( buildNumber );
+
+ Matcher mtimestamp = VersionUtil.TIMESTAMP_PATTERN.matcher( m.group( 2 ) );
+ if ( mtimestamp.matches() )
+ {
+ String tsDate = mtimestamp.group( 1 );
+ String tsTime = mtimestamp.group( 2 );
+
+ long snapshotLastUpdated = toLastUpdatedLong( tsDate + tsTime );
+
+ lastUpdated = Math.max( lastUpdated, snapshotLastUpdated );
+
+ metadata.getSnapshotVersion().setTimestamp( m.group( 2 ) );
+ }
+ }
+ }
+ else if ( VersionUtil.isGenericSnapshot( latestVersion ) )
+ {
+ // The latestVersion ends with the generic version string.
+ // Example: 1.0-alpha-5-SNAPSHOT
+
+ metadata.setSnapshotVersion( new SnapshotVersion() );
+
+ /* Disabled due to decision in [MRM-535].
+ * Do not set metadata.lastUpdated to file.lastModified.
+ *
+ * Should this be the last updated timestamp of the file, or in the case of an
+ * archive, the most recent timestamp in the archive?
+ *
+ ArtifactReference artifact = getFirstArtifact( managedRepository, reference );
+
+ if ( artifact == null )
+ {
+ throw new IOException( "Not snapshot artifact found to reference in " + reference );
+ }
+
+ File artifactFile = managedRepository.toFile( artifact );
+
+ if ( artifactFile.exists() )
+ {
+ Date lastModified = new Date( artifactFile.lastModified() );
+ metadata.setLastUpdatedTimestamp( lastModified );
+ }
+ */
+ }
+ else
+ {
+ throw new RepositoryMetadataException(
+ "Unable to process snapshot version <" + latestVersion + "> reference <" + reference + ">" );
+ }
+ }
+ else
+ {
+ // Do RELEASE handling.
+ metadata.setVersion( reference.getVersion() );
+ }
+
+ // Set last updated
+ if ( lastUpdated > 0 )
+ {
+ metadata.setLastUpdatedTimestamp( toLastUpdatedDate( lastUpdated ) );
+ }
+
+ // Save the metadata model to disk.
+ RepositoryMetadataWriter.write( metadata, metadataFile );
+ ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
+ checksum.fixChecksums( algorithms );
+ }
+
+ private void initConfigVariables()
+ {
+ assert(this.artifactPatterns!=null);
+ assert(proxies!=null);
+ synchronized ( this.artifactPatterns )
+ {
+ this.artifactPatterns.clear();
+
+ this.artifactPatterns.addAll( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
+ }
+
+ synchronized ( proxies )
+ {
+ this.proxies.clear();
+
+ List<ProxyConnectorConfiguration> proxyConfigs = configuration.getConfiguration().getProxyConnectors();
+ for ( ProxyConnectorConfiguration proxyConfig : proxyConfigs )
+ {
+ String key = proxyConfig.getSourceRepoId();
+
+ Set<String> remoteRepoIds = this.proxies.get( key );
+
+ if ( remoteRepoIds == null )
+ {
+ remoteRepoIds = new HashSet<String>();
+ }
+
+ remoteRepoIds.add( proxyConfig.getTargetRepoId() );
+
+ this.proxies.put( key, remoteRepoIds );
+ }
+ }
+ }
+
+ /**
+ * Get the first Artifact found in the provided VersionedReference location.
+ *
+ * @param managedRepository the repository to search within.
+ * @param reference the reference to the versioned reference to search within
+ * @return the ArtifactReference to the first artifact located within the versioned reference. or null if
+ * no artifact was found within the versioned reference.
+ * @throws IOException if the versioned reference is invalid (example: doesn't exist, or isn't a directory)
+ * @throws LayoutException
+ */
+ public ArtifactReference getFirstArtifact( ManagedRepositoryContent managedRepository,
+ VersionedReference reference )
+ throws LayoutException, IOException
+ {
+ String path = toPath( reference );
+
+ int idx = path.lastIndexOf( '/' );
+ if ( idx > 0 )
+ {
+ path = path.substring( 0, idx );
+ }
+
+ Path repoDir = Paths.get( managedRepository.getRepoRoot(), path );
+
+ if ( !Files.exists(repoDir))
+ {
+ throw new IOException( "Unable to gather the list of snapshot versions on a non-existant directory: "
+ + repoDir.toAbsolutePath() );
+ }
+
+ if ( !Files.isDirectory( repoDir ))
+ {
+ throw new IOException(
+ "Unable to gather the list of snapshot versions on a non-directory: " + repoDir.toAbsolutePath() );
+ }
+
+ try(Stream<Path> stream = Files.list(repoDir)) {
+ String result = stream.filter( Files::isRegularFile ).map( path1 ->
+ PathUtil.getRelative( managedRepository.getRepoRoot(), path1 )
+ ).filter( filetypes::matchesArtifactPattern ).findFirst().orElse( null );
+ if (result!=null) {
+ return managedRepository.toArtifactReference( result );
+ }
+ }
+ // No artifact was found.
+ return null;
+ }
+
+ public ArchivaConfiguration getConfiguration()
+ {
+ return configuration;
+ }
+
+ public void setConfiguration( ArchivaConfiguration configuration )
+ {
+ this.configuration = configuration;
+ }
+
+ public FileTypes getFiletypes()
+ {
+ return filetypes;
+ }
+
+ public void setFiletypes( FileTypes filetypes )
+ {
+ this.filetypes = filetypes;
+ }
+}
--- /dev/null
+package org.apache.archiva.repository.internal.metadata;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.repository.RepositoryException;
+
+/**
+ * RepositoryMetadataException
+ *
+ *
+ */
+public class RepositoryMetadataException
+ extends RepositoryException
+{
+
+ public RepositoryMetadataException()
+ {
+ super();
+ }
+
+ public RepositoryMetadataException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+
+ public RepositoryMetadataException( String message )
+ {
+ super( message );
+ }
+
+ public RepositoryMetadataException( Throwable cause )
+ {
+ super( cause );
+ }
+}
--- /dev/null
+package org.apache.archiva.repository.internal.metadata;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.model.ArchivaModelCloner;
+import org.apache.archiva.model.ArchivaRepositoryMetadata;
+import org.apache.archiva.model.Plugin;
+import org.apache.archiva.model.SnapshotVersion;
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * RepositoryMetadataMerge
+ *
+ *
+ */
+public class RepositoryMetadataMerge
+{
+ public static ArchivaRepositoryMetadata merge( final ArchivaRepositoryMetadata mainMetadata,
+ final ArchivaRepositoryMetadata sourceMetadata )
+ throws RepositoryMetadataException
+ {
+ if ( mainMetadata == null )
+ {
+ throw new RepositoryMetadataException( "Cannot merge a null main project." );
+ }
+
+ if ( sourceMetadata == null )
+ {
+ throw new RepositoryMetadataException( "Cannot copy to a null parent project." );
+ }
+
+ ArchivaRepositoryMetadata merged = new ArchivaRepositoryMetadata();
+
+ merged.setGroupId( merge( mainMetadata.getGroupId(), sourceMetadata.getGroupId() ) );
+ merged.setArtifactId( merge(mainMetadata.getArtifactId(), sourceMetadata.getArtifactId()));
+ merged.setVersion( merge(mainMetadata.getVersion(), sourceMetadata.getVersion()) );
+ merged.setReleasedVersion( merge( mainMetadata.getReleasedVersion(), sourceMetadata.getReleasedVersion() ) );
+ merged.setSnapshotVersion( merge( mainMetadata.getSnapshotVersion(), sourceMetadata.getSnapshotVersion() ) );
+ merged.setAvailableVersions( mergeAvailableVersions( mainMetadata.getAvailableVersions(), sourceMetadata.getAvailableVersions() ) );
+ merged.setPlugins( mergePlugins( mainMetadata.getPlugins(), sourceMetadata.getPlugins() ) );
+
+ //Don't set if merge was not possible
+ long lastUpdated = mergeTimestamp( mainMetadata.getLastUpdated(), sourceMetadata.getLastUpdated());
+ if (lastUpdated > -1)
+ {
+ merged.setLastUpdated( Long.toString(lastUpdated) );
+ }
+
+ return merged;
+ }
+
+ private static boolean empty( String val )
+ {
+ if ( val == null )
+ {
+ return true;
+ }
+
+ return ( val.trim().length() <= 0 );
+ }
+
+ private static long mergeTimestamp(String mainTimestamp, String sourceTimestamp)
+ {
+ if (sourceTimestamp == null && mainTimestamp != null)
+ {
+ return convertTimestampToLong(mainTimestamp);
+ }
+
+ if (mainTimestamp == null && sourceTimestamp != null)
+ {
+ return convertTimestampToLong(sourceTimestamp);
+ }
+
+ if (sourceTimestamp == null && mainTimestamp == null)
+ {
+ return -1;
+ }
+
+ return mergeTimestamp(convertTimestampToLong(mainTimestamp), convertTimestampToLong(sourceTimestamp));
+ }
+
+ private static long mergeTimestamp(long mainTimestamp, long sourceTimestamp)
+ {
+ return Math.max( mainTimestamp, sourceTimestamp );
+ }
+
+ private static SnapshotVersion merge( SnapshotVersion mainSnapshotVersion, SnapshotVersion sourceSnapshotVersion )
+ {
+ if ( sourceSnapshotVersion == null )
+ {
+ return mainSnapshotVersion;
+ }
+
+ if ( mainSnapshotVersion == null )
+ {
+ return ArchivaModelCloner.clone( sourceSnapshotVersion );
+ }
+
+ SnapshotVersion merged = new SnapshotVersion();
+
+ long mainSnapshotLastUpdated = convertTimestampToLong(mainSnapshotVersion.getTimestamp());
+ long sourceSnapshotLastUpdated = convertTimestampToLong(sourceSnapshotVersion.getTimestamp());
+
+ long lastUpdated = mergeTimestamp(mainSnapshotLastUpdated, sourceSnapshotLastUpdated);
+
+ if (lastUpdated == mainSnapshotLastUpdated)
+ {
+ merged.setTimestamp(mainSnapshotVersion.getTimestamp());
+ merged.setBuildNumber(mainSnapshotVersion.getBuildNumber());
+ }
+ else
+ {
+ merged.setTimestamp(sourceSnapshotVersion.getTimestamp());
+ merged.setBuildNumber(sourceSnapshotVersion.getBuildNumber());
+ }
+
+ return merged;
+ }
+
+ private static long convertTimestampToLong(String timestamp)
+ {
+ if (timestamp == null)
+ {
+ return -1;
+ }
+
+ return getLongFromTimestampSafely(StringUtils.replace(timestamp, ".", ""));
+ }
+
+ private static long getLongFromTimestampSafely( String timestampString )
+ {
+ try
+ {
+ return Long.parseLong(timestampString);
+ }
+ catch (NumberFormatException e)
+ {
+ return -1;
+ }
+ }
+
+ private static String merge( String main, String source )
+ {
+ if ( empty( main ) && !empty( source ) )
+ {
+ return source;
+ }
+
+ return main;
+ }
+
+ private static List<Plugin> mergePlugins(List<Plugin> mainPlugins, List<Plugin> sourcePlugins)
+ {
+ if ( sourcePlugins == null )
+ {
+ return mainPlugins;
+ }
+
+ if ( mainPlugins == null )
+ {
+ return clonePlugins( sourcePlugins );
+ }
+
+ List<Plugin> merged = clonePlugins( mainPlugins );
+
+ for ( Plugin plugin : sourcePlugins )
+ {
+ if ( !merged.contains( plugin ) )
+ {
+ merged.add( plugin );
+ }
+ }
+
+ return merged;
+ }
+
+ /**
+ * Clones a list of plugins.
+ *
+ * This method exists because ArchivaModelCloner.clonePlugins()
+ * only works with artifact references.
+ *
+ * @param plugins
+ * @return list of cloned plugins
+ */
+ private static List<Plugin> clonePlugins(List<Plugin> plugins)
+ {
+ if (plugins == null)
+ {
+ return null;
+ }
+
+ List<Plugin> result = new ArrayList<>();
+
+ for (Plugin plugin : plugins)
+ {
+ Plugin clonedPlugin = new Plugin();
+ clonedPlugin.setArtifactId(plugin.getArtifactId());
+ clonedPlugin.setName(plugin.getName());
+ clonedPlugin.setPrefix(plugin.getPrefix());
+ result.add(plugin);
+ }
+
+ return result;
+ }
+
+ private static List<String> mergeAvailableVersions( List<String> mainAvailableVersions, List<String> sourceAvailableVersions )
+ {
+ if ( sourceAvailableVersions == null )
+ {
+ return mainAvailableVersions;
+ }
+
+ if ( mainAvailableVersions == null )
+ {
+ return ArchivaModelCloner.cloneAvailableVersions( sourceAvailableVersions );
+ }
+
+ List<String> merged = ArchivaModelCloner.cloneAvailableVersions( mainAvailableVersions );
+
+ for ( String sourceVersion : sourceAvailableVersions )
+ {
+ if ( !merged.contains( sourceVersion ) )
+ {
+ merged.add( sourceVersion );
+ }
+ }
+
+ return merged;
+ }
+}
--- /dev/null
+package org.apache.archiva.repository.internal.metadata;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.model.ArchivaRepositoryMetadata;
+import org.apache.archiva.model.Plugin;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.xml.XMLException;
+import org.apache.archiva.xml.XMLWriter;
+import org.apache.archiva.xml.XmlUtil;
+import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+import javax.xml.parsers.ParserConfigurationException;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * RepositoryMetadataWriter
+ */
+public class RepositoryMetadataWriter
+{
+ private static final Logger log = LoggerFactory.getLogger(RepositoryMetadataWriter.class);
+
+ public static void write( ArchivaRepositoryMetadata metadata, StorageAsset outputFile )
+ throws RepositoryMetadataException
+ {
+ boolean thrown = false;
+ try (OutputStreamWriter writer = new OutputStreamWriter( outputFile.getWriteStream(true)))
+ {
+ write( metadata, writer );
+ writer.flush();
+ }
+ catch ( IOException e )
+ {
+ thrown = true;
+ throw new RepositoryMetadataException(
+ "Unable to write metadata file: " + outputFile.getPath() + " - " + e.getMessage(), e );
+ }
+ finally
+ {
+ if ( thrown )
+ {
+ try {
+ outputFile.getStorage().removeAsset(outputFile);
+ } catch (IOException e) {
+ log.error("Could not remove asset {}", outputFile);
+ }
+ }
+ }
+ }
+
+ public static void write( ArchivaRepositoryMetadata metadata, Writer writer )
+ throws RepositoryMetadataException
+ {
+ Document doc = null;
+ try {
+ doc = XmlUtil.createDocument();
+ } catch (ParserConfigurationException e) {
+ throw new RepositoryMetadataException("Could not create xml doc " + e.getMessage(), e);
+ }
+
+ Element root = doc.createElement( "metadata" );
+ doc.appendChild(root);
+
+ addOptionalElementText( root, "groupId", metadata.getGroupId() );
+ addOptionalElementText( root, "artifactId", metadata.getArtifactId() );
+ addOptionalElementText( root, "version", metadata.getVersion() );
+
+ if ( CollectionUtils.isNotEmpty( metadata.getPlugins() ) )
+ {
+ Element plugins = XmlUtil.addChild(root, "plugins" );
+
+ List<Plugin> pluginList = metadata.getPlugins();
+ Collections.sort( pluginList, PluginComparator.INSTANCE );
+
+ for ( Plugin plugin : metadata.getPlugins() )
+ {
+ Element p = XmlUtil.addChild(plugins, "plugin" );
+ XmlUtil.addChild(doc, p, "prefix" ).setTextContent( plugin.getPrefix() );
+ XmlUtil.addChild(doc, p, "artifactId" ).setTextContent( plugin.getArtifactId() );
+ addOptionalElementText( p, "name", plugin.getName() );
+ }
+ }
+
+ if ( CollectionUtils.isNotEmpty( metadata.getAvailableVersions() ) //
+ || StringUtils.isNotBlank( metadata.getReleasedVersion() ) //
+ || StringUtils.isNotBlank( metadata.getLatestVersion() ) //
+ || StringUtils.isNotBlank( metadata.getLastUpdated() ) //
+ || ( metadata.getSnapshotVersion() != null ) )
+ {
+ Element versioning = XmlUtil.addChild(root, "versioning" );
+
+ addOptionalElementText( versioning, "latest", metadata.getLatestVersion() );
+ addOptionalElementText( versioning, "release", metadata.getReleasedVersion() );
+
+ if ( metadata.getSnapshotVersion() != null )
+ {
+ Element snapshot = XmlUtil.addChild(versioning, "snapshot" );
+ String bnum = String.valueOf( metadata.getSnapshotVersion().getBuildNumber() );
+ addOptionalElementText( snapshot, "buildNumber", bnum );
+ addOptionalElementText( snapshot, "timestamp", metadata.getSnapshotVersion().getTimestamp() );
+ }
+
+ if ( CollectionUtils.isNotEmpty( metadata.getAvailableVersions() ) )
+ {
+ Element versions = XmlUtil.addChild(versioning, "versions" );
+ Iterator<String> it = metadata.getAvailableVersions().iterator();
+ while ( it.hasNext() )
+ {
+ String version = it.next();
+ XmlUtil.addChild(versions, "version" ).setTextContent( version );
+ }
+ }
+
+ addOptionalElementText( versioning, "lastUpdated", metadata.getLastUpdated() );
+ }
+
+ try
+ {
+ XMLWriter.write( doc, writer );
+ }
+ catch ( XMLException e )
+ {
+ throw new RepositoryMetadataException( "Unable to write xml contents to writer: " + e.getMessage(), e );
+ }
+ }
+
+ private static void addOptionalElementText( Element elem, String elemName, String text )
+ {
+ if ( StringUtils.isBlank( text ) )
+ {
+ return;
+ }
+
+ XmlUtil.addChild(elem, elemName ).setTextContent( text );
+ }
+
+ private static class PluginComparator
+ implements Comparator<Plugin>
+ {
+ private static final PluginComparator INSTANCE = new PluginComparator();
+
+ @Override
+ public int compare( Plugin plugin, Plugin plugin2 )
+ {
+ if ( plugin.getPrefix() != null && plugin2.getPrefix() != null )
+ {
+ return plugin.getPrefix().compareTo( plugin2.getPrefix() );
+ }
+ if ( plugin.getName() != null && plugin2.getName() != null )
+ {
+ return plugin.getName().compareTo( plugin2.getName() );
+ }
+ // we assume artifactId is not null which sounds good :-)
+ return plugin.getArtifactId().compareTo( plugin2.getArtifactId() );
+ }
+ }
+}
+++ /dev/null
-package org.apache.archiva.repository;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.configuration.ArchivaConfiguration;
-import org.apache.archiva.configuration.Configuration;
-import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.archiva.configuration.RemoteRepositoryConfiguration;
-import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.test.context.ContextConfiguration;
-
-import javax.inject.Inject;
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.StandardCopyOption;
-import java.util.Collection;
-
-import static org.junit.Assert.*;
-
-/**
- * Test for RepositoryRegistry
- */
-@RunWith(ArchivaSpringJUnit4ClassRunner.class)
-@ContextConfiguration(locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" })
-public class RepositoryRegistryTest
-{
-
- @Inject
- RepositoryRegistry repositoryRegistry;
-
- @Inject
- ArchivaConfiguration archivaConfiguration;
-
- private static final Path userCfg = Paths.get(System.getProperty( "user.home" ), ".m2/archiva.xml");
-
- private static Path cfgCopy;
- private static Path archivaCfg;
-
- @BeforeClass
- public static void classSetup() throws IOException, URISyntaxException
- {
- URL archivaCfgUri = Thread.currentThread().getContextClassLoader().getResource( "archiva.xml" );
- if (archivaCfgUri!=null) {
- archivaCfg = Paths.get(archivaCfgUri.toURI());
- cfgCopy = Files.createTempFile( "archiva-backup", ".xml" );
- Files.copy( archivaCfg, cfgCopy, StandardCopyOption.REPLACE_EXISTING);
- }
- }
-
- @AfterClass
- public static void classTearDown() throws IOException
- {
- if (cfgCopy!=null) {
- Files.deleteIfExists( cfgCopy );
- }
- }
-
- @Before
- public void setUp( ) throws Exception
- {
- assertNotNull( repositoryRegistry );
- Files.deleteIfExists( userCfg );
- URL archivaCfgUri = Thread.currentThread().getContextClassLoader().getResource( "archiva.xml" );
- if (archivaCfgUri!=null) {
- archivaCfg = Paths.get(archivaCfgUri.toURI());
- if (Files.exists(cfgCopy))
- {
- Files.copy( cfgCopy, archivaCfg , StandardCopyOption.REPLACE_EXISTING);
- }
- }
- archivaConfiguration.reload();
- repositoryRegistry.reload();
- }
-
- @After
- public void tearDown( ) throws Exception
- {
- Files.deleteIfExists( userCfg );
- if (cfgCopy!=null && Files.exists(cfgCopy)) {
- Files.copy(cfgCopy, archivaCfg, StandardCopyOption.REPLACE_EXISTING);
- }
- }
-
- @Test
- public void getRepositories( ) throws Exception
- {
- Collection<Repository> repos = repositoryRegistry.getRepositories( );
- assertEquals( 5, repos.size( ) );
- assertTrue(repos.stream().anyMatch( rep -> rep.getId().equals("internal") ));
- assertTrue( repos.stream( ).anyMatch( rep -> rep.getId( ).equals( "snapshots") ) );
- assertTrue(repos.stream().anyMatch( rep -> rep.getId().equals( "central") ));
- }
-
- @Test
- public void getManagedRepositories( ) throws Exception
- {
- Collection<ManagedRepository> repos = repositoryRegistry.getManagedRepositories();
- assertEquals( 4, repos.size( ) );
- assertTrue(repos.stream().anyMatch( rep -> rep.getId().equals("internal") ));
- assertTrue( repos.stream( ).anyMatch( rep -> rep.getId( ).equals( "snapshots") ) );
- }
-
- @Test
- public void getRemoteRepositories( ) throws Exception
- {
- Collection<RemoteRepository> repos = repositoryRegistry.getRemoteRepositories( );
- assertEquals( 1, repos.size( ) );
- assertTrue(repos.stream().anyMatch( rep -> rep.getId().equals( "central") ));
- }
-
- @Test
- public void getRepository( ) throws Exception
- {
- Repository repo = repositoryRegistry.getRepository( "internal" );
- assertNotNull(repo);
- assertEquals("internal", repo.getId());
- assertEquals("Archiva Managed Internal Repository", repo.getName());
- assertEquals("This is internal repository.", repo.getDescription());
- assertEquals( "default", repo.getLayout( ) );
- assertEquals("0 0 * * * ?", repo.getSchedulingDefinition());
- assertTrue(repo instanceof ManagedRepository);
- assertTrue( repo.hasIndex( ) );
- assertTrue(repo.isScanned());
- assertEquals(RepositoryType.MAVEN, repo.getType());
- }
-
- @Test
- public void getManagedRepository( ) throws Exception
- {
- ManagedRepository repo = repositoryRegistry.getManagedRepository( "internal" );
- assertNotNull(repo);
- assertEquals("internal", repo.getId());
- assertEquals("Archiva Managed Internal Repository", repo.getName());
- assertEquals("This is internal repository.", repo.getDescription());
- assertEquals( "default", repo.getLayout( ) );
- assertEquals("0 0 * * * ?", repo.getSchedulingDefinition());
- assertTrue( repo.hasIndex( ) );
- assertTrue(repo.isScanned());
- assertEquals(RepositoryType.MAVEN, repo.getType());
- assertTrue(repo.getActiveReleaseSchemes().contains(ReleaseScheme.RELEASE));
- assertFalse( repo.getActiveReleaseSchemes( ).contains( ReleaseScheme.SNAPSHOT ) );
- assertNotNull(repo.getContent());
-
- assertNull(repositoryRegistry.getManagedRepository( "xyu" ));
-
- }
-
- @Test
- public void getRemoteRepository( ) throws Exception
- {
- RemoteRepository repo = repositoryRegistry.getRemoteRepository( "central" );
- assertNotNull(repo);
- assertEquals("central", repo.getId());
- assertEquals("Central Repository", repo.getName());
- assertEquals("", repo.getDescription());
- assertEquals( "default", repo.getLayout( ) );
- assertEquals("0 0 08 ? * SUN", repo.getSchedulingDefinition());
- assertTrue( repo.hasIndex( ) );
- assertTrue(repo.isScanned());
- assertEquals(RepositoryType.MAVEN, repo.getType());
-
- assertEquals(35, repo.getTimeout().getSeconds());
- }
-
- @Test
- public void putManagedRepository( ) throws Exception
- {
- BasicManagedRepository managedRepository = BasicManagedRepository.newFilesystemInstance("test001", "Test repo", archivaConfiguration.getRepositoryBaseDir().resolve("test001"));
- managedRepository.setDescription( managedRepository.getPrimaryLocale(), "This is just a test" );
- repositoryRegistry.putRepository(managedRepository);
-
- assertNotNull(managedRepository.getContent());
- assertEquals(6, repositoryRegistry.getRepositories().size());
-
- managedRepository = BasicManagedRepository.newFilesystemInstance("central", "Test repo", archivaConfiguration.getRepositoryBaseDir().resolve("central"));
- managedRepository.setDescription( managedRepository.getPrimaryLocale(), "This is just a test" );
- ManagedRepository updatedRepo = null;
- try {
- repositoryRegistry.putRepository( managedRepository );
- throw new RuntimeException("Repository exception should be thrown, if there exists a remote repository already with that id");
- } catch (RepositoryException e) {
- // OK
- }
- managedRepository = BasicManagedRepository.newFilesystemInstance("internal", "Test repo", archivaConfiguration.getRepositoryBaseDir().resolve("internal"));
- managedRepository.setDescription( managedRepository.getPrimaryLocale(), "This is just a test" );
- updatedRepo = repositoryRegistry.putRepository( managedRepository );
-
- assertTrue(updatedRepo==managedRepository);
- assertNotNull(managedRepository.getContent());
- assertEquals(6, repositoryRegistry.getRepositories().size());
- ManagedRepository managedRepository1 = repositoryRegistry.getManagedRepository( "internal" );
- assertEquals("Test repo", managedRepository1.getName());
- assertTrue(managedRepository1==managedRepository);
-
- }
-
- @Test
- public void putManagedRepositoryFromConfig( ) throws Exception
- {
- ManagedRepositoryConfiguration cfg = new ManagedRepositoryConfiguration();
- cfg.setId("test002");
- cfg.setName("This is test 002");
- ManagedRepository repo = repositoryRegistry.putRepository( cfg );
- assertNotNull(repo);
- assertEquals("test002", repo.getId());
- assertEquals("This is test 002", repo.getName());
- assertNotNull(repo.getContent());
- archivaConfiguration.reload();
- Collection<ManagedRepository> repos = repositoryRegistry.getManagedRepositories();
- assertEquals(5, repos.size());
-
- ManagedRepository internalRepo = repositoryRegistry.getManagedRepository( "internal" );
- cfg = new ManagedRepositoryConfiguration();
- cfg.setId("internal");
- cfg.setName("This is internal test 002");
- repo = repositoryRegistry.putRepository( cfg );
- assertTrue(internalRepo==repo);
- assertEquals("This is internal test 002",repo.getName());
- assertEquals(5, repositoryRegistry.getManagedRepositories().size());
-
- repositoryRegistry.reload();
- assertEquals(5, repositoryRegistry.getManagedRepositories().size());
-
- }
-
- @Test
- public void putManagedRepositoryFromConfigWithoutSave( ) throws Exception
- {
- Configuration configuration = archivaConfiguration.getConfiguration();
- ManagedRepositoryConfiguration cfg = new ManagedRepositoryConfiguration();
- cfg.setId("test002");
- cfg.setName("This is test 002");
- ManagedRepository repo = repositoryRegistry.putRepository( cfg, configuration );
- assertNotNull(repo);
- assertEquals("test002", repo.getId());
- assertEquals("This is test 002", repo.getName());
- assertNotNull(repo.getContent());
- archivaConfiguration.reload();
- assertEquals(3, archivaConfiguration.getConfiguration().getManagedRepositories().size());
- Collection<ManagedRepository> repos = repositoryRegistry.getManagedRepositories();
- assertEquals(5, repos.size());
-
- ManagedRepository internalRepo = repositoryRegistry.getManagedRepository( "internal" );
- cfg = new ManagedRepositoryConfiguration();
- cfg.setId("internal");
- cfg.setName("This is internal test 002");
- repo = repositoryRegistry.putRepository( cfg, configuration );
- assertTrue(internalRepo==repo);
- assertEquals("This is internal test 002",repo.getName());
- assertEquals(5, repositoryRegistry.getManagedRepositories().size());
-
- repositoryRegistry.reload();
- assertEquals(4, repositoryRegistry.getManagedRepositories().size());
- }
-
- @Test
- public void putRemoteRepository( ) throws Exception
- {
- BasicRemoteRepository remoteRepository = BasicRemoteRepository.newFilesystemInstance( "test001", "Test repo", archivaConfiguration.getRemoteRepositoryBaseDir() );
- remoteRepository.setDescription( remoteRepository.getPrimaryLocale(), "This is just a test" );
- RemoteRepository newRepo = repositoryRegistry.putRepository(remoteRepository);
-
- assertTrue(remoteRepository==newRepo);
- assertNotNull(remoteRepository.getContent());
- assertEquals(6, repositoryRegistry.getRepositories().size());
-
- remoteRepository = BasicRemoteRepository.newFilesystemInstance( "internal", "Test repo", archivaConfiguration.getRemoteRepositoryBaseDir() );
- remoteRepository.setDescription( remoteRepository.getPrimaryLocale(), "This is just a test" );
- RemoteRepository updatedRepo = null;
- try
- {
- updatedRepo = repositoryRegistry.putRepository( remoteRepository );
- throw new RuntimeException("Should throw repository exception, if repository exists already and is not the same type.");
- } catch (RepositoryException e) {
- // OK
- }
-
- remoteRepository = BasicRemoteRepository.newFilesystemInstance( "central", "Test repo", archivaConfiguration.getRemoteRepositoryBaseDir() );
- remoteRepository.setDescription( remoteRepository.getPrimaryLocale(), "This is just a test" );
- updatedRepo = repositoryRegistry.putRepository( remoteRepository );
-
- assertTrue(updatedRepo==remoteRepository);
- assertNotNull(remoteRepository.getContent());
- assertEquals(6, repositoryRegistry.getRepositories().size());
- RemoteRepository remoteRepository1 = repositoryRegistry.getRemoteRepository( "central" );
- assertEquals("Test repo", remoteRepository1.getName());
- assertTrue(remoteRepository1==remoteRepository);
- }
-
- @Test
- public void putRemoteRepositoryFromConfig( ) throws Exception
- {
- RemoteRepositoryConfiguration cfg = new RemoteRepositoryConfiguration();
- cfg.setId("test002");
- cfg.setName("This is test 002");
- RemoteRepository repo = repositoryRegistry.putRepository( cfg );
- assertNotNull(repo);
- assertEquals("test002", repo.getId());
- assertEquals("This is test 002", repo.getName());
- assertNotNull(repo.getContent());
- archivaConfiguration.reload();
- Collection<RemoteRepository> repos = repositoryRegistry.getRemoteRepositories();
- assertEquals(2, repos.size());
-
- RemoteRepository internalRepo = repositoryRegistry.getRemoteRepository( "central" );
- cfg = new RemoteRepositoryConfiguration();
- cfg.setId("central");
- cfg.setName("This is central test 002");
- repo = repositoryRegistry.putRepository( cfg );
- assertTrue(internalRepo==repo);
- assertEquals("This is central test 002",repo.getName());
- assertEquals(2, repositoryRegistry.getRemoteRepositories().size());
-
- repositoryRegistry.reload();
- assertEquals(2, repositoryRegistry.getRemoteRepositories().size());
- }
-
- @Test
- public void putRemoteRepositoryFromConfigWithoutSave( ) throws Exception
- {
- Configuration configuration = archivaConfiguration.getConfiguration();
- RemoteRepositoryConfiguration cfg = new RemoteRepositoryConfiguration();
- cfg.setId("test002");
- cfg.setName("This is test 002");
- RemoteRepository repo = repositoryRegistry.putRepository( cfg, configuration );
- assertNotNull(repo);
- assertEquals("test002", repo.getId());
- assertEquals("This is test 002", repo.getName());
- assertNotNull(repo.getContent());
- archivaConfiguration.reload();
- assertEquals(1, archivaConfiguration.getConfiguration().getRemoteRepositories().size());
- Collection<RemoteRepository> repos = repositoryRegistry.getRemoteRepositories();
- assertEquals(2, repos.size());
-
- RemoteRepository internalRepo = repositoryRegistry.getRemoteRepository( "central" );
- cfg = new RemoteRepositoryConfiguration();
- cfg.setId("central");
- cfg.setName("This is central test 002");
- repo = repositoryRegistry.putRepository( cfg, configuration );
- assertTrue(internalRepo==repo);
- assertEquals("This is central test 002",repo.getName());
- assertEquals(2, repositoryRegistry.getRemoteRepositories().size());
-
- repositoryRegistry.reload();
- assertEquals(1, repositoryRegistry.getRemoteRepositories().size());
- }
-
- @Test
- public void removeRepository( ) throws Exception
- {
- assertEquals(5, repositoryRegistry.getRepositories().size());
- Repository repo = repositoryRegistry.getRepository( "snapshots" );
- repositoryRegistry.removeRepository( repo );
- assertEquals(4, repositoryRegistry.getRepositories().size());
- assertTrue( repositoryRegistry.getRepositories( ).stream( ).noneMatch( rep -> rep.getId( ).equals( "snapshots" ) ) );
- archivaConfiguration.reload();
- repositoryRegistry.reload();
- assertEquals(4, repositoryRegistry.getRepositories().size());
- }
-
- @Test
- public void removeManagedRepository( ) throws Exception
- {
-
- assertEquals(4, repositoryRegistry.getManagedRepositories().size());
- ManagedRepository repo = repositoryRegistry.getManagedRepository( "snapshots" );
- repositoryRegistry.removeRepository( repo );
- assertEquals(3, repositoryRegistry.getManagedRepositories().size());
- assertTrue( repositoryRegistry.getManagedRepositories( ).stream( ).noneMatch( rep -> rep.getId( ).equals( "snapshots" ) ) );
- archivaConfiguration.reload();
- repositoryRegistry.reload();
- assertEquals(3, repositoryRegistry.getManagedRepositories().size());
- }
-
- @Test
- public void removeManagedRepositoryWithoutSave( ) throws Exception
- {
- Configuration configuration = archivaConfiguration.getConfiguration();
- assertEquals(4, repositoryRegistry.getManagedRepositories().size());
- ManagedRepository repo = repositoryRegistry.getManagedRepository( "snapshots" );
- repositoryRegistry.removeRepository( repo, configuration );
- assertEquals(3, repositoryRegistry.getManagedRepositories().size());
- assertTrue( repositoryRegistry.getManagedRepositories( ).stream( ).noneMatch( rep -> rep.getId( ).equals( "snapshots" ) ) );
- archivaConfiguration.reload();
- repositoryRegistry.reload();
- assertEquals(4, repositoryRegistry.getManagedRepositories().size());
- }
-
-
- @Test
- public void removeRemoteRepository( ) throws Exception
- {
- assertEquals(1, repositoryRegistry.getRemoteRepositories().size());
- RemoteRepository repo = repositoryRegistry.getRemoteRepository( "central" );
- repositoryRegistry.removeRepository( repo );
- assertEquals(0, repositoryRegistry.getRemoteRepositories().size());
- assertTrue( repositoryRegistry.getRemoteRepositories( ).stream( ).noneMatch( rep -> rep.getId( ).equals( "central" ) ) );
- archivaConfiguration.reload();
- repositoryRegistry.reload();
- assertEquals(0, repositoryRegistry.getRemoteRepositories().size());
- }
-
- @Test
- public void removeRemoteRepositoryWithoutSave( ) throws Exception
- {
- Configuration configuration = archivaConfiguration.getConfiguration();
- assertEquals(1, repositoryRegistry.getRemoteRepositories().size());
- RemoteRepository repo = repositoryRegistry.getRemoteRepository( "central" );
- repositoryRegistry.removeRepository( repo, configuration );
- assertEquals(0, repositoryRegistry.getRemoteRepositories().size());
- assertTrue( repositoryRegistry.getRemoteRepositories( ).stream( ).noneMatch( rep -> rep.getId( ).equals( "central" ) ) );
- archivaConfiguration.reload();
- repositoryRegistry.reload();
- assertEquals(1, repositoryRegistry.getRemoteRepositories().size());
- }
-
-
- @Test
- public void cloneManagedRepo( ) throws Exception
- {
- ManagedRepository managedRepository = repositoryRegistry.getManagedRepository( "internal" );
-
- try
- {
- repositoryRegistry.clone(managedRepository, "snapshots");
- throw new RuntimeException("RepositoryRegistry exception should be thrown if id exists already.");
- }
- catch ( RepositoryException e )
- {
- // OK
- }
-
- try
- {
- repositoryRegistry.clone(managedRepository, "central");
- throw new RuntimeException("RepositoryRegistry exception should be thrown if id exists already.");
- }
- catch ( RepositoryException e )
- {
- // OK
- }
-
- ManagedRepository clone = repositoryRegistry.clone( managedRepository, "newinternal" );
- assertNotNull(clone);
- assertNull(clone.getContent());
- assertEquals("Archiva Managed Internal Repository", clone.getName());
- assertFalse(managedRepository==clone);
-
- }
-
- @Test
- public void cloneRemoteRepo( ) throws Exception
- {
- RemoteRepository remoteRepository = repositoryRegistry.getRemoteRepository( "central" );
-
- try
- {
- repositoryRegistry.clone(remoteRepository, "snapshots");
- throw new RuntimeException("RepositoryRegistry exception should be thrown if id exists already.");
- }
- catch ( RepositoryException e )
- {
- // OK
- }
-
- try
- {
- repositoryRegistry.clone(remoteRepository, "central");
- throw new RuntimeException("RepositoryRegistry exception should be thrown if id exists already.");
- }
- catch ( RepositoryException e )
- {
- // OK
- }
-
- RemoteRepository clone = repositoryRegistry.clone( remoteRepository, "newCentral" );
- assertNotNull(clone);
- assertNull(clone.getContent());
- assertEquals("Central Repository", clone.getName());
- assertFalse(remoteRepository==clone);
-
- }
-
-}
\ No newline at end of file