@Named(value = "archivaConfiguration#default")
private ArchivaConfiguration archivaConfiguration;
+
+ public FileTypes() {
+
+ }
+
/**
* Map of default values for the file types.
*/
import org.apache.archiva.consumers.ConsumerException;
import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.archiva.metadata.repository.MetadataResolutionException;
+import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.model.ArtifactReference;
this.repository = repository;
log.info( "Beginning scan of repository [{}]", this.repository.getId() );
- repositorySession = repositorySessionFactory.createSession();
+ try
+ {
+ repositorySession = repositorySessionFactory.createSession( );
+ } catch (MetadataRepositoryException e) {
+ log.error("Could not create repository session {}", e.getMessage());
+ throw new ConsumerException( "Could not create repository session: " + e.getMessage( ), e );
+ }
}
public void processFile( String path )
ManagedRepositoryContent repositoryContent = repository.getContent();
ArtifactReference artifact = repositoryContent.toArtifactReference( path );
- repositorySession.getRepository().getArtifacts( repository.getId(), artifact.getGroupId(),
+ repositorySession.getRepository().getArtifacts( repositorySession, repository.getId(), artifact.getGroupId(),
artifact.getArtifactId(), artifact.getVersion() );
}
- catch ( LayoutException | MetadataResolutionException e )
+ catch ( LayoutException | MetadataResolutionException e )
{
throw new ConsumerException( e.getLocalizedMessage(), e );
}
}
catch ( org.apache.archiva.metadata.repository.MetadataRepositoryException e )
{
- e.printStackTrace( );
+ throw new ConsumerException( "Could not create session: " + e.getMessage( ), e );
}
if (repository.supportsFeature( ArtifactCleanupFeature.class ))
this.repositorySessionFactory = repositorySessionFactory;
}
- public RepositorySession getRepositorySession( )
- {
- return repositorySession;
- }
-
- public void setRepositorySession( RepositorySession repositorySession )
- {
- this.repositorySession = repositorySession;
- }
}
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.storage.maven2.Maven2RepositoryPathTranslator;
import org.apache.archiva.repository.BasicManagedRepository;
import org.apache.archiva.repository.ManagedRepositoryContent;
protected RepositoryListener listener;
+ protected IMocksControl sessionControl;
protected RepositorySession repositorySession;
+ protected IMocksControl sessionFactoryControl;
+ protected RepositorySessionFactory sessionFactory;
+
protected MetadataRepository metadataRepository;
@Inject
listener = listenerControl.createMock( RepositoryListener.class );
- repositorySession = mock( RepositorySession.class );
- metadataRepository = mock( MetadataRepository.class );
- when( repositorySession.getRepository() ).thenReturn( metadataRepository );
+ sessionControl = EasyMock.createControl();
+ sessionFactoryControl = EasyMock.createControl( );
+ repositorySession = sessionControl.createMock( RepositorySession.class );
+ metadataRepository = mock( MetadataRepository.class );
+ sessionFactory = sessionFactoryControl.createMock( RepositorySessionFactory.class );
+ EasyMock.expect( repositorySession.getRepository() ).andStubReturn( metadataRepository );
+ EasyMock.expect( sessionFactory.createSession( ) ).andStubReturn( repositorySession );
}
listener = listenerControl.createMock( RepositoryListener.class );
List<RepositoryListener> listeners = Collections.singletonList( listener );
+
+ sessionControl.reset();
+ sessionFactoryControl.reset();
+ EasyMock.expect( sessionFactory.createSession( ) ).andStubReturn( repositorySession );
+ EasyMock.expect( repositorySession.getRepository()).andStubReturn( metadataRepository );
+ repositorySession.save();
+ EasyMock.expectLastCall().anyTimes();
+ sessionFactoryControl.replay();
+ sessionControl.replay();
repoPurge = new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), metadataTools,
applicationContext.getBean(
RepositoryRegistry.class ),
// Verify the metadataRepository invocations
// complete snapshot version removal for released
- verify(metadataRepository, times(1)).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
- verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq("2.3") );
+ verify(metadataRepository, times(1)).removeProjectVersion(eq(repositorySession) , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, never()).removeProjectVersion(eq(repositorySession) , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq("2.3") );
// check if the snapshot was removed
assertDeleted( projectRoot + "/2.3-SNAPSHOT" );
// Verify the metadataRepository invocations
// Complete version removal for cleanup
- verify(metadataRepository, times(1)).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
- verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(RELEASES_TEST_REPO_ID), eq(projectNs), eq(projectName), eq(releaseVersion) );
+ verify(metadataRepository, times(1)).removeProjectVersion(eq(repositorySession) , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, never()).removeProjectVersion(eq(repositorySession) , eq(RELEASES_TEST_REPO_ID), eq(projectNs), eq(projectName), eq(releaseVersion) );
// check if the snapshot was removed
// Verify the metadataRepository invocations
// No removal
- verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
- verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq("2.0.3-SNAPSHOT") );
- verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq("2.0.4-SNAPSHOT") );
- verify(metadataRepository, never()).removeArtifact(repositorySession , any(ArtifactMetadata.class), any(String.class) );
- verify(metadataRepository, never()).removeArtifact(repositorySession , any(String.class), any(String.class), any(String.class), any(String.class), any( MetadataFacet.class) );
+ verify(metadataRepository, never()).removeProjectVersion(eq(repositorySession) , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, never()).removeProjectVersion(eq(repositorySession) , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq("2.0.3-SNAPSHOT") );
+ verify(metadataRepository, never()).removeProjectVersion(eq(repositorySession) , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq("2.0.4-SNAPSHOT") );
+ verify(metadataRepository, never()).removeArtifact(eq(repositorySession) , any(ArtifactMetadata.class), any(String.class) );
+ verify(metadataRepository, never()).removeArtifact(eq(repositorySession) , any(String.class), any(String.class), any(String.class), any(String.class), any( MetadataFacet.class) );
import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.archiva.repository.features.ArtifactCleanupFeature;
import org.apache.commons.lang.time.DateUtils;
+import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
{
org.apache.archiva.repository.ManagedRepository repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
ArtifactCleanupFeature atf = repoConfiguration.getFeature( ArtifactCleanupFeature.class ).get();
+
+
+ sessionControl.reset();
+ sessionFactoryControl.reset();
+ EasyMock.expect( sessionFactory.createSession( ) ).andStubReturn( repositorySession );
+ EasyMock.expect( repositorySession.getRepository()).andStubReturn( metadataRepository );
+ repositorySession.save();
+ EasyMock.expectLastCall().anyTimes();
+ sessionFactoryControl.replay();
+ sessionControl.replay();
+
repoPurge = new DaysOldRepositoryPurge( getRepository(), atf.getRetentionPeriod().getDays(),
atf.getRetentionCount(), repositorySession,
Collections.singletonList( listener ) );
listenerControl.verify();
// Verify the metadataRepository invocations
- verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, never()).removeProjectVersion(eq(repositorySession) , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
- verify(metadataRepository, times(2)).removeArtifact(repositorySession , metadataArg.capture(), eq(projectVersion) );
+ verify(metadataRepository, times(2)).removeArtifact(eq(repositorySession) , metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
org.apache.archiva.repository.ManagedRepository repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
ArtifactCleanupFeature atf = repoConfiguration.getFeature( ArtifactCleanupFeature.class ).get();
List<RepositoryListener> listeners = Collections.singletonList( listener );
+
+ sessionControl.reset();
+ sessionFactoryControl.reset();
+ EasyMock.expect( sessionFactory.createSession( ) ).andStubReturn( repositorySession );
+ EasyMock.expect( repositorySession.getRepository()).andStubReturn( metadataRepository );
+ repositorySession.save();
+ EasyMock.expectLastCall().anyTimes();
+ sessionFactoryControl.replay();
+ sessionControl.replay();
repoPurge = new DaysOldRepositoryPurge( getRepository(), atf.getRetentionPeriod().getDays(),
atf.getRetentionCount(), repositorySession, listeners );
listenerControl.verify();
// Verify the metadataRepository invocations
- verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, never()).removeProjectVersion(eq(repositorySession) , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
- verify(metadataRepository, times(deletedVersions.size())).removeArtifact(repositorySession , metadataArg.capture(), eq(projectVersion) );
+ verify(metadataRepository, times(deletedVersions.size())).removeArtifact(eq(repositorySession) , metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
org.apache.archiva.repository.ManagedRepository repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
ArtifactCleanupFeature atf = repoConfiguration.getFeature( ArtifactCleanupFeature.class ).get();
List<RepositoryListener> listeners = Collections.singletonList( listener );
+
+ sessionControl.reset();
+ sessionFactoryControl.reset();
+ EasyMock.expect( sessionFactory.createSession( ) ).andStubReturn( repositorySession );
+ EasyMock.expect( repositorySession.getRepository()).andStubReturn( metadataRepository );
+ repositorySession.save();
+ EasyMock.expectLastCall().anyTimes();
+ sessionFactoryControl.replay();
+ sessionControl.replay();
repoPurge = new DaysOldRepositoryPurge( getRepository(), atf.getRetentionPeriod().getDays(),
atf.getRetentionCount(), repositorySession, listeners );
listenerControl.verify();
// Verify the metadataRepository invocations
- verify(metadataRepository, never()).removeProjectVersion(repositorySession , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, never()).removeProjectVersion(eq(repositorySession) , eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
- verify(metadataRepository, times(deletedVersions.size())).removeArtifact( repositorySession, metadataArg.capture(), eq(projectVersion) );
+ verify(metadataRepository, times(deletedVersions.size())).removeArtifact( eq(repositorySession), metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.features.ArtifactCleanupFeature;
import org.custommonkey.xmlunit.XMLAssert;
+import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
{
super.setUp();
- MockRepositorySessionFactory factory = applicationContext.getBean( MockRepositorySessionFactory.class );
- factory.setRepository( metadataRepository );
}
@After
public void testConsumerByRetentionCount()
throws Exception
{
- KnownRepositoryContentConsumer repoPurgeConsumer =
+ RepositoryPurgeConsumer repoPurgeConsumer =
applicationContext.getBean( "knownRepositoryContentConsumer#repo-purge-consumer-by-retention-count",
- KnownRepositoryContentConsumer.class );
-
+ RepositoryPurgeConsumer.class );
+ repoPurgeConsumer.setRepositorySessionFactory( sessionFactory );
org.apache.archiva.repository.ManagedRepository repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
ArtifactCleanupFeature atf = repoConfiguration.getFeature( ArtifactCleanupFeature.class ).get();
atf.setRetentionPeriod( Period.ofDays( 0 ) ); // force days older off to allow retention count purge to execute.
atf.setRetentionCount( TEST_RETENTION_COUNT );
addRepoToConfiguration( "retention-count", repoConfiguration );
+ sessionControl.reset();
+ sessionFactoryControl.reset();
+ EasyMock.expect( sessionFactory.createSession( ) ).andStubReturn( repositorySession );
+ EasyMock.expect( repositorySession.getRepository()).andStubReturn( metadataRepository );
+ repositorySession.save();
+ EasyMock.expectLastCall().anyTimes();
+ sessionFactoryControl.replay();
+ sessionControl.replay();
+
repoPurgeConsumer.beginScan( repoConfiguration, null );
String repoRoot = prepareTestRepos();
// Provide the metadata list
List<ArtifactMetadata> ml = getArtifactMetadataFromDir( TEST_REPO_ID, projectName, repo, vDir );
+
+
+
when(metadataRepository.getArtifacts( repositorySession, TEST_REPO_ID,
projectNs, projectName, projectVersion )).thenReturn(ml);
Set<String> deletedVersions = new HashSet<>();
repoPurgeConsumer.processFile( PATH_TO_BY_RETENTION_COUNT_ARTIFACT );
// Verify the metadataRepository invocations
- verify(metadataRepository, never()).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, never()).removeProjectVersion( eq(repositorySession), eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
- verify(metadataRepository, times(2)).removeArtifact( repositorySession, metadataArg.capture(), eq(projectVersion) );
+ verify(metadataRepository, times(2)).removeArtifact( eq(repositorySession), metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
public void testConsumerByDaysOld()
throws Exception
{
- KnownRepositoryContentConsumer repoPurgeConsumer =
+ RepositoryPurgeConsumer repoPurgeConsumer =
applicationContext.getBean( "knownRepositoryContentConsumer#repo-purge-consumer-by-days-old",
- KnownRepositoryContentConsumer.class );
+ RepositoryPurgeConsumer.class );
+
+ repoPurgeConsumer.setRepositorySessionFactory( sessionFactory );
org.apache.archiva.repository.ManagedRepository repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
ArtifactCleanupFeature atf = repoConfiguration.getFeature( ArtifactCleanupFeature.class ).get();
atf.setRetentionPeriod( Period.ofDays( TEST_DAYS_OLDER ) );
addRepoToConfiguration( "days-old", repoConfiguration );
+ sessionControl.reset();
+ sessionFactoryControl.reset();
+ EasyMock.expect( sessionFactory.createSession( ) ).andStubReturn( repositorySession );
+ EasyMock.expect( repositorySession.getRepository()).andStubReturn( metadataRepository );
+ repositorySession.save();
+ EasyMock.expectLastCall().anyTimes();
+ sessionFactoryControl.replay();
+ sessionControl.replay();
repoPurgeConsumer.beginScan( repoConfiguration, null );
String repoRoot = prepareTestRepos();
repoPurgeConsumer.processFile( PATH_TO_BY_DAYS_OLD_ARTIFACT );
// Verify the metadataRepository invocations
- verify(metadataRepository, never()).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, never()).removeProjectVersion( eq(repositorySession), eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
- verify(metadataRepository, times(2)).removeArtifact( repositorySession, metadataArg.capture(), eq(projectVersion) );
+ verify(metadataRepository, times(2)).removeArtifact( eq(repositorySession), metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
+ assertTrue( metaL.size( ) > 0 );
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
assertTrue(deletedVersions.contains(meta.getVersion()));
repoPurgeConsumer.processFile(
CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
- verify(metadataRepository, never()).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, never()).removeProjectVersion( eq(repositorySession), eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
- verify(metadataRepository, never()).removeArtifact( repositorySession, any(), any() );
- verify(metadataRepository, never()).removeArtifact( repositorySession, any(), any(), any(), any(), any(MetadataFacet.class) );
+ verify(metadataRepository, never()).removeArtifact( eq(repositorySession), any(), any() );
+ verify(metadataRepository, never()).removeArtifact( eq(repositorySession), any(), any(), any(), any(), any(MetadataFacet.class) );
// check if the snapshot wasn't removed
public void testReleasedSnapshotsWereCleaned()
throws Exception
{
- KnownRepositoryContentConsumer repoPurgeConsumer =
+ RepositoryPurgeConsumer repoPurgeConsumer =
applicationContext.getBean( "knownRepositoryContentConsumer#repo-purge-consumer-by-days-old",
- KnownRepositoryContentConsumer.class );
-
+ RepositoryPurgeConsumer.class );
+ repoPurgeConsumer.setRepositorySessionFactory( sessionFactory );
org.apache.archiva.repository.ManagedRepository repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
ArtifactCleanupFeature acf = repoConfiguration.getFeature( ArtifactCleanupFeature.class ).get();
acf.setDeleteReleasedSnapshots( true );
addRepoToConfiguration( "days-old", repoConfiguration );
-
+ sessionControl.reset();
+ sessionFactoryControl.reset();
+ EasyMock.expect( sessionFactory.createSession( ) ).andStubReturn( repositorySession );
+ EasyMock.expect( repositorySession.getRepository()).andStubReturn( metadataRepository );
+ repositorySession.save();
+ EasyMock.expectLastCall().anyTimes();
+ sessionFactoryControl.replay();
+ sessionControl.replay();
repoPurgeConsumer.beginScan( repoConfiguration, null );
String repoRoot = prepareTestRepos();
repoPurgeConsumer.processFile(
CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
- verify(metadataRepository, times(1)).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, times(1)).removeProjectVersion( eq(repositorySession), eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
- verify(metadataRepository, never()).removeArtifact( repositorySession, any(), any() );
+ verify(metadataRepository, never()).removeArtifact( eq(repositorySession), any(), any() );
// check if the snapshot was removed
assertDeleted( projectRoot + "/2.3-SNAPSHOT" );
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.archiva.repository.features.ArtifactCleanupFeature;
+import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
org.apache.archiva.repository.ManagedRepository repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
List<RepositoryListener> listeners = Collections.singletonList( listener );
ArtifactCleanupFeature acf = repoConfiguration.getFeature( ArtifactCleanupFeature.class ).get();
+
+ sessionControl.reset();
+ sessionFactoryControl.reset();
+ EasyMock.expect( sessionFactory.createSession( ) ).andStubReturn( repositorySession );
+ EasyMock.expect( repositorySession.getRepository()).andStubReturn( metadataRepository );
+ repositorySession.save();
+ EasyMock.expectLastCall().anyTimes();
+ sessionFactoryControl.replay();
+ sessionControl.replay();
+
repoPurge = new RetentionCountRepositoryPurge( getRepository(), acf.getRetentionCount(),
repositorySession, listeners );
}
listenerControl.verify();
// Verify the metadataRepository invocations
- verify(metadataRepository, never()).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, never()).removeProjectVersion( eq(repositorySession), eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
- verify(metadataRepository, times(deletedVersions.size())).removeArtifact( repositorySession, metadataArg.capture(), eq(projectVersion) );
+ verify(metadataRepository, times(deletedVersions.size())).removeArtifact( eq(repositorySession), metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
listenerControl.verify();
// Verify the metadataRepository invocations
- verify(metadataRepository, never()).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, never()).removeProjectVersion( eq(repositorySession), eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
- verify(metadataRepository, times(deletedVersions.size())).removeArtifact( repositorySession, metadataArg.capture(), eq(projectVersion) );
+ verify(metadataRepository, times(deletedVersions.size())).removeArtifact( eq(repositorySession), metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
listenerControl.verify();
// Verify the metadataRepository invocations
- verify(metadataRepository, never()).removeProjectVersion( repositorySession, eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
+ verify(metadataRepository, never()).removeProjectVersion( eq(repositorySession), eq(TEST_REPO_ID), eq(projectNs), eq(projectName), eq(projectVersion) );
ArgumentCaptor<ArtifactMetadata> metadataArg = ArgumentCaptor.forClass(ArtifactMetadata.class);
- verify(metadataRepository, times(deletedVersions.size())).removeArtifact( repositorySession, metadataArg.capture(), eq(projectVersion) );
+ verify(metadataRepository, times(deletedVersions.size())).removeArtifact( eq(repositorySession), metadataArg.capture(), eq(projectVersion) );
List<ArtifactMetadata> metaL = metadataArg.getAllValues();
for (ArtifactMetadata meta : metaL) {
assertTrue(meta.getId().startsWith(projectName));
if ( !stagedOne )
{
- RepositorySession repositorySession = null;
- try
- {
- repositorySession = getRepositorySessionFactory().createSession();
- }
- catch ( MetadataRepositoryException e )
- {
- e.printStackTrace( );
- }
- try
+ boolean success=false;
+ try(RepositorySession repositorySession = getRepositorySessionFactory().createSession())
{
MetadataRepository metadataRepository = repositorySession.getRepository();
metadataRepository.removeRepository(repositorySession , repository.getId() );
//invalidate cache
namespacesCache.remove( repository.getId() );
- log.debug( "call repositoryStatisticsManager.deleteStatistics" );
- getRepositoryStatisticsManager().deleteStatistics( metadataRepository, repository.getId() );
repositorySession.save();
+ success=true;
}
catch ( MetadataRepositoryException e )
{
//throw new RepositoryAdminException( e.getMessage(), e );
log.warn( "skip error during removing repository from MetadataRepository:{}", e.getMessage(), e );
+ success = false;
} catch (MetadataSessionException e) {
log.warn( "skip error during removing repository from MetadataRepository:{}", e.getMessage(), e );
- } finally
+ success = false;
+ }
+ if (success)
{
- repositorySession.close();
+ log.debug( "call repositoryStatisticsManager.deleteStatistics" );
+ try
+ {
+ getRepositoryStatisticsManager( ).deleteStatistics( repository.getId( ) );
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ e.printStackTrace( );
+ }
}
+
}
if ( deleteContent )
if ( resetStats )
{
log.debug( "call repositoryStatisticsManager.deleteStatistics" );
- getRepositoryStatisticsManager().deleteStatistics( repositorySession.getRepository(),
- managedRepository.getId() );
+ getRepositoryStatisticsManager().deleteStatistics(
+ managedRepository.getId() );
repositorySession.save();
}
if ( !task.isScanAll() )
{
RepositoryStatistics previousStats =
- repositoryStatisticsManager.getLastStatistics( metadataRepository, repoId );
+ repositoryStatisticsManager.getLastStatistics( repoId );
if ( previousStats != null )
{
sinceWhen = previousStats.getScanStartTime().getTime();
log.info( "Gathering repository statistics" );
- repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, repoId,
+ repositoryStatisticsManager.addStatisticsAfterScan( repoId,
stats.getWhenGathered(), endTime,
stats.getTotalFileCount(),
stats.getTotalFileCount() - previousFileCount );
{
long start = System.currentTimeMillis();
- boolean res = repositoryStatisticsManager.hasStatistics( metadataRepository, repoConfig.getId() );
+ boolean res = repositoryStatisticsManager.hasStatistics( repoConfig.getId() );
long end = System.currentTimeMillis();
// check correctness of new stats
RepositoryStatistics newStats =
- repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
+ repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertEquals( 0, newStats.getNewFileCount() );
assertEquals( 31, newStats.getTotalFileCount() );
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
// check correctness of new stats
RepositoryStatistics updatedStats =
- repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
+ repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertEquals( 2, updatedStats.getNewFileCount() );
assertEquals( 33, updatedStats.getTotalFileCount() );
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
// check correctness of new stats
RepositoryStatistics newStats =
- repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
+ repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertEquals( 2, newStats.getNewFileCount() );
assertEquals( 33, newStats.getTotalFileCount() );
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
// check correctness of new stats
RepositoryStatistics newStats =
- repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
+ repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertEquals( 2, newStats.getNewFileCount() );
assertEquals( 33, newStats.getTotalFileCount() );
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
repoTask.setScanAll( true );
Date date = Calendar.getInstance().getTime();
- repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, //
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, //
new Date( date.getTime() - 1234567 ), //
date, 8, 8 ); //
stats.setTotalProjectCount( 5 );
stats.setTotalArtifactFileSize( 38545 );
- repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, //
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, //
new Date( date.getTime() - 1234567 ), //
date, 31, 31 );
}
* under the License.
*/
-import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.stats.model.DefaultRepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.model.RepositoryStatistics;
private Map<String, List<RepositoryStatistics>> repoStats = new HashMap<>();
@Override
- public boolean hasStatistics( MetadataRepository metadataRepository, String repositoryId )
+ public boolean hasStatistics( String repositoryId )
throws MetadataRepositoryException
{
return !repoStats.isEmpty();
}
@Override
- public RepositoryStatistics getLastStatistics( MetadataRepository metadataRepository, String repositoryId )
+ public RepositoryStatistics getLastStatistics( String repositoryId )
{
List<RepositoryStatistics> repositoryStatisticsList = getStatsList( repositoryId );
return !repositoryStatisticsList.isEmpty()
}
@Override
- public void addStatisticsAfterScan( MetadataRepository metadataRepository, String repositoryId, Date startTime,
+ public void addStatisticsAfterScan( String repositoryId, Date startTime,
Date endTime, long totalFiles, long newFiles )
{
List<RepositoryStatistics> stats = getStatsList( repositoryId );
}
@Override
- public void deleteStatistics( MetadataRepository metadataRepository, String repositoryId )
+ public void deleteStatistics( String repositoryId )
{
repoStats.remove( repositoryId );
}
@Override
- public List<RepositoryStatistics> getStatisticsInRange( MetadataRepository metadataRepository, String repositoryId,
+ public List<RepositoryStatistics> getStatisticsInRange( String repositoryId,
Date startDate, Date endDate )
{
throw new UnsupportedOperationException();
public List<Artifact> searchArtifacts( String text, String repositoryId, Boolean exact )
throws ArchivaRestServiceException
{
- RepositorySession repositorySession = null;
- try
- {
- repositorySession = repositorySessionFactory.createSession();
- }
- catch ( MetadataRepositoryException e )
- {
- e.printStackTrace( );
- }
- try
+ try(RepositorySession repositorySession = repositorySessionFactory.createSession())
{
List<ArtifactMetadata> artifactMetadatas =
repositorySession.getRepository().searchArtifacts(repositorySession , repositoryId, text, exact == null ? false : exact );
{
throw new ArchivaRestServiceException( e.getMessage(), e );
}
- finally
- {
- repositorySession.close();
- }
}
@Override
RepositoryStatistics stats = null;
try
{
- stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, repositoryId );
+ stats = repositoryStatisticsManager.getLastStatistics( repositoryId );
}
catch ( MetadataRepositoryException e )
{
{
try
{
- stats.add( repositoryStatisticsManager.getLastStatistics( metadataRepository, repo ) );
+ stats.add( repositoryStatisticsManager.getLastStatistics( repo ) );
}
catch ( MetadataRepositoryException e )
{
List<RepositoryStatistics> stats = null;
try
{
- stats = repositoryStatisticsManager.getStatisticsInRange( metadataRepository, repositoryId, startDate,
+ stats = repositoryStatisticsManager.getStatisticsInRange( repositoryId, startDate,
endDate );
}
catch ( MetadataRepositoryException e )
import java.util.Date;
import java.util.Locale;
import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Function;
/**
* @author Olivier Lamy
return this.reuseServer;
}
+ /*
+ * Used by tryAssert to allow to throw exceptions in the lambda expression.
+ */
+ @FunctionalInterface
+ protected interface AssertFunction
+ {
+ void accept( ) throws Exception;
+ }
+
+ protected void tryAssert( AssertFunction func ) throws Exception
+ {
+ tryAssert( func, 10, 500 );
+ }
+ /*
+ * Runs the assert method until the assert is successful or the number of retries
+ * is reached. This is needed because the JCR Oak index update is asynchronous, so updates
+ * may not be visible immediately after the modification.
+ */
+ private void tryAssert( AssertFunction func, int retries, int sleepMillis ) throws Exception
+ {
+ Throwable t = null;
+ int retry = retries;
+ while ( retry-- > 0 )
+ {
+ try
+ {
+ func.accept( );
+ return;
+ }
+ catch ( Exception | AssertionError e )
+ {
+ t = e;
+ Thread.currentThread( ).sleep( sleepMillis );
+ log.warn( "Retrying assert {}: {}", retry, e.getMessage( ) );
+ }
+ }
+ log.warn( "Retries: {}, Exception: {}", retry, t.getMessage( ) );
+ if ( retry <= 0 && t != null )
+ {
+ if ( t instanceof RuntimeException )
+ {
+ throw (RuntimeException) t;
+ }
+ else if ( t instanceof Exception )
+ {
+ throw (Exception) t;
+ }
+ else if ( t instanceof Error )
+ {
+ throw (Error) t;
+ }
+ }
+ }
// START SNIPPET: authz-header
// guest with an empty password
browseService.addMetadata( "commons-cli", "commons-cli", "1.0", "wine", "bordeaux", TEST_REPO_ID );
- List<Artifact> artifactDownloadInfos =
- browseService.getArtifactsByProjectVersionMetadata( "wine", "bordeaux", TEST_REPO_ID );
+ tryAssert( ( ) -> {
+ List<Artifact> artifactDownloadInfos =
+ browseService.getArtifactsByProjectVersionMetadata( "wine", "bordeaux", TEST_REPO_ID );
- assertThat( artifactDownloadInfos ).isNotNull().isNotEmpty().hasSize( 3 );
- // END SNIPPET: get-artifacts-by-project-version-metadata
+ assertThat( artifactDownloadInfos ).isNotNull( ).isNotEmpty( ).hasSize( 3 );
+ // END SNIPPET: get-artifacts-by-project-version-metadata
+ } );
}
public void getArtifactsByProjectVersionMetadataWithNoRepository()
throws Exception
{
- BrowseService browseService = getBrowseService( authorizationHeader, true );
+ final BrowseService browseService = getBrowseService( authorizationHeader, true );
browseService.addMetadata( "commons-cli", "commons-cli", "1.0", "wine", "bordeaux", TEST_REPO_ID );
- List<Artifact> artifactDownloadInfos =
- browseService.getArtifactsByProjectVersionMetadata( "wine", "bordeaux", null );
- assertThat( artifactDownloadInfos ).isNotNull().isNotEmpty().hasSize( 3 );
+ tryAssert( ( ) -> {
+ List<Artifact> artifactDownloadInfos =
+ null;
+ try
+ {
+ artifactDownloadInfos = browseService.getArtifactsByProjectVersionMetadata( "wine", "bordeaux", null );
+ }
+ catch ( ArchivaRestServiceException e )
+ {
+ throw new AssertionError( "ArchivaRestServiceException", e );
+ }
+ assertThat( artifactDownloadInfos ).isNotNull( ).isNotEmpty( ).hasSize( 3 );
+ });
}
// START SNIPPET: search-artifacts
BrowseService browseService = getBrowseService( authorizationHeader, true );
- List<Artifact> artifactDownloadInfos =
- browseService.searchArtifacts( "The Apache Software Foundation", TEST_REPO_ID, true );
+ tryAssert( ( ) -> {
+ List<Artifact> artifactDownloadInfos =
+ browseService.searchArtifacts( "The Apache Software Foundation", TEST_REPO_ID, true );
- assertThat( artifactDownloadInfos ).isNotNull().isNotEmpty().hasSize( 7 );
+ assertThat( artifactDownloadInfos ).isNotNull( ).isNotEmpty( ).hasSize( 7 );
+ } );
// END SNIPPET: search-artifacts
}
import com.sun.syndication.feed.synd.SyndFeed;
import com.sun.syndication.io.FeedException;
import org.apache.archiva.metadata.model.ArtifactMetadata;
-import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
* new versions of artifact.
*/
@Override
- public SyndFeed process( Map<String, String> reqParams, MetadataRepository metadataRepository )
+ public SyndFeed process( Map<String, String> reqParams )
throws FeedException
{
log.debug( "Process new artifacts into rss feeds." );
String repoId = reqParams.get( RssFeedProcessor.KEY_REPO_ID );
if ( repoId != null )
{
- return processNewArtifactsInRepo( repoId, metadataRepository );
+ return processNewArtifactsInRepo( repoId );
}
return null;
}
- private SyndFeed processNewArtifactsInRepo( String repoId, MetadataRepository metadataRepository )
+ private SyndFeed processNewArtifactsInRepo( String repoId )
throws FeedException
{
Calendar greaterThanThisDate = Calendar.getInstance( GMT_TIME_ZONE );
List<ArtifactMetadata> artifacts;
try(RepositorySession session = repositorySessionFactory.createSession())
{
- artifacts = metadataRepository.getArtifactsByDateRange(session , repoId, greaterThanThisDate.getTime(), null );
+ artifacts = session.getRepository().getArtifactsByDateRange(session , repoId, greaterThanThisDate.getTime(), null );
}
catch ( MetadataRepositoryException e )
{
{
this.numberOfDaysBeforeNow = numberOfDaysBeforeNow;
}
+
+ public RepositorySessionFactory getRepositorySessionFactory( )
+ {
+ return repositorySessionFactory;
+ }
+
+ public void setRepositorySessionFactory( RepositorySessionFactory repositorySessionFactory )
+ {
+ this.repositorySessionFactory = repositorySessionFactory;
+ }
}
* Process all versions of the artifact which had a rss feed request.
*/
@Override
- public SyndFeed process( Map<String, String> reqParams, MetadataRepository metadataRepository )
+ public SyndFeed process( Map<String, String> reqParams )
throws FeedException
{
String groupId = reqParams.get( RssFeedProcessor.KEY_GROUP_ID );
if ( groupId != null && artifactId != null )
{
- return processNewVersionsOfArtifact( groupId, artifactId, metadataRepository );
+ return processNewVersionsOfArtifact( groupId, artifactId );
}
return null;
}
- private SyndFeed processNewVersionsOfArtifact( String groupId, String artifactId,
- MetadataRepository metadataRepository )
+ private SyndFeed processNewVersionsOfArtifact( String groupId, String artifactId )
throws FeedException
{
List<ArtifactMetadata> artifacts = new ArrayList<>();
try(RepositorySession session = repositorySessionFactory.createSession())
{
+ final MetadataRepository metadataRepository = session.getRepository( );
for ( Repository repo : repositoryRegistry.getRepositories() )
{
final String repoId = repo.getId();
{
this.generator = generator;
}
+
+ public RepositorySessionFactory getRepositorySessionFactory( )
+ {
+ return repositorySessionFactory;
+ }
+
+ public void setRepositorySessionFactory( RepositorySessionFactory repositorySessionFactory )
+ {
+ this.repositorySessionFactory = repositorySessionFactory;
+ }
+
+ public RepositoryRegistry getRepositoryRegistry( )
+ {
+ return repositoryRegistry;
+ }
+
+ public void setRepositoryRegistry( RepositoryRegistry repositoryRegistry )
+ {
+ this.repositoryRegistry = repositoryRegistry;
+ }
}
import com.sun.syndication.feed.synd.SyndFeed;
import com.sun.syndication.io.FeedException;
-import org.apache.archiva.metadata.repository.MetadataRepository;
import java.util.Map;
String KEY_ARTIFACT_ID = "artifactId";
- SyndFeed process( Map<String, String> reqParams, MetadataRepository metadataRepository )
+ SyndFeed process( Map<String, String> reqParams )
throws FeedException;
}
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.AbstractMetadataRepository;
import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.rss.RssFeedGenerator;
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
+import org.easymock.EasyMock;
+import org.easymock.IMocksControl;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
private MetadataRepositoryMock metadataRepository;
+ private IMocksControl sessionFactoryControl;
+ private RepositorySessionFactory sessionFactory;
+
+ private IMocksControl sessionControl;
+ private RepositorySession session;
+
@Before
@Override
public void setUp()
newArtifactsProcessor.setGenerator( new RssFeedGenerator() );
metadataRepository = new MetadataRepositoryMock();
+
+ sessionFactoryControl = EasyMock.createControl();
+ sessionControl = EasyMock.createControl();
+ sessionControl.resetToNice();
+
+ sessionFactory = sessionFactoryControl.createMock( RepositorySessionFactory.class );
+ session = sessionControl.createMock( RepositorySession.class );
+
+ EasyMock.expect( sessionFactory.createSession() ).andStubReturn( session );
+ EasyMock.expect( session.getRepository( ) ).andStubReturn( metadataRepository );
+
+ sessionFactoryControl.replay();
+ sessionControl.replay();
+
+ newArtifactsProcessor.setRepositorySessionFactory( sessionFactory );
+
}
@SuppressWarnings ("unchecked")
metadataRepository.setArtifactsByDateRange( newArtifacts );
+
Map<String, String> reqParams = new HashMap<>();
reqParams.put( RssFeedProcessor.KEY_REPO_ID, TEST_REPO );
- SyndFeed feed = newArtifactsProcessor.process( reqParams, metadataRepository );
+ SyndFeed feed = newArtifactsProcessor.process( reqParams );
// check that the date used in the call is close to the one passed (5 seconds difference at most)
Calendar cal = Calendar.getInstance( TimeZone.getTimeZone( "GMT" ) );
import com.sun.syndication.feed.synd.SyndEntry;
import com.sun.syndication.feed.synd.SyndFeed;
import junit.framework.TestCase;
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
+import org.apache.archiva.repository.BasicManagedRepository;
+import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.RepositoryRegistry;
+import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.rss.RssFeedGenerator;
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
+import org.easymock.EasyMock;
import org.easymock.IMocksControl;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
+import java.nio.file.Paths;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
private MetadataRepository metadataRepository;
- private IMocksControl factoryControl;
- private RepositorySessionFactory repositorySessionFactory;
+ private IMocksControl sessionFactoryControl;
+ private RepositorySessionFactory sessionFactory;
+
+ private IMocksControl sessionControl;
+ private RepositorySession session;
+
+ private IMocksControl repositoryRegistryControl;
+ private RepositoryRegistry repositoryRegistry;
@Before
metadataRepositoryControl = createControl();
metadataRepository = metadataRepositoryControl.createMock( MetadataRepository.class );
- factoryControl = createControl();
- repositorySessionFactory = factoryControl.createMock(RepositorySessionFactory.class);
+ sessionFactoryControl = EasyMock.createControl();
+ sessionControl = EasyMock.createControl();
+ sessionControl.resetToNice();
+
+ sessionFactory = sessionFactoryControl.createMock( RepositorySessionFactory.class );
+ session = sessionControl.createMock( RepositorySession.class );
+
+ EasyMock.expect( sessionFactory.createSession() ).andStubReturn( session );
+ EasyMock.expect( session.getRepository( ) ).andStubReturn( metadataRepository );
+ sessionFactoryControl.replay();
+ sessionControl.replay();
+
+ repositoryRegistryControl = EasyMock.createControl();
+ repositoryRegistry = repositoryRegistryControl.createMock( RepositoryRegistry.class );
+
+ List<Repository> reg = new ArrayList<>( );
+ reg.add( new BasicManagedRepository( TEST_REPO, TEST_REPO, new FilesystemStorage( Paths.get("target/test-storage"), new DefaultFileLockManager() ) ) );
+ EasyMock.expect( repositoryRegistry.getRepositories() ).andStubReturn( reg );
+ repositoryRegistryControl.replay();
+
+ newVersionsProcessor.setRepositorySessionFactory( sessionFactory );
+ newVersionsProcessor.setRepositoryRegistry( repositoryRegistry );
}
@SuppressWarnings("unchecked")
reqParams.put( RssFeedProcessor.KEY_GROUP_ID, GROUP_ID );
reqParams.put( RssFeedProcessor.KEY_ARTIFACT_ID, ARTIFACT_ID );
- try(RepositorySession session = repositorySessionFactory.createSession()) {
expect(metadataRepository.getProjectVersions(session, TEST_REPO, GROUP_ID, ARTIFACT_ID)).andReturn(
Arrays.asList("1.0.1", "1.0.2", "1.0.3-SNAPSHOT"));
expect(metadataRepository.getArtifacts(session, TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.1")).andReturn(
Collections.singletonList(artifact2));
expect(metadataRepository.getArtifacts(session, TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.3-SNAPSHOT")).andReturn(
Collections.singletonList(artifact3));
- }
metadataRepositoryControl.replay();
- SyndFeed feed = newVersionsProcessor.process( reqParams, metadataRepository );
+ SyndFeed feed = newVersionsProcessor.process( reqParams );
assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two'", feed.getTitle() );
assertEquals( "New versions of artifact 'org.apache.archiva:artifact-two' found during repository scan.",
return;
}
- RepositorySession repositorySession = repositorySessionFactory.createSession();
- try
- {
- feed = processor.process( map, repositorySession.getRepository() );
- }
- finally
- {
- repositorySession.close();
- }
+ feed = processor.process( map );
+
if ( feed == null )
{
res.sendError( HttpServletResponse.SC_NO_CONTENT, "No information available." );
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, USER_NOT_AUTHORIZED );
}
- catch ( org.apache.archiva.metadata.repository.MetadataRepositoryException e )
- {
- e.printStackTrace( );
- }
}
/**
import junit.framework.TestCase;
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.configuration.ArchivaConfiguration;
+import org.apache.archiva.repository.BasicManagedRepository;
+import org.apache.archiva.repository.RepositoryRegistry;
+import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.commons.codec.Encoder;
import org.apache.commons.codec.binary.Base64;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.lang.annotation.Annotation;
+import java.nio.file.Paths;
import java.util.Locale;
import java.util.Map;
@Inject
protected ApplicationContext applicationContext;
+ @Inject
+ protected RepositoryRegistry repositoryRegistry;
+
@BeforeClass
public static void initConfigurationPath()
throws Exception
}
};
+ repositoryRegistry.reload();
+ repositoryRegistry.putRepository( new BasicManagedRepository( "internal", "internal",
+ new FilesystemStorage( Paths.get( "target/appserver-base/repositories/internal" ), new DefaultFileLockManager( ) ) ) );
rssFeedServlet.init( mockServletConfig );
}
assertNull( metadata1.getScm( ) );
assertNull( metadata1.getIssueManagement( ) );
assertNull( metadata1.getOrganization( ) );
- assertEquals( "", metadata1.getDescription( ) );
- assertEquals( "", metadata1.getName( ) );
+ assertTrue( metadata1.getDescription( )==null || "".equals(metadata1.getDescription()) );
+ assertTrue( metadata1.getName( )==null || "".equals(metadata1.getName()) );
assertEquals( TEST_PROJECT_VERSION, metadata1.getId( ) );
assertEquals( TEST_PROJECT_VERSION, metadata1.getVersion( ) );
assertTrue( metadata1.getMailingLists( ).isEmpty( ) );
* under the License.
*/
-import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import java.util.Date;
*/
public interface RepositoryStatisticsManager
{
- RepositoryStatistics getLastStatistics( MetadataRepository metadataRepository, String repositoryId )
+ RepositoryStatistics getLastStatistics( String repositoryId )
throws MetadataRepositoryException;
- boolean hasStatistics( MetadataRepository metadataRepository, String repositoryId )
+ boolean hasStatistics( String repositoryId )
throws MetadataRepositoryException;
- void addStatisticsAfterScan( MetadataRepository metadataRepository, String repositoryId, Date startTime,
+ void addStatisticsAfterScan( String repositoryId, Date startTime,
Date endTime, long totalFiles, long newFiles )
throws MetadataRepositoryException;
- void deleteStatistics( MetadataRepository metadataRepository, String repositoryId )
+ void deleteStatistics( String repositoryId )
throws MetadataRepositoryException;
- List<RepositoryStatistics> getStatisticsInRange( MetadataRepository metadataRepository, String repositoryId,
+ List<RepositoryStatistics> getStatisticsInRange( String repositoryId,
Date startTime, Date endTime )
throws MetadataRepositoryException;
}
return other.name.compareTo( name );
}
}
+
+ public RepositorySessionFactory getRepositorySessionFactory( )
+ {
+ return repositorySessionFactory;
+ }
+
+ public void setRepositorySessionFactory( RepositorySessionFactory repositorySessionFactory )
+ {
+ this.repositorySessionFactory = repositorySessionFactory;
+ }
}
private RepositorySessionFactory repositorySessionFactory;
+ private RepositorySession session;
+
private static final String AUDIT_EVENT_BASE = "2010/01/18/123456.";
private static final String TEST_REPO_ID = "test-repo";
private static final DecimalFormat MILLIS_FORMAT = new DecimalFormat( "000" );
private IMocksControl factoryControl;
+ private IMocksControl sessionControl;
private static SimpleDateFormat createTimestampFormat()
{
factoryControl = EasyMock.createControl();
repositorySessionFactory = factoryControl.createMock(RepositorySessionFactory.class);
+ sessionControl = EasyMock.createControl( );
+ session = sessionControl.createMock( RepositorySession.class );
+
+ auditManager.setRepositorySessionFactory( repositorySessionFactory );
+
ManagedRepositoryConfiguration repository = new ManagedRepositoryConfiguration();
repository.setId( TEST_REPO_ID );
repository.setLocation( "" );
expectedEvents.add( event );
}
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
getEventNames(expectedEvents));
metadataRepository.getMetadataFacet(session, TEST_REPO_ID, AuditEvent.FACET_ID, event.getName())).andReturn(
event);
}
- }
metadataRepositoryControl.replay();
List<AuditEvent> events =
expectedEvents.add( createEvent( AUDIT_EVENT_BASE + MILLIS_FORMAT.format( i ) ) );
}
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
getEventNames(expectedEvents));
for (AuditEvent event : expectedEvents) {
metadataRepository.getMetadataFacet(session, TEST_REPO_ID, AuditEvent.FACET_ID, event.getName())).andReturn(
event);
}
- }
metadataRepositoryControl.replay();
List<AuditEvent> events =
events.add( event );
eventNames.get( repositoryId ).add( event.getName() );
}
-
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
eventNames.get(TEST_REPO_ID));
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID_2, AuditEvent.FACET_ID)).andReturn(
EasyMock.expect(metadataRepository.getMetadataFacet(session, event.getRepositoryId(),
AuditEvent.FACET_ID, event.getName())).andReturn(event);
}
- }
metadataRepositoryControl.replay();
events =
throws Exception
{
-
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Collections.<String>emptyList());
- }
metadataRepositoryControl.replay();
assertTrue( auditManager.getMostRecentAuditEvents( metadataRepository,
{
AuditEvent event = createEvent( new Date() );
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
metadataRepository.addMetadataFacet(session, TEST_REPO_ID, event);
- }
metadataRepositoryControl.replay();
throws Exception
{
- try(RepositorySession session = repositorySessionFactory.createSession()) {
- metadataRepository.removeMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID);
- }
+
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
+
+ metadataRepository.removeMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID);
metadataRepositoryControl.replay();
Date expectedTimestamp = new Date( current.getTime() - 3000 );
AuditEvent expectedEvent = createEvent( expectedTimestamp );
AuditEvent event3 = createEvent( new Date( current.getTime() - 1000 ) );
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
- try(RepositorySession session = repositorySessionFactory.createSession()) {
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Arrays.asList(event1.getName(), expectedEvent.getName(), event3.getName()));
// only match the middle one
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent.getName())).andReturn(expectedEvent);
- }
metadataRepositoryControl.replay();
Date ts3 = new Date( current.getTime() - 1000 );
AuditEvent expectedEvent3 = createEvent( ts3 );
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
+
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Arrays.asList(event1.getName(), expectedEvent2.getName(), expectedEvent3.getName()));
AuditEvent.FACET_ID, expectedEvent2.getName())).andReturn(expectedEvent2);
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent3.getName())).andReturn(expectedEvent3);
- }
metadataRepositoryControl.replay();
List<AuditEvent> events =
AuditEvent expectedEvent2 = createEvent( expectedTimestamp );
AuditEvent event3 = createEvent( new Date( current.getTime() - 1000 ) );
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
+
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Arrays.asList(expectedEvent1.getName(), expectedEvent2.getName(), event3.getName()));
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent2.getName())).andReturn(expectedEvent2);
- }
metadataRepositoryControl.replay();
List<AuditEvent> events =
Date ts3 = new Date( current.getTime() - 1000 );
AuditEvent expectedEvent3 = createEvent( ts3 );
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Arrays.asList(expectedEvent1.getName(), expectedEvent2.getName(), expectedEvent3.getName()));
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent3.getName())).andReturn(expectedEvent3);
- }
metadataRepositoryControl.replay();
List<AuditEvent> events =
Date ts3 = new Date( current.getTime() - 1000 );
AuditEvent expectedEvent3 = createEvent( ts3 );
-
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Arrays.asList(expectedEvent1.getName(), expectedEvent2.getName(), expectedEvent3.getName()));
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
AuditEvent.FACET_ID, expectedEvent3.getName())).andReturn(expectedEvent3);
- }
metadataRepositoryControl.replay();
List<AuditEvent> events =
expectedEvent2.setResource( "different-resource" );
AuditEvent expectedEvent3 = createEvent( new Date( current.getTime() - 1000 ) );
-
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID)).andReturn(
Arrays.asList(expectedEvent1.getName(), expectedEvent2.getName(), expectedEvent3.getName()));
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID, AuditEvent.FACET_ID, expectedEvent3.getName())).andReturn(expectedEvent3);
- }
metadataRepositoryControl.replay();
List<AuditEvent> events =
Date ts3 = new Date( current.getTime() - 1000 );
AuditEvent expectedEvent3 = createEvent( ts3 );
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID))
.andReturn(Arrays.asList(expectedEvent1.getName(), expectedEvent3.getName()));
EasyMock.expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID, AuditEvent.FACET_ID, expectedEvent3.getName()))
.andReturn(expectedEvent3);
- }
metadataRepositoryControl.replay();
List<AuditEvent> events =
String name2 = createEvent( expectedTimestamp ).getName();
String name3 = createEvent( new Date( current.getTime() - 1000 ) ).getName();
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+ sessionControl.reset();
+ factoryControl.reset();
+ EasyMock.expect( repositorySessionFactory.createSession() ).andStubReturn( session );
+ factoryControl.replay();
EasyMock.expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, AuditEvent.FACET_ID))
.andReturn(Arrays.asList(name1, name2, name3));
- }
metadataRepositoryControl.replay();
List<AuditEvent> events =
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.cassandra.model.ProjectVersionMetadataModel;
+import org.easymock.EasyMock;
+import org.easymock.IMocksControl;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
CassandraMetadataRepository cmr;
+ IMocksControl sessionFactoryControl;
+ RepositorySessionFactory sessionFactory;
+
+ IMocksControl sessionControl;
+ RepositorySession session;
+
+
+
@Override
protected RepositorySessionFactory getSessionFactory( )
{
- return null;
+ return sessionFactory;
}
@Override
this.cmr = new CassandraMetadataRepository( factories, null, cassandraArchivaManager );
+ sessionFactoryControl = EasyMock.createControl( );
+ sessionFactory = sessionFactoryControl.createMock( RepositorySessionFactory.class );
+ sessionControl = EasyMock.createControl( );
+ session = sessionControl.createMock( RepositorySession.class );
+
+ EasyMock.expect( sessionFactory.createSession( ) ).andStubReturn( session );
+
+ sessionFactoryControl.replay();
+
clearReposAndNamespace( cassandraArchivaManager );
}
{
private FileMetadataRepository repository;
- private RepositorySessionFactory sessionFactory = null;
+ private RepositorySessionFactory sessionFactory = new FileRepositorySessionFactory();
@Override
protected MetadataRepository getRepository( )
" AND [projectVersion].[jcr:path] LIKE '/repositories/%/content/%'" :
" AND ISDESCENDANTNODE(projectVersion,'/" + getRepositoryContentPath( repositoryId ) + "')";
List<ArtifactMetadata> result = new ArrayList<>( );
- if (key!=null && Arrays.binarySearch( PROJECT_VERSION_VERSION_PROPERTIES, key )>=0)
+ if (key==null || (key!=null && Arrays.binarySearch( PROJECT_VERSION_VERSION_PROPERTIES, key )>=0))
{
// We search only for project version properties if the key is a valid property name
String q1 =
initIncludes();
configuration.addChangeListener( this );
}
+
+ public RepositorySessionFactory getRepositorySessionFactory( )
+ {
+ return repositorySessionFactory;
+ }
+
+ public void setRepositorySessionFactory( RepositorySessionFactory repositorySessionFactory )
+ {
+ this.repositorySessionFactory = repositorySessionFactory;
+ }
}
RepositorySessionFactory repositorySessionFactory;
+ RepositorySession session;
+
@Before
@Override
public void setUp()
metadataRepository = mock( MetadataRepository.class );
repositorySessionFactory = mock(RepositorySessionFactory.class);
- RepositorySession session = mock( RepositorySession.class );
- when( session.getRepository() ).thenReturn( metadataRepository );
+ session = mock( RepositorySession.class );
- RepositorySessionFactory factory = applicationContext.getBean( RepositorySessionFactory.class );
- //(RepositorySessionFactory) lookup( RepositorySessionFactory.class );
- when( factory.createSession() ).thenReturn( session );
+ when( repositorySessionFactory.createSession( ) ).thenReturn( session );
+ when( session.getRepository() ).thenReturn( metadataRepository );
when( pathTranslator.getArtifactForPath( TEST_REPO, TEST_FILE ) ).thenReturn( TEST_METADATA );
+
+ consumer.setRepositorySessionFactory( repositorySessionFactory );
+
}
@Test
public void testConsumerArtifactNotDuplicated()
throws Exception
{
- RepositorySession session = repositorySessionFactory.createSession();
- when( metadataRepository.getArtifactsByChecksum(session , TEST_REPO, TEST_CHECKSUM ) ).thenReturn(
+ when( metadataRepository.getArtifactsByChecksum(eq(session) , eq(TEST_REPO), eq(TEST_CHECKSUM) ) ).thenReturn(
Arrays.asList( TEST_METADATA ) );
consumer.beginScan( config, new Date() );
consumer.processFile( TEST_FILE );
consumer.completeScan();
- verify( metadataRepository, never() ).addMetadataFacet(session , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
+ verify( metadataRepository, never() ).addMetadataFacet(eq(session) , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
}
// TODO: Doesn't currently work
public void testConsumerArtifactDuplicated()
throws Exception
{
- RepositorySession session = repositorySessionFactory.createSession();
when( metadataRepository.getArtifactsByChecksum(session , TEST_REPO, TEST_CHECKSUM ) ).thenReturn(
Arrays.asList( TEST_METADATA, createMetadata( "1.0" ) ) );
consumer.completeScan();
ArgumentCaptor<RepositoryProblemFacet> argument = ArgumentCaptor.forClass( RepositoryProblemFacet.class );
- verify( metadataRepository ).addMetadataFacet(session , eq( TEST_REPO ), argument.capture() );
+ verify( metadataRepository ).addMetadataFacet(eq(session) , eq( TEST_REPO ), argument.capture() );
RepositoryProblemFacet problem = argument.getValue();
assertProblem( problem );
}
public void testConsumerArtifactDuplicatedButSelfNotInMetadataRepository()
throws Exception
{
- RepositorySession session = repositorySessionFactory.createSession();
when( metadataRepository.getArtifactsByChecksum(session , TEST_REPO, TEST_CHECKSUM ) ).thenReturn(
Arrays.asList( createMetadata( "1.0" ) ) );
consumer.completeScan();
ArgumentCaptor<RepositoryProblemFacet> argument = ArgumentCaptor.forClass( RepositoryProblemFacet.class );
- verify( metadataRepository ).addMetadataFacet(session , eq( TEST_REPO ), argument.capture() );
+ verify( metadataRepository ).addMetadataFacet(eq(session) , eq( TEST_REPO ), argument.capture() );
RepositoryProblemFacet problem = argument.getValue();
assertProblem( problem );
}
{
consumer.completeScan();
}
- RepositorySession session = repositorySessionFactory.createSession();
- verify( metadataRepository, never() ).addMetadataFacet(session , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
+ verify( metadataRepository, never() ).addMetadataFacet(eq(session) , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
}
@Test
// No exception unnecessarily for something we can't report on
consumer.processFile( "com/example/invalid-artifact.txt" );
consumer.completeScan();
- RepositorySession session = repositorySessionFactory.createSession();
- verify( metadataRepository, never() ).addMetadataFacet(session , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
+ verify( metadataRepository, never() ).addMetadataFacet(eq(session) , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
}
@Test
public void testConsumerArtifactNotAnArtifactPathResults()
throws Exception
{
- RepositorySession session = repositorySessionFactory.createSession();
- when( metadataRepository.getArtifactsByChecksum(session , eq( TEST_REPO ), anyString() ) ).thenReturn(
+ when( metadataRepository.getArtifactsByChecksum(eq(session) , eq( TEST_REPO ), anyString() ) ).thenReturn(
Arrays.asList( TEST_METADATA, createMetadata( "1.0" ) ) );
// override, this feels a little overspecified though
consumer.processFile( "com/example/invalid-artifact.txt" );
consumer.completeScan();
- verify( metadataRepository, never() ).addMetadataFacet(session , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
+ verify( metadataRepository, never() ).addMetadataFacet(eq(session) , eq( TEST_REPO ), Matchers.<MetadataFacet>anyObject() );
}
private static void assertProblem( RepositoryProblemFacet problem )
private RepositoryWalkingStatisticsProvider walkingProvider = new RepositoryWalkingStatisticsProvider();
+
+
@Inject
RepositorySessionFactory repositorySessionFactory;
@Override
- public boolean hasStatistics( MetadataRepository metadataRepository, String repositoryId )
+ public boolean hasStatistics( String repositoryId )
throws MetadataRepositoryException
{
try(RepositorySession session = repositorySessionFactory.createSession()) {
+ final MetadataRepository metadataRepository = session.getRepository( );
return metadataRepository.hasMetadataFacet(session, repositoryId, DefaultRepositoryStatistics.FACET_ID);
}
}
@Override
- public RepositoryStatistics getLastStatistics( MetadataRepository metadataRepository, String repositoryId )
+ public RepositoryStatistics getLastStatistics( String repositoryId )
throws MetadataRepositoryException
{
StopWatch stopWatch = new StopWatch();
stopWatch.start();
try(RepositorySession session = repositorySessionFactory.createSession()) {
+ final MetadataRepository metadataRepository = session.getRepository( );
+
// TODO: consider a more efficient implementation that directly gets the last one from the content repository
List<String> scans = metadataRepository.getMetadataFacets(session, repositoryId, DefaultRepositoryStatistics.FACET_ID);
if (scans == null) {
}
@Override
- public void addStatisticsAfterScan( MetadataRepository metadataRepository, String repositoryId, Date startTime,
+ public void addStatisticsAfterScan( String repositoryId, Date startTime,
Date endTime, long totalFiles, long newFiles )
throws MetadataRepositoryException
{
try(RepositorySession session = repositorySessionFactory.createSession()) {
+ final MetadataRepository metadataRepository = session.getRepository( );
+
DefaultRepositoryStatistics repositoryStatistics = new DefaultRepositoryStatistics();
repositoryStatistics.setRepositoryId(repositoryId);
repositoryStatistics.setScanStartTime(startTime);
}
@Override
- public void deleteStatistics( MetadataRepository metadataRepository, String repositoryId )
+ public void deleteStatistics( String repositoryId )
throws MetadataRepositoryException
{
try(RepositorySession session = repositorySessionFactory.createSession()) {
+ final MetadataRepository metadataRepository = session.getRepository( );
metadataRepository.removeMetadataFacets(session, repositoryId, DefaultRepositoryStatistics.FACET_ID);
}
}
@Override
- public List<RepositoryStatistics> getStatisticsInRange( MetadataRepository metadataRepository, String repositoryId,
+ public List<RepositoryStatistics> getStatisticsInRange( String repositoryId,
Date startTime, Date endTime )
throws MetadataRepositoryException
{
try(RepositorySession session = repositorySessionFactory.createSession()) {
+ final MetadataRepository metadataRepository = session.getRepository( );
List<RepositoryStatistics> results = new ArrayList<>();
List<String> list = metadataRepository.getMetadataFacets(session, repositoryId, DefaultRepositoryStatistics.FACET_ID);
Collections.sort(list, Collections.reverseOrder());
fmt.setTimeZone( UTC_TIME_ZONE );
return fmt;
}
+
+ public RepositorySessionFactory getRepositorySessionFactory( )
+ {
+ return repositorySessionFactory;
+ }
+
+ public void setRepositorySessionFactory( RepositorySessionFactory repositorySessionFactory )
+ {
+ this.repositorySessionFactory = repositorySessionFactory;
+ }
}
import java.util.Map;
import java.util.TimeZone;
-import static org.easymock.EasyMock.createControl;
-import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.*;
@RunWith( ArchivaBlockJUnit4ClassRunner.class )
public class RepositoryStatisticsManagerTest
private RepositorySessionFactory repositorySessionFactory;
private IMocksControl factoryControl;
+ private IMocksControl sessionControl;
+ private RepositorySession session;
private static SimpleDateFormat createTimestampFormat()
{
factoryControl = createControl();
repositorySessionFactory = factoryControl.createMock(RepositorySessionFactory.class);
+
+ repositoryStatisticsManager.setRepositorySessionFactory( repositorySessionFactory );
+
+ sessionControl = createControl( );
+ session = sessionControl.createMock( RepositorySession.class );
+
}
@Test
stats.setTotalFileCount( 56229 );
- try(RepositorySession session = repositorySessionFactory.createSession()) {
- expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID)).andReturn(
+ sessionControl.reset();
+ factoryControl.reset();
+ expect( repositorySessionFactory.createSession( ) ).andStubReturn( session );
+ expect( session.getRepository() ).andStubReturn( metadataRepository );
+ session.close();
+ expectLastCall( ).anyTimes( );
+ factoryControl.replay();
+ sessionControl.replay();
+
+ expect(metadataRepository.getMetadataFacets(session, TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID)).andReturn(
Arrays.asList(FIRST_TEST_SCAN, SECOND_TEST_SCAN));
- expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
+ expect(metadataRepository.getMetadataFacet(session, TEST_REPO_ID,
DefaultRepositoryStatistics.FACET_ID, SECOND_TEST_SCAN)).andReturn(stats);
- }
metadataRepositoryControl.replay();
- stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
+ stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertNotNull( stats );
assertEquals( 1314527915L, stats.getTotalArtifactFileSize() );
assertEquals( 123, stats.getNewFileCount() );
throws Exception
{
- RepositorySession session = repositorySessionFactory.createSession();
+ sessionControl.reset();
+ factoryControl.reset();
+ expect( repositorySessionFactory.createSession( ) ).andStubReturn( session );
+ expect( session.getRepository() ).andStubReturn( metadataRepository );
+ session.close();
+ expectLastCall( ).anyTimes( );
+ factoryControl.replay();
+ sessionControl.replay();
+
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
Collections.<String>emptyList() );
metadataRepositoryControl.replay();
- RepositoryStatistics stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
+ RepositoryStatistics stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertNull( stats );
metadataRepositoryControl.verify();
RepositoryStatistics stats = createTestStats( startTime, current );
walkRepository( 1 );
- RepositorySession session = repositorySessionFactory.createSession();
+ sessionControl.reset();
+ factoryControl.reset();
+ expect( repositorySessionFactory.createSession( ) ).andStubReturn( session );
+ expect( session.getRepository() ).andStubReturn( metadataRepository );
+ session.close();
+ expectLastCall( ).anyTimes( );
+ factoryControl.replay();
+ sessionControl.replay();
metadataRepository.addMetadataFacet(session , TEST_REPO_ID, stats );
metadataRepositoryControl.replay();
- repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, startTime, current, 56345,
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime, current, 56345,
45 );
- stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
+ stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertNotNull( stats );
assertEquals( 246900, stats.getTotalArtifactFileSize() );
assertEquals( 45, stats.getNewFileCount() );
Date startTime1 = new Date( current.getTime() - 12345 );
DefaultRepositoryStatistics stats1 = createTestStats( startTime1, new Date( current.getTime() - 6000 ) );
- RepositorySession session = repositorySessionFactory.createSession();
+
+ sessionControl.reset();
+ factoryControl.reset();
+ expect( repositorySessionFactory.createSession( ) ).andStubReturn( session );
+ expect( session.getRepository() ).andStubReturn( metadataRepository );
+ session.close();
+ expectLastCall( ).anyTimes( );
+ factoryControl.replay();
+ sessionControl.replay();
metadataRepository.addMetadataFacet(session , TEST_REPO_ID, stats1 );
metadataRepositoryControl.replay();
- repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, startTime1,
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime1,
stats1.getScanEndTime(), 56345, 45 );
- repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, startTime2,
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime2,
stats2.getScanEndTime(), 56345, 45 );
- assertNotNull( repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID ) );
+ assertNotNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
- repositoryStatisticsManager.deleteStatistics( metadataRepository, TEST_REPO_ID );
+ repositoryStatisticsManager.deleteStatistics( TEST_REPO_ID );
- assertNull( repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID ) );
+ assertNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
metadataRepositoryControl.verify();
}
public void testDeleteStatsWhenEmpty()
throws Exception
{
- RepositorySession session = repositorySessionFactory.createSession();
+ sessionControl.reset();
+ factoryControl.reset();
+ expect( repositorySessionFactory.createSession( ) ).andStubReturn( session );
+ expect( session.getRepository() ).andStubReturn( metadataRepository );
+ session.close();
+ expectLastCall( ).anyTimes( );
+ factoryControl.replay();
+ sessionControl.replay();
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn(
Collections.<String>emptyList() ).times( 2 );
metadataRepositoryControl.replay();
- assertNull( repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID ) );
+ assertNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
- repositoryStatisticsManager.deleteStatistics( metadataRepository, TEST_REPO_ID );
+ repositoryStatisticsManager.deleteStatistics( TEST_REPO_ID );
- assertNull( repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID ) );
+ assertNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
metadataRepositoryControl.verify();
}
Date current = new Date();
+ sessionControl.reset();
+ factoryControl.reset();
+ expect( repositorySessionFactory.createSession( ) ).andStubReturn( session );
+ expect( session.getRepository() ).andStubReturn( metadataRepository );
+ session.close();
+ expectLastCall( ).anyTimes( );
+ factoryControl.replay();
+ sessionControl.replay();
+
+
addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
addStats( new Date( current.getTime() - 3000 ), new Date( current.getTime() - 2000 ) );
addStats( new Date( current.getTime() - 1000 ), current );
ArrayList<String> keys = new ArrayList<>( statsCreated.keySet() );
- RepositorySession session = repositorySessionFactory.createSession();
-
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
// only match the middle one
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID,
stats.getScanStartTime(), stats.getScanEndTime(), 56345,
45 );
}
List<RepositoryStatistics> list =
- repositoryStatisticsManager.getStatisticsInRange( metadataRepository, TEST_REPO_ID,
+ repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID,
new Date( current.getTime() - 4000 ),
new Date( current.getTime() - 2000 ) );
Date current = new Date();
+ sessionControl.reset();
+ factoryControl.reset();
+ expect( repositorySessionFactory.createSession( ) ).andStubReturn( session );
+ expect( session.getRepository() ).andStubReturn( metadataRepository );
+ session.close();
+ expectLastCall( ).anyTimes( );
+ factoryControl.replay();
+ sessionControl.replay();
+
addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
addStats( new Date( current.getTime() - 3000 ), new Date( current.getTime() - 2000 ) );
addStats( new Date( current.getTime() - 1000 ), current );
List<String> keys = new ArrayList<>( statsCreated.keySet() );
- RepositorySession session = repositorySessionFactory.createSession();
-
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
String key = keys.get( 1 );
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID,
stats.getScanStartTime(), stats.getScanEndTime(), 56345,
45 );
}
List<RepositoryStatistics> list =
- repositoryStatisticsManager.getStatisticsInRange( metadataRepository, TEST_REPO_ID,
+ repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID,
new Date( current.getTime() - 4000 ), current );
assertEquals( 2, list.size() );
Date current = new Date();
+ sessionControl.reset();
+ factoryControl.reset();
+ expect( repositorySessionFactory.createSession( ) ).andStubReturn( session );
+ expect( session.getRepository() ).andStubReturn( metadataRepository );
+ session.close();
+ expectLastCall( ).anyTimes( );
+ factoryControl.replay();
+ sessionControl.replay();
+
addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
addStats( new Date( current.getTime() - 3000 ), new Date( current.getTime() - 2000 ) );
addStats( new Date( current.getTime() - 1000 ), current );
List<String> keys = new ArrayList<>( statsCreated.keySet() );
- RepositorySession session = repositorySessionFactory.createSession();
-
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
String key = keys.get( 0 );
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID,
stats.getScanStartTime(), stats.getScanEndTime(), 56345,
45 );
}
List<RepositoryStatistics> list =
- repositoryStatisticsManager.getStatisticsInRange( metadataRepository, TEST_REPO_ID,
+ repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID,
new Date( current.getTime() - 20000 ),
new Date( current.getTime() - 2000 ) );
Date current = new Date();
+ sessionControl.reset();
+ factoryControl.reset();
+ expect( repositorySessionFactory.createSession( ) ).andStubReturn( session );
+ expect( session.getRepository() ).andStubReturn( metadataRepository );
+ session.close();
+ expectLastCall( ).anyTimes( );
+ factoryControl.replay();
+ sessionControl.replay();
+
addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
addStats( new Date( current.getTime() - 3000 ), new Date( current.getTime() - 2000 ) );
addStats( new Date( current.getTime() - 1000 ), current );
ArrayList<String> keys = new ArrayList<>( statsCreated.keySet() );
- RepositorySession session = repositorySessionFactory.createSession();
-
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
String key = keys.get( 0 );
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID,
stats.getScanStartTime(), stats.getScanEndTime(), 56345,
45 );
}
List<RepositoryStatistics> list =
- repositoryStatisticsManager.getStatisticsInRange( metadataRepository, TEST_REPO_ID,
+ repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID,
new Date( current.getTime() - 20000 ), current );
assertEquals( 3, list.size() );
walkRepository( 3 );
Date current = new Date();
+ sessionControl.reset();
+ factoryControl.reset();
+ expect( repositorySessionFactory.createSession( ) ).andStubReturn( session );
+ expect( session.getRepository() ).andStubReturn( metadataRepository );
+ session.close();
+ expectLastCall( ).anyTimes( );
+ factoryControl.replay();
+ sessionControl.replay();
addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
addStats( new Date( current.getTime() - 3000 ), new Date( current.getTime() - 2000 ) );
ArrayList<String> keys = new ArrayList<>( statsCreated.keySet() );
- RepositorySession session = repositorySessionFactory.createSession();
-
expect( metadataRepository.getMetadataFacets(session , TEST_REPO_ID, DefaultRepositoryStatistics.FACET_ID ) ).andReturn( keys );
metadataRepositoryControl.replay();
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID,
stats.getScanStartTime(), stats.getScanEndTime(), 56345,
45 );
}
List<RepositoryStatistics> list =
- repositoryStatisticsManager.getStatisticsInRange( metadataRepository, TEST_REPO_ID,
+ repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID,
new Date( current.getTime() - 20000 ),
new Date( current.getTime() - 16000 ) );
private void walkRepository( int count )
throws Exception
{
- RepositorySession session = repositorySessionFactory.createSession();
+ sessionControl.reset();
+ expect( repositorySessionFactory.createSession( ) ).andStubReturn( session );
+ factoryControl.replay();
for ( int i = 0; i < count; i++ )
{
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-model</artifactId>
</dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>metadata-store-jcr</artifactId>
- </dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
}
}
+ public RepositorySessionFactory getRepositorySessionFactory( )
+ {
+ return repositorySessionFactory;
+ }
+ public void setRepositorySessionFactory( RepositorySessionFactory repositorySessionFactory )
+ {
+ this.repositorySessionFactory = repositorySessionFactory;
+ }
}
public class StubConfiguration
implements ArchivaConfiguration
{
- private Configuration configuration;
+ private Configuration configuration = new Configuration();
@Override
public Configuration getConfiguration()
@Override
public void addListener( ConfigurationListener listener )
{
- throw new UnsupportedOperationException();
+ // throw new UnsupportedOperationException();
}
@Override
@Override
public void addChangeListener( RegistryListener listener )
{
- throw new UnsupportedOperationException();
+ // throw new UnsupportedOperationException();
}
@Override
@Override
public Path getDataDirectory() {
- if (configuration!=null && StringUtils.isNotEmpty(configuration.getArchivaRuntimeConfiguration().getDataDirectory())) {
+ if (configuration!=null && configuration.getArchivaRuntimeConfiguration()!=null && StringUtils.isNotEmpty(configuration.getArchivaRuntimeConfiguration().getDataDirectory())) {
Path dataDir = Paths.get(configuration.getArchivaRuntimeConfiguration().getDataDirectory());
if (dataDir.isAbsolute()) {
return dataDir;
import org.apache.archiva.configuration.RepositoryScanningConfiguration;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
+import org.apache.archiva.repository.Repository;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.junit.Before;
+import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.MockitoAnnotations;
private MetadataRepository metadataRepository;
- private RepositorySessionFactory repositorySessionFactory;
+ private static RepositorySessionFactory repositorySessionFactory;
+
+ private static RepositorySession session;
+
+ static
+ {
+ repositorySessionFactory = mock(RepositorySessionFactory.class);
+ session = mock( RepositorySession.class );
+
+ try
+ {
+ when( repositorySessionFactory.createSession( ) ).thenReturn( session );
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ throw new RuntimeException( e );
+ }
+
+ }
+
+ public static RepositorySessionFactory getRepositorySessionFactory() {
+ return repositorySessionFactory;
+ }
+
+
@Before
@Override
super.setUp();
MockitoAnnotations.initMocks( this );
metadataRepository = mock( MetadataRepository.class );
- repositorySessionFactory = mock(RepositorySessionFactory.class);
+ repositoryMerger.setRepositorySessionFactory( repositorySessionFactory );
+
}
private List<ArtifactMetadata> getArtifacts()
c.addManagedRepository( targetRepo );
configuration.save( c );
- try(RepositorySession session = repositorySessionFactory.createSession()) {
+
when(metadataRepository.getArtifacts(session, TEST_REPO_ID)).thenReturn(getArtifacts());
repositoryMerger.merge(metadataRepository, TEST_REPO_ID, "target-rep");
verify(metadataRepository).getArtifacts(session, TEST_REPO_ID);
- }
assertTrue( Files.exists(mergedArtifact) );
assertTrue( Files.exists(mavenMetadata) );
assertTrue( Files.exists(pom) );
"/target/test-repository/com/example/test/test-artifact/1.0-SNAPSHOT/test-artifact-1.0-20100308.230825-1.jar" );
targetRepoFile.toFile().setReadOnly();
- try(RepositorySession session = repositorySessionFactory.createSession()) {
when(metadataRepository.getArtifacts(session, sourceRepoId)).thenReturn(sourceRepoArtifactsList);
when(metadataRepository.getArtifacts(session, TEST_REPO_ID)).thenReturn(targetRepoArtifactsList);
assertEquals(1, repositoryMerger.getConflictingArtifacts(metadataRepository, sourceRepoId,
TEST_REPO_ID).size());
verify(metadataRepository).getArtifacts(session, TEST_REPO_ID);
- }
}
}
\ No newline at end of file
<alias name="archivaConfiguration#mocked" alias="archivaConfiguration"/>
<alias name="archivaConfiguration#mocked" alias="archivaConfiguration#default" />
+ <bean name="repositorySessionFactory#mock"
+ class="org.apache.archiva.stagerepository.merge.Maven2RepositoryMergerTest" factory-method="getRepositorySessionFactory" />
+ <alias alias="repositorySessionFactory#jcr" name="repositorySessionFactory#mock" />
+
</beans>
\ No newline at end of file