<artifactId>xmlunit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
</project>
*/
import org.apache.archiva.audit.AuditEvent;
+import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
protected final ManagedRepositoryContent repository;
+ protected final RepositorySession repositorySession;
+
protected final List<RepositoryListener> listeners;
private Logger logger = LoggerFactory.getLogger( "org.apache.archiva.AuditLog" );
private static final char DELIM = ' ';
- public AbstractRepositoryPurge( ManagedRepositoryContent repository, List<RepositoryListener> listeners )
+ public AbstractRepositoryPurge( ManagedRepositoryContent repository, RepositorySession repositorySession,
+ List<RepositoryListener> listeners )
{
this.repository = repository;
+ this.repositorySession = repositorySession;
this.listeners = listeners;
}
{
if ( references != null && !references.isEmpty() )
{
+ MetadataRepository metadataRepository = repositorySession.getRepository();
for ( ArtifactReference reference : references )
{
File artifactFile = repository.toFile( reference );
// FIXME: looks incomplete, might not delete related metadata?
for ( RepositoryListener listener : listeners )
{
- listener.deleteArtifact( repository.getId(), reference.getGroupId(), reference.getArtifactId(),
- reference.getVersion(), artifactFile.getName() );
+ listener.deleteArtifact( metadataRepository, repository.getId(), reference.getGroupId(),
+ reference.getArtifactId(), reference.getVersion(),
+ artifactFile.getName() );
}
// TODO: this needs to be logged
artifactFile.delete();
+ repositorySession.save();
+
triggerAuditEvent( repository.getRepository().getId(), ArtifactReference.toKey( reference ),
AuditEvent.PURGE_ARTIFACT );
purgeSupportFiles( artifactFile );
* under the License.
*/
+import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
public CleanupReleasedSnapshotsRepositoryPurge( ManagedRepositoryContent repository, MetadataTools metadataTools,
ArchivaConfiguration archivaConfig,
RepositoryContentFactory repoContentFactory,
+ RepositorySession repositorySession,
List<RepositoryListener> listeners )
{
- super( repository, listeners );
+ super( repository, repositorySession, listeners );
this.metadataTools = metadataTools;
this.archivaConfig = archivaConfig;
this.repoContentFactory = repoContentFactory;
artifactRef.getVersion(), artifactRef.getClassifier(),
artifactRef.getType(), repository.getId() );
+ MetadataRepository metadataRepository = repositorySession.getRepository();
for ( String version : snapshotVersions )
{
if ( releasedVersions.contains( VersionUtil.getReleaseVersion( version ) ) )
// FIXME: looks incomplete, might not delete related metadata?
for ( RepositoryListener listener : listeners )
{
- listener.deleteArtifact( repository.getId(), artifact.getGroupId(), artifact.getArtifactId(),
- artifact.getVersion(), artifactFile.getName() );
+ listener.deleteArtifact( metadataRepository, repository.getId(), artifact.getGroupId(),
+ artifact.getArtifactId(), artifact.getVersion(),
+ artifactFile.getName() );
}
needsMetadataUpdate = true;
* under the License.
*/
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.commons.lang.time.DateUtils;
import org.apache.maven.archiva.common.utils.VersionComparator;
/**
* Purge from repository all snapshots older than the specified days in the repository configuration.
- *
*/
public class DaysOldRepositoryPurge
extends AbstractRepositoryPurge
private int retentionCount;
- public DaysOldRepositoryPurge( ManagedRepositoryContent repository, int daysOlder,
- int retentionCount, List<RepositoryListener> listeners )
+ public DaysOldRepositoryPurge( ManagedRepositoryContent repository, int daysOlder, int retentionCount,
+ RepositorySession repositorySession, List<RepositoryListener> listeners )
{
- super( repository, listeners );
+ super( repository, repositorySession, listeners );
this.daysOlder = daysOlder;
this.retentionCount = retentionCount;
timestampParser = new SimpleDateFormat( "yyyyMMdd.HHmmss" );
break;
}
- ArtifactReference newArtifactReference =
- repository.toArtifactReference( artifactFile.getAbsolutePath() );
+ ArtifactReference newArtifactReference = repository.toArtifactReference(
+ artifactFile.getAbsolutePath() );
newArtifactReference.setVersion( version );
File newArtifactFile = repository.toFile( newArtifactReference );
* under the License.
*/
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
* Consumer for removing old snapshots in the repository based on the criteria
* specified by the user.
*
- *
- * @plexus.component
- * role="org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer"
- * role-hint="repository-purge"
- * instantiation-strategy="per-lookup"
+ * @plexus.component role="org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer"
+ * role-hint="repository-purge"
+ * instantiation-strategy="per-lookup"
*/
public class RepositoryPurgeConsumer
extends AbstractMonitoredConsumer
private boolean deleteReleasedSnapshots;
- /** @plexus.requirement role="org.apache.archiva.repository.events.RepositoryListener" */
+ /**
+ * @plexus.requirement role="org.apache.archiva.repository.events.RepositoryListener"
+ */
private List<RepositoryListener> listeners = Collections.emptyList();
-
+
+ /**
+ * TODO: this could be multiple implementations and needs to be configured.
+ *
+ * @plexus.requirement
+ */
+ private RepositorySessionFactory repositorySessionFactory;
+
+ private RepositorySession repositorySession;
+
public String getId()
{
return this.id;
public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered )
throws ConsumerException
{
+ ManagedRepositoryContent repositoryContent;
try
{
- ManagedRepositoryContent repositoryContent = repositoryFactory.getManagedRepositoryContent( repository
- .getId() );
-
- if ( repository.getDaysOlder() != 0 )
- {
- repoPurge = new DaysOldRepositoryPurge( repositoryContent, repository.getDaysOlder(),
- repository.getRetentionCount(), listeners );
- }
- else
- {
- repoPurge = new RetentionCountRepositoryPurge( repositoryContent, repository.getRetentionCount(),
- listeners );
- }
-
- cleanUp =
- new CleanupReleasedSnapshotsRepositoryPurge( repositoryContent, metadataTools, configuration,
- repositoryFactory, listeners );
-
- deleteReleasedSnapshots = repository.isDeleteReleasedSnapshots();
+ repositoryContent = repositoryFactory.getManagedRepositoryContent( repository.getId() );
}
catch ( RepositoryNotFoundException e )
{
{
throw new ConsumerException( "Can't run repository purge: " + e.getMessage(), e );
}
+
+ repositorySession = repositorySessionFactory.createSession();
+
+ if ( repository.getDaysOlder() != 0 )
+ {
+ repoPurge = new DaysOldRepositoryPurge( repositoryContent, repository.getDaysOlder(),
+ repository.getRetentionCount(), repositorySession, listeners );
+ }
+ else
+ {
+ repoPurge = new RetentionCountRepositoryPurge( repositoryContent, repository.getRetentionCount(),
+ repositorySession, listeners );
+ }
+
+ cleanUp = new CleanupReleasedSnapshotsRepositoryPurge( repositoryContent, metadataTools, configuration,
+ repositoryFactory, repositorySession, listeners );
+
+ deleteReleasedSnapshots = repository.isDeleteReleasedSnapshots();
}
public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
public void completeScan()
{
- /* do nothing */
+ repositorySession.close();
}
public void completeScan( boolean executeOnEntireRepo )
* under the License.
*/
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
/**
* Purge the repository by retention count. Retain only the specified number of snapshots.
- *
*/
public class RetentionCountRepositoryPurge
extends AbstractRepositoryPurge
{
private int retentionCount;
- public RetentionCountRepositoryPurge( ManagedRepositoryContent repository,
- int retentionCount, List<RepositoryListener> listeners )
+ public RetentionCountRepositoryPurge( ManagedRepositoryContent repository, int retentionCount,
+ RepositorySession repositorySession, List<RepositoryListener> listeners )
{
- super( repository, listeners );
+ super( repository, repositorySession, listeners );
this.retentionCount = retentionCount;
}
{
return;
}
-
+
ArtifactReference artifact = repository.toArtifactReference( path );
if ( VersionUtil.isSnapshot( artifact.getVersion() ) )
artifact.setVersion( version );
artifact.setClassifier( reference.getClassifier() );
artifact.setType( reference.getType() );
-
+
try
{
Set<ArtifactReference> related = repository.getRelatedArtifacts( artifact );
--- /dev/null
+package org.apache.archiva.metadata.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+public class TestRepositorySessionFactory
+ implements RepositorySessionFactory
+{
+ private MetadataRepository repository;
+
+ private MetadataResolver resolver;
+
+ public RepositorySession createSession()
+ {
+ return new RepositorySession( repository, resolver );
+ }
+
+ public void setRepository( MetadataRepository repository )
+ {
+ this.repository = repository;
+ }
+
+ public void setResolver( MetadataResolver resolver )
+ {
+ this.resolver = resolver;
+ }
+}
* under the License.
*/
+import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.easymock.MockControl;
import java.io.File;
import java.io.IOException;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
/**
*/
public abstract class AbstractRepositoryPurgeTest
public static final int TEST_DAYS_OLDER = 30;
- public static final String PATH_TO_BY_DAYS_OLD_ARTIFACT = "org/apache/maven/plugins/maven-install-plugin/2.2-SNAPSHOT/maven-install-plugin-2.2-20061118.060401-2.jar";
+ public static final String PATH_TO_BY_DAYS_OLD_ARTIFACT =
+ "org/apache/maven/plugins/maven-install-plugin/2.2-SNAPSHOT/maven-install-plugin-2.2-20061118.060401-2.jar";
- public static final String PATH_TO_BY_DAYS_OLD_METADATA_DRIVEN_ARTIFACT = "org/codehaus/plexus/plexus-utils/1.4.3-SNAPSHOT/plexus-utils-1.4.3-20070113.163208-4.jar";
+ public static final String PATH_TO_BY_DAYS_OLD_METADATA_DRIVEN_ARTIFACT =
+ "org/codehaus/plexus/plexus-utils/1.4.3-SNAPSHOT/plexus-utils-1.4.3-20070113.163208-4.jar";
- public static final String PATH_TO_BY_RETENTION_COUNT_ARTIFACT = "org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar";
+ public static final String PATH_TO_BY_RETENTION_COUNT_ARTIFACT =
+ "org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar";
- public static final String PATH_TO_BY_RETENTION_COUNT_POM = "org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom";
+ public static final String PATH_TO_BY_RETENTION_COUNT_POM =
+ "org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom";
- public static final String PATH_TO_TEST_ORDER_OF_DELETION = "org/apache/maven/plugins/maven-assembly-plugin/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070615.105019-3.jar";
+ public static final String PATH_TO_TEST_ORDER_OF_DELETION =
+ "org/apache/maven/plugins/maven-assembly-plugin/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070615.105019-3.jar";
protected static final String RELEASES_TEST_REPO_ID = "releases-test-repo-one";
protected RepositoryListener listener;
+ protected RepositorySession repositorySession;
+
+ protected MetadataRepository metadataRepository;
+
@Override
protected void setUp()
throws Exception
{
super.setUp();
-
+
listenerControl = MockControl.createControl( RepositoryListener.class );
listener = (RepositoryListener) listenerControl.getMock();
+
+ repositorySession = mock( RepositorySession.class );
+ metadataRepository = mock( MetadataRepository.class );
+ when( repositorySession.getRepository() ).thenReturn( metadataRepository );
}
-
+
@Override
protected void tearDown()
throws Exception
config.setSnapshots( true );
config.setDeleteReleasedSnapshots( true );
config.setRetentionCount( TEST_RETENTION_COUNT );
-
+
return config;
}
{
if ( repo == null )
{
- repo = (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class, "default" );
+ repo = (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class, "default" );
repo.setRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
}
{
assertTrue( "File should exist: " + path, new File( path ).exists() );
}
-
+
protected File getTestRepoRoot()
{
return getTestFile( "target/test-" + getName() + "/" + TEST_REPO_ID );
File testDir = getTestRepoRoot();
FileUtils.deleteDirectory( testDir );
FileUtils.copyDirectory( getTestFile( "target/test-classes/" + TEST_REPO_ID ), testDir );
-
+
File releasesTestDir = getTestFile( "target/test-" + getName() + "/" + RELEASES_TEST_REPO_ID );
FileUtils.deleteDirectory( releasesTestDir );
FileUtils.copyDirectory( getTestFile( "target/test-classes/" + RELEASES_TEST_REPO_ID ), releasesTestDir );
-
- return testDir.getAbsolutePath();
- }
- protected ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String type )
- {
- return new ArchivaArtifact( groupId, artifactId, version, null, type, TEST_REPO_ID );
+ return testDir.getAbsolutePath();
}
}
import java.io.File;
import java.util.Collections;
+import java.util.List;
/**
public static final String PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO =
"org/apache/archiva/released-artifact-in-diff-repo/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.jar";
-
- public static final String PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO = "org/apache/maven/plugins/maven-source-plugin/2.0.3-SNAPSHOT/maven-source-plugin-2.0.3-SNAPSHOT.jar";
- public static final String PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO = "org/apache/maven/plugins/maven-plugin-plugin/2.3-SNAPSHOT/maven-plugin-plugin-2.3-SNAPSHOT.jar";
-
+ public static final String PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO =
+ "org/apache/maven/plugins/maven-source-plugin/2.0.3-SNAPSHOT/maven-source-plugin-2.0.3-SNAPSHOT.jar";
+
+ public static final String PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO =
+ "org/apache/maven/plugins/maven-plugin-plugin/2.3-SNAPSHOT/maven-plugin-plugin-2.3-SNAPSHOT.jar";
+
@Override
protected void setUp()
throws Exception
{
super.setUp();
-
+
MetadataTools metadataTools = (MetadataTools) lookup( MetadataTools.class );
- RepositoryContentFactory factory = (RepositoryContentFactory) lookup( RepositoryContentFactory.class, "cleanup-released-snapshots");
-
- archivaConfiguration =
- (ArchivaConfiguration) lookup( ArchivaConfiguration.class, "cleanup-released-snapshots" );
+ RepositoryContentFactory factory = (RepositoryContentFactory) lookup( RepositoryContentFactory.class,
+ "cleanup-released-snapshots" );
+
+ archivaConfiguration = (ArchivaConfiguration) lookup( ArchivaConfiguration.class,
+ "cleanup-released-snapshots" );
listenerControl = MockControl.createControl( RepositoryListener.class );
-
+
listener = (RepositoryListener) listenerControl.getMock();
- repoPurge =
- new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), metadataTools, archivaConfiguration, factory,
- Collections.singletonList( listener ) );
+ List<RepositoryListener> listeners = Collections.singletonList( listener );
+ repoPurge = new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), metadataTools, archivaConfiguration,
+ factory, repositorySession, listeners );
}
public void testReleasedSnapshotsExistsInSameRepo()
Configuration config = archivaConfiguration.getConfiguration();
config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) );
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
-
- String repoRoot = prepareTestRepos();
+
+ String repoRoot = prepareTestRepos();
// test listeners for the correct artifacts
- listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-plugin-plugin",
- "2.3-SNAPSHOT", "maven-plugin-plugin-2.3-SNAPSHOT.jar" );
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
+ "maven-plugin-plugin", "2.3-SNAPSHOT", "maven-plugin-plugin-2.3-SNAPSHOT.jar" );
listenerControl.replay();
-
+
repoPurge.process( PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
-
+
listenerControl.verify();
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-plugin-plugin";
-
+
// check if the snapshot was removed
assertDeleted( projectRoot + "/2.3-SNAPSHOT" );
assertDeleted( projectRoot + "/2.3-SNAPSHOT/maven-plugin-plugin-2.3-SNAPSHOT.jar" );
// check if metadata file was updated
File artifactMetadataFile = new File( projectRoot + "/maven-metadata.xml" );
-
+
String metadataXml = FileUtils.readFileToString( artifactMetadataFile, null );
-
- String expectedVersions = "<expected><versions><version>2.2</version>" +
- "<version>2.3</version></versions></expected>";
-
+
+ String expectedVersions =
+ "<expected><versions><version>2.2</version>" + "<version>2.3</version></versions></expected>";
+
XMLAssert.assertXpathEvaluatesTo( "2.3", "//metadata/versioning/release", metadataXml );
XMLAssert.assertXpathEvaluatesTo( "2.3", "//metadata/versioning/latest", metadataXml );
XMLAssert.assertXpathsEqual( "//expected/versions/version", expectedVersions,
"//metadata/versioning/versions/version", metadataXml );
XMLAssert.assertXpathEvaluatesTo( "20070315032817", "//metadata/versioning/lastUpdated", metadataXml );
}
-
+
public void testNonArtifactFile()
throws Exception
{
public void testReleasedSnapshotsExistsInDifferentRepo()
throws Exception
- {
+ {
Configuration config = archivaConfiguration.getConfiguration();
config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) );
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
config.addManagedRepository( getRepoConfiguration( RELEASES_TEST_REPO_ID, RELEASES_TEST_REPO_NAME ) );
-
- String repoRoot = prepareTestRepos();
+
+ String repoRoot = prepareTestRepos();
// test listeners for the correct artifacts
- listener.deleteArtifact( getRepository().getId(), "org.apache.archiva", "released-artifact-in-diff-repo",
- "1.0-SNAPSHOT", "released-artifact-in-diff-repo-1.0-SNAPSHOT.jar" );
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.archiva",
+ "released-artifact-in-diff-repo", "1.0-SNAPSHOT",
+ "released-artifact-in-diff-repo-1.0-SNAPSHOT.jar" );
listenerControl.replay();
-
+
repoPurge.process( PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO );
listenerControl.verify();
-
+
String projectRoot = repoRoot + "/org/apache/archiva/released-artifact-in-diff-repo";
-
+
// check if the snapshot was removed
assertDeleted( projectRoot + "/1.0-SNAPSHOT" );
assertDeleted( projectRoot + "/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.jar" );
assertDeleted( projectRoot + "/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.pom.md5" );
assertDeleted( projectRoot + "/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.pom.sha1" );
- String releasesProjectRoot =
- getTestFile( "target/test-" + getName() + "/releases-test-repo-one" ).getAbsolutePath() +
- "/org/apache/archiva/released-artifact-in-diff-repo";
-
+ String releasesProjectRoot = getTestFile(
+ "target/test-" + getName() + "/releases-test-repo-one" ).getAbsolutePath() +
+ "/org/apache/archiva/released-artifact-in-diff-repo";
+
// check if the released version was not removed
- assertExists( releasesProjectRoot + "/1.0" );
+ assertExists( releasesProjectRoot + "/1.0" );
assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.jar" );
assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.jar.md5" );
assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.jar.sha1" );
assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.pom" );
assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.pom.md5" );
- assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.pom.sha1" );
+ assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.pom.sha1" );
}
public void testHigherSnapshotExistsInSameRepo()
throws Exception
- {
+ {
Configuration config = archivaConfiguration.getConfiguration();
config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) );
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
-
+
String repoRoot = prepareTestRepos();
// test listeners for the correct artifacts - no deletions
listenerControl.replay();
-
+
repoPurge.process( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO );
listenerControl.verify();
-
+
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-source-plugin";
-
+
// check if the snapshot was not removed
assertExists( projectRoot + "/2.0.3-SNAPSHOT" );
assertExists( projectRoot + "/2.0.3-SNAPSHOT/maven-source-plugin-2.0.3-SNAPSHOT.jar" );
File artifactMetadataFile = new File( projectRoot + "/maven-metadata.xml" );
String metadataXml = FileUtils.readFileToString( artifactMetadataFile, null );
-
+
String expectedVersions = "<expected><versions><version>2.0.3-SNAPSHOT</version>" +
- "<version>2.0.4-SNAPSHOT</version></versions></expected>";
-
+ "<version>2.0.4-SNAPSHOT</version></versions></expected>";
+
XMLAssert.assertXpathEvaluatesTo( "2.0.4-SNAPSHOT", "//metadata/versioning/latest", metadataXml );
XMLAssert.assertXpathsEqual( "//expected/versions/version", expectedVersions,
"//metadata/versioning/versions/version", metadataXml );
* under the License.
*/
+import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.commons.lang.time.DateUtils;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Collections;
+import java.util.List;
/**
*/
public void testByLastModified()
throws Exception
{
- repoPurge =
- new DaysOldRepositoryPurge( getRepository(),
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
- Collections.singletonList( listener ) );
+ ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
+ repoPurge = new DaysOldRepositoryPurge( getRepository(), repoConfiguration.getDaysOlder(),
+ repoConfiguration.getRetentionCount(), repositorySession,
+ Collections.singletonList( listener ) );
String repoRoot = prepareTestRepos();
setLastModified( projectRoot + "/2.2-SNAPSHOT/", OLD_TIMESTAMP );
// test listeners for the correct artifacts
- listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-install-plugin",
- "2.2-SNAPSHOT", "maven-install-plugin-2.2-SNAPSHOT.jar" );
- listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-install-plugin",
- "2.2-SNAPSHOT", "maven-install-plugin-2.2-SNAPSHOT.pom" );
- listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-install-plugin",
- "2.2-20061118.060401-2", "maven-install-plugin-2.2-20061118.060401-2.jar" );
- listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-install-plugin",
- "2.2-20061118.060401-2", "maven-install-plugin-2.2-20061118.060401-2.pom" );
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
+ "maven-install-plugin", "2.2-SNAPSHOT", "maven-install-plugin-2.2-SNAPSHOT.jar" );
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
+ "maven-install-plugin", "2.2-SNAPSHOT", "maven-install-plugin-2.2-SNAPSHOT.pom" );
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
+ "maven-install-plugin", "2.2-20061118.060401-2",
+ "maven-install-plugin-2.2-20061118.060401-2.jar" );
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
+ "maven-install-plugin", "2.2-20061118.060401-2",
+ "maven-install-plugin-2.2-20061118.060401-2.pom" );
listenerControl.replay();
repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT );
public void testOrderOfDeletion()
throws Exception
{
- repoPurge =
- new DaysOldRepositoryPurge( getRepository(), getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
- Collections.singletonList( listener ) );
+ ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
+ List<RepositoryListener> listeners = Collections.singletonList( listener );
+ repoPurge = new DaysOldRepositoryPurge( getRepository(), repoConfiguration.getDaysOlder(),
+ repoConfiguration.getRetentionCount(), repositorySession, listeners );
String repoRoot = prepareTestRepos();
setLastModified( projectRoot + "/1.1.2-SNAPSHOT/", OLD_TIMESTAMP );
// test listeners for the correct artifacts
- listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-assembly-plugin",
- "1.1.2-20070427.065136-1", "maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
- listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-assembly-plugin",
- "1.1.2-20070427.065136-1", "maven-assembly-plugin-1.1.2-20070427.065136-1.pom" );
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
+ "maven-assembly-plugin", "1.1.2-20070427.065136-1",
+ "maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
+ "maven-assembly-plugin", "1.1.2-20070427.065136-1",
+ "maven-assembly-plugin-1.1.2-20070427.065136-1.pom" );
listenerControl.replay();
repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION );
public void testMetadataDrivenSnapshots()
throws Exception
{
- repoPurge =
- new DaysOldRepositoryPurge( getRepository(),
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
- Collections.singletonList( listener ) );
+ ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
+ List<RepositoryListener> listeners = Collections.singletonList( listener );
+ repoPurge = new DaysOldRepositoryPurge( getRepository(), repoConfiguration.getDaysOlder(),
+ repoConfiguration.getRetentionCount(), repositorySession, listeners );
String repoRoot = prepareTestRepos();
}
// test listeners for the correct artifacts
- listener.deleteArtifact( getRepository().getId(), "org.codehaus.plexus", "plexus-utils",
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.codehaus.plexus", "plexus-utils",
"1.4.3-20070113.163208-4", "plexus-utils-1.4.3-20070113.163208-4.jar" );
- listener.deleteArtifact( getRepository().getId(), "org.codehaus.plexus", "plexus-utils",
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.codehaus.plexus", "plexus-utils",
"1.4.3-20070113.163208-4", "plexus-utils-1.4.3-20070113.163208-4.pom" );
listenerControl.replay();
* under the License.
*/
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
+import org.apache.archiva.metadata.repository.TestRepositorySessionFactory;
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.common.utils.BaseFile;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
FileTypes fileTypes = (FileTypes) lookup( FileTypes.class );
fileTypes.afterConfigurationChange( null, "repositoryScanning.fileTypes", null );
- KnownRepositoryContentConsumer repoPurgeConsumer =
- (KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class, "repository-purge" );
+ KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
+ KnownRepositoryContentConsumer.class, "repository-purge" );
File repoLocation = getTestFile( "target/test-" + getName() + "/test-repo" );
- File localFile =
- new File( repoLocation, path );
+ File localFile = new File( repoLocation, path );
ConsumerWantsFilePredicate predicate = new ConsumerWantsFilePredicate();
BaseFile baseFile = new BaseFile( repoLocation, localFile );
File[] contents = dir.listFiles();
for ( int i = 0; i < contents.length; i++ )
{
- contents[i].setLastModified( 1179382029 );
+ contents[i].setLastModified( 1179382029 );
}
}
public void testConsumerByRetentionCount()
throws Exception
{
- KnownRepositoryContentConsumer repoPurgeConsumer =
- (KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
- "repo-purge-consumer-by-retention-count" );
+ KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
+ KnownRepositoryContentConsumer.class, "repo-purge-consumer-by-retention-count" );
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDaysOlder( 0 ); // force days older off to allow retention count purge to execute.
private void addRepoToConfiguration( String configHint, ManagedRepositoryConfiguration repoConfiguration )
throws Exception
{
- ArchivaConfiguration archivaConfiguration =
- (ArchivaConfiguration) lookup( ArchivaConfiguration.class, configHint );
+ ArchivaConfiguration archivaConfiguration = (ArchivaConfiguration) lookup( ArchivaConfiguration.class,
+ configHint );
Configuration configuration = archivaConfiguration.getConfiguration();
configuration.removeManagedRepository( configuration.findManagedRepositoryById( repoConfiguration.getId() ) );
configuration.addManagedRepository( repoConfiguration );
public void testConsumerByDaysOld()
throws Exception
{
- KnownRepositoryContentConsumer repoPurgeConsumer =
- (KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
- "repo-purge-consumer-by-days-old" );
+ KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
+ KnownRepositoryContentConsumer.class, "repo-purge-consumer-by-days-old" );
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDaysOlder( TEST_DAYS_OLDER );
assertExists( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-20070513.034619-5.pom" );
assertExists( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-20070513.034619-5.pom.md5" );
assertExists( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-20070513.034619-5.pom.sha1" );
-
+
assertExists( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-20070510.010101-4.jar" );
assertExists( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-20070510.010101-4.jar.md5" );
assertExists( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-20070510.010101-4.jar.sha1" );
public void testReleasedSnapshotsWereNotCleaned()
throws Exception
{
- KnownRepositoryContentConsumer repoPurgeConsumer =
- (KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
- "repo-purge-consumer-by-retention-count" );
+ KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
+ KnownRepositoryContentConsumer.class, "repo-purge-consumer-by-retention-count" );
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDeleteReleasedSnapshots( false ); // Set to NOT delete released snapshots.
String repoRoot = prepareTestRepos();
- repoPurgeConsumer.processFile( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
+ repoPurgeConsumer.processFile(
+ CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
// check if the snapshot wasn't removed
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-plugin-plugin";
public void testReleasedSnapshotsWereCleaned()
throws Exception
{
- KnownRepositoryContentConsumer repoPurgeConsumer =
- (KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
- "repo-purge-consumer-by-days-old" );
+ KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
+ KnownRepositoryContentConsumer.class, "repo-purge-consumer-by-days-old" );
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDeleteReleasedSnapshots( true );
String repoRoot = prepareTestRepos();
- repoPurgeConsumer.processFile( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
+ repoPurgeConsumer.processFile(
+ CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-plugin-plugin";
"//metadata/versioning/versions/version", metadataXml );
XMLAssert.assertXpathEvaluatesTo( "20070315032817", "//metadata/versioning/lastUpdated", metadataXml );
}
+
+ @Override
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
+ factory.setRepository( metadataRepository );
+ }
}
package org.apache.maven.archiva.consumers.core.repository;
-import java.util.Collections;
-
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* under the License.
*/
+import org.apache.archiva.repository.events.RepositoryListener;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+
+import java.util.Collections;
+import java.util.List;
+
/**
* Test RetentionsCountRepositoryPurgeTest
- *
*/
public class RetentionCountRepositoryPurgeTest
extends AbstractRepositoryPurgeTest
{
-
protected void setUp()
throws Exception
{
super.setUp();
- repoPurge =
- new RetentionCountRepositoryPurge(
- getRepository(),
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
- Collections.singletonList( listener ) );
+ ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
+ List<RepositoryListener> listeners = Collections.singletonList( listener );
+ repoPurge = new RetentionCountRepositoryPurge( getRepository(), repoConfiguration.getRetentionCount(),
+ repositorySession, listeners );
}
/**
String repoRoot = prepareTestRepos();
// test listeners for the correct artifacts
- listener.deleteArtifact( getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
"1.0RC1-20070504.153317-1", "jruby-rake-plugin-1.0RC1-20070504.153317-1.jar" );
- listener.deleteArtifact( getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
"1.0RC1-20070504.153317-1", "jruby-rake-plugin-1.0RC1-20070504.153317-1.pom" );
- listener.deleteArtifact( getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
"1.0RC1-20070504.160758-2", "jruby-rake-plugin-1.0RC1-20070504.160758-2.jar" );
- listener.deleteArtifact( getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
"1.0RC1-20070504.160758-2", "jruby-rake-plugin-1.0RC1-20070504.160758-2.pom" );
listenerControl.replay();
-
+
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT );
-
+
listenerControl.verify();
String versionRoot = repoRoot + "/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT";
String repoRoot = prepareTestRepos();
// test listeners for the correct artifacts
- listener.deleteArtifact( getRepository().getId(), "org.codehaus.castor", "castor-anttasks",
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.codehaus.castor", "castor-anttasks",
"1.1.2-20070427.065136-1", "castor-anttasks-1.1.2-20070427.065136-1.jar" );
- listener.deleteArtifact( getRepository().getId(), "org.codehaus.castor", "castor-anttasks",
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.codehaus.castor", "castor-anttasks",
"1.1.2-20070427.065136-1", "castor-anttasks-1.1.2-20070427.065136-1.pom" );
listenerControl.replay();
-
+
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM );
-
+
listenerControl.verify();
String versionRoot = repoRoot + "/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT";
-
+
// assert if removed from repo
assertDeleted( versionRoot + "/castor-anttasks-1.1.2-20070427.065136-1.jar" );
assertDeleted( versionRoot + "/castor-anttasks-1.1.2-20070427.065136-1.jar.md5" );
String repoRoot = prepareTestRepos();
// test listeners for the correct artifacts
- listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-assembly-plugin",
- "1.1.2-20070427.065136-1", "maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
- listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-assembly-plugin",
- "1.1.2-20070427.065136-1", "maven-assembly-plugin-1.1.2-20070427.065136-1.pom" );
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
+ "maven-assembly-plugin", "1.1.2-20070427.065136-1",
+ "maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
+ listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
+ "maven-assembly-plugin", "1.1.2-20070427.065136-1",
+ "maven-assembly-plugin-1.1.2-20070427.065136-1.pom" );
listenerControl.replay();
-
+
repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION );
listenerControl.verify();
- String versionRoot = repoRoot +
- "/org/apache/maven/plugins/maven-assembly-plugin/1.1.2-SNAPSHOT";
-
+ String versionRoot = repoRoot + "/org/apache/maven/plugins/maven-assembly-plugin/1.1.2-SNAPSHOT";
+
assertDeleted( versionRoot + "/maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
assertDeleted( versionRoot + "/maven-assembly-plugin-1.1.2-20070427.065136-1.jar.sha1" );
assertDeleted( versionRoot + "/maven-assembly-plugin-1.1.2-20070427.065136-1.jar.md5" );
<role>org.apache.maven.archiva.configuration.FileTypes</role>
<role-hint>retention-count</role-hint>
</requirement>
+ <requirement>
+ <role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
+ </requirement>
</requirements>
<configuration>
<id>repository-purge</id>
<role>org.apache.maven.archiva.configuration.FileTypes</role>
<role-hint>days-old</role-hint>
</requirement>
+ <requirement>
+ <role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
+ </requirement>
</requirements>
<configuration>
<id>repository-purge</id>
</requirement>
</requirements>
</component>
+ <component>
+ <role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
+ <implementation>org.apache.archiva.metadata.repository.TestRepositorySessionFactory</implementation>
+ </component>
</components>
</component-set>
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
-import org.apache.archiva.metadata.repository.MetadataResolutionException;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.storage.RepositoryStorage;
+import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException;
+import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataNotFoundException;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
private List<String> includes = new ArrayList<String>();
/**
+ * FIXME: can be of other types
+ *
* @plexus.requirement
*/
- private MetadataRepository metadataRepository;
+ private RepositorySessionFactory repositorySessionFactory;
/**
* FIXME: this needs to be configurable based on storage type - and could also be instantiated per repo. Change to a
project.setId( artifact.getProject() );
String projectVersion = VersionUtil.getBaseVersion( artifact.getVersion() );
- // FIXME: maybe not too efficient since it may have already been read and stored for this artifact
- ProjectVersionMetadata versionMetadata = null;
+
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
try
{
- versionMetadata = repositoryStorage.readProjectVersionMetadata( repoId, artifact.getNamespace(),
- artifact.getProject(), projectVersion );
- }
- catch ( MetadataResolutionException e )
- {
- log.warn( "Error occurred resolving POM for artifact: " + path + "; message: " + e.getMessage() );
- }
+ MetadataRepository metadataRepository = repositorySession.getRepository();
- boolean createVersionMetadata = false;
- if ( versionMetadata == null )
- {
- log.warn( "Missing or invalid POM for artifact: " + path + "; creating empty metadata" );
- versionMetadata = new ProjectVersionMetadata();
- versionMetadata.setId( projectVersion );
- versionMetadata.setIncomplete( true );
- createVersionMetadata = true;
- }
+ boolean createVersionMetadata = false;
+
+ // FIXME: maybe not too efficient since it may have already been read and stored for this artifact
+ ProjectVersionMetadata versionMetadata = null;
+ try
+ {
+ versionMetadata = repositoryStorage.readProjectVersionMetadata( repoId, artifact.getNamespace(),
+ artifact.getProject(), projectVersion );
+ }
+ catch ( RepositoryStorageMetadataNotFoundException e )
+ {
+ log.warn( "Missing or invalid POM for artifact: " + path + "; creating empty metadata" );
+
+ versionMetadata = new ProjectVersionMetadata();
+ versionMetadata.setId( projectVersion );
+ versionMetadata.setIncomplete( true );
+ createVersionMetadata = true;
+ }
+ catch ( RepositoryStorageMetadataInvalidException e )
+ {
+ log.warn( "Error occurred resolving POM for artifact: " + path + "; message: " + e.getMessage() );
+ }
- try
- {
- // FIXME: transaction
// read the metadata and update it if it is newer or doesn't exist
artifact.setWhenGathered( whenGathered );
metadataRepository.updateArtifact( repoId, project.getNamespace(), project.getId(), projectVersion,
versionMetadata );
}
metadataRepository.updateProject( repoId, project );
+ repositorySession.save();
}
catch ( MetadataRepositoryException e )
{
log.warn( "Error occurred persisting metadata for artifact: " + path + "; message: " + e.getMessage(), e );
+ repositorySession.revert();
+ }
+ finally
+ {
+ repositorySession.close();
}
}
--- /dev/null
+package org.apache.archiva.metadata.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+public class TestRepositorySessionFactory
+ implements RepositorySessionFactory
+{
+ private MetadataResolver resolver;
+
+ public RepositorySession createSession()
+ {
+ return new RepositorySession( new TestMetadataRepository(), resolver );
+ }
+
+ public void setResolver( MetadataResolver resolver )
+ {
+ this.resolver = resolver;
+ }
+}
* under the License.
*/
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Collection;
-import java.util.Date;
-import java.util.Locale;
-
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.ArrayUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.easymock.ArgumentsMatcher;
import org.easymock.MockControl;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Collection;
+import java.util.Date;
+import java.util.Locale;
+
/**
* AbstractProxyTestCase
*
protected static final String REPOPATH_LEGACY_MANAGED_TARGET = "target/test-repository/legacy-managed";
- protected static final ArgumentsMatcher customWagonGetIfNewerMatcher = new ArgumentsMatcher() {
+ protected static final ArgumentsMatcher customWagonGetIfNewerMatcher = new ArgumentsMatcher()
+ {
- public boolean matches(Object[] expected, Object[] actual) {
- if (expected.length < 1 || actual.length < 1)
+ public boolean matches( Object[] expected, Object[] actual )
+ {
+ if ( expected.length < 1 || actual.length < 1 )
{
return false;
}
- return MockControl.ARRAY_MATCHER.matches(ArrayUtils.remove(expected, 1), ArrayUtils.remove(actual, 1));
+ return MockControl.ARRAY_MATCHER.matches( ArrayUtils.remove( expected, 1 ), ArrayUtils.remove( actual,
+ 1 ) );
}
- public String toString(Object[] arguments) {
- return ArrayUtils.toString(arguments);
+ public String toString( Object[] arguments )
+ {
+ return ArrayUtils.toString( arguments );
}
};
- protected static final ArgumentsMatcher customWagonGetMatcher = new ArgumentsMatcher() {
+ protected static final ArgumentsMatcher customWagonGetMatcher = new ArgumentsMatcher()
+ {
- public boolean matches(Object[] expected, Object[] actual)
+ public boolean matches( Object[] expected, Object[] actual )
+ {
+ if ( expected.length == 2 && actual.length == 2 )
{
- if (expected.length == 2 && actual.length == 2)
+ if ( expected[0] == null && actual[0] == null )
{
- if (expected[0] == null && actual[0] == null)
- {
- return true;
- }
-
- if (expected[0] == null)
- {
- return actual[0] == null;
- }
+ return true;
+ }
- if (actual[0] == null)
- {
- return expected[0] == null;
- }
+ if ( expected[0] == null )
+ {
+ return actual[0] == null;
+ }
- return expected[0].equals(actual[0]);
+ if ( actual[0] == null )
+ {
+ return expected[0] == null;
}
- return false;
- }
- public String toString(Object[] arguments)
- {
- return ArrayUtils.toString(arguments);
+ return expected[0].equals( actual[0] );
}
- };
+ return false;
+ }
+
+ public String toString( Object[] arguments )
+ {
+ return ArrayUtils.toString( arguments );
+ }
+ };
protected MockControl wagonMockControl;
assertNotNull( "Actual File should not be null.", actualFile );
assertTrue( "Check actual file exists.", actualFile.exists() );
- assertEquals( "Check filename path is appropriate.", expectedFile.getCanonicalPath(), actualFile.getCanonicalPath() );
+ assertEquals( "Check filename path is appropriate.", expectedFile.getCanonicalPath(),
+ actualFile.getCanonicalPath() );
assertEquals( "Check file path matches.", expectedFile.getAbsolutePath(), actualFile.getAbsolutePath() );
String expectedContents = FileUtils.readFileToString( sourceFile, null );
assertNull( "Found file: " + downloadedFile + "; but was expecting a failure", downloadedFile );
}
- @SuppressWarnings("unchecked")
+ @SuppressWarnings( "unchecked" )
protected void assertNoTempFiles( File expectedFile )
{
File workingDir = expectedFile.getParentFile();
return;
}
- Collection<File> tmpFiles = FileUtils.listFiles( workingDir, new String[] { "tmp" }, false );
+ Collection<File> tmpFiles = FileUtils.listFiles( workingDir, new String[]{"tmp"}, false );
if ( !tmpFiles.isEmpty() )
{
StringBuffer emsg = new StringBuffer();
{
if ( !destination.exists() && !destination.mkdirs() )
{
- throw new IOException( "Could not create destination directory '"
- + destination.getAbsolutePath() + "'." );
+ throw new IOException(
+ "Could not create destination directory '" + destination.getAbsolutePath() + "'." );
}
copyDirectoryStructure( file, destination );
protected void saveConnector( String sourceRepoId, String targetRepoId, boolean disabled )
{
- saveConnector( sourceRepoId, targetRepoId, ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
- SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, disabled );
+ saveConnector( sourceRepoId, targetRepoId, ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS, SnapshotsPolicy.ALWAYS,
+ CachedFailuresPolicy.NO, disabled );
}
protected void saveConnector( String sourceRepoId, String targetRepoId, String checksumPolicy, String releasePolicy,
}
protected void saveConnector( String sourceRepoId, String targetRepoId, String checksumPolicy, String releasePolicy,
- String snapshotPolicy, String cacheFailuresPolicy, String errorPolicy, boolean disabled )
+ String snapshotPolicy, String cacheFailuresPolicy, String errorPolicy,
+ boolean disabled )
{
saveConnector( sourceRepoId, targetRepoId, checksumPolicy, releasePolicy, snapshotPolicy, cacheFailuresPolicy,
errorPolicy, PropagateErrorsOnUpdateDownloadPolicy.NOT_PRESENT, disabled );
connectorConfig.addPolicy( ProxyConnectorConfiguration.POLICY_CACHE_FAILURES, cacheFailuresPolicy );
connectorConfig.addPolicy( ProxyConnectorConfiguration.POLICY_PROPAGATE_ERRORS, errorPolicy );
connectorConfig.addPolicy( ProxyConnectorConfiguration.POLICY_PROPAGATE_ERRORS_ON_UPDATE, errorOnUpdatePolicy );
- connectorConfig.setDisabled(disabled);
+ connectorConfig.setDisabled( disabled );
int count = config.getConfiguration().getProxyConnectors().size();
config.getConfiguration().addProxyConnector( connectorConfig );
config.triggerChange( prefix + ".policies.checksum", connectorConfig.getPolicy( "checksum", "" ) );
config.triggerChange( prefix + ".policies.snapshots", connectorConfig.getPolicy( "snapshots", "" ) );
config.triggerChange( prefix + ".policies.cache-failures", connectorConfig.getPolicy( "cache-failures", "" ) );
- config.triggerChange( prefix + ".policies.propagate-errors",
- connectorConfig.getPolicy( "propagate-errors", "" ) );
- config.triggerChange( prefix + ".policies.propagate-errors-on-update",
- connectorConfig.getPolicy( "propagate-errors-on-update", "" ) );
+ config.triggerChange( prefix + ".policies.propagate-errors", connectorConfig.getPolicy( "propagate-errors",
+ "" ) );
+ config.triggerChange( prefix + ".policies.propagate-errors-on-update", connectorConfig.getPolicy(
+ "propagate-errors-on-update", "" ) );
}
protected void saveManagedRepositoryConfig( String id, String name, String path, String layout )
/**
* {@inheritDoc}
+ *
* @see org.codehaus.plexus.spring.PlexusInSpringTestCase#getConfigLocation()
*/
@Override
config.getConfiguration().addManagedRepository( repoConfig );
// Setup target (proxied to) repository.
- saveRemoteRepositoryConfig( ID_PROXIED1, "Proxied Repository 1", new File( REPOPATH_PROXIED1 ).toURL()
- .toExternalForm(), "default" );
+ saveRemoteRepositoryConfig( ID_PROXIED1, "Proxied Repository 1", new File(
+ REPOPATH_PROXIED1 ).toURL().toExternalForm(), "default" );
// Setup target (proxied to) repository.
- saveRemoteRepositoryConfig( ID_PROXIED2, "Proxied Repository 2", new File( REPOPATH_PROXIED2 ).toURL()
- .toExternalForm(), "default" );
+ saveRemoteRepositoryConfig( ID_PROXIED2, "Proxied Repository 2", new File(
+ REPOPATH_PROXIED2 ).toURL().toExternalForm(), "default" );
// Setup target (proxied to) repository using legacy layout.
- saveRemoteRepositoryConfig( ID_LEGACY_PROXIED, "Proxied Legacy Repository", new File( REPOPATH_PROXIED_LEGACY )
- .toURL().toExternalForm(), "legacy" );
+ saveRemoteRepositoryConfig( ID_LEGACY_PROXIED, "Proxied Legacy Repository", new File(
+ REPOPATH_PROXIED_LEGACY ).toURL().toExternalForm(), "legacy" );
// Setup the proxy handler.
- proxyHandler = (RepositoryProxyConnectors) lookup( RepositoryProxyConnectors.class.getName() );
+ try
+ {
+ proxyHandler = (RepositoryProxyConnectors) lookup( RepositoryProxyConnectors.class.getName() );
+ }
+ catch ( Exception e )
+ {
+ // TODO: handle in plexus-spring instead
+ applicationContext.close();
+ throw e;
+ }
// Setup the wagon mock.
wagonMockControl = MockControl.createNiceControl( Wagon.class );
if ( !sourceDir.exists() )
{
// This is just a warning.
- System.err.println( "[WARN] Skipping setup of testable managed repository, source dir does not exist: "
- + sourceDir );
+ System.err.println(
+ "[WARN] Skipping setup of testable managed repository, source dir does not exist: " + sourceDir );
}
else
{
protected void assertNotModified( File file, long expectedModificationTime )
{
- assertEquals( "File <" + file.getAbsolutePath() + "> not have been modified.",
- expectedModificationTime, file.lastModified() );
+ assertEquals( "File <" + file.getAbsolutePath() + "> not have been modified.", expectedModificationTime,
+ file.lastModified() );
}
protected void assertNotExistsInManagedLegacyRepo( File file )
String managedLegacyPath = managedLegacyDir.getCanonicalPath();
String testFile = file.getCanonicalPath();
- assertTrue( "Unit Test Failure: File <" + testFile
- + "> should be have been defined within the legacy managed path of <" + managedLegacyPath + ">", testFile
- .startsWith( managedLegacyPath ) );
+ assertTrue( "Unit Test Failure: File <" + testFile +
+ "> should be have been defined within the legacy managed path of <" + managedLegacyPath + ">",
+ testFile.startsWith( managedLegacyPath ) );
assertFalse( "File < " + testFile + "> should not exist in managed legacy repository.", file.exists() );
}
String managedDefaultPath = managedDefaultDir.getCanonicalPath();
String testFile = file.getCanonicalPath();
- assertTrue( "Unit Test Failure: File <" + testFile
- + "> should be have been defined within the managed default path of <" + managedDefaultPath + ">", testFile
- .startsWith( managedDefaultPath ) );
+ assertTrue( "Unit Test Failure: File <" + testFile +
+ "> should be have been defined within the managed default path of <" + managedDefaultPath + ">",
+ testFile.startsWith( managedDefaultPath ) );
assertFalse( "File < " + testFile + "> should not exist in managed default repository.", file.exists() );
}
* under the License.
*/
-import java.io.File;
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.handler.AbstractHandler;
+import java.io.File;
+import java.io.IOException;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
/**
* Integration test for connecting over a HTTP proxy.
- *
+ *
* @version $Id: ManagedDefaultTransferTest.java 677852 2008-07-18 08:16:24Z brett $
*/
public class HttpProxyTransferTest
throws Exception
{
super.setUp();
-
+
// Setup source repository (using default layout)
String repoPath = "target/test-repository/managed/" + getName();
repo.setLocation( repoPath );
repo.setLayout( "default" );
- ManagedRepositoryContent repoContent =
- (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class, "default" );
+ ManagedRepositoryContent repoContent = (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class,
+ "default" );
repoContent.setRepository( repo );
managedDefaultRepository = repoContent;
response.setStatus( HttpServletResponse.SC_OK );
response.getWriter().print( "get-default-layout-1.0.jar\n\n" );
assertNotNull( request.getHeader( "Proxy-Connection" ) );
-
+
( (Request) request ).setHandled( true );
}
};
proxyConfig.setProtocol( "http" );
proxyConfig.setId( PROXY_ID );
config.getConfiguration().addNetworkProxy( proxyConfig );
-
+
// Setup target (proxied to) repository.
RemoteRepositoryConfiguration repoConfig = new RemoteRepositoryConfiguration();
config.getConfiguration().addRemoteRepository( repoConfig );
// Setup the proxy handler.
- proxyHandler = (RepositoryProxyConnectors) lookup( RepositoryProxyConnectors.class.getName() );
+ try
+ {
+ proxyHandler = (RepositoryProxyConnectors) lookup( RepositoryProxyConnectors.class.getName() );
+ }
+ catch ( Exception e )
+ {
+ server.stop();
+ applicationContext.close();
+ throw e;
+ }
}
@Override
throws Exception
{
super.tearDown();
-
+
server.stop();
}
{
assertNull( System.getProperty( "http.proxyHost" ) );
assertNull( System.getProperty( "http.proxyPort" ) );
-
+
String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
// Configure Connector (usually done within archiva.xml configuration)
String expectedContents = FileUtils.readFileToString( sourceFile, null );
String actualContents = FileUtils.readFileToString( downloadedFile, null );
assertEquals( "Check file contents.", expectedContents, actualContents );
-
+
assertNull( System.getProperty( "http.proxyHost" ) );
assertNull( System.getProperty( "http.proxyPort" ) );
}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd">
+
+ <bean id="repositorySessionFactory" class="org.apache.archiva.metadata.repository.TestRepositorySessionFactory"/>
+</beans>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+ xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>archiva-scheduler</artifactId>
<artifactId>slf4j-simple</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
<plugins>
* under the License.
*/
+import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.archiva.repository.scanner.RepositoryContentConsumers;
*/
private RepositoryStatisticsManager repositoryStatisticsManager;
+ /**
+ * TODO: may be different implementations
+ *
+ * @plexus.requirement
+ */
+ private RepositorySessionFactory repositorySessionFactory;
+
public void initialize()
throws InitializationException
{
long sinceWhen = RepositoryScanner.FRESH_SCAN;
long previousFileCount = 0;
- if ( !repoTask.isScanAll() )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+ try
{
- RepositoryStatistics previousStats;
- try
+ if ( !repoTask.isScanAll() )
{
- previousStats = repositoryStatisticsManager.getLastStatistics( repoId );
+ RepositoryStatistics previousStats = repositoryStatisticsManager.getLastStatistics(
+ metadataRepository, repoId );
+ if ( previousStats != null )
+ {
+ sinceWhen = previousStats.getScanStartTime().getTime();
+ previousFileCount = previousStats.getTotalFileCount();
+ }
}
- catch ( MetadataRepositoryException e )
+
+ RepositoryScanStatistics stats;
+ try
{
- throw new TaskExecutionException( "Unable to get previous statistics: " + e.getMessage(), e );
+ stats = repoScanner.scan( arepo, sinceWhen );
}
- if ( previousStats != null )
+ catch ( RepositoryScannerException e )
{
- sinceWhen = previousStats.getScanStartTime().getTime();
- previousFileCount = previousStats.getTotalFileCount();
+ throw new TaskExecutionException( "Repository error when executing repository job.", e );
}
- }
- RepositoryScanStatistics stats;
- try
- {
- stats = repoScanner.scan( arepo, sinceWhen );
- }
- catch ( RepositoryScannerException e )
- {
- throw new TaskExecutionException( "Repository error when executing repository job.", e );
- }
+ log.info( "Finished first scan: " + stats.toDump( arepo ) );
- log.info( "Finished first scan: " + stats.toDump( arepo ) );
-
- // further statistics will be populated by the following method
- Date endTime = new Date( stats.getWhenGathered().getTime() + stats.getDuration() );
- try
- {
- repositoryStatisticsManager.addStatisticsAfterScan( repoId, stats.getWhenGathered(), endTime,
- stats.getTotalFileCount(),
+ // further statistics will be populated by the following method
+ Date endTime = new Date( stats.getWhenGathered().getTime() + stats.getDuration() );
+ repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, repoId, stats.getWhenGathered(),
+ endTime, stats.getTotalFileCount(),
stats.getTotalFileCount() - previousFileCount );
+ repositorySession.save();
}
catch ( MetadataRepositoryException e )
{
throw new TaskExecutionException( "Unable to store updated statistics: " + e.getMessage(), e );
}
+ finally
+ {
+ repositorySession.close();
+ }
// log.info( "Scanning for removed repository content" );
* under the License.
*/
+import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
import org.apache.maven.archiva.common.ArchivaException;
*/
private RepositoryStatisticsManager repositoryStatisticsManager;
+ /**
+ * TODO: could have multiple implementations
+ *
+ * @plexus.requirement
+ */
+ private RepositorySessionFactory repositorySessionFactory;
+
private static final String REPOSITORY_SCAN_GROUP = "rg";
private static final String REPOSITORY_JOB = "rj";
List<ManagedRepositoryConfiguration> repositories =
archivaConfiguration.getConfiguration().getManagedRepositories();
- for ( ManagedRepositoryConfiguration repoConfig : repositories )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- if ( repoConfig.isScanned() )
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+ for ( ManagedRepositoryConfiguration repoConfig : repositories )
{
- try
+ if ( repoConfig.isScanned() )
{
- scheduleRepositoryJobs( repoConfig );
- }
- catch ( SchedulerException e )
- {
- throw new StartingException( "Unable to start scheduler: " + e.getMessage(), e );
- }
+ try
+ {
+ scheduleRepositoryJobs( repoConfig );
+ }
+ catch ( SchedulerException e )
+ {
+ throw new StartingException( "Unable to start scheduler: " + e.getMessage(), e );
+ }
- try
- {
- if ( !isPreviouslyScanned( repoConfig ) )
+ try
{
- queueInitialRepoScan( repoConfig );
+ if ( !isPreviouslyScanned( repoConfig, metadataRepository ) )
+ {
+ queueInitialRepoScan( repoConfig );
+ }
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ log.warn( "Unable to determine if a repository is already scanned, skipping initial scan: " +
+ e.getMessage(), e );
}
- }
- catch ( MetadataRepositoryException e )
- {
- log.warn( "Unable to determine if a repository is already scanned, skipping initial scan: " +
- e.getMessage(), e );
}
}
}
+ finally
+ {
+ repositorySession.close();
+ }
}
public void stop()
}
@SuppressWarnings( "unchecked" )
- private boolean isPreviouslyScanned( ManagedRepositoryConfiguration repoConfig )
+ private boolean isPreviouslyScanned( ManagedRepositoryConfiguration repoConfig,
+ MetadataRepository metadataRepository )
throws MetadataRepositoryException
{
- return repositoryStatisticsManager.getLastStatistics( repoConfig.getId() ) != null;
+ return repositoryStatisticsManager.getLastStatistics( metadataRepository, repoConfig.getId() ) != null;
}
// MRM-848: Pre-configured repository initially appear to be empty
--- /dev/null
+package org.apache.archiva.metadata.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+public class TestRepositorySessionFactory
+ implements RepositorySessionFactory
+{
+ private MetadataRepository repository;
+
+ private MetadataResolver resolver;
+
+ public RepositorySession createSession()
+ {
+ return new RepositorySession( repository, resolver );
+ }
+
+ public void setRepository( MetadataRepository repository )
+ {
+ this.repository = repository;
+ }
+
+ public void setResolver( MetadataResolver resolver )
+ {
+ this.resolver = resolver;
+ }
+}
* under the License.
*/
+import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import java.util.Date;
import java.util.List;
+import static org.mockito.Mockito.mock;
+
/**
* ArchivaRepositoryScanningTaskExecutorTest
*
private TestConsumer testConsumer;
+ private MetadataRepository metadataRepository;
+
protected void setUp()
throws Exception
{
super.setUp();
- taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-repository-scanning" );
+ try
+ {
+ taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-repository-scanning" );
+ }
+ catch ( Exception e )
+ {
+ // TODO: handle cleanup in plexus-spring lookup method instead
+ applicationContext.close();
+ throw e;
+ }
File sourceRepoDir = new File( getBasedir(), "src/test/repositories/default-repository" );
repoDir = new File( getBasedir(), "target/default-repository" );
repositoryStatisticsManager = (RepositoryStatisticsManager) lookup( RepositoryStatisticsManager.class );
testConsumer = (TestConsumer) lookup( KnownRepositoryContentConsumer.class, "test-consumer" );
+
+ metadataRepository = mock( MetadataRepository.class );
}
protected void tearDown()
unprocessedResultList.size() );
// check correctness of new stats
- RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
+ RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( metadataRepository,
+ TEST_REPO_ID );
assertEquals( 0, newStats.getNewFileCount() );
assertEquals( 31, newStats.getTotalFileCount() );
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
unprocessedResultList.size() );
// check correctness of new stats
- RepositoryStatistics updatedStats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
+ RepositoryStatistics updatedStats = repositoryStatisticsManager.getLastStatistics( metadataRepository,
+ TEST_REPO_ID );
assertEquals( 2, updatedStats.getNewFileCount() );
assertEquals( 33, updatedStats.getTotalFileCount() );
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
unprocessedResultList.size() );
// check correctness of new stats
- RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
+ RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( metadataRepository,
+ TEST_REPO_ID );
assertEquals( 2, newStats.getNewFileCount() );
assertEquals( 33, newStats.getTotalFileCount() );
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
unprocessedResultList.size() );
// check correctness of new stats
- RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
+ RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( metadataRepository,
+ TEST_REPO_ID );
assertEquals( 2, newStats.getNewFileCount() );
assertEquals( 33, newStats.getTotalFileCount() );
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
repoTask.setScanAll( true );
Date date = Calendar.getInstance().getTime();
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, new Date( date.getTime() - 1234567 ), date, 8,
- 8 );
+ repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, new Date(
+ date.getTime() - 1234567 ), date, 8, 8 );
taskExecutor.executeTask( repoTask );
stats.setTotalProjectCount( 5 );
stats.setTotalArtifactFileSize( 38545 );
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, new Date( date.getTime() - 1234567 ), date,
- 31, 31 );
+ repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, new Date(
+ date.getTime() - 1234567 ), date, 31, 31 );
}
}
* under the License.
*/
+import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
+import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
+
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
-import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
-
public class TestRepositoryStatisticsManager
implements RepositoryStatisticsManager
{
private Map<String, List<RepositoryStatistics>> repoStats = new HashMap<String, List<RepositoryStatistics>>();
- public RepositoryStatistics getLastStatistics( String repositoryId )
+ public RepositoryStatistics getLastStatistics( MetadataRepository metadataRepository, String repositoryId )
{
List<RepositoryStatistics> repositoryStatisticsList = getStatsList( repositoryId );
return !repositoryStatisticsList.isEmpty()
: null;
}
- public void addStatisticsAfterScan( String repositoryId, Date startTime, Date endTime, long totalFiles,
- long newFiles )
+ public void addStatisticsAfterScan( MetadataRepository metadataRepository, String repositoryId, Date startTime,
+ Date endTime, long totalFiles, long newFiles )
{
List<RepositoryStatistics> stats = getStatsList( repositoryId );
stats.add( repositoryStatistics );
}
- public void deleteStatistics( String repositoryId )
+ public void deleteStatistics( MetadataRepository metadataRepository, String repositoryId )
{
repoStats.remove( repositoryId );
}
- public List<RepositoryStatistics> getStatisticsInRange( String repositoryId, Date startDate, Date endDate )
+ public List<RepositoryStatistics> getStatisticsInRange( MetadataRepository metadataRepository, String repositoryId,
+ Date startDate, Date endDate )
{
throw new UnsupportedOperationException();
}
<requirement>
<role>org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager</role>
</requirement>
+ <requirement>
+ <role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
+ </requirement>
</requirements>
</component>
</requirement>
</requirements>
</component>
+
+ <component>
+ <role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
+ <implementation>org.apache.archiva.metadata.repository.TestRepositorySessionFactory</implementation>
+ </component>
</components>
</component-set>
* under the License.
*/
-import org.apache.archiva.metadata.repository.MetadataRepository;
-
public abstract class AbstractArtifactsRssFeedProcessor
implements RssFeedProcessor
{
- /**
- * @plexus.requirement
- */
- protected MetadataRepository metadataRepository;
-
protected abstract String getTitle();
protected abstract String getDescription();
-
- public void setMetadataRepository( MetadataRepository metadataRepository )
- {
- this.metadataRepository = metadataRepository;
- }
}
import com.sun.syndication.feed.synd.SyndFeed;
import com.sun.syndication.io.FeedException;
import org.apache.archiva.metadata.model.ArtifactMetadata;
+import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.rss.RssFeedEntry;
import org.apache.archiva.rss.RssFeedGenerator;
* Process the newly discovered artifacts in the repository. Generate feeds for new artifacts in the repository and
* new versions of artifact.
*/
- public SyndFeed process( Map<String, String> reqParams )
+ public SyndFeed process( Map<String, String> reqParams, MetadataRepository metadataRepository )
throws FeedException
{
log.debug( "Process new artifacts into rss feeds." );
String repoId = reqParams.get( RssFeedProcessor.KEY_REPO_ID );
if ( repoId != null )
{
- return processNewArtifactsInRepo( repoId );
+ return processNewArtifactsInRepo( repoId, metadataRepository );
}
return null;
}
- private SyndFeed processNewArtifactsInRepo( String repoId )
+ private SyndFeed processNewArtifactsInRepo( String repoId, MetadataRepository metadataRepository )
throws FeedException
{
Calendar greaterThanThisDate = Calendar.getInstance( GMT_TIME_ZONE );
greaterThanThisDate.add( Calendar.DATE, -( getNumberOfDaysBeforeNow() ) );
greaterThanThisDate.clear( Calendar.MILLISECOND );
- List<ArtifactMetadata> artifacts = null;
+ List<ArtifactMetadata> artifacts;
try
{
artifacts = metadataRepository.getArtifactsByDateRange( repoId, greaterThanThisDate.getTime(), null );
import com.sun.syndication.feed.synd.SyndFeed;
import com.sun.syndication.io.FeedException;
import org.apache.archiva.metadata.model.ArtifactMetadata;
+import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.rss.RssFeedEntry;
/**
* Process all versions of the artifact which had a rss feed request.
*/
- public SyndFeed process( Map<String, String> reqParams )
+ public SyndFeed process( Map<String, String> reqParams, MetadataRepository metadataRepository )
throws FeedException
{
String groupId = reqParams.get( RssFeedProcessor.KEY_GROUP_ID );
if ( groupId != null && artifactId != null )
{
- return processNewVersionsOfArtifact( groupId, artifactId );
+ return processNewVersionsOfArtifact( groupId, artifactId, metadataRepository );
}
return null;
}
- private SyndFeed processNewVersionsOfArtifact( String groupId, String artifactId )
+ private SyndFeed processNewVersionsOfArtifact( String groupId, String artifactId,
+ MetadataRepository metadataRepository )
throws FeedException
{
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>();
import com.sun.syndication.feed.synd.SyndFeed;
import com.sun.syndication.io.FeedException;
+import org.apache.archiva.metadata.repository.MetadataRepository;
import java.util.Map;
public static final String KEY_ARTIFACT_ID = "artifactId";
- SyndFeed process( Map<String, String> reqParams )
+ SyndFeed process( Map<String, String> reqParams, MetadataRepository metadataRepository )
throws FeedException;
}
newArtifactsProcessor.setGenerator( new RssFeedGenerator() );
metadataRepository = new MetadataRepositoryMock();
- newArtifactsProcessor.setMetadataRepository( metadataRepository );
}
@SuppressWarnings( "unchecked" )
Map<String, String> reqParams = new HashMap<String, String>();
reqParams.put( RssFeedProcessor.KEY_REPO_ID, TEST_REPO );
- SyndFeed feed = newArtifactsProcessor.process( reqParams );
+ SyndFeed feed = newArtifactsProcessor.process( reqParams, metadataRepository );
// check that the date used in the call is close to the one passed (5 seconds difference at most)
Calendar cal = Calendar.getInstance( TimeZone.getTimeZone( "GMT" ) );
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
- newVersionsProcessor.setMetadataRepository( metadataRepository );
}
- @SuppressWarnings("unchecked")
+ @SuppressWarnings( "unchecked" )
public void testProcess()
throws Exception
{
reqParams.put( RssFeedProcessor.KEY_GROUP_ID, GROUP_ID );
reqParams.put( RssFeedProcessor.KEY_ARTIFACT_ID, ARTIFACT_ID );
- metadataRepositoryControl.expectAndReturn( metadataRepository.getRepositories(),
- Collections.singletonList( TEST_REPO ) );
- metadataRepositoryControl.expectAndReturn(
- metadataRepository.getProjectVersions( TEST_REPO, GROUP_ID, ARTIFACT_ID ),
- Arrays.asList( "1.0.1", "1.0.2", "1.0.3-SNAPSHOT" ) );
- metadataRepositoryControl.expectAndReturn(
- metadataRepository.getArtifacts( TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.1" ),
- Collections.singletonList( artifact1 ) );
- metadataRepositoryControl.expectAndReturn(
- metadataRepository.getArtifacts( TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.2" ),
- Collections.singletonList( artifact2 ) );
- metadataRepositoryControl.expectAndReturn(
- metadataRepository.getArtifacts( TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.3-SNAPSHOT" ),
- Collections.singletonList( artifact3 ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getRepositories(), Collections.singletonList(
+ TEST_REPO ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getProjectVersions( TEST_REPO, GROUP_ID,
+ ARTIFACT_ID ), Arrays.asList(
+ "1.0.1", "1.0.2", "1.0.3-SNAPSHOT" ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( TEST_REPO, GROUP_ID, ARTIFACT_ID,
+ "1.0.1" ),
+ Collections.singletonList( artifact1 ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( TEST_REPO, GROUP_ID, ARTIFACT_ID,
+ "1.0.2" ),
+ Collections.singletonList( artifact2 ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( TEST_REPO, GROUP_ID, ARTIFACT_ID,
+ "1.0.3-SNAPSHOT" ),
+ Collections.singletonList( artifact3 ) );
metadataRepositoryControl.replay();
- SyndFeed feed = newVersionsProcessor.process( reqParams );
+ SyndFeed feed = newVersionsProcessor.process( reqParams, metadataRepository );
assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two'", feed.getTitle() );
assertEquals( "New versions of artifact 'org.apache.archiva:artifact-two' found during repository scan.",
assertEquals( 2, entries.size() );
- assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two' as of " + whenGathered,
- entries.get( 0 ).getTitle() );
+ assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two' as of " + whenGathered, entries.get(
+ 0 ).getTitle() );
assertEquals( whenGathered, entries.get( 0 ).getPublishedDate() );
assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two' as of " + whenGatheredNext,
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.archiva</groupId>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>1.4</version>
- </dependency>
+ </dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<version>1.0.1</version>
<scope>test</scope>
<exclusions>
- <exclusion>
- <groupId>commons-logging</groupId>
- <artifactId>commons-logging</artifactId>
- </exclusion>
+ <exclusion>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ </exclusion>
</exclusions>
</dependency>
<groupId>jaxen</groupId>
<artifactId>jaxen</artifactId>
</dependency>
-
+
<!-- Dependencies below are provided by the appserver -->
<dependency>
<groupId>org.apache.derby</groupId>
</execution>
</executions>
</plugin>
-
+
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<name>browser</name>
<value>${selenium.browser}</value>
</property>
- <property>
+ <property>
<name>baseUrl</name>
<value>${baseUrl}</value>
</property>
<goal>copy-dependencies</goal>
</goals>
<configuration>
+ <!-- TODO: this sometimes copies everything, causing problems with the server start up -->
<includeGroupIds>org.apache.derby,javax.mail,javax.activation</includeGroupIds>
<overWriteReleases>false</overWriteReleases>
<overWriteSnapshots>true</overWriteSnapshots>
<configuration>
<tasks>
<copy todir="${project.build.directory}/appserver-base">
- <fileset dir="src/test/resources/appserver-base" />
+ <fileset dir="src/test/resources/appserver-base"/>
</copy>
<copy todir="${project.build.directory}/repository">
- <fileset dir="src/test/resources/repository" />
+ <fileset dir="src/test/resources/repository"/>
</copy>
<copy todir="${project.build.directory}/index">
- <fileset dir="src/test/resources/index" />
+ <fileset dir="src/test/resources/index"/>
</copy>
<copy todir="${project.build.directory}/snapshots">
- <fileset dir="src/test/resources/snapshots" />
+ <fileset dir="src/test/resources/snapshots"/>
</copy>
<copy todir="${project.build.directory}/projects">
- <fileset dir="src/test/resources/projects" />
+ <fileset dir="src/test/resources/projects"/>
</copy>
<copy todir="${project.build.directory}/local-repo">
- <fileset dir="src/test/resources/local-repo" />
+ <fileset dir="src/test/resources/local-repo"/>
</copy>
</tasks>
</configuration>
<configuration>
<tasks>
<copy todir="${project.build.directory}/${container.name}conf">
- <fileset dir="src/test/${container.name}" />
+ <fileset dir="src/test/${container.name}"/>
</copy>
<copy
- todir="${cargo.install.dir}/${container.name}/apache-tomcat-${tomcat5x.version}/apache-tomcat-${tomcat5x.version}/common/lib">
+ todir="${cargo.install.dir}/${container.name}/apache-tomcat-${tomcat5x.version}/apache-tomcat-${tomcat5x.version}/common/lib">
<fileset dir="${project.build.directory}/providedDependencies">
- <include name="**/*.jar" />
+ <include name="**/*.jar"/>
</fileset>
</copy>
</tasks>
<phase>integration-test</phase>
<configuration>
<tasks>
- <get src="http://localhost:9696/archiva/" dest="${project.build.directory}/index.html" />
- <delete file="${project.build.directory}/index.html" />
+ <get src="http://localhost:9696/archiva/" dest="${project.build.directory}/index.html"/>
+ <delete file="${project.build.directory}/index.html"/>
</tasks>
</configuration>
<goals>
<configuration>
<background>true</background>
<port>${seleniumPort}</port>
- <logOutput>true</logOutput>
+ <logOutput>true</logOutput>
</configuration>
</execution>
</executions>
</activation>
<properties>
<container.name>tomcat5x</container.name>
- <container.url>http://archive.apache.org/dist/tomcat/tomcat-5/v${tomcat5x.version}/bin/apache-tomcat-${tomcat5x.version}.zip</container.url>
+ <container.url>
+ http://archive.apache.org/dist/tomcat/tomcat-5/v${tomcat5x.version}/bin/apache-tomcat-${tomcat5x.version}.zip
+ </container.url>
</properties>
</profile>
<profile>
</activation>
<properties>
<selenium.browser>*firefox</selenium.browser>
- <excluded.groups />
+ <excluded.groups/>
</properties>
</profile>
<profile>
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.metadata.repository.MetadataResolver;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.storage.maven2.MavenProjectFacet;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
public class BrowseAction
extends AbstractRepositoryBasedAction
{
- /**
- * @plexus.requirement
- */
- private MetadataResolver metadataResolver;
-
private String groupId;
private String artifactId;
// TODO: this logic should be optional, particularly remembering we want to keep this code simple
// it is located here to avoid the content repository implementation needing to do too much for what
// is essentially presentation code
- Set<String> namespacesToCollapse = new LinkedHashSet<String>();
- for ( String repoId : selectedRepos )
+ Set<String> namespacesToCollapse;
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- namespacesToCollapse.addAll( metadataResolver.resolveRootNamespaces( repoId ) );
- }
+ MetadataResolver metadataResolver = repositorySession.getResolver();
+ namespacesToCollapse = new LinkedHashSet<String>();
+ for ( String repoId : selectedRepos )
+ {
+ namespacesToCollapse.addAll( metadataResolver.resolveRootNamespaces( repositorySession, repoId ) );
+ }
- for ( String n : namespacesToCollapse )
+ for ( String n : namespacesToCollapse )
+ {
+ // TODO: check performance of this
+ namespaces.add( collapseNamespaces( repositorySession, metadataResolver, selectedRepos, n ) );
+ }
+ }
+ finally
{
- // TODO: check performance of this
- namespaces.add( collapseNamespaces( selectedRepos, n ) );
+ repositorySession.close();
}
this.namespaces = getSortedList( namespaces );
return SUCCESS;
}
- private String collapseNamespaces( Collection<String> repoIds, String n )
+ private String collapseNamespaces( RepositorySession repositorySession, MetadataResolver metadataResolver,
+ Collection<String> repoIds, String n )
throws MetadataResolutionException
{
Set<String> subNamespaces = new LinkedHashSet<String>();
for ( String repoId : repoIds )
{
- subNamespaces.addAll( metadataResolver.resolveNamespaces( repoId, n ) );
+ subNamespaces.addAll( metadataResolver.resolveNamespaces( repositorySession, repoId, n ) );
}
if ( subNamespaces.size() != 1 )
{
{
for ( String repoId : repoIds )
{
- Collection<String> projects = metadataResolver.resolveProjects( repoId, n );
+ Collection<String> projects = metadataResolver.resolveProjects( repositorySession, repoId, n );
if ( projects != null && !projects.isEmpty() )
{
if ( log.isDebugEnabled() )
return n;
}
}
- return collapseNamespaces( repoIds, n + "." + subNamespaces.iterator().next() );
+ return collapseNamespaces( repositorySession, metadataResolver, repoIds,
+ n + "." + subNamespaces.iterator().next() );
}
}
Set<String> projects = new LinkedHashSet<String>();
- Set<String> namespacesToCollapse = new LinkedHashSet<String>();
- for ( String repoId : selectedRepos )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ Set<String> namespaces;
+ try
{
- namespacesToCollapse.addAll( metadataResolver.resolveNamespaces( repoId, groupId ) );
+ MetadataResolver metadataResolver = repositorySession.getResolver();
- projects.addAll( metadataResolver.resolveProjects( repoId, groupId ) );
- }
+ Set<String> namespacesToCollapse = new LinkedHashSet<String>();
+ for ( String repoId : selectedRepos )
+ {
+ namespacesToCollapse.addAll( metadataResolver.resolveNamespaces( repositorySession, repoId, groupId ) );
- // TODO: this logic should be optional, particularly remembering we want to keep this code simple
- // it is located here to avoid the content repository implementation needing to do too much for what
- // is essentially presentation code
- Set<String> namespaces = new LinkedHashSet<String>();
- for ( String n : namespacesToCollapse )
+ projects.addAll( metadataResolver.resolveProjects( repositorySession, repoId, groupId ) );
+ }
+
+ // TODO: this logic should be optional, particularly remembering we want to keep this code simple
+ // it is located here to avoid the content repository implementation needing to do too much for what
+ // is essentially presentation code
+ namespaces = new LinkedHashSet<String>();
+ for ( String n : namespacesToCollapse )
+ {
+ // TODO: check performance of this
+ namespaces.add( collapseNamespaces( repositorySession, metadataResolver, selectedRepos,
+ groupId + "." + n ) );
+ }
+ }
+ finally
{
- // TODO: check performance of this
- namespaces.add( collapseNamespaces( selectedRepos, groupId + "." + n ) );
+ repositorySession.close();
}
this.namespaces = getSortedList( namespaces );
return GlobalResults.ACCESS_TO_NO_REPOS;
}
- Set<String> versions = new LinkedHashSet<String>();
- for ( String repoId : selectedRepos )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- versions.addAll( metadataResolver.resolveProjectVersions( repoId, groupId, artifactId ) );
- }
+ MetadataResolver metadataResolver = repositorySession.getResolver();
- // TODO: sort by known version ordering method
- this.projectVersions = new ArrayList<String>( versions );
+ Set<String> versions = new LinkedHashSet<String>();
+ for ( String repoId : selectedRepos )
+ {
+ versions.addAll( metadataResolver.resolveProjectVersions( repositorySession, repoId, groupId,
+ artifactId ) );
+ }
- populateSharedModel( selectedRepos, versions );
+ // TODO: sort by known version ordering method
+ this.projectVersions = new ArrayList<String>( versions );
+
+ populateSharedModel( repositorySession, metadataResolver, selectedRepos, versions );
+ }
+ finally
+ {
+ repositorySession.close();
+ }
return SUCCESS;
}
- private void populateSharedModel( Collection<String> selectedRepos, Collection<String> projectVersions )
+ private void populateSharedModel( RepositorySession repositorySession, MetadataResolver metadataResolver,
+ Collection<String> selectedRepos, Collection<String> projectVersions )
{
sharedModel = new ProjectVersionMetadata();
{
try
{
- versionMetadata = metadataResolver.resolveProjectVersion( repoId, groupId, artifactId,
- version );
+ versionMetadata = metadataResolver.resolveProjectVersion( repositorySession, repoId, groupId,
+ artifactId, version );
}
catch ( MetadataResolutionException e )
{
return sharedModel;
}
- public MetadataResolver getMetadataResolver()
- {
- return metadataResolver;
- }
-
public Collection<String> getProjectIds()
{
return projectIds;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataResolutionException;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
private ChecksumAlgorithm[] algorithms = new ChecksumAlgorithm[]{ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5};
- /**
- * @plexus.requirement
- */
- private MetadataRepository metadataRepository;
-
public String getGroupId()
{
return groupId;
public String doDelete()
{
- try
- {
- Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
+ Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
- TimeZone timezone = TimeZone.getTimeZone( "UTC" );
- DateFormat fmt = new SimpleDateFormat( "yyyyMMdd.HHmmss" );
- fmt.setTimeZone( timezone );
- ManagedRepositoryConfiguration repoConfig = configuration.getConfiguration().findManagedRepositoryById(
- repositoryId );
+ TimeZone timezone = TimeZone.getTimeZone( "UTC" );
+ DateFormat fmt = new SimpleDateFormat( "yyyyMMdd.HHmmss" );
+ fmt.setTimeZone( timezone );
+ ManagedRepositoryConfiguration repoConfig = configuration.getConfiguration().findManagedRepositoryById(
+ repositoryId );
- VersionedReference ref = new VersionedReference();
- ref.setArtifactId( artifactId );
- ref.setGroupId( groupId );
- ref.setVersion( version );
+ VersionedReference ref = new VersionedReference();
+ ref.setArtifactId( artifactId );
+ ref.setGroupId( groupId );
+ ref.setVersion( version );
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
+ {
ManagedRepositoryContent repository = repositoryFactory.getManagedRepositoryContent( repositoryId );
String path = repository.toMetadataPath( ref );
updateMetadata( metadata, metadataFile, lastUpdatedTimestamp );
+ MetadataRepository metadataRepository = repositorySession.getRepository();
Collection<ArtifactMetadata> artifacts = metadataRepository.getArtifacts( repositoryId, groupId, artifactId,
version );
// repository metadata to an artifact
for ( RepositoryListener listener : listeners )
{
- listener.deleteArtifact( repository.getId(), artifact.getNamespace(), artifact.getProject(),
- artifact.getVersion(), artifact.getId() );
+ listener.deleteArtifact( metadataRepository, repository.getId(), artifact.getNamespace(),
+ artifact.getProject(), artifact.getVersion(), artifact.getId() );
}
triggerAuditEvent( repositoryId, path, AuditEvent.REMOVE_FILE );
}
}
-
- String msg = "Artifact \'" + groupId + ":" + artifactId + ":" + version +
- "\' was successfully deleted from repository \'" + repositoryId + "\'";
-
- addActionMessage( msg );
-
- reset();
- return SUCCESS;
+ repositorySession.save();
}
catch ( ContentNotFoundException e )
{
addActionError( "Repository exception: " + e.getMessage() );
return ERROR;
}
+ finally
+ {
+ repositorySession.close();
+ }
+
+ String msg = "Artifact \'" + groupId + ":" + artifactId + ":" + version +
+ "\' was successfully deleted from repository \'" + repositoryId + "\'";
+
+ addActionMessage( msg );
+
+ reset();
+ return SUCCESS;
}
private File getMetadata( String targetPath )
{
this.configuration = configuration;
}
-
- public void setMetadataRepository( MetadataRepository metadataRepository )
- {
- this.metadataRepository = metadataRepository;
- }
}
* under the License.
*/
-import com.opensymphony.xwork2.Validateable;
import com.opensymphony.xwork2.Preparable;
-import org.apache.archiva.audit.Auditable;
+import com.opensymphony.xwork2.Validateable;
import org.apache.archiva.audit.AuditEvent;
-import org.apache.archiva.stagerepository.merge.Maven2RepositoryMerger;
+import org.apache.archiva.audit.Auditable;
import org.apache.archiva.metadata.model.ArtifactMetadata;
+import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.filter.Filter;
import org.apache.archiva.metadata.repository.filter.IncludesFilter;
-import org.apache.archiva.metadata.repository.MetadataRepository;
-import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.archiva.stagerepository.merge.Maven2RepositoryMerger;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.web.action.admin.SchedulerAction;
-import java.util.List;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
+import java.util.List;
/**
* @plexus.component role="com.opensymphony.xwork2.Action" role-hint="mergeAction" instantiation-strategy="per-lookup"
*/
-public class
- MergeAction
+public class MergeAction
extends PlexusActionSupport
implements Validateable, Preparable, Auditable
*/
protected ArchivaConfiguration archivaConfiguration;
- /**
- * @plexus.requirement role-hint="default"
- */
- private MetadataRepository metadataRepository;
-
/**
* @plexus.requirement role="com.opensymphony.xwork2.Action" role-hint="schedulerAction"
*/
public String doMerge()
throws Exception
{
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
try
{
+ MetadataRepository metadataRepository = repositorySession.getRepository();
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( sourceRepoId );
if ( repository.isReleases() && !repository.isSnapshots() )
{
- mergeWithOutSnapshots( sourceArtifacts, sourceRepoId, repoid );
+ mergeWithOutSnapshots( metadataRepository, sourceArtifacts, sourceRepoId, repoid );
}
else
{
- repositoryMerger.merge( sourceRepoId, repoid );
+ repositoryMerger.merge( metadataRepository, sourceRepoId, repoid );
for ( ArtifactMetadata metadata : sourceArtifacts )
{
addActionError( "Error occurred while merging the repositories." );
return ERROR;
}
+ finally
+ {
+ repositorySession.close();
+ }
}
public String mergeBySkippingConflicts()
{
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
try
{
+ MetadataRepository metadataRepository = repositorySession.getRepository();
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( sourceRepoId );
sourceArtifacts.removeAll( conflictSourceArtifacts );
if ( repository.isReleases() && !repository.isSnapshots() )
{
- mergeWithOutSnapshots( sourceArtifacts, sourceRepoId, repoid );
+ mergeWithOutSnapshots( metadataRepository, sourceArtifacts, sourceRepoId, repoid );
}
else
{
- Filter<ArtifactMetadata> artifactsWithOutConflicts =
- new IncludesFilter<ArtifactMetadata>( sourceArtifacts );
- repositoryMerger.merge( sourceRepoId, repoid, artifactsWithOutConflicts );
+ Filter<ArtifactMetadata> artifactsWithOutConflicts = new IncludesFilter<ArtifactMetadata>(
+ sourceArtifacts );
+ repositoryMerger.merge( metadataRepository, sourceRepoId, repoid, artifactsWithOutConflicts );
for ( ArtifactMetadata metadata : sourceArtifacts )
{
triggerAuditEvent( repoid, metadata.getId(), AuditEvent.MERGING_REPOSITORIES );
addActionError( "Error occurred while merging the repositories." );
return ERROR;
}
+ finally
+ {
+ repositorySession.close();
+ }
}
- public String mergeWithOutConlficts()
+ public String mergeWithOutConlficts()
{
-
sourceRepoId = repoid + "-stage";
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
try
{
- conflictSourceArtifacts = repositoryMerger.getConflictingArtifacts( sourceRepoId, repoid );
+ conflictSourceArtifacts = repositoryMerger.getConflictingArtifacts( repositorySession.getRepository(),
+ sourceRepoId, repoid );
}
catch ( Exception e )
{
addActionError( "Error occurred while merging the repositories." );
return ERROR;
}
+ finally
+ {
+ repositorySession.close();
+ }
addActionMessage( "Repository '" + sourceRepoId + "' successfully merged to '" + repoid + "'." );
throws Exception
{
sourceRepoId = repoid + "-stage";
- conflictSourceArtifacts = repositoryMerger.getConflictingArtifacts( sourceRepoId, repoid );
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
+ {
+ conflictSourceArtifacts = repositoryMerger.getConflictingArtifacts( repositorySession.getRepository(),
+ sourceRepoId, repoid );
+ }
+ finally
+ {
+ repositorySession.close();
+ }
this.scheduler.setRepoid( repoid );
-
+
Configuration config = archivaConfiguration.getConfiguration();
this.repository = config.findManagedRepositoryById( repoid );
setConflictSourceArtifactsToBeDisplayed( conflictSourceArtifacts );
}
}
- private void mergeWithOutSnapshots( List<ArtifactMetadata> sourceArtifacts, String sourceRepoId, String repoid )
+ private void mergeWithOutSnapshots( MetadataRepository metadataRepository, List<ArtifactMetadata> sourceArtifacts,
+ String sourceRepoId, String repoid )
throws Exception
{
List<ArtifactMetadata> artifactsWithOutSnapshots = new ArrayList<ArtifactMetadata>();
sourceArtifacts.removeAll( artifactsWithOutSnapshots );
Filter<ArtifactMetadata> artifactListWithOutSnapShots = new IncludesFilter<ArtifactMetadata>( sourceArtifacts );
- repositoryMerger.merge( sourceRepoId, repoid, artifactListWithOutSnapShots );
+ repositoryMerger.merge( metadataRepository, sourceRepoId, repoid, artifactListWithOutSnapShots );
}
}
import org.apache.archiva.audit.AuditEvent;\r
import org.apache.archiva.audit.AuditListener;\r
import org.apache.archiva.audit.Auditable;\r
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;\r
import org.apache.maven.archiva.security.ArchivaXworkUser;\r
import org.apache.struts2.ServletActionContext;\r
import org.apache.struts2.interceptor.SessionAware;\r
*/\r
private List<AuditListener> auditListeners = new ArrayList<AuditListener>();\r
\r
+ /**\r
+ * @plexus.requirement\r
+ */\r
+ protected RepositorySessionFactory repositorySessionFactory;\r
+\r
private String principal;\r
\r
- @SuppressWarnings("unchecked")\r
+ @SuppressWarnings( "unchecked" )\r
public void setSession( Map map )\r
{\r
this.session = map;\r
{\r
AuditEvent event = new AuditEvent( repositoryId, getPrincipal(), resource, action );\r
event.setRemoteIP( getRemoteAddr() );\r
- \r
+\r
for ( AuditListener listener : auditListeners )\r
{\r
listener.auditEvent( event );\r
{\r
AuditEvent event = new AuditEvent( null, getPrincipal(), resource, action );\r
event.setRemoteIP( getRemoteAddr() );\r
- \r
+\r
for ( AuditListener listener : auditListeners )\r
{\r
listener.auditEvent( event );\r
{\r
AuditEvent event = new AuditEvent( null, getPrincipal(), null, action );\r
event.setRemoteIP( getRemoteAddr() );\r
- \r
+\r
for ( AuditListener listener : auditListeners )\r
{\r
listener.auditEvent( event );\r
}\r
return ArchivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );\r
}\r
- \r
+\r
void setPrincipal( String principal )\r
{\r
this.principal = principal;\r
{\r
this.auditListeners = auditListeners;\r
}\r
+\r
+ public void setRepositorySessionFactory( RepositorySessionFactory repositorySessionFactory )\r
+ {\r
+ this.repositorySessionFactory = repositorySessionFactory;\r
+ }\r
}\r
package org.apache.maven.archiva.web.action;
-
+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* under the License.
*/
-import java.net.MalformedURLException;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-
import com.opensymphony.xwork2.Preparable;
import org.apache.archiva.indexer.search.RepositorySearch;
import org.apache.archiva.indexer.search.RepositorySearchException;
import org.apache.archiva.indexer.search.SearchResults;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;
+import java.net.MalformedURLException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
/**
* Search all indexed fields by the given criteria.
*
* @plexus.component role="com.opensymphony.xwork2.Action" role-hint="searchAction" instantiation-strategy="per-lookup"
*/
-public class SearchAction
+public class SearchAction
extends AbstractRepositoryBasedAction
implements Preparable
{
private static final String ARTIFACT = "artifact";
private List<ArtifactMetadata> databaseResults;
-
+
private int currentPage = 0;
-
+
private int totalPages;
-
+
private boolean searchResultsOnly;
-
+
private String completeQueryString;
-
+
private static final String COMPLETE_QUERY_STRING_SEPARATOR = ";";
private List<String> managedRepositoryList;
private boolean fromResultsPage;
private RepositorySearch nexusSearch;
-
+
private Map<String, String> searchFields;
private String infoMessage;
- /**
- * @plexus.requirement
- */
- private MetadataRepository metadataRepository;
-
public boolean isFromResultsPage()
{
return fromResultsPage;
{
managedRepositoryList.add( "all" );
}
-
+
searchFields = new LinkedHashMap<String, String>();
searchFields.put( "groupId", "Group ID" );
searchFields.put( "artifactId", "Artifact ID" );
searchFields.put( "version", "Version" );
- searchFields.put( "className", "Class/Package Name" );
+ searchFields.put( "className", "Class/Package Name" );
searchFields.put( "rowCount", "Row Count" );
-
- super.clearErrorsAndMessages();
+
+ super.clearErrorsAndMessages();
clearSearchFields();
}
-
+
private void clearSearchFields()
{
repositoryId = "";
artifactId = "";
groupId = "";
version = "";
- className = "";
+ className = "";
rowCount = 30;
currentPage = 0;
}
// advanced search MRM-90 -- filtered search
public String filteredSearch()
throws MalformedURLException
- {
- if ( ( groupId == null || "".equals( groupId ) ) &&
- ( artifactId == null || "".equals( artifactId ) ) && ( className == null || "".equals( className ) ) &&
- ( version == null || "".equals( version ) ) )
- {
+ {
+ if ( ( groupId == null || "".equals( groupId ) ) && ( artifactId == null || "".equals( artifactId ) ) &&
+ ( className == null || "".equals( className ) ) && ( version == null || "".equals( version ) ) )
+ {
addActionError( "Advanced Search - At least one search criteria must be provided." );
return INPUT;
}
-
+
fromFilterSearch = true;
-
+
if ( CollectionUtils.isEmpty( managedRepositoryList ) )
- {
+ {
return GlobalResults.ACCESS_TO_NO_REPOS;
}
SearchResultLimits limits = new SearchResultLimits( currentPage );
limits.setPageSize( rowCount );
List<String> selectedRepos = new ArrayList<String>();
-
- if ( repositoryId == null || StringUtils.isBlank( repositoryId ) ||
- "all".equals( StringUtils.stripToEmpty( repositoryId ) ) )
+
+ if ( repositoryId == null || StringUtils.isBlank( repositoryId ) || "all".equals( StringUtils.stripToEmpty(
+ repositoryId ) ) )
{
selectedRepos = getObservableRepos();
}
else
{
selectedRepos.add( repositoryId );
- }
+ }
if ( CollectionUtils.isEmpty( selectedRepos ) )
- {
+ {
return GlobalResults.ACCESS_TO_NO_REPOS;
}
- SearchFields searchFields =
- new SearchFields( groupId, artifactId, version, null, className, selectedRepos );
-
+ SearchFields searchFields = new SearchFields( groupId, artifactId, version, null, className, selectedRepos );
+
// TODO: add packaging in the list of fields for advanced search (UI)?
try
{
addActionError( e.getMessage() );
return ERROR;
}
-
+
if ( results.isEmpty() )
{
addActionError( "No results found" );
totalPages = totalPages + 1;
}
- for (SearchResultHit hit : results.getHits())
+ for ( SearchResultHit hit : results.getHits() )
{
final String version = hit.getVersion();
- if (version != null)
+ if ( version != null )
{
- hit.setVersion(VersionUtil.getBaseVersion(version));
+ hit.setVersion( VersionUtil.getBaseVersion( version ) );
}
}
return SUCCESS;
}
- @SuppressWarnings("unchecked")
+ @SuppressWarnings( "unchecked" )
public String quickSearch()
throws MalformedURLException
{
try
{
- if( searchResultsOnly && !completeQueryString.equals( "" ) )
- {
- results = getNexusSearch().search( getPrincipal(), selectedRepos, q, limits, parseCompleteQueryString() );
+ if ( searchResultsOnly && !completeQueryString.equals( "" ) )
+ {
+ results = getNexusSearch().search( getPrincipal(), selectedRepos, q, limits,
+ parseCompleteQueryString() );
}
else
{
- completeQueryString = "";
- results = getNexusSearch().search( getPrincipal(), selectedRepos, q, limits, null );
+ completeQueryString = "";
+ results = getNexusSearch().search( getPrincipal(), selectedRepos, q, limits, null );
}
}
catch ( RepositorySearchException e )
totalPages = results.getTotalHits() / limits.getPageSize();
- if( (results.getTotalHits() % limits.getPageSize()) != 0 )
+ if ( ( results.getTotalHits() % limits.getPageSize() ) != 0 )
{
totalPages = totalPages + 1;
}
- if( !isEqualToPreviousSearchTerm( q ) )
+ if ( !isEqualToPreviousSearchTerm( q ) )
{
buildCompleteQueryString( q );
}
-
+
return SUCCESS;
}
}
databaseResults = new ArrayList<ArtifactMetadata>();
- for ( String repoId : getObservableRepos() )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
+ {
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+ for ( String repoId : getObservableRepos() )
+ {
+ databaseResults.addAll( metadataRepository.getArtifactsByChecksum( repoId, q ) );
+ }
+ }
+ finally
{
- databaseResults.addAll( metadataRepository.getArtifactsByChecksum( repoId, q ) );
+ repositorySession.close();
}
if ( databaseResults.isEmpty() )
return RESULTS;
}
-
+
public String doInput()
{
return INPUT;
public RepositorySearch getNexusSearch()
{
// no need to do this when wiring is already in spring
- if( nexusSearch == null )
+ if ( nexusSearch == null )
{
- WebApplicationContext wac =
- WebApplicationContextUtils.getRequiredWebApplicationContext( ServletActionContext.getServletContext() );
- nexusSearch = ( RepositorySearch ) wac.getBean( "nexusSearch" );
+ WebApplicationContext wac = WebApplicationContextUtils.getRequiredWebApplicationContext(
+ ServletActionContext.getServletContext() );
+ nexusSearch = (RepositorySearch) wac.getBean( "nexusSearch" );
}
return nexusSearch;
}
{
this.searchFields = searchFields;
}
-
+
public String getInfoMessage()
{
return infoMessage;
{
this.infoMessage = infoMessage;
}
-
- public void setMetadataRepository( MetadataRepository metadataRepository )
- {
- this.metadataRepository = metadataRepository;
- }
}
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.metadata.repository.MetadataResolver;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.storage.maven2.MavenArtifactFacet;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.model.ArtifactReference;
{
/* .\ Not Exposed \._____________________________________________ */
- /**
- * @plexus.requirement
- */
- private MetadataResolver metadataResolver;
-
/**
* @plexus.requirement
*/
private RepositoryContentFactory repositoryFactory;
- /**
- * @plexus.requirement
- */
- private MetadataRepository metadataRepository;
-
/* .\ Exposed Output Objects \.__________________________________ */
private String groupId;
*/
public String artifact()
{
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
+ {
+ return handleArtifact( repositorySession );
+ }
+ finally
+ {
+ repositorySession.close();
+ }
+ }
+ private String handleArtifact( RepositorySession session )
+ {
// In the future, this should be replaced by the repository grouping mechanism, so that we are only making
// simple resource requests here and letting the resolver take care of it
- String errorMsg = null;
- ProjectVersionMetadata versionMetadata = getProjectVersionMetadata();
+ ProjectVersionMetadata versionMetadata = getProjectVersionMetadata( session );
if ( versionMetadata == null )
{
- addActionError( errorMsg != null ? errorMsg : "Artifact not found" );
+ addActionError( "Artifact not found" );
return ERROR;
}
return SUCCESS;
}
- private ProjectVersionMetadata getProjectVersionMetadata()
+ private ProjectVersionMetadata getProjectVersionMetadata( RepositorySession session )
{
ProjectVersionMetadata versionMetadata = null;
artifacts = new LinkedHashMap<String, List<ArtifactDownloadInfo>>();
List<String> repos = getObservableRepos();
+ MetadataResolver metadataResolver = session.getResolver();
for ( String repoId : repos )
{
if ( versionMetadata == null )
// "just-in-time" nature of picking up the metadata (if appropriate for the repository type) is used
try
{
- versionMetadata = metadataResolver.resolveProjectVersion( repoId, groupId, artifactId, version );
+ versionMetadata = metadataResolver.resolveProjectVersion( session, repoId, groupId, artifactId,
+ version );
}
catch ( MetadataResolutionException e )
{
List<ArtifactMetadata> artifacts;
try
{
- artifacts = new ArrayList<ArtifactMetadata>( metadataResolver.resolveArtifacts( repoId, groupId,
+ artifacts = new ArrayList<ArtifactMetadata>( metadataResolver.resolveArtifacts( session, repoId,
+ groupId,
artifactId,
version ) );
}
{
List<ProjectVersionReference> references = new ArrayList<ProjectVersionReference>();
// TODO: what if we get duplicates across repositories?
- for ( String repoId : getObservableRepos() )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- // TODO: what about if we want to see this irrespective of version?
- references.addAll( metadataResolver.resolveProjectReferences( repoId, groupId, artifactId, version ) );
+ MetadataResolver metadataResolver = repositorySession.getResolver();
+ for ( String repoId : getObservableRepos() )
+ {
+ // TODO: what about if we want to see this irrespective of version?
+ references.addAll( metadataResolver.resolveProjectReferences( repositorySession, repoId, groupId,
+ artifactId, version ) );
+ }
+ }
+ finally
+ {
+ repositorySession.close();
}
this.dependees = references;
public String addMetadataProperty()
{
- String errorMsg = null;
-
- ProjectVersionMetadata projectMetadata = getProjectVersionMetadata();
- if ( projectMetadata == null )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ ProjectVersionMetadata projectMetadata;
+ try
{
- addActionError( errorMsg != null ? errorMsg : "Artifact not found" );
- return ERROR;
- }
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+ projectMetadata = getProjectVersionMetadata( repositorySession );
+ if ( projectMetadata == null )
+ {
+ addActionError( "Artifact not found" );
+ return ERROR;
+ }
- if ( projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ) == null )
- {
- genericMetadata = new HashMap<String, String>();
- }
- else
- {
- genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
- }
+ if ( projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ) == null )
+ {
+ genericMetadata = new HashMap<String, String>();
+ }
+ else
+ {
+ genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
+ }
- if ( propertyName == null || "".equals( propertyName.trim() ) || propertyValue == null || "".equals(
- propertyValue.trim() ) )
- {
- model = projectMetadata;
- addActionError( errorMsg != null ? errorMsg : "Property Name and Property Value are required." );
- return INPUT;
- }
+ if ( propertyName == null || "".equals( propertyName.trim() ) || propertyValue == null || "".equals(
+ propertyValue.trim() ) )
+ {
+ model = projectMetadata;
+ addActionError( "Property Name and Property Value are required." );
+ return INPUT;
+ }
- genericMetadata.put( propertyName, propertyValue );
+ genericMetadata.put( propertyName, propertyValue );
- try
- {
- updateProjectMetadata( projectMetadata );
+ try
+ {
+ updateProjectMetadata( projectMetadata, metadataRepository );
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ log.warn( "Unable to persist modified project metadata after adding entry: " + e.getMessage(), e );
+ addActionError(
+ "Unable to add metadata item to underlying content storage - consult application logs." );
+ return ERROR;
+ }
+
+ // TODO: why re-retrieve?
+ projectMetadata = getProjectVersionMetadata( repositorySession );
}
- catch ( MetadataRepositoryException e )
+ finally
{
- log.warn( "Unable to persist modified project metadata after adding entry: " + e.getMessage(), e );
- addActionError( "Unable to add metadata item to underlying content storage - consult application logs." );
- return ERROR;
+ repositorySession.close();
}
- projectMetadata = getProjectVersionMetadata();
-
genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
model = projectMetadata;
public String deleteMetadataEntry()
{
- ProjectVersionMetadata projectMetadata = getProjectVersionMetadata();
- String errorMsg = null;
-
- if ( projectMetadata == null )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- addActionError( "Artifact not found" );
- return ERROR;
- }
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+ ProjectVersionMetadata projectMetadata = getProjectVersionMetadata( repositorySession );
- if ( projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ) != null )
- {
- genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
+ if ( projectMetadata == null )
+ {
+ addActionError( "Artifact not found" );
+ return ERROR;
+ }
- if ( !StringUtils.isEmpty( deleteItem ) )
+ if ( projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ) != null )
{
- genericMetadata.remove( deleteItem );
+ genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
- try
- {
- updateProjectMetadata( projectMetadata );
- }
- catch ( MetadataRepositoryException e )
+ if ( !StringUtils.isEmpty( deleteItem ) )
{
- log.warn( "Unable to persist modified project metadata after removing entry: " + e.getMessage(),
- e );
- addActionError(
- "Unable to remove metadata item to underlying content storage - consult application logs." );
- return ERROR;
- }
+ genericMetadata.remove( deleteItem );
- projectMetadata = getProjectVersionMetadata();
+ try
+ {
+ updateProjectMetadata( projectMetadata, metadataRepository );
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ log.warn( "Unable to persist modified project metadata after removing entry: " + e.getMessage(),
+ e );
+ addActionError(
+ "Unable to remove metadata item to underlying content storage - consult application logs." );
+ return ERROR;
+ }
- genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
+ // TODO: why re-retrieve?
+ projectMetadata = getProjectVersionMetadata( repositorySession );
- model = projectMetadata;
+ genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
- addActionMessage( "Property successfully deleted." );
- }
+ model = projectMetadata;
+
+ addActionMessage( "Property successfully deleted." );
+ }
- deleteItem = "";
+ deleteItem = "";
+ }
+ else
+ {
+ addActionError( "No generic metadata facet for this artifact." );
+ return ERROR;
+ }
}
- else
+ finally
{
- addActionError( "No generic metadata facet for this artifact." );
- return ERROR;
+ repositorySession.close();
}
return SUCCESS;
}
- private void updateProjectMetadata( ProjectVersionMetadata projectMetadata )
+ private void updateProjectMetadata( ProjectVersionMetadata projectMetadata, MetadataRepository metadataRepository )
throws MetadataRepositoryException
{
GenericMetadataFacet genericMetadataFacet = new GenericMetadataFacet();
this.repositoryId = repositoryId;
}
- public MetadataResolver getMetadataResolver()
- {
- return metadataResolver;
- }
-
public Map<String, List<ArtifactDownloadInfo>> getArtifacts()
{
return artifacts;
this.repositoryFactory = repositoryFactory;
}
- public void setMetadataRepository( MetadataRepository metadataRepository )
- {
- this.metadataRepository = metadataRepository;
- }
-
// TODO: move this into the artifact metadata itself via facets where necessary
public class ArtifactDownloadInfo
import org.apache.archiva.audit.AuditEvent;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.Configuration;
*/
private RepositoryStatisticsManager repositoryStatisticsManager;
- /**
- * @plexus.requirement
- */
- private MetadataRepository metadataRepository;
-
public void prepare()
{
if ( StringUtils.isNotBlank( repoid ) )
String result;
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
try
{
Configuration configuration = archivaConfiguration.getConfiguration();
if ( attachedStagingRepo != null )
{
- cleanupRepositoryData( attachedStagingRepo );
+ cleanupRepositoryData( attachedStagingRepo, repositorySession );
removeRepository( repoid + "-stage", configuration );
triggerAuditEvent( repoid + "-stage", null, AuditEvent.DELETE_MANAGED_REPO );
}
- cleanupRepositoryData( existingRepository );
+ cleanupRepositoryData( existingRepository, repositorySession );
removeRepository( repoid, configuration );
triggerAuditEvent( repoid, null, AuditEvent.DELETE_MANAGED_REPO );
result = saveConfiguration( configuration );
"Unable to delete repository, content may already be partially removed: " + e.getMessage() );
result = ERROR;
}
+ finally
+ {
+ repositorySession.close();
+ }
return result;
}
- private void cleanupRepositoryData( ManagedRepositoryConfiguration cleanupRepository )
+ private void cleanupRepositoryData( ManagedRepositoryConfiguration cleanupRepository,
+ RepositorySession repositorySession )
throws RoleManagerException, MetadataRepositoryException
{
removeRepositoryRoles( cleanupRepository );
- cleanupDatabase( cleanupRepository.getId() );
- repositoryStatisticsManager.deleteStatistics( cleanupRepository.getId() );
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+ cleanupDatabase( metadataRepository, cleanupRepository.getId() );
+ repositoryStatisticsManager.deleteStatistics( metadataRepository, cleanupRepository.getId() );
// TODO: delete all content for a repository from the content API?
+ repositorySession.save();
List<ProxyConnectorConfiguration> proxyConnectors = getProxyConnectors();
for ( ProxyConnectorConfiguration proxyConnector : proxyConnectors )
}
}
- private void cleanupDatabase( String repoId )
+ private void cleanupDatabase( MetadataRepository metadataRepository, String repoId )
throws MetadataRepositoryException
{
metadataRepository.removeRepository( repoId );
{
this.repositoryStatisticsManager = repositoryStatisticsManager;
}
-
- public void setMetadataRepository( MetadataRepository metadataRepository )
- {
- this.metadataRepository = metadataRepository;
- }
}
import com.opensymphony.xwork2.Validateable;
import org.apache.archiva.audit.AuditEvent;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.Configuration;
// Save the repository configuration.
String result;
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
try
{
addRepository( repository, configuration );
result = saveConfiguration( configuration );
if ( resetStats )
{
- resetStatistics();
+ repositoryStatisticsManager.deleteStatistics( repositorySession.getRepository(), repository.getId() );
+ repositorySession.save();
}
}
catch ( IOException e )
addActionError( "Metadata Exception: " + e.getMessage() );
result = ERROR;
}
+ finally
+ {
+ repositorySession.close();
+ }
return result;
}
}
}
- private void resetStatistics()
- throws MetadataRepositoryException
- {
- repositoryStatisticsManager.deleteStatistics( repository.getId() );
- }
-
public String getRepoid()
{
return repoid;
*/
import com.opensymphony.xwork2.Preparable;
+import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
Collections.sort( remoteRepositories, new RepositoryConfigurationComparator() );
repositoryStatistics = new HashMap<String, RepositoryStatistics>();
- for ( ManagedRepositoryConfiguration repo : managedRepositories )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- RepositoryStatistics stats = null;
- try
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+ for ( ManagedRepositoryConfiguration repo : managedRepositories )
{
- stats = repositoryStatisticsManager.getLastStatistics( repo.getId() );
- }
- catch ( MetadataRepositoryException e )
- {
- addActionError(
- "Error retrieving statistics for repository " + repo.getId() + " - consult application logs" );
- log.warn( "Error retrieving repository statistics: " + e.getMessage(), e );
- }
- if ( stats != null )
- {
- repositoryStatistics.put( repo.getId(), stats );
+ RepositoryStatistics stats = null;
+ try
+ {
+ stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, repo.getId() );
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ addActionError(
+ "Error retrieving statistics for repository " + repo.getId() + " - consult application logs" );
+ log.warn( "Error retrieving repository statistics: " + e.getMessage(), e );
+ }
+ if ( stats != null )
+ {
+ repositoryStatistics.put( repo.getId(), stats );
+ }
}
}
+ finally
+ {
+ repositorySession.close();
+ }
}
public List<ManagedRepositoryConfiguration> getManagedRepositories()
import com.opensymphony.xwork2.Preparable;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.archiva.reports.RepositoryProblemFacet;
private boolean lastPage;
- /**
- * @plexus.requirement
- */
- private MetadataRepository metadataRepository;
-
@SuppressWarnings( "unchecked" )
public void prepare()
{
Date startDateInDF;
Date endDateInDF;
- if ( selectedRepositories.size() > 1 )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- numPages = 1;
-
- try
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+ if ( selectedRepositories.size() > 1 )
{
- startDateInDF = getStartDateInDateFormat();
- endDateInDF = getEndDateInDateFormat();
- }
- catch ( ParseException e )
- {
- addActionError( "Error parsing date(s)." );
- return ERROR;
- }
+ numPages = 1;
- if ( startDateInDF != null && endDateInDF != null && startDateInDF.after( endDateInDF ) )
- {
- addFieldError( "startDate", "Start Date must be earlier than the End Date" );
- return INPUT;
- }
-
- // multiple repos
- for ( String repo : selectedRepositories )
- {
- List<RepositoryStatistics> stats = null;
try
{
- stats = repositoryStatisticsManager.getStatisticsInRange( repo, startDateInDF, endDateInDF );
- }
- catch ( MetadataRepositoryException e )
- {
- log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
+ startDateInDF = getStartDateInDateFormat();
+ endDateInDF = getEndDateInDateFormat();
}
- if ( stats == null || stats.isEmpty() )
+ catch ( ParseException e )
{
- log.info( "No statistics available for repository '" + repo + "'." );
- // TODO set repo's stats to 0
- continue;
+ addActionError( "Error parsing date(s)." );
+ return ERROR;
}
- repositoryStatistics.add( stats.get( 0 ) );
- }
- }
- else if ( selectedRepositories.size() == 1 )
- {
- repositoryId = selectedRepositories.get( 0 );
- try
- {
- startDateInDF = getStartDateInDateFormat();
- endDateInDF = getEndDateInDateFormat();
-
if ( startDateInDF != null && endDateInDF != null && startDateInDF.after( endDateInDF ) )
{
addFieldError( "startDate", "Start Date must be earlier than the End Date" );
return INPUT;
}
- List<RepositoryStatistics> stats = null;
- try
+ // multiple repos
+ for ( String repo : selectedRepositories )
{
- stats = repositoryStatisticsManager.getStatisticsInRange( repositoryId, startDateInDF,
- endDateInDF );
+ List<RepositoryStatistics> stats = null;
+ try
+ {
+ stats = repositoryStatisticsManager.getStatisticsInRange( metadataRepository, repo,
+ startDateInDF, endDateInDF );
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
+ }
+ if ( stats == null || stats.isEmpty() )
+ {
+ log.info( "No statistics available for repository '" + repo + "'." );
+ // TODO set repo's stats to 0
+ continue;
+ }
+
+ repositoryStatistics.add( stats.get( 0 ) );
}
- catch ( MetadataRepositoryException e )
+ }
+ else if ( selectedRepositories.size() == 1 )
+ {
+ repositoryId = selectedRepositories.get( 0 );
+ try
{
- log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
+ startDateInDF = getStartDateInDateFormat();
+ endDateInDF = getEndDateInDateFormat();
+
+ if ( startDateInDF != null && endDateInDF != null && startDateInDF.after( endDateInDF ) )
+ {
+ addFieldError( "startDate", "Start Date must be earlier than the End Date" );
+ return INPUT;
+ }
+
+ List<RepositoryStatistics> stats = null;
+ try
+ {
+ stats = repositoryStatisticsManager.getStatisticsInRange( metadataRepository, repositoryId,
+ startDateInDF, endDateInDF );
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
+ }
+ if ( stats == null || stats.isEmpty() )
+ {
+ addActionError(
+ "No statistics available for repository. Repository might not have been scanned." );
+ return ERROR;
+ }
+
+ int rowCount = getRowCount();
+ int extraPage = ( stats.size() % rowCount ) != 0 ? 1 : 0;
+ int totalPages = ( stats.size() / rowCount ) + extraPage;
+ numPages = totalPages;
+
+ int currentPage = getPage();
+ if ( currentPage > totalPages )
+ {
+ addActionError(
+ "Error encountered while generating report :: The requested page exceeds the total number of pages." );
+ return ERROR;
+ }
+
+ int start = rowCount * ( currentPage - 1 );
+ int end = ( start + rowCount ) - 1;
+
+ if ( end > stats.size() )
+ {
+ end = stats.size() - 1;
+ }
+
+ repositoryStatistics = stats.subList( start, end + 1 );
}
- if ( stats == null || stats.isEmpty() )
+ catch ( ParseException pe )
{
- addActionError( "No statistics available for repository. Repository might not have been scanned." );
+ addActionError( pe.getMessage() );
return ERROR;
}
-
- int rowCount = getRowCount();
- int extraPage = ( stats.size() % rowCount ) != 0 ? 1 : 0;
- int totalPages = ( stats.size() / rowCount ) + extraPage;
- numPages = totalPages;
-
- int currentPage = getPage();
- if ( currentPage > totalPages )
- {
- addActionError(
- "Error encountered while generating report :: The requested page exceeds the total number of pages." );
- return ERROR;
- }
-
- int start = rowCount * ( currentPage - 1 );
- int end = ( start + rowCount ) - 1;
-
- if ( end > stats.size() )
- {
- end = stats.size() - 1;
- }
-
- repositoryStatistics = stats.subList( start, end + 1 );
}
- catch ( ParseException pe )
+ else
{
- addActionError( pe.getMessage() );
- return ERROR;
+ addFieldError( "availableRepositories", "Please select a repository (or repositories) from the list." );
+ return INPUT;
}
}
- else
+ finally
{
- addFieldError( "availableRepositories", "Please select a repository (or repositories) from the list." );
- return INPUT;
+ repositorySession.close();
}
if ( repositoryStatistics.isEmpty() )
List<RepositoryStatistics> repositoryStatistics = new ArrayList<RepositoryStatistics>();
StringBuffer input;
- if ( selectedRepositories.size() > 1 )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- try
- {
- startDateInDF = getStartDateInDateFormat();
- endDateInDF = getEndDateInDateFormat();
- }
- catch ( ParseException e )
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+ if ( selectedRepositories.size() > 1 )
{
- addActionError( "Error parsing date(s)." );
- return ERROR;
- }
-
- if ( startDateInDF != null && endDateInDF != null && startDateInDF.after( endDateInDF ) )
- {
- addFieldError( "startDate", "Start Date must be earlier than the End Date" );
- return INPUT;
- }
-
- input = new StringBuffer(
- "Repository,Total File Count,Total Size,Artifact Count,Group Count,Project Count,Plugins,Archetypes," +
- "Jars,Wars\n" );
-
- // multiple repos
- for ( String repo : selectedRepositories )
- {
- List<RepositoryStatistics> stats = null;
try
{
- stats = repositoryStatisticsManager.getStatisticsInRange( repo, startDateInDF, endDateInDF );
+ startDateInDF = getStartDateInDateFormat();
+ endDateInDF = getEndDateInDateFormat();
}
- catch ( MetadataRepositoryException e )
+ catch ( ParseException e )
{
- log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
- }
- if ( stats == null || stats.isEmpty() )
- {
- log.info( "No statistics available for repository '" + repo + "'." );
- // TODO set repo's stats to 0
- continue;
+ addActionError( "Error parsing date(s)." );
+ return ERROR;
}
- // only the first one
- RepositoryStatistics repositoryStats = stats.get( 0 );
- repositoryStatistics.add( repositoryStats );
-
- input.append( repo ).append( "," );
- input.append( repositoryStats.getTotalFileCount() ).append( "," );
- input.append( repositoryStats.getTotalArtifactFileSize() ).append( "," );
- input.append( repositoryStats.getTotalArtifactCount() ).append( "," );
- input.append( repositoryStats.getTotalGroupCount() ).append( "," );
- input.append( repositoryStats.getTotalProjectCount() ).append( "," );
- input.append( repositoryStats.getTotalCountForType( "maven-plugin" ) ).append( "," );
- input.append( repositoryStats.getTotalCountForType( "maven-archetype" ) ).append( "," );
- input.append( repositoryStats.getTotalCountForType( "jar" ) ).append( "," );
- input.append( repositoryStats.getTotalCountForType( "war" ) );
- input.append( "\n" );
- }
- }
- else if ( selectedRepositories.size() == 1 )
- {
- repositoryId = selectedRepositories.get( 0 );
- try
- {
- startDateInDF = getStartDateInDateFormat();
- endDateInDF = getEndDateInDateFormat();
-
if ( startDateInDF != null && endDateInDF != null && startDateInDF.after( endDateInDF ) )
{
addFieldError( "startDate", "Start Date must be earlier than the End Date" );
return INPUT;
}
- List<RepositoryStatistics> stats = null;
- try
- {
- stats = repositoryStatisticsManager.getStatisticsInRange( repositoryId, startDateInDF,
- endDateInDF );
- }
- catch ( MetadataRepositoryException e )
- {
- log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
- }
- if ( stats == null || stats.isEmpty() )
- {
- addActionError( "No statistics available for repository. Repository might not have been scanned." );
- return ERROR;
- }
-
input = new StringBuffer(
- "Date of Scan,Total File Count,Total Size,Artifact Count,Group Count,Project Count,Plugins," +
- "Archetypes,Jars,Wars\n" );
+ "Repository,Total File Count,Total Size,Artifact Count,Group Count,Project Count,Plugins,Archetypes," +
+ "Jars,Wars\n" );
- for ( RepositoryStatistics repositoryStats : stats )
+ // multiple repos
+ for ( String repo : selectedRepositories )
{
- input.append( repositoryStats.getScanStartTime() ).append( "," );
+ List<RepositoryStatistics> stats = null;
+ try
+ {
+ stats = repositoryStatisticsManager.getStatisticsInRange( metadataRepository, repo,
+ startDateInDF, endDateInDF );
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
+ }
+ if ( stats == null || stats.isEmpty() )
+ {
+ log.info( "No statistics available for repository '" + repo + "'." );
+ // TODO set repo's stats to 0
+ continue;
+ }
+
+ // only the first one
+ RepositoryStatistics repositoryStats = stats.get( 0 );
+ repositoryStatistics.add( repositoryStats );
+
+ input.append( repo ).append( "," );
input.append( repositoryStats.getTotalFileCount() ).append( "," );
input.append( repositoryStats.getTotalArtifactFileSize() ).append( "," );
input.append( repositoryStats.getTotalArtifactCount() ).append( "," );
input.append( repositoryStats.getTotalCountForType( "war" ) );
input.append( "\n" );
}
-
- repositoryStatistics = stats;
}
- catch ( ParseException pe )
+ else if ( selectedRepositories.size() == 1 )
+ {
+ repositoryId = selectedRepositories.get( 0 );
+ try
+ {
+ startDateInDF = getStartDateInDateFormat();
+ endDateInDF = getEndDateInDateFormat();
+
+ if ( startDateInDF != null && endDateInDF != null && startDateInDF.after( endDateInDF ) )
+ {
+ addFieldError( "startDate", "Start Date must be earlier than the End Date" );
+ return INPUT;
+ }
+
+ List<RepositoryStatistics> stats = null;
+ try
+ {
+ stats = repositoryStatisticsManager.getStatisticsInRange( metadataRepository, repositoryId,
+ startDateInDF, endDateInDF );
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
+ }
+ if ( stats == null || stats.isEmpty() )
+ {
+ addActionError(
+ "No statistics available for repository. Repository might not have been scanned." );
+ return ERROR;
+ }
+
+ input = new StringBuffer(
+ "Date of Scan,Total File Count,Total Size,Artifact Count,Group Count,Project Count,Plugins," +
+ "Archetypes,Jars,Wars\n" );
+
+ for ( RepositoryStatistics repositoryStats : stats )
+ {
+ input.append( repositoryStats.getScanStartTime() ).append( "," );
+ input.append( repositoryStats.getTotalFileCount() ).append( "," );
+ input.append( repositoryStats.getTotalArtifactFileSize() ).append( "," );
+ input.append( repositoryStats.getTotalArtifactCount() ).append( "," );
+ input.append( repositoryStats.getTotalGroupCount() ).append( "," );
+ input.append( repositoryStats.getTotalProjectCount() ).append( "," );
+ input.append( repositoryStats.getTotalCountForType( "maven-plugin" ) ).append( "," );
+ input.append( repositoryStats.getTotalCountForType( "maven-archetype" ) ).append( "," );
+ input.append( repositoryStats.getTotalCountForType( "jar" ) ).append( "," );
+ input.append( repositoryStats.getTotalCountForType( "war" ) );
+ input.append( "\n" );
+ }
+
+ repositoryStatistics = stats;
+ }
+ catch ( ParseException pe )
+ {
+ addActionError( pe.getMessage() );
+ return ERROR;
+ }
+ }
+ else
{
- addActionError( pe.getMessage() );
- return ERROR;
+ addFieldError( "availableRepositories", "Please select a repository (or repositories) from the list." );
+ return INPUT;
}
}
- else
+ finally
{
- addFieldError( "availableRepositories", "Please select a repository (or repositories) from the list." );
- return INPUT;
+ repositorySession.close();
}
if ( repositoryStatistics.isEmpty() )
}
List<RepositoryProblemFacet> problemArtifacts = new ArrayList<RepositoryProblemFacet>();
- for ( String repoId : repoIds )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- // TODO: improve performance by navigating into a group subtree. Currently group is property, not part of name of item
- for ( String name : metadataRepository.getMetadataFacets( repoId, RepositoryProblemFacet.FACET_ID ) )
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+ for ( String repoId : repoIds )
{
- RepositoryProblemFacet metadataFacet = (RepositoryProblemFacet) metadataRepository.getMetadataFacet(
- repoId, RepositoryProblemFacet.FACET_ID, name );
-
- if ( StringUtils.isEmpty( groupId ) || groupId.equals( metadataFacet.getNamespace() ) )
+ // TODO: improve performance by navigating into a group subtree. Currently group is property, not part of name of item
+ for ( String name : metadataRepository.getMetadataFacets( repoId, RepositoryProblemFacet.FACET_ID ) )
{
- problemArtifacts.add( metadataFacet );
+ RepositoryProblemFacet metadataFacet = (RepositoryProblemFacet) metadataRepository.getMetadataFacet(
+ repoId, RepositoryProblemFacet.FACET_ID, name );
+
+ if ( StringUtils.isEmpty( groupId ) || groupId.equals( metadataFacet.getNamespace() ) )
+ {
+ problemArtifacts.add( metadataFacet );
+ }
}
}
}
+ finally
+ {
+ repositorySession.close();
+ }
// TODO: getting range only after reading is not efficient for a large number of artifacts
int lowerBound = ( page - 1 ) * rowCount;
{
this.repositoryStatisticsManager = repositoryStatisticsManager;
}
-
- public void setMetadataRepository( MetadataRepository metadataRepository )
- {
- this.metadataRepository = metadataRepository;
- }
}
import com.opensymphony.xwork2.Preparable;
import org.apache.archiva.audit.AuditEvent;
import org.apache.archiva.audit.AuditManager;
+import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateUtils;
import org.apache.maven.archiva.security.AccessDeniedException;
this.request = request;
}
- @SuppressWarnings("unchecked")
+ @SuppressWarnings( "unchecked" )
public void prepare()
throws Exception
{
headerName = HEADER_RESULTS;
}
- auditLogs = auditManager.getMostRecentAuditEvents( repos );
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
+ {
+ auditLogs = auditManager.getMostRecentAuditEvents( repositorySession.getRepository(), repos );
+ }
+ finally
+ {
+ repositorySession.close();
+ }
}
public String execute()
}
}
- auditLogs = auditManager.getAuditEventsInRange( repos, resource, startDateInDF, endDateInDF );
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
+ {
+ auditLogs = auditManager.getAuditEventsInRange( repositorySession.getRepository(), repos, resource,
+ startDateInDF, endDateInDF );
+ }
+ finally
+ {
+ repositorySession.close();
+ }
if ( auditLogs.isEmpty() )
{
* under the License.
*/
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
import com.sun.syndication.feed.synd.SyndFeed;
import com.sun.syndication.io.FeedException;
import com.sun.syndication.io.SyndFeedOutput;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.rss.processor.RssFeedProcessor;
import org.apache.commons.codec.Decoder;
import org.apache.commons.codec.DecoderException;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
/**
* Servlet for handling rss feed requests.
- *
- * @version
*/
public class RssFeedServlet
extends HttpServlet
private ServletAuthenticator servletAuth;
private HttpAuthenticator httpAuth;
-
+
+ private RepositorySessionFactory repositorySessionFactory;
+
public void init( javax.servlet.ServletConfig servletConfig )
throws ServletException
{
super.init( servletConfig );
wac = WebApplicationContextUtils.getRequiredWebApplicationContext( servletConfig.getServletContext() );
- userRepositories =
- (UserRepositories) wac.getBean( PlexusToSpringUtils.buildSpringId( UserRepositories.class.getName() ) );
- servletAuth =
- (ServletAuthenticator) wac.getBean( PlexusToSpringUtils.buildSpringId( ServletAuthenticator.class.getName() ) );
- httpAuth =
- (HttpAuthenticator) wac.getBean( PlexusToSpringUtils.buildSpringId( HttpAuthenticator.ROLE, "basic" ) );
+ userRepositories = (UserRepositories) wac.getBean( PlexusToSpringUtils.buildSpringId(
+ UserRepositories.class.getName() ) );
+ servletAuth = (ServletAuthenticator) wac.getBean( PlexusToSpringUtils.buildSpringId(
+ ServletAuthenticator.class.getName() ) );
+ httpAuth = (HttpAuthenticator) wac.getBean( PlexusToSpringUtils.buildSpringId( HttpAuthenticator.ROLE,
+ "basic" ) );
+ // TODO: what if there are other types?
+ repositorySessionFactory = (RepositorySessionFactory) wac.getBean( PlexusToSpringUtils.buildSpringId(
+ RepositorySessionFactory.class.getName() ) );
}
public void doGet( HttpServletRequest req, HttpServletResponse res )
String repoId = null;
String groupId = null;
String artifactId = null;
-
- String url = StringUtils.removeEnd( req.getRequestURL().toString(), "/" );
- if( StringUtils.countMatches( StringUtils.substringAfter( url, "feeds/" ), "/" ) > 0 )
+
+ String url = StringUtils.removeEnd( req.getRequestURL().toString(), "/" );
+ if ( StringUtils.countMatches( StringUtils.substringAfter( url, "feeds/" ), "/" ) > 0 )
{
artifactId = StringUtils.substringAfterLast( url, "/" );
- groupId = StringUtils.substringBeforeLast( StringUtils.substringAfter( url, "feeds/" ), "/");
+ groupId = StringUtils.substringBeforeLast( StringUtils.substringAfter( url, "feeds/" ), "/" );
groupId = StringUtils.replaceChars( groupId, '/', '.' );
}
- else if( StringUtils.countMatches( StringUtils.substringAfter( url, "feeds/" ), "/" ) == 0 )
+ else if ( StringUtils.countMatches( StringUtils.substringAfter( url, "feeds/" ), "/" ) == 0 )
{
repoId = StringUtils.substringAfterLast( url, "/" );
}
{
res.sendError( HttpServletResponse.SC_BAD_REQUEST, "Invalid request url." );
return;
- }
-
+ }
+
try
{
Map<String, String> map = new HashMap<String, String>();
SyndFeed feed = null;
-
+
if ( isAllowed( req, repoId, groupId, artifactId ) )
{
if ( repoId != null )
{
// new artifacts in repo feed request
- processor =
- (RssFeedProcessor) wac.getBean( PlexusToSpringUtils.buildSpringId(
- RssFeedProcessor.class.getName(),
- "new-artifacts" ) );
+ processor = (RssFeedProcessor) wac.getBean( PlexusToSpringUtils.buildSpringId(
+ RssFeedProcessor.class.getName(), "new-artifacts" ) );
map.put( RssFeedProcessor.KEY_REPO_ID, repoId );
}
else if ( ( groupId != null ) && ( artifactId != null ) )
{
// TODO: this only works for guest - we could pass in the list of repos
// new versions of artifact feed request
- processor =
- (RssFeedProcessor) wac.getBean( PlexusToSpringUtils.buildSpringId(
- RssFeedProcessor.class.getName(),
- "new-versions" ) );
+ processor = (RssFeedProcessor) wac.getBean( PlexusToSpringUtils.buildSpringId(
+ RssFeedProcessor.class.getName(), "new-versions" ) );
map.put( RssFeedProcessor.KEY_GROUP_ID, groupId );
map.put( RssFeedProcessor.KEY_ARTIFACT_ID, artifactId );
}
return;
}
- feed = processor.process( map );
- if( feed == null )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
+ {
+ feed = processor.process( map, repositorySession.getRepository() );
+ }
+ finally
+ {
+ repositorySession.close();
+ }
+ if ( feed == null )
{
res.sendError( HttpServletResponse.SC_NO_CONTENT, "No information available." );
return;
}
-
+
res.setContentType( MIME_TYPE );
-
+
if ( repoId != null )
- {
+ {
feed.setLink( req.getRequestURL().toString() );
}
else if ( ( groupId != null ) && ( artifactId != null ) )
{
- feed.setLink( req.getRequestURL().toString() );
+ feed.setLink( req.getRequestURL().toString() );
}
SyndFeedOutput output = new SyndFeedOutput();
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER );
}
catch ( AccountLockedException acce )
- {
+ {
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER );
}
catch ( AuthenticationException authe )
- {
+ {
log.debug( COULD_NOT_AUTHENTICATE_USER, authe );
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER );
}
res.sendError( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, COULD_NOT_GENERATE_FEED_ERROR );
}
catch ( MustChangePasswordException e )
- {
+ {
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER );
}
catch ( UnauthorizedException e )
log.debug( e.getMessage() );
if ( repoId != null )
{
- res.setHeader("WWW-Authenticate", "Basic realm=\"Repository Archiva Managed " + repoId + " Repository" );
+ res.setHeader( "WWW-Authenticate",
+ "Basic realm=\"Repository Archiva Managed " + repoId + " Repository" );
}
else
{
- res.setHeader("WWW-Authenticate", "Basic realm=\"Artifact " + groupId + ":" + artifactId );
+ res.setHeader( "WWW-Authenticate", "Basic realm=\"Artifact " + groupId + ":" + artifactId );
}
-
+
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, USER_NOT_AUTHORIZED );
}
}
/**
* Basic authentication.
- *
+ *
* @param req
* @param repositoryId TODO
- * @param groupId TODO
- * @param artifactId TODO
+ * @param groupId TODO
+ * @param artifactId TODO
* @return
*/
private boolean isAllowed( HttpServletRequest req, String repositoryId, String groupId, String artifactId )
AuthenticationResult result = httpAuth.getAuthenticationResult( req, null );
SecuritySession securitySession = httpAuth.getSecuritySession( req.getSession( true ) );
- if ( servletAuth.isAuthenticated( req, result )
- && servletAuth.isAuthorized( req, securitySession, repoId,
- ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS ) )
+ if ( servletAuth.isAuthenticated( req, result ) && servletAuth.isAuthorized( req, securitySession,
+ repoId,
+ ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS ) )
{
return true;
}
}
catch ( AuthorizationException e )
{
-
+
}
catch ( UnauthorizedException e )
{
-
+
}
}
<bean id="loggerManager" class="org.codehaus.plexus.logging.slf4j.Slf4jLoggerManager"
init-method="initialize"/>
+ <alias name="repositorySessionFactory#file" alias="repositorySessionFactory"/>
+
<bean name="wagon#http" class="org.apache.maven.wagon.providers.http.LightweightHttpWagon" scope="prototype">
<property name="httpHeaders">
<map>
<bean name="searchService" lazy-init="true" scope="singleton"
class="org.apache.archiva.web.xmlrpc.services.SearchServiceImpl">
<constructor-arg ref="xmlRpcUserRepositories"/>
- <constructor-arg ref="metadataResolver"/>
- <constructor-arg ref="metadataRepository"/>
+ <constructor-arg ref="repositorySessionFactory"/>
<constructor-arg ref="nexusSearch"/>
</bean>
<constructor-arg ref="archivaConfiguration"/>
<constructor-arg ref="repositoryContentConsumers"/>
<constructor-arg ref="repositoryContentFactory"/>
- <constructor-arg ref="metadataRepository"/>
+ <constructor-arg ref="repositorySessionFactory"/>
<constructor-arg ref="archivaTaskScheduler#repository"/>
<constructor-arg>
<bean class="org.apache.archiva.web.spring.RepositoryListenerFactoryBean"/>
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.repository.MetadataResolver;
+import org.apache.archiva.metadata.repository.RepositorySession;
import java.util.Collection;
import java.util.Collections;
private Map<String, Collection<String>> versionsInProject = new HashMap<String, Collection<String>>();
- public ProjectVersionMetadata resolveProjectVersion( String repoId, String namespace, String projectId,
- String projectVersion )
+ public ProjectVersionMetadata resolveProjectVersion( RepositorySession repositorySession, String repoId,
+ String namespace, String projectId, String projectVersion )
{
return projectVersions.get( createMapKey( repoId, namespace, projectId, projectVersion ) );
}
- public Collection<ProjectVersionReference> resolveProjectReferences( String repoId, String namespace,
+ public Collection<ProjectVersionReference> resolveProjectReferences( RepositorySession repositorySession,
+ String repoId, String namespace,
String projectId, String projectVersion )
{
return references.get( createMapKey( repoId, namespace, projectId, projectVersion ) );
}
- public Collection<String> resolveRootNamespaces( String repoId )
+ public Collection<String> resolveRootNamespaces( RepositorySession repositorySession, String repoId )
{
- return resolveNamespaces( repoId, null );
+ return resolveNamespaces( repositorySession, repoId, null );
}
- public Collection<String> resolveNamespaces( String repoId, String baseNamespace )
+ public Collection<String> resolveNamespaces( RepositorySession repositorySession, String repoId,
+ String baseNamespace )
{
Set<String> namespaces = new LinkedHashSet<String>();
int fromIndex = baseNamespace != null ? baseNamespace.length() + 1 : 0;
return namespaces;
}
- public Collection<String> resolveProjects( String repoId, String namespace )
+ public Collection<String> resolveProjects( RepositorySession repositorySession, String repoId, String namespace )
{
Collection<String> list = projectsInNamespace.get( namespace );
return list != null ? list : Collections.<String>emptyList();
}
- public Collection<String> resolveProjectVersions( String repoId, String namespace, String projectId )
+ public Collection<String> resolveProjectVersions( RepositorySession repositorySession, String repoId,
+ String namespace, String projectId )
{
Collection<String> list = versionsInProject.get( namespace + ":" + projectId );
return list != null ? list : Collections.<String>emptyList();
}
- public Collection<ArtifactMetadata> resolveArtifacts( String repoId, String namespace, String projectId,
- String projectVersion )
+ public Collection<ArtifactMetadata> resolveArtifacts( RepositorySession repositorySession, String repoId,
+ String namespace, String projectId, String projectVersion )
{
List<ArtifactMetadata> artifacts = this.artifacts.get( createMapKey( repoId, namespace, projectId,
projectVersion ) );
--- /dev/null
+package org.apache.archiva.metadata.repository.memory;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
+
+public class TestRepositorySessionFactory
+ implements RepositorySessionFactory
+{
+ private RepositorySession repositorySession;
+
+ public void setRepositorySession( RepositorySession repositorySession )
+ {
+ this.repositorySession = repositorySession;
+ }
+
+ public RepositorySession createSession()
+ {
+ return repositorySession != null ? repositorySession : new RepositorySession( new TestMetadataRepository(),
+ new TestMetadataResolver() );
+ }
+}
import com.opensymphony.xwork2.Action;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.memory.TestMetadataResolver;
+import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
public class BrowseActionTest
extends AbstractActionTestCase
{
{
super.setUp();
action = (BrowseAction) lookup( Action.class, ACTION_HINT );
- metadataResolver = (TestMetadataResolver) action.getMetadataResolver();
+ metadataResolver = new TestMetadataResolver();
+ RepositorySession repositorySession = mock( RepositorySession.class );
+ when( repositorySession.getResolver() ).thenReturn( metadataResolver );
+ TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
+ factory.setRepositorySession( repositorySession );
}
}
\ No newline at end of file
import com.opensymphony.xwork2.Action;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
+import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import java.io.File;
import java.util.ArrayList;
-import java.util.Collection;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
public class DeleteArtifactActionTest
extends PlexusInSpringTestCase
assertNotNull( action );
configurationControl = MockControl.createControl( ArchivaConfiguration.class );
- configuration = ( ArchivaConfiguration ) configurationControl.getMock();
+ configuration = (ArchivaConfiguration) configurationControl.getMock();
repositoryFactoryControl = MockClassControl.createControl( RepositoryContentFactory.class );
- repositoryFactory = ( RepositoryContentFactory ) repositoryFactoryControl.getMock();
+ repositoryFactory = (RepositoryContentFactory) repositoryFactoryControl.getMock();
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
- metadataRepository = ( MetadataRepository ) metadataRepositoryControl.getMock();
+ metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
+
+ RepositorySession repositorySession = mock( RepositorySession.class );
+ when( repositorySession.getRepository() ).thenReturn( metadataRepository );
+
+ TestRepositorySessionFactory repositorySessionFactory = (TestRepositorySessionFactory) lookup(
+ RepositorySessionFactory.class );
+ repositorySessionFactory.setRepositorySession( repositorySession );
action.setConfiguration( configuration );
action.setRepositoryFactory( repositoryFactory );
- action.setMetadataRepository( metadataRepository );
}
@Override
throws Exception
{
action = null;
-
+
super.tearDown();
}
repoContent.setRepository( config.findManagedRepositoryById( REPOSITORY_ID ) );
configurationControl.expectAndReturn( configuration.getConfiguration(), config );
- repositoryFactoryControl.expectAndReturn( repositoryFactory.getManagedRepositoryContent( REPOSITORY_ID ), repoContent );
- metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( REPOSITORY_ID, GROUP_ID, ARTIFACT_ID, VERSION ),
+ repositoryFactoryControl.expectAndReturn( repositoryFactory.getManagedRepositoryContent( REPOSITORY_ID ),
+ repoContent );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( REPOSITORY_ID, GROUP_ID,
+ ARTIFACT_ID, VERSION ),
new ArrayList<ArtifactMetadata>() );
configurationControl.replay();
assertFalse( new File( artifactPath + ".jar" ).exists() );
assertFalse( new File( artifactPath + ".jar.sha1" ).exists() );
assertFalse( new File( artifactPath + ".jar.md5" ).exists() );
-
+
assertFalse( new File( artifactPath + ".pom" ).exists() );
assertFalse( new File( artifactPath + ".pom.sha1" ).exists() );
assertFalse( new File( artifactPath + ".pom.md5" ).exists() );
import org.apache.archiva.indexer.util.SearchUtil;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
+import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.security.UserRepositories;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import java.util.Collections;
import java.util.List;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
/**
*
*/
private static final String GUEST = "guest";
+ private RepositorySession session;
+
@Override
protected void setUp()
throws Exception
action = new SearchAction();
+ session = mock( RepositorySession.class );
+ TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
+ factory.setRepositorySession( session );
+ action.setRepositorySessionFactory( factory );
+
MockControl archivaConfigControl = MockControl.createControl( ArchivaConfiguration.class );
ArchivaConfiguration archivaConfig = (ArchivaConfiguration) archivaConfigControl.getMock();
userReposControl = MockControl.createControl( UserRepositories.class );
- userRepos = ( UserRepositories ) userReposControl.getMock();
+ userRepos = (UserRepositories) userReposControl.getMock();
searchControl = MockControl.createControl( RepositorySearch.class );
searchControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
- search = ( RepositorySearch ) searchControl.getMock();
+ search = (RepositorySearch) searchControl.getMock();
action.setArchivaConfiguration( archivaConfig );
action.setUserRepositories( userRepos );
action.setNexusSearch( search );
}
- @Override
- protected void tearDown()
- throws Exception
- {
- super.tearDown();
- }
-
// quick search...
public void testQuickSearch()
assertEquals( Action.INPUT, result );
assertFalse( action.getActionErrors().isEmpty() );
- assertEquals( "No results found",( String ) action.getActionErrors().iterator().next() );
+ assertEquals( "No results found", (String) action.getActionErrors().iterator().next() );
searchControl.verify();
}
MockControl control = MockControl.createControl( MetadataRepository.class );
MetadataRepository metadataRepository = (MetadataRepository) control.getMock();
- action.setMetadataRepository( metadataRepository );
+ when( session.getRepository() ).thenReturn( metadataRepository );
ArtifactMetadata artifact = createArtifact( "archiva-configuration", "1.0" );
control.expectAndReturn( metadataRepository.getArtifactsByChecksum( TEST_REPO, TEST_CHECKSUM ),
Collections.singletonList( artifact ) );
- userReposControl.expectAndReturn( userRepos.getObservableRepositoryIds( GUEST ),
- Collections.singletonList( TEST_REPO ) );
+ userReposControl.expectAndReturn( userRepos.getObservableRepositoryIds( GUEST ), Collections.singletonList(
+ TEST_REPO ) );
control.replay();
userReposControl.replay();
MockControl control = MockControl.createControl( MetadataRepository.class );
MetadataRepository metadataRepository = (MetadataRepository) control.getMock();
- action.setMetadataRepository( metadataRepository );
+ when( session.getRepository() ).thenReturn( metadataRepository );
List<ArtifactMetadata> artifacts = Arrays.asList( createArtifact( "archiva-configuration", "1.0" ),
createArtifact( "archiva-indexer", "1.0" ) );
control.expectAndReturn( metadataRepository.getArtifactsByChecksum( TEST_REPO, TEST_CHECKSUM ), artifacts );
- userReposControl.expectAndReturn( userRepos.getObservableRepositoryIds( GUEST ),
- Collections.singletonList( TEST_REPO ) );
+ userReposControl.expectAndReturn( userRepos.getObservableRepositoryIds( GUEST ), Collections.singletonList(
+ TEST_REPO ) );
control.replay();
userReposControl.replay();
assertEquals( Action.INPUT, result );
assertFalse( action.getActionErrors().isEmpty() );
- assertEquals( "Unable to search for a blank checksum", ( String ) action.getActionErrors().iterator().next() );
+ assertEquals( "Unable to search for a blank checksum", (String) action.getActionErrors().iterator().next() );
}
public void testFindArtifactNoResults()
MockControl control = MockControl.createControl( MetadataRepository.class );
MetadataRepository metadataRepository = (MetadataRepository) control.getMock();
- action.setMetadataRepository( metadataRepository );
+ when( session.getRepository() ).thenReturn( metadataRepository );
control.expectAndReturn( metadataRepository.getArtifactsByChecksum( TEST_REPO, TEST_CHECKSUM ),
Collections.<ArtifactMetadata>emptyList() );
- userReposControl.expectAndReturn( userRepos.getObservableRepositoryIds( GUEST ),
- Collections.singletonList( TEST_REPO ) );
+ userReposControl.expectAndReturn( userRepos.getObservableRepositoryIds( GUEST ), Collections.singletonList(
+ TEST_REPO ) );
control.replay();
userReposControl.replay();
String result = action.findArtifact();
assertEquals( Action.INPUT, result );
assertFalse( action.getActionErrors().isEmpty() );
- assertEquals( "No results found", ( String )action.getActionErrors().iterator().next() );
+ assertEquals( "No results found", (String) action.getActionErrors().iterator().next() );
control.verify();
userReposControl.verify();
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.memory.TestMetadataResolver;
+import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
import org.apache.archiva.metadata.repository.storage.maven2.MavenArtifactFacet;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
action.setPropertyValue( "bar" );
action.setRepositoryId( TEST_REPO );
- MetadataRepository repo = mock( MetadataRepository.class );
- action.setMetadataRepository( repo );
-
String result = action.addMetadataProperty();
assertActionSuccess( action, result );
{
super.setUp();
action = (ShowArtifactAction) lookup( Action.class, ACTION_HINT );
- metadataResolver = (TestMetadataResolver) action.getMetadataResolver();
+
+ metadataResolver = new TestMetadataResolver();
+ MetadataRepository repo = mock( MetadataRepository.class );
+ RepositorySession repositorySession = mock( RepositorySession.class );
+ when( repositorySession.getResolver() ).thenReturn( metadataResolver );
+ when( repositorySession.getRepository() ).thenReturn( repo );
+ TestRepositorySessionFactory repositorySessionFactory = (TestRepositorySessionFactory) lookup(
+ RepositorySessionFactory.class );
+ repositorySessionFactory.setRepositorySession( repositorySession );
RepositoryContentFactory factory = mock( RepositoryContentFactory.class );
action.setRepositoryFactory( factory );
import org.apache.archiva.audit.AuditEvent;
import org.apache.archiva.audit.AuditListener;
import org.apache.archiva.metadata.repository.MetadataRepository;
-import org.apache.archiva.metadata.repository.MetadataRepositoryException;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
+import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import java.util.Collections;
import java.util.List;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
/**
* DeleteManagedRepositoryActionTest
*
private RepositoryStatisticsManager repositoryStatisticsManager;
+ private MetadataRepository metadataRepository;
+
+ private RepositorySession respositorySession;
+
+ private MockControl metadataRepositoryControl;
+
protected void setUp()
throws Exception
{
repositoryStatisticsManager = (RepositoryStatisticsManager) repositoryStatisticsManagerControl.getMock();
action.setRepositoryStatisticsManager( repositoryStatisticsManager );
- MockControl metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
- MetadataRepository metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
+ metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
+ metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
metadataRepository.removeRepository( REPO_ID );
- action.setMetadataRepository( metadataRepository );
+
+ respositorySession = mock( RepositorySession.class );
+ when( respositorySession.getRepository() ).thenReturn( metadataRepository );
+ TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
+ factory.setRepositorySession( respositorySession );
+ action.setRepositorySessionFactory( factory );
metadataRepositoryControl.replay();
}
throws Exception
{
// even when we keep the content, we don't keep the metadata at this point
- repositoryStatisticsManager.deleteStatistics( REPO_ID );
+ repositoryStatisticsManager.deleteStatistics( metadataRepository, REPO_ID );
repositoryStatisticsManagerControl.replay();
prepareRoleManagerMock();
MockControl control = mockAuditListeners();
- MockControl metadataRepositoryControl = mockMetadataRepository();
+ when( respositorySession.getRepository() ).thenReturn( metadataRepository );
String status = action.deleteEntry();
metadataRepositoryControl.verify();
}
- private MockControl mockMetadataRepository()
- throws MetadataRepositoryException
- {
- MockControl metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
- MetadataRepository metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
- metadataRepository.removeRepository( REPO_ID );
- metadataRepositoryControl.replay();
- action.setMetadataRepository( metadataRepository );
- return metadataRepositoryControl;
- }
-
private MockControl mockAuditListeners()
{
MockControl control = MockControl.createControl( AuditListener.class );
public void testDeleteRepositoryDeleteContent()
throws Exception
{
- repositoryStatisticsManager.deleteStatistics( REPO_ID );
+ repositoryStatisticsManager.deleteStatistics( metadataRepository, REPO_ID );
repositoryStatisticsManagerControl.replay();
prepareRoleManagerMock();
MockControl control = mockAuditListeners();
- MockControl metadataRepositoryControl = mockMetadataRepository();
+ when( respositorySession.getRepository() ).thenReturn( metadataRepository );
String status = action.deleteContents();
public void testDeleteRepositoryAndAssociatedProxyConnectors()
throws Exception
{
- repositoryStatisticsManager.deleteStatistics( REPO_ID );
+ repositoryStatisticsManager.deleteStatistics( metadataRepository, REPO_ID );
repositoryStatisticsManagerControl.replay();
Configuration configuration = prepDeletionTest( createRepository(), 5 );
assertEquals( 1, configuration.getProxyConnectors().size() );
MockControl control = mockAuditListeners();
- MockControl metadataRepositoryControl = mockMetadataRepository();
+ when( respositorySession.getRepository() ).thenReturn( metadataRepository );
String status = action.deleteContents();
assertEquals( Action.SUCCESS, status );
* under the License.
*/
-import java.io.File;
-import java.io.IOException;
-import java.util.Collections;
-
import com.opensymphony.xwork2.Action;
+import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
+import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.codehaus.redback.integration.interceptor.SecureActionException;
import org.easymock.MockControl;
+import java.io.File;
+import java.io.IOException;
+import java.util.Collections;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
/**
* EditManagedRepositoryActionTest
*
private File location;
+ private MetadataRepository metadataRepository;
+
protected void setUp()
throws Exception
{
roleManager = (RoleManager) roleManagerControl.getMock();
action.setRoleManager( roleManager );
location = getTestFile( "target/test/location" );
+
+ metadataRepository = mock( MetadataRepository.class );
+ RepositorySession repositorySession = mock( RepositorySession.class );
+ when( repositorySession.getRepository() ).thenReturn( metadataRepository );
+ TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
+ factory.setRepositorySession( repositorySession );
+ action.setRepositorySessionFactory( factory );
}
public void testSecureActionBundle()
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID );
roleManagerControl.setReturnValue( false );
- roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID +"-stage" );
+ roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID + "-stage" );
roleManagerControl.setReturnValue( false );
roleManager.createTemplatedRole( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID );
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID );
roleManagerControl.setReturnValue( false );
- roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID +"-stage");
+ roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID + "-stage" );
roleManagerControl.setReturnValue( false );
roleManager.createTemplatedRole( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID );
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID );
roleManagerControl.setReturnValue( false );
- roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID +"-stage");
+ roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID + "-stage" );
roleManagerControl.setReturnValue( false );
roleManager.createTemplatedRole( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID );
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID );
roleManagerControl.setReturnValue( false );
- roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID +"-stage");
+ roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID + "-stage" );
roleManagerControl.setReturnValue( false );
roleManager.createTemplatedRole( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID );
stageRepoConfiguration.addManagedRepository( createStagingRepository() );
archivaConfigurationControl.setReturnValue( stageRepoConfiguration );
-
archivaConfigurationControl.setReturnValue( configuration );
archivaConfigurationControl.setReturnValue( configuration );
RepositoryStatisticsManager repositoryStatisticsManager =
(RepositoryStatisticsManager) repositoryStatisticsManagerControl.getMock();
action.setRepositoryStatisticsManager( repositoryStatisticsManager );
- repositoryStatisticsManager.deleteStatistics( REPO_ID );
+ repositoryStatisticsManager.deleteStatistics( metadataRepository, REPO_ID );
repositoryStatisticsManagerControl.replay();
action.setRepoid( REPO_ID );
repository.setScanned( false );
repository.setDeleteReleasedSnapshots( true );
}
+
private void populateStagingRepository( ManagedRepositoryConfiguration repository )
throws IOException
{
- repository.setId( REPO_ID + "-stage");
+ repository.setId( REPO_ID + "-stage" );
repository.setName( "repo name" );
repository.setLocation( location.getCanonicalPath() );
repository.setLayout( "default" );
* under the License.
*/
-import java.util.Arrays;
-
import com.meterware.servletunit.ServletRunner;
import com.meterware.servletunit.ServletUnitClient;
import com.opensymphony.xwork2.Action;
import org.apache.archiva.metadata.repository.MetadataRepository;
-import org.apache.archiva.metadata.repository.stats.DefaultRepositoryStatisticsManager;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
+import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
-import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.codehaus.redback.integration.interceptor.SecureActionBundle;
import org.codehaus.redback.integration.interceptor.SecureActionException;
import org.easymock.MockControl;
+import java.util.Arrays;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
/**
* Test the repositories action returns the correct data.
*/
{
super.setUp();
- action = (RepositoriesAction) lookup( Action.class.getName(), "repositoriesAction" );
+ try
+ {
+ action = (RepositoriesAction) lookup( Action.class.getName(), "repositoriesAction" );
+ }
+ catch ( Exception e )
+ {
+ // clean up cache - TODO: move handling to plexus-spring
+ applicationContext.close();
+ throw e;
+ }
}
public void testGetRepositories()
MetadataRepository metadataRepository = (MetadataRepository) control.getMock();
control.expectAndReturn( metadataRepository.getMetadataFacets( "internal", RepositoryStatistics.FACET_ID ),
Arrays.asList( "20091125.123456.678" ) );
- control.expectAndReturn(
- metadataRepository.getMetadataFacet( "internal", RepositoryStatistics.FACET_ID, "20091125.123456.678" ),
- new RepositoryStatistics() );
+ control.expectAndReturn( metadataRepository.getMetadataFacet( "internal", RepositoryStatistics.FACET_ID,
+ "20091125.123456.678" ),
+ new RepositoryStatistics() );
control.expectAndReturn( metadataRepository.getMetadataFacets( "snapshots", RepositoryStatistics.FACET_ID ),
Arrays.asList( "20091112.012345.012" ) );
- control.expectAndReturn(
- metadataRepository.getMetadataFacet( "snapshots", RepositoryStatistics.FACET_ID, "20091112.012345.012" ),
- new RepositoryStatistics() );
+ control.expectAndReturn( metadataRepository.getMetadataFacet( "snapshots", RepositoryStatistics.FACET_ID,
+ "20091112.012345.012" ),
+ new RepositoryStatistics() );
control.replay();
- DefaultRepositoryStatisticsManager statsManager =
- (DefaultRepositoryStatisticsManager) lookup( RepositoryStatisticsManager.class );
- statsManager.setMetadataRepository( metadataRepository );
+ RepositorySession session = mock( RepositorySession.class );
+ when( session.getRepository() ).thenReturn( metadataRepository );
+ TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
+ factory.setRepositorySession( session );
ServletRunner sr = new ServletRunner();
ServletUnitClient sc = sr.newClient();
import com.opensymphony.xwork2.Action;
import org.apache.archiva.metadata.model.MetadataFacet;
import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
+import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import org.apache.archiva.reports.RepositoryProblemFacet;
import java.util.Date;
import java.util.List;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
/**
* Test the GenerationReportAction. Note that we are testing for <i>current</i> behaviour, however there are several
* instances below where other behaviour may actually be more appropriate (eg the error handling, download stats should
{
super.setUp();
- action = (GenerateReportAction) lookup( Action.class, "generateReport" );
+ try
+ {
+ action = (GenerateReportAction) lookup( Action.class, "generateReport" );
+ }
+ catch ( Exception e )
+ {
+ // clean up cache - TODO: move handling to plexus-spring
+ applicationContext.close();
+ throw e;
+ }
repositoryStatisticsManagerControl = MockControl.createControl( RepositoryStatisticsManager.class );
repositoryStatisticsManager = (RepositoryStatisticsManager) repositoryStatisticsManagerControl.getMock();
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
- action.setMetadataRepository( metadataRepository );
+
+ RepositorySession repositorySession = mock( RepositorySession.class );
+ when( repositorySession.getRepository() ).thenReturn( metadataRepository );
+
+ TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
+ factory.setRepositorySession( repositorySession );
}
private void prepareAction( List<String> selectedRepositories, List<String> availableRepositories )
public void testGenerateStatisticsSingleRepo()
throws Exception
{
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
- null,
- null ),
- Collections.singletonList( createDefaultStats() ) );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, INTERNAL, null, null ), Collections.singletonList( createDefaultStats() ) );
repositoryStatisticsManagerControl.replay();
prepareAction( Collections.singletonList( INTERNAL ), Collections.singletonList( SNAPSHOTS ) );
throws Exception
{
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
- null,
- null ),
- Collections.<Object>emptyList() );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, INTERNAL, null, null ), Collections.<Object>emptyList() );
repositoryStatisticsManagerControl.replay();
prepareAction( Collections.singletonList( INTERNAL ), Collections.singletonList( SNAPSHOTS ) );
throws Exception
{
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
- null,
- null ),
- Collections.singletonList( createDefaultStats() ) );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, INTERNAL, null, null ), Collections.singletonList( createDefaultStats() ) );
repositoryStatisticsManagerControl.replay();
action.setPage( 2 );
prepareAction( Collections.singletonList( INTERNAL ), Collections.singletonList( SNAPSHOTS ) );
throws Exception
{
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( SNAPSHOTS,
- null,
- null ),
- Collections.<Object>emptyList() );
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
- null,
- null ),
- Collections.<Object>emptyList() );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, SNAPSHOTS, null, null ), Collections.<Object>emptyList() );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, INTERNAL, null, null ), Collections.<Object>emptyList() );
repositoryStatisticsManagerControl.replay();
prepareAction( Arrays.asList( SNAPSHOTS, INTERNAL ), Collections.<String>emptyList() );
throws Exception
{
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( SNAPSHOTS,
- null,
- null ),
- Collections.singletonList( createDefaultStats() ) );
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
- null,
- null ),
- Collections.singletonList( createDefaultStats() ) );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, SNAPSHOTS, null, null ), Collections.singletonList( createDefaultStats() ) );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, INTERNAL, null, null ), Collections.singletonList( createDefaultStats() ) );
repositoryStatisticsManagerControl.replay();
prepareAction( Arrays.asList( SNAPSHOTS, INTERNAL ), Collections.<String>emptyList() );
throws Exception
{
Date date = new Date();
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( SNAPSHOTS,
- null,
- null ),
- Collections.singletonList( createStats( date ) ) );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, SNAPSHOTS, null, null ), Collections.singletonList( createStats( date ) ) );
repositoryStatisticsManagerControl.replay();
prepareAction( Arrays.asList( SNAPSHOTS ), Arrays.asList( INTERNAL ) );
public void testDownloadStatisticsMultipleRepos()
throws Exception
{
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( SNAPSHOTS,
- null,
- null ),
- Collections.singletonList( createDefaultStats() ) );
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
- null,
- null ),
- Collections.singletonList( createDefaultStats() ) );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, SNAPSHOTS, null, null ), Collections.singletonList( createDefaultStats() ) );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, INTERNAL, null, null ), Collections.singletonList( createDefaultStats() ) );
repositoryStatisticsManagerControl.replay();
prepareAction( Arrays.asList( SNAPSHOTS, INTERNAL ), Collections.<String>emptyList() );
throws Exception
{
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
- null,
- null ),
- Collections.<Object>emptyList() );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, INTERNAL, null, null ), Collections.<Object>emptyList() );
repositoryStatisticsManagerControl.replay();
prepareAction( Collections.singletonList( INTERNAL ), Collections.singletonList( SNAPSHOTS ) );
throws Exception
{
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( SNAPSHOTS,
- null,
- null ),
- Collections.<Object>emptyList() );
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
- null,
- null ),
- Collections.<Object>emptyList() );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, SNAPSHOTS, null, null ), Collections.<Object>emptyList() );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, INTERNAL, null, null ), Collections.<Object>emptyList() );
repositoryStatisticsManagerControl.replay();
prepareAction( Arrays.asList( SNAPSHOTS, INTERNAL ), Collections.<String>emptyList() );
public void testDownloadStatisticsMultipleRepoInStrutsFormat()
throws Exception
{
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( SNAPSHOTS,
- null,
- null ),
- Collections.singletonList( createDefaultStats() ) );
- repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
- null,
- null ),
- Collections.singletonList( createDefaultStats() ) );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, SNAPSHOTS, null, null ), Collections.singletonList( createDefaultStats() ) );
+ repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
+ metadataRepository, INTERNAL, null, null ), Collections.singletonList( createDefaultStats() ) );
repositoryStatisticsManagerControl.replay();
prepareAction( Arrays.asList( SNAPSHOTS, INTERNAL ), Collections.<String>emptyList() );
* under the License.
*/
-import javax.servlet.http.HttpServletResponse;
-
import com.meterware.httpunit.GetMethodWebRequest;
import com.meterware.httpunit.HttpException;
import com.meterware.httpunit.WebRequest;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import sun.misc.BASE64Encoder;
+import javax.servlet.http.HttpServletResponse;
+
public class RssFeedServletTest
extends PlexusInSpringTestCase
{
public void testRetrieveServlet()
throws Exception
{
- RssFeedServlet servlet =
- (RssFeedServlet) client.newInvocation( "http://localhost/feeds/test-repo" ).getServlet();
+ RssFeedServlet servlet = (RssFeedServlet) client.newInvocation(
+ "http://localhost/feeds/test-repo" ).getServlet();
assertNotNull( servlet );
}
public void testRequestNewArtifactsInRepo()
throws Exception
{
- RssFeedServlet servlet =
- (RssFeedServlet) client.newInvocation( "http://localhost/feeds/test-repo" ).getServlet();
+ RssFeedServlet servlet = (RssFeedServlet) client.newInvocation(
+ "http://localhost/feeds/test-repo" ).getServlet();
assertNotNull( servlet );
WebRequest request = new GetMethodWebRequest( "http://localhost/feeds/test-repo" );
public void XXX_testInvalidRequest()
throws Exception
{
- RssFeedServlet servlet =
- (RssFeedServlet) client.newInvocation( "http://localhost/feeds?invalid_param=xxx" ).getServlet();
+ RssFeedServlet servlet = (RssFeedServlet) client.newInvocation(
+ "http://localhost/feeds?invalid_param=xxx" ).getServlet();
assertNotNull( servlet );
try
public void XXX_testInvalidAuthenticationRequest()
throws Exception
{
- RssFeedServlet servlet =
- (RssFeedServlet) client.newInvocation( "http://localhost/feeds/unauthorized-repo" ).getServlet();
+ RssFeedServlet servlet = (RssFeedServlet) client.newInvocation(
+ "http://localhost/feeds/unauthorized-repo" ).getServlet();
assertNotNull( servlet );
WebRequest request = new GetMethodWebRequest( "http://localhost/feeds/unauthorized-repo" );
public void XXX_testUnauthorizedRequest()
throws Exception
{
- RssFeedServlet servlet =
- (RssFeedServlet) client.newInvocation( "http://localhost/feeds/unauthorized-repo" ).getServlet();
+ RssFeedServlet servlet = (RssFeedServlet) client.newInvocation(
+ "http://localhost/feeds/unauthorized-repo" ).getServlet();
assertNotNull( servlet );
WebRequest request = new GetMethodWebRequest( "http://localhost/feeds/unauthorized-repo" );
* under the License.
*/
-import java.util.List;
-
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.repository.MetadataResolver;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.memory.TestMetadataResolver;
+import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
+import java.util.List;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
public class DependencyTreeTest
extends PlexusInSpringTestCase
{
ProjectVersionMetadata metadata = new ProjectVersionMetadata();
metadata.setId( TEST_VERSION );
metadataResolver.setProjectVersion( TEST_REPO_ID, TEST_GROUP_ID, TEST_ARTIFACT_ID, metadata );
+
+ RepositorySession repositorySession = mock( RepositorySession.class );
+ when( repositorySession.getResolver() ).thenReturn( metadataResolver );
+ TestRepositorySessionFactory repositorySessionFactory = (TestRepositorySessionFactory) lookup(
+ RepositorySessionFactory.class );
+ repositorySessionFactory.setRepositorySession( repositorySession );
}
public void testTree()
<resource>archiva-mime-types.txt</resource>
</configuration>
</component>
+ <component>
+ <role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
+ <role-hint>default</role-hint>
+ <implementation>org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory</implementation>
+ </component>
</components>
</component-set>
\ No newline at end of file
<role-hint>default</role-hint>
<implementation>org.apache.maven.archiva.security.UserRepositoriesStub</implementation>
</component>
- <component>
- <role>org.apache.archiva.metadata.repository.MetadataResolver</role>
- <role-hint>default</role-hint>
- <implementation>org.apache.archiva.metadata.repository.memory.TestMetadataResolver</implementation>
- <instantiation-strategy>per-lookup</instantiation-strategy>
- </component>
</components>
</plexus>
<role-hint>default</role-hint>
<implementation>org.apache.maven.archiva.security.UserRepositoriesStub</implementation>
</component>
- <component>
- <role>org.apache.archiva.metadata.repository.MetadataResolver</role>
- <role-hint>default</role-hint>
- <implementation>org.apache.archiva.metadata.repository.memory.TestMetadataResolver</implementation>
- <instantiation-strategy>per-lookup</instantiation-strategy>
- </component>
</components>
</plexus>
--- /dev/null
+package org.apache.archiva.metadata.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+public class TestRepositorySessionFactory
+ implements RepositorySessionFactory
+{
+ private MetadataResolver resolver;
+
+ public RepositorySession createSession()
+ {
+ return new RepositorySession( new TestMetadataRepository(), resolver );
+ }
+
+ public void setResolver( MetadataResolver resolver )
+ {
+ this.resolver = resolver;
+ }
+}
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
</component>
+ <component>
+ <role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
+ <implementation>org.apache.archiva.metadata.repository.TestRepositorySessionFactory</implementation>
+ </component>
</components>
</component-set>
\ No newline at end of file
~ under the License.
-->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.archiva</groupId>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>audit</artifactId>
- </dependency>
+ </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-scanner</artifactId>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<scope>test</scope>
- </dependency>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
</project>
import org.apache.archiva.audit.AuditListener;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.filter.Filter;
import org.apache.archiva.metadata.repository.filter.IncludesFilter;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
private Collection<RepositoryListener> listeners;
- private MetadataRepository metadataRepository;
-
private RepositoryStatisticsManager repositoryStatisticsManager;
private RepositoryMerger repositoryMerger;
private AuditListener auditListener;
+ private RepositorySessionFactory repositorySessionFactory;
+
public AdministrationServiceImpl( ArchivaConfiguration archivaConfig, RepositoryContentConsumers repoConsumersUtil,
- RepositoryContentFactory repoFactory, MetadataRepository metadataRepository,
+ RepositoryContentFactory repoFactory,
+ RepositorySessionFactory repositorySessionFactory,
RepositoryArchivaTaskScheduler repositoryTaskScheduler,
Collection<RepositoryListener> listeners,
RepositoryStatisticsManager repositoryStatisticsManager,
this.repoFactory = repoFactory;
this.repositoryTaskScheduler = repositoryTaskScheduler;
this.listeners = listeners;
- this.metadataRepository = metadataRepository;
+ this.repositorySessionFactory = repositorySessionFactory;
this.repositoryStatisticsManager = repositoryStatisticsManager;
this.repositoryMerger = repositoryMerger;
this.auditListener = auditListener;
throw new Exception( "Repository does not exist." );
}
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
try
{
ManagedRepositoryContent repoContent = repoFactory.getManagedRepositoryContent( repoId );
// delete from file system
repoContent.deleteVersion( ref );
+ MetadataRepository metadataRepository = repositorySession.getRepository();
Collection<ArtifactMetadata> artifacts = metadataRepository.getArtifacts( repoId, groupId, artifactId,
version );
// repository metadata to an artifact
for ( RepositoryListener listener : listeners )
{
- listener.deleteArtifact( repoId, artifact.getNamespace(), artifact.getProject(),
- artifact.getVersion(), artifact.getId() );
+ listener.deleteArtifact( metadataRepository, repoId, artifact.getNamespace(),
+ artifact.getProject(), artifact.getVersion(), artifact.getId() );
}
}
}
+ repositorySession.save();
}
catch ( ContentNotFoundException e )
{
{
throw new Exception( "Repository exception occurred." );
}
+ finally
+ {
+ repositorySession.close();
+ }
return true;
}
throw new Exception( "A repository with that id does not exist" );
}
- metadataRepository.removeRepository( repository.getId() );
- repositoryStatisticsManager.deleteStatistics( repository.getId() );
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
+ {
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+ metadataRepository.removeRepository( repository.getId() );
+ repositoryStatisticsManager.deleteStatistics( metadataRepository, repository.getId() );
+ repositorySession.save();
+ }
+ finally
+ {
+ repositorySession.close();
+ }
config.removeManagedRepository( repository );
try
log.debug( "Retrieved repository configuration for repo '" + repoId + "'" );
- if ( repoConfig != null )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- stagingConfig = config.findManagedRepositoryById( stagingId );
-
- if ( stagingConfig != null )
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+ if ( repoConfig != null )
{
- List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( stagingId );
+ stagingConfig = config.findManagedRepositoryById( stagingId );
- if ( repoConfig.isReleases() && !repoConfig.isSnapshots() )
+ if ( stagingConfig != null )
{
- log.info( "Repository to be merged contains releases only.." );
- if ( skipConflicts )
- {
- List<ArtifactMetadata> conflicts = repositoryMerger.getConflictingArtifacts( repoId,
- stagingId );
+ List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( stagingId );
- if ( log.isDebugEnabled() )
+ if ( repoConfig.isReleases() && !repoConfig.isSnapshots() )
+ {
+ log.info( "Repository to be merged contains releases only.." );
+ if ( skipConflicts )
{
- log.debug( "Artifacts in conflict.." );
- for ( ArtifactMetadata metadata : conflicts )
+ List<ArtifactMetadata> conflicts = repositoryMerger.getConflictingArtifacts(
+ metadataRepository, repoId, stagingId );
+
+ if ( log.isDebugEnabled() )
{
- log.debug( metadata.getNamespace() + ":" + metadata.getProject() + ":" +
- metadata.getProjectVersion() );
+ log.debug( "Artifacts in conflict.." );
+ for ( ArtifactMetadata metadata : conflicts )
+ {
+ log.debug( metadata.getNamespace() + ":" + metadata.getProject() + ":" +
+ metadata.getProjectVersion() );
+ }
}
- }
- sourceArtifacts.removeAll( conflicts );
+ sourceArtifacts.removeAll( conflicts );
- log.debug( "Source artifacts size :: " + sourceArtifacts.size() );
- mergeWithOutSnapshots( sourceArtifacts, stagingId, repoId );
+ log.debug( "Source artifacts size :: " + sourceArtifacts.size() );
+ mergeWithOutSnapshots( sourceArtifacts, stagingId, repoId, null );
+ }
+ else
+ {
+ log.debug( "Source artifacts size :: " + sourceArtifacts.size() );
+ mergeWithOutSnapshots( sourceArtifacts, stagingId, repoId, null );
+ }
}
else
{
- log.debug( "Source artifacts size :: " + sourceArtifacts.size() );
- mergeWithOutSnapshots( sourceArtifacts, stagingId, repoId );
- }
- }
- else
- {
- log.info( "Repository to be merged has snapshot artifacts.." );
- if ( skipConflicts )
- {
- List<ArtifactMetadata> conflicts = repositoryMerger.getConflictingArtifacts( repoId,
- stagingId );
-
- if ( log.isDebugEnabled() )
+ log.info( "Repository to be merged has snapshot artifacts.." );
+ if ( skipConflicts )
{
- log.debug( "Artifacts in conflict.." );
- for ( ArtifactMetadata metadata : conflicts )
+ List<ArtifactMetadata> conflicts = repositoryMerger.getConflictingArtifacts(
+ metadataRepository, repoId, stagingId );
+
+ if ( log.isDebugEnabled() )
{
- log.debug( metadata.getNamespace() + ":" + metadata.getProject() + ":" +
- metadata.getProjectVersion() );
+ log.debug( "Artifacts in conflict.." );
+ for ( ArtifactMetadata metadata : conflicts )
+ {
+ log.debug( metadata.getNamespace() + ":" + metadata.getProject() + ":" +
+ metadata.getProjectVersion() );
+ }
}
- }
- sourceArtifacts.removeAll( conflicts );
+ sourceArtifacts.removeAll( conflicts );
- log.debug( "Source artifacts size :: " + sourceArtifacts.size() );
+ log.debug( "Source artifacts size :: " + sourceArtifacts.size() );
- Filter<ArtifactMetadata> artifactsWithOutConflicts = new IncludesFilter<ArtifactMetadata>(
- sourceArtifacts );
- repositoryMerger.merge( stagingId, repoId, artifactsWithOutConflicts );
+ Filter<ArtifactMetadata> artifactsWithOutConflicts = new IncludesFilter<ArtifactMetadata>(
+ sourceArtifacts );
+ repositoryMerger.merge( metadataRepository, stagingId, repoId, artifactsWithOutConflicts );
- log.info(
- "Staging repository '" + stagingId + "' merged successfully with managed repo '" + repoId +
- "'." );
- }
- else
- {
- repositoryMerger.merge( stagingId, repoId );
+ log.info( "Staging repository '" + stagingId + "' merged successfully with managed repo '" +
+ repoId + "'." );
+ }
+ else
+ {
+ repositoryMerger.merge( metadataRepository, stagingId, repoId );
- log.info(
- "Staging repository '" + stagingId + "' merged successfully with managed repo '" + repoId +
- "'." );
+ log.info( "Staging repository '" + stagingId + "' merged successfully with managed repo '" +
+ repoId + "'." );
+ }
}
}
+ else
+ {
+ throw new Exception( "Staging Id : " + stagingId + " not found." );
+ }
}
else
{
- throw new Exception( "Staging Id : " + stagingId + " not found." );
+ throw new Exception( "Repository Id : " + repoId + " not found." );
}
- }
- else
- {
- throw new Exception( "Repository Id : " + repoId + " not found." );
- }
- if ( !repositoryTaskScheduler.isProcessingRepositoryTask( repoId ) )
- {
- RepositoryTask task = new RepositoryTask();
- task.setRepositoryId( repoId );
+ if ( !repositoryTaskScheduler.isProcessingRepositoryTask( repoId ) )
+ {
+ RepositoryTask task = new RepositoryTask();
+ task.setRepositoryId( repoId );
- repositoryTaskScheduler.queueTask( task );
- }
+ repositoryTaskScheduler.queueTask( task );
+ }
- AuditEvent event = createAuditEvent( repoConfig );
+ AuditEvent event = createAuditEvent( repoConfig );
- // add event for audit log reports
- metadataRepository.addMetadataFacet( event.getRepositoryId(), event );
+ // add event for audit log reports
+ metadataRepository.addMetadataFacet( event.getRepositoryId(), event );
- // log event in archiva audit log
- auditListener.auditEvent( createAuditEvent( repoConfig ) );
+ // log event in archiva audit log
+ auditListener.auditEvent( createAuditEvent( repoConfig ) );
+ repositorySession.save();
+ }
+ finally
+ {
+ repositorySession.close();
+ }
return true;
}
return event;
}
- private void mergeWithOutSnapshots( List<ArtifactMetadata> sourceArtifacts, String sourceRepoId, String repoid )
+ private void mergeWithOutSnapshots( List<ArtifactMetadata> sourceArtifacts, String sourceRepoId, String repoid,
+ MetadataRepository metadataRepository )
throws Exception
{
List<ArtifactMetadata> artifactsWithOutSnapshots = new ArrayList<ArtifactMetadata>();
Filter<ArtifactMetadata> artifactListWithOutSnapShots = new IncludesFilter<ArtifactMetadata>( sourceArtifacts );
- repositoryMerger.merge( sourceRepoId, repoid, artifactListWithOutSnapShots );
+ repositoryMerger.merge( metadataRepository, sourceRepoId, repoid, artifactListWithOutSnapShots );
}
private ManagedRepositoryConfiguration getStageRepoConfig( ManagedRepositoryConfiguration repository )
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataResolver;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.storage.maven2.MavenArtifactFacet;
import org.apache.archiva.metadata.repository.storage.maven2.MavenProjectFacet;
import org.apache.archiva.web.xmlrpc.api.SearchService;
private XmlRpcUserRepositories xmlRpcUserRepositories;
- private MetadataResolver metadataResolver;
+ private RepositorySessionFactory repositorySessionFactory;
- private MetadataRepository metadataRepository;
-
- public SearchServiceImpl( XmlRpcUserRepositories xmlRpcUserRepositories, MetadataResolver metadataResolver,
- MetadataRepository metadataRepository, RepositorySearch search )
+ public SearchServiceImpl( XmlRpcUserRepositories xmlRpcUserRepositories,
+ RepositorySessionFactory repositorySessionFactory, RepositorySearch search )
{
this.xmlRpcUserRepositories = xmlRpcUserRepositories;
this.search = search;
- this.metadataResolver = metadataResolver;
- this.metadataRepository = metadataRepository;
+ this.repositorySessionFactory = repositorySessionFactory;
}
@SuppressWarnings( "unchecked" )
results = search.search( "", observableRepos, queryString, limits, null );
- for ( SearchResultHit resultHit : results.getHits() )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- List<String> resultHitVersions = resultHit.getVersions();
- if ( resultHitVersions != null )
+ MetadataResolver metadataResolver = repositorySession.getResolver();
+
+ for ( SearchResultHit resultHit : results.getHits() )
{
- for ( String version : resultHitVersions )
+ List<String> resultHitVersions = resultHit.getVersions();
+ if ( resultHitVersions != null )
{
- Artifact artifact = null;
- for ( String repoId : observableRepos )
+ for ( String version : resultHitVersions )
{
- // slight behaviour change to previous implementation: instead of allocating "jar" when not
- // found in the database, we can rely on the metadata repository to create it on the fly. We
- // just allocate the default packaging if the Maven facet is not found.
- FacetedMetadata model = metadataResolver.resolveProjectVersion( repoId, resultHit.getGroupId(),
- resultHit.getArtifactId(),
- version );
-
- if ( model != null )
+ Artifact artifact = null;
+ for ( String repoId : observableRepos )
{
- String packaging = "jar";
-
- MavenProjectFacet facet = (MavenProjectFacet) model.getFacet( MavenProjectFacet.FACET_ID );
- if ( facet != null && facet.getPackaging() != null )
+ // slight behaviour change to previous implementation: instead of allocating "jar" when not
+ // found in the database, we can rely on the metadata repository to create it on the fly. We
+ // just allocate the default packaging if the Maven facet is not found.
+ FacetedMetadata model = metadataResolver.resolveProjectVersion( repositorySession, repoId,
+ resultHit.getGroupId(),
+ resultHit.getArtifactId(),
+ version );
+
+ if ( model != null )
{
- packaging = facet.getPackaging();
+ String packaging = "jar";
+
+ MavenProjectFacet facet = (MavenProjectFacet) model.getFacet(
+ MavenProjectFacet.FACET_ID );
+ if ( facet != null && facet.getPackaging() != null )
+ {
+ packaging = facet.getPackaging();
+ }
+ artifact = new Artifact( repoId, resultHit.getGroupId(), resultHit.getArtifactId(),
+ version, packaging );
+ break;
}
- artifact = new Artifact( repoId, resultHit.getGroupId(), resultHit.getArtifactId(), version,
- packaging );
- break;
}
- }
- if ( artifact != null )
- {
- artifacts.add( artifact );
+ if ( artifact != null )
+ {
+ artifacts.add( artifact );
+ }
}
}
}
}
+ finally
+ {
+ repositorySession.close();
+ }
return artifacts;
}
{
List<String> observableRepos = xmlRpcUserRepositories.getObservableRepositories();
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+
List<Artifact> results = new ArrayList<Artifact>();
- for ( String repoId : observableRepos )
+ try
{
- for ( ArtifactMetadata artifact : metadataRepository.getArtifactsByChecksum( repoId, checksum ) )
+ MetadataRepository metadataRepository = repositorySession.getRepository();
+
+ for ( String repoId : observableRepos )
{
- // TODO: customise XMLRPC to handle non-Maven artifacts
- MavenArtifactFacet facet = (MavenArtifactFacet) artifact.getFacet( MavenArtifactFacet.FACET_ID );
+ for ( ArtifactMetadata artifact : metadataRepository.getArtifactsByChecksum( repoId, checksum ) )
+ {
+ // TODO: customise XMLRPC to handle non-Maven artifacts
+ MavenArtifactFacet facet = (MavenArtifactFacet) artifact.getFacet( MavenArtifactFacet.FACET_ID );
- results.add( new Artifact( artifact.getRepositoryId(), artifact.getNamespace(), artifact.getProject(),
- artifact.getVersion(), facet != null ? facet.getType() : null ) );
+ results.add( new Artifact( artifact.getRepositoryId(), artifact.getNamespace(),
+ artifact.getProject(), artifact.getVersion(),
+ facet != null ? facet.getType() : null ) );
+ }
}
}
+ finally
+ {
+ repositorySession.close();
+ }
return results;
}
List<Artifact> artifacts = new ArrayList<Artifact>();
List<String> observableRepos = xmlRpcUserRepositories.getObservableRepositories();
- for ( String repoId : observableRepos )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- Collection<String> results = metadataResolver.resolveProjectVersions( repoId, groupId, artifactId );
+ MetadataResolver metadataResolver = repositorySession.getResolver();
- for ( final String version : results )
+ for ( String repoId : observableRepos )
{
- final Artifact artifact = new Artifact( repoId, groupId, artifactId, version, "pom" );
+ Collection<String> results = metadataResolver.resolveProjectVersions( repositorySession, repoId,
+ groupId, artifactId );
- artifacts.add( artifact );
+ for ( final String version : results )
+ {
+ final Artifact artifact = new Artifact( repoId, groupId, artifactId, version, "pom" );
+
+ artifacts.add( artifact );
+ }
}
}
+ finally
+ {
+ repositorySession.close();
+ }
return artifacts;
}
{
List<String> observableRepos = xmlRpcUserRepositories.getObservableRepositories();
- for ( String repoId : observableRepos )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- ProjectVersionMetadata model = metadataResolver.resolveProjectVersion( repoId, groupId, artifactId,
- version );
- if ( model != null )
+ MetadataResolver metadataResolver = repositorySession.getResolver();
+
+ for ( String repoId : observableRepos )
{
- List<Dependency> dependencies = new ArrayList<Dependency>();
- List<org.apache.archiva.metadata.model.Dependency> modelDeps = model.getDependencies();
- for ( org.apache.archiva.metadata.model.Dependency dep : modelDeps )
+ ProjectVersionMetadata model = metadataResolver.resolveProjectVersion( repositorySession, repoId,
+ groupId, artifactId, version );
+ if ( model != null )
{
- Dependency dependency = new Dependency( dep.getGroupId(), dep.getArtifactId(), dep.getVersion(),
- dep.getClassifier(), dep.getType(), dep.getScope() );
- dependencies.add( dependency );
+ List<Dependency> dependencies = new ArrayList<Dependency>();
+ List<org.apache.archiva.metadata.model.Dependency> modelDeps = model.getDependencies();
+ for ( org.apache.archiva.metadata.model.Dependency dep : modelDeps )
+ {
+ Dependency dependency = new Dependency( dep.getGroupId(), dep.getArtifactId(), dep.getVersion(),
+ dep.getClassifier(), dep.getType(), dep.getScope() );
+ dependencies.add( dependency );
+ }
+ return dependencies;
}
- return dependencies;
}
}
+ finally
+ {
+ repositorySession.close();
+ }
throw new Exception( "Artifact does not exist." );
}
List<Artifact> artifacts = new ArrayList<Artifact>();
List<String> observableRepos = xmlRpcUserRepositories.getObservableRepositories();
- for ( String repoId : observableRepos )
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
{
- Collection<ProjectVersionReference> refs = metadataResolver.resolveProjectReferences( repoId, groupId,
- artifactId, version );
- for ( ProjectVersionReference ref : refs )
+ MetadataResolver metadataResolver = repositorySession.getResolver();
+
+ for ( String repoId : observableRepos )
{
- artifacts.add( new Artifact( repoId, ref.getNamespace(), ref.getProjectId(), ref.getProjectVersion(),
- "" ) );
+ Collection<ProjectVersionReference> refs = metadataResolver.resolveProjectReferences( repositorySession,
+ repoId, groupId,
+ artifactId,
+ version );
+ for ( ProjectVersionReference ref : refs )
+ {
+ artifacts.add( new Artifact( repoId, ref.getNamespace(), ref.getProjectId(),
+ ref.getProjectVersion(), "" ) );
+ }
}
}
+ finally
+ {
+ repositorySession.close();
+ }
return artifacts;
}
import org.apache.archiva.audit.AuditListener;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.filter.Filter;
import org.apache.archiva.metadata.repository.filter.IncludesFilter;
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
import java.util.List;
import java.util.Map;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
/**
* AdministrationServiceImplTest
*
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
+ RepositorySession repositorySession = mock( RepositorySession.class );
+ when( repositorySession.getRepository() ).thenReturn( metadataRepository );
+
+ RepositorySessionFactory repositorySessionFactory = mock( RepositorySessionFactory.class );
+ when( repositorySessionFactory.createSession() ).thenReturn( repositorySession );
+
listenerControl = MockControl.createControl( RepositoryListener.class );
listener = (RepositoryListener) listenerControl.getMock();
auditListener = (AuditListener) auditListenerControl.getMock();
service = new AdministrationServiceImpl( archivaConfig, repoConsumersUtil, repositoryFactory,
- metadataRepository, repositoryTaskScheduler, Collections.singletonList(
- listener ), repositoryStatisticsManager, repositoryMerger, auditListener );
+ repositorySessionFactory, repositoryTaskScheduler,
+ Collections.singletonList( listener ), repositoryStatisticsManager,
+ repositoryMerger, auditListener );
}
/* Tests for repository consumers */
metadataRepository.removeArtifact( repoContent.getId(), artifact.getNamespace(), artifact.getProject(),
artifact.getVersion(), artifact.getId() );
- listener.deleteArtifact( repoContent.getId(), artifact.getNamespace(), artifact.getProject(),
- artifact.getVersion(), artifact.getId() );
+ listener.deleteArtifact( metadataRepository, repoContent.getId(), artifact.getNamespace(),
+ artifact.getProject(), artifact.getVersion(), artifact.getId() );
listenerControl.setVoidCallable( 1 );
archivaConfigControl.replay();
metadataRepository.removeArtifact( repoContent.getId(), artifact.getNamespace(), artifact.getProject(),
artifact.getVersion(), artifact.getId() );
- listener.deleteArtifact( repoContent.getId(), artifact.getNamespace(), artifact.getProject(),
- artifact.getVersion(), artifact.getId() );
+ listener.deleteArtifact( metadataRepository, repoContent.getId(), artifact.getNamespace(),
+ artifact.getProject(), artifact.getVersion(), artifact.getId() );
listenerControl.setVoidCallable( 1 );
archivaConfigControl.replay();
configControl.expectAndReturn( config.findManagedRepositoryById( "merge-stage" ), staging );
metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( staging.getId() ), sources );
- repositoryMergerControl.expectAndDefaultReturn( repositoryMerger.getConflictingArtifacts( staging.getId(),
+ repositoryMergerControl.expectAndDefaultReturn( repositoryMerger.getConflictingArtifacts( metadataRepository,
+ staging.getId(),
merge.getId() ),
sources );
- repositoryMerger.merge( staging.getId(), merge.getId() );
+ repositoryMerger.merge( metadataRepository, staging.getId(), merge.getId() );
repositoryMergerControl.setVoidCallable();
repositoryTaskSchedulerControl.expectAndReturn( repositoryTaskScheduler.isProcessingRepositoryTask( "merge" ),
false );
configControl.expectAndReturn( config.findManagedRepositoryById( "repo-stage" ), staging );
metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( staging.getId() ), sources );
- repositoryMergerControl.expectAndDefaultReturn( repositoryMerger.getConflictingArtifacts( staging.getId(),
+ repositoryMergerControl.expectAndDefaultReturn( repositoryMerger.getConflictingArtifacts( metadataRepository,
+ staging.getId(),
repo.getId() ),
conflicts );
- repositoryMerger.merge( staging.getId(), repo.getId(), artifactsWithOutConflicts );
+ repositoryMerger.merge( metadataRepository, staging.getId(), repo.getId(), artifactsWithOutConflicts );
repositoryMergerControl.setMatcher( MockControl.ALWAYS_MATCHER );
repositoryMergerControl.setVoidCallable();
repositoryTaskSchedulerControl.expectAndReturn( repositoryTaskScheduler.isProcessingRepositoryTask( "repo" ),
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataResolver;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.storage.maven2.MavenArtifactFacet;
import org.apache.archiva.metadata.repository.storage.maven2.MavenProjectFacet;
import org.apache.archiva.web.xmlrpc.api.SearchService;
import java.util.Date;
import java.util.List;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
/**
* SearchServiceImplTest
*
private static final String TEST_REPO = "test-repo";
+ private RepositorySession repositorySession;
+
@Override
public void setUp()
throws Exception
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
- searchService = new SearchServiceImpl( userRepos, metadataResolver, metadataRepository, search );
+ repositorySession = mock( RepositorySession.class );
+ when( repositorySession.getResolver() ).thenReturn( metadataResolver );
+ when( repositorySession.getRepository() ).thenReturn( metadataRepository );
+ RepositorySessionFactory repositorySessionFactory = mock( RepositorySessionFactory.class );
+ when( repositorySessionFactory.createSession() ).thenReturn( repositorySession );
+
+ searchService = new SearchServiceImpl( userRepos, repositorySessionFactory, search );
}
// MRM-1230
facet.setPackaging( "war" );
model.addFacet( facet );
- metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( "repo1.mirror",
+ metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repositorySession,
+ "repo1.mirror",
ARCHIVA_TEST_GROUP_ID,
"archiva-webapp", "1.0" ),
model );
searchControl.expectAndDefaultReturn( search.search( "", observableRepoIds, "archiva", limits, null ),
results );
- metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( "repo1.mirror",
+ metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repositorySession,
+ "repo1.mirror",
ARCHIVA_TEST_GROUP_ID,
ARCHIVA_TEST_ARTIFACT_ID,
"1.0" ), null );
ProjectVersionMetadata model = new ProjectVersionMetadata();
model.setId( "1.0" );
- metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( "public.releases",
+ metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repositorySession,
+ "public.releases",
ARCHIVA_TEST_GROUP_ID,
ARCHIVA_TEST_ARTIFACT_ID,
"1.0" ), model );
facet.setPackaging( "jar" );
model.addFacet( facet );
- metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( "repo1.mirror",
+ metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repositorySession,
+ "repo1.mirror",
ARCHIVA_TEST_GROUP_ID,
ARCHIVA_TEST_ARTIFACT_ID,
"1.0" ), model );
observableRepoIds.add( "public.releases" );
userReposControl.expectAndReturn( userRepos.getObservableRepositories(), observableRepoIds );
- metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersions( "repo1.mirror",
+ metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersions( repositorySession,
+ "repo1.mirror",
ARCHIVA_TEST_GROUP_ID,
ARCHIVA_TEST_ARTIFACT_ID ),
Arrays.asList( "1.0", "1.1-beta-2", "1.2" ) );
- metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersions( "public.releases",
+ metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersions( repositorySession,
+ "public.releases",
ARCHIVA_TEST_GROUP_ID,
ARCHIVA_TEST_ARTIFACT_ID ),
Arrays.asList( "1.1-beta-1", "1.1", "1.2.1-SNAPSHOT" ) );
model.addDependency( dependency );
userReposControl.expectAndReturn( userRepos.getObservableRepositories(), Collections.singletonList( repoId ) );
- metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repoId, ARCHIVA_TEST_GROUP_ID,
+ metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repositorySession, repoId,
+ ARCHIVA_TEST_GROUP_ID,
ARCHIVA_TEST_ARTIFACT_ID,
"1.0" ), model );
String repoId = "repo1.mirror";
userReposControl.expectAndReturn( userRepos.getObservableRepositories(), Collections.singletonList( repoId ) );
- metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repoId, ARCHIVA_TEST_GROUP_ID,
+ metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repositorySession, repoId,
+ ARCHIVA_TEST_GROUP_ID,
ARCHIVA_TEST_ARTIFACT_ID,
"1.0" ), null );
dependeeModels.add( dependeeModel );
userReposControl.expectAndReturn( userRepos.getObservableRepositories(), observableRepoIds );
- metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectReferences( repoId,
+ metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectReferences( repositorySession, repoId,
ARCHIVA_TEST_GROUP_ID,
ARCHIVA_TEST_ARTIFACT_ID,
"1.0" ), dependeeModels );
// no longer differentiating between a project not being present and a project that is present but with
// no references. If it is later determined to be needed, we will need to modify the metadata content repository
userReposControl.expectAndReturn( userRepos.getObservableRepositories(), observableRepoIds );
- metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectReferences( "repo1.mirror",
+ metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectReferences( repositorySession,
+ "repo1.mirror",
ARCHIVA_TEST_GROUP_ID,
ARCHIVA_TEST_ARTIFACT_ID,
"1.0" ),
Collections.<ProjectVersionReference>emptyList() );
- metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectReferences( "public.releases",
+ metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectReferences( repositorySession,
+ "public.releases",
ARCHIVA_TEST_GROUP_ID,
ARCHIVA_TEST_ARTIFACT_ID,
"1.0" ),
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.repository.filter.ExcludesFilter;
import org.apache.archiva.metadata.repository.storage.RepositoryStorage;
+import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException;
+import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataNotFoundException;
+import org.apache.archiva.repository.events.RepositoryListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.List;
/**
+ * Default implementation of the metadata resolver API. At present it will handle updating the content repository
+ * from new or changed information in the model and artifacts from the repository storage.
+ *
+ * This is a singleton component to allow an alternate implementation to be provided. It is intended to be the same
+ * system-wide for the whole content repository instead of on a per-managed-repository basis. Therefore, the session is
+ * passed in as an argument to obtain any necessary resources, rather than the class being instantiated within the
+ * session in the context of a single managed repository's resolution needs.
+ *
+ * Note that the caller is responsible for the session, such as closing and saving (which is implied by the resolver
+ * being obtained from within the session). The {@link RepositorySession#markDirty()} method is used as a hint to ensure
+ * that the session knows we've made changes at close. We cannot ensure the changes will be persisted if the caller
+ * chooses to revert first. This is preferable to storing the metadata immediately - a separate session would require
+ * having a bi-directional link with the session factory, and saving the existing session might save other changes
+ * unknowingly by the caller.
+ *
* @plexus.component role="org.apache.archiva.metadata.repository.MetadataResolver"
*/
public class DefaultMetadataResolver
implements MetadataResolver
{
- /**
- * @plexus.requirement
- */
- private MetadataRepository metadataRepository;
-
/**
* FIXME: this needs to be configurable based on storage type - and could also be instantiated per repo. Change to a
- * factory.
+ * factory, and perhaps retrieve from the session. We should avoid creating one per request, however.
*
* TODO: Also need to accommodate availability of proxy module
* ... could be a different type since we need methods to modify the storage metadata, which would also allow more
*/
private RepositoryStorage repositoryStorage;
+ /**
+ * @plexus.requirement role="org.apache.archiva.repository.events.RepositoryListener"
+ */
+ private List<RepositoryListener> listeners;
+
private static final Logger log = LoggerFactory.getLogger( DefaultMetadataResolver.class );
- public ProjectVersionMetadata resolveProjectVersion( String repoId, String namespace, String projectId,
- String projectVersion )
+ public ProjectVersionMetadata resolveProjectVersion( RepositorySession session, String repoId, String namespace,
+ String projectId, String projectVersion )
throws MetadataResolutionException
{
+ MetadataRepository metadataRepository = session.getRepository();
+
ProjectVersionMetadata metadata = metadataRepository.getProjectVersion( repoId, namespace, projectId,
projectVersion );
// TODO: do we want to detect changes as well by comparing timestamps? isProjectVersionNewerThan(updated)
// may then work here and be more efficient than always trying again)
if ( metadata == null || metadata.isIncomplete() )
{
- metadata = repositoryStorage.readProjectVersionMetadata( repoId, namespace, projectId, projectVersion );
- if ( metadata != null )
+ try
{
+ metadata = repositoryStorage.readProjectVersionMetadata( repoId, namespace, projectId, projectVersion );
+
if ( log.isDebugEnabled() )
{
log.debug( "Resolved project version metadata from storage: " + metadata );
}
try
{
+ for ( RepositoryListener listener : listeners )
+ {
+ listener.addArtifact( session, repoId, namespace, projectId, metadata );
+ }
metadataRepository.updateProjectVersion( repoId, namespace, projectId, metadata );
}
catch ( MetadataRepositoryException e )
{
log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
}
+
+ session.markDirty();
+ }
+ catch ( RepositoryStorageMetadataInvalidException e )
+ {
+ for ( RepositoryListener listener : listeners )
+ {
+ listener.addArtifactProblem( session, repoId, namespace, projectId, projectVersion, e );
+ }
+ throw new MetadataResolutionException( e.getMessage(), e );
+ }
+ catch ( RepositoryStorageMetadataNotFoundException e )
+ {
+ for ( RepositoryListener listener : listeners )
+ {
+ listener.addArtifactProblem( session, repoId, namespace, projectId, projectVersion, e );
+ }
+ // no need to rethrow - return null
}
}
return metadata;
}
- public Collection<ProjectVersionReference> resolveProjectReferences( String repoId, String namespace,
- String projectId, String projectVersion )
+ public Collection<ProjectVersionReference> resolveProjectReferences( RepositorySession session, String repoId,
+ String namespace, String projectId,
+ String projectVersion )
throws MetadataResolutionException
{
// TODO: is this assumption correct? could a storage mech. actually know all references in a non-Maven scenario?
// not passed to the storage mechanism as resolving references would require iterating all artifacts
+ MetadataRepository metadataRepository = session.getRepository();
return metadataRepository.getProjectReferences( repoId, namespace, projectId, projectVersion );
}
- public Collection<String> resolveRootNamespaces( String repoId )
+ public Collection<String> resolveRootNamespaces( RepositorySession session, String repoId )
throws MetadataResolutionException
{
+ MetadataRepository metadataRepository = session.getRepository();
Collection<String> namespaces = metadataRepository.getRootNamespaces( repoId );
Collection<String> storageNamespaces = repositoryStorage.listRootNamespaces( repoId, new ExcludesFilter<String>(
namespaces ) );
log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
}
}
+ session.markDirty();
+
namespaces = new ArrayList<String>( namespaces );
namespaces.addAll( storageNamespaces );
}
return namespaces;
}
- public Collection<String> resolveNamespaces( String repoId, String namespace )
+ public Collection<String> resolveNamespaces( RepositorySession session, String repoId, String namespace )
throws MetadataResolutionException
{
+ MetadataRepository metadataRepository = session.getRepository();
Collection<String> namespaces = metadataRepository.getNamespaces( repoId, namespace );
Collection<String> exclusions = new ArrayList<String>( namespaces );
exclusions.addAll( metadataRepository.getProjects( repoId, namespace ) );
log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
}
}
+ session.markDirty();
+
namespaces = new ArrayList<String>( namespaces );
namespaces.addAll( storageNamespaces );
}
return namespaces;
}
- public Collection<String> resolveProjects( String repoId, String namespace )
+ public Collection<String> resolveProjects( RepositorySession session, String repoId, String namespace )
throws MetadataResolutionException
{
+ MetadataRepository metadataRepository = session.getRepository();
Collection<String> projects = metadataRepository.getProjects( repoId, namespace );
Collection<String> exclusions = new ArrayList<String>( projects );
exclusions.addAll( metadataRepository.getNamespaces( repoId, namespace ) );
}
}
}
+ session.markDirty();
+
projects = new ArrayList<String>( projects );
projects.addAll( storageProjects );
}
return projects;
}
- public Collection<String> resolveProjectVersions( String repoId, String namespace, String projectId )
+ public Collection<String> resolveProjectVersions( RepositorySession session, String repoId, String namespace,
+ String projectId )
throws MetadataResolutionException
{
+ MetadataRepository metadataRepository = session.getRepository();
Collection<String> projectVersions = metadataRepository.getProjectVersions( repoId, namespace, projectId );
Collection<String> storageProjectVersions = repositoryStorage.listProjectVersions( repoId, namespace, projectId,
new ExcludesFilter<String>(
namespace,
projectId,
projectVersion );
- if ( versionMetadata != null )
+ for ( RepositoryListener listener : listeners )
{
- metadataRepository.updateProjectVersion( repoId, namespace, projectId, versionMetadata );
+ listener.addArtifact( session, repoId, namespace, projectId, versionMetadata );
}
+
+ metadataRepository.updateProjectVersion( repoId, namespace, projectId, versionMetadata );
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
}
- catch ( MetadataResolutionException e )
+ catch ( RepositoryStorageMetadataInvalidException e )
{
log.warn( "Not update project in metadata repository due to an error resolving it from storage: " +
e.getMessage() );
+
+ for ( RepositoryListener listener : listeners )
+ {
+ listener.addArtifactProblem( session, repoId, namespace, projectId, projectVersion, e );
+ }
}
- catch ( MetadataRepositoryException e )
+ catch ( RepositoryStorageMetadataNotFoundException e )
{
- log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
+ for ( RepositoryListener listener : listeners )
+ {
+ listener.addArtifactProblem( session, repoId, namespace, projectId, projectVersion, e );
+ }
}
}
+ session.markDirty();
+
projectVersions = new ArrayList<String>( projectVersions );
projectVersions.addAll( storageProjectVersions );
}
return projectVersions;
}
- public Collection<ArtifactMetadata> resolveArtifacts( String repoId, String namespace, String projectId,
- String projectVersion )
+ public Collection<ArtifactMetadata> resolveArtifacts( RepositorySession session, String repoId, String namespace,
+ String projectId, String projectVersion )
throws MetadataResolutionException
{
+ MetadataRepository metadataRepository = session.getRepository();
Collection<ArtifactMetadata> artifacts = metadataRepository.getArtifacts( repoId, namespace, projectId,
projectVersion );
+ ExcludesFilter<String> filter = new ExcludesFilter<String>( createArtifactIdList( artifacts ) );
Collection<ArtifactMetadata> storageArtifacts = repositoryStorage.readArtifactsMetadata( repoId, namespace,
projectId,
projectVersion,
- new ExcludesFilter<String>(
- createArtifactIdList(
- artifacts ) ) );
+ filter );
if ( storageArtifacts != null && !storageArtifacts.isEmpty() )
{
if ( log.isDebugEnabled() )
log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
}
}
+ session.markDirty();
+
artifacts = new ArrayList<ArtifactMetadata>( artifacts );
artifacts.addAll( storageArtifacts );
}
public interface MetadataResolver
{
- ProjectVersionMetadata resolveProjectVersion( String repoId, String namespace, String projectId,
- String projectVersion )
+ ProjectVersionMetadata resolveProjectVersion( RepositorySession session, String repoId, String namespace,
+ String projectId, String projectVersion )
throws MetadataResolutionException;
/**
* @param projectVersion the version of the project to get references to
* @return a list of project references
*/
- Collection<ProjectVersionReference> resolveProjectReferences( String repoId, String namespace, String projectId,
+ Collection<ProjectVersionReference> resolveProjectReferences( RepositorySession session, String repoId,
+ String namespace, String projectId,
String projectVersion )
throws MetadataResolutionException;
- Collection<String> resolveRootNamespaces( String repoId )
+ Collection<String> resolveRootNamespaces( RepositorySession session, String repoId )
throws MetadataResolutionException;
- Collection<String> resolveNamespaces( String repoId, String namespace )
+ Collection<String> resolveNamespaces( RepositorySession session, String repoId, String namespace )
throws MetadataResolutionException;
- Collection<String> resolveProjects( String repoId, String namespace )
+ Collection<String> resolveProjects( RepositorySession session, String repoId, String namespace )
throws MetadataResolutionException;
- Collection<String> resolveProjectVersions( String repoId, String namespace, String projectId )
+ Collection<String> resolveProjectVersions( RepositorySession session, String repoId, String namespace,
+ String projectId )
throws MetadataResolutionException;
- Collection<ArtifactMetadata> resolveArtifacts( String repoId, String namespace, String projectId,
- String projectVersion )
+ Collection<ArtifactMetadata> resolveArtifacts( RepositorySession session, String repoId, String namespace,
+ String projectId, String projectVersion )
throws MetadataResolutionException;
}
--- /dev/null
+package org.apache.archiva.metadata.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * The repository session provides a single interface to accessing Archiva repositories. It provides access to three
+ * resources:
+ * <ul>
+ * <li>{@link MetadataRepository} - the metadata content repository for read/write access, in its current state (no
+ * remote resources will be retrieved in the process</li>
+ * <li>{@link MetadataResolver} - access to resolve metadata content, accommodating metadata not yet stored or up to
+ * date in the content repository (i.e. virtualised repositories, remote proxied content, or metadata in a different
+ * model format in the repository storage)</li>
+ * <li>{@link org.apache.archiva.metadata.repository.storage.RepositoryStorage} - access to the physical storage of a
+ * repository and the source artifacts and project models</li>
+ * </ul>
+ */
+public class RepositorySession
+{
+ private final MetadataRepository repository;
+
+ private final MetadataResolver resolver;
+
+ private boolean dirty;
+
+ // FIXME: include storage here too - perhaps a factory based on repository ID, or one per type to retrieve and
+ // operate on a given repo within the storage API
+
+ public RepositorySession( MetadataRepository metadataRepository, MetadataResolver resolver )
+ {
+ this.repository = metadataRepository;
+ this.resolver = resolver;
+ }
+
+ public MetadataRepository getRepository()
+ {
+ return repository;
+ }
+
+ public MetadataResolver getResolver()
+ {
+ return resolver;
+ }
+
+ public void save()
+ {
+ // FIXME
+
+ dirty = false;
+ }
+
+ public void revert()
+ {
+ // FIXME
+
+ dirty = false;
+ }
+
+ /**
+ * Close the session. Required to be called for all open sessions to ensure resources are properly released.
+ * If the session has been marked as dirty, it will be saved. This may save partial changes in the case of a typical
+ * <code>try { ... } finally { ... }</code> approach - if this is a problem, ensure you revert changes when an
+ * exception occurs.
+ */
+ public void close()
+ {
+ if ( dirty )
+ {
+ save();
+ }
+
+ // FIXME
+ }
+
+ public void markDirty()
+ {
+ this.dirty = true;
+ }
+}
--- /dev/null
+package org.apache.archiva.metadata.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+public interface RepositorySessionFactory
+{
+ RepositorySession createSession();
+}
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
-import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.metadata.repository.filter.Filter;
import java.util.Collection;
// FIXME: we should drop the repoId parameters and attach this to an instance of a repository storage
public interface RepositoryStorage
{
- ProjectMetadata readProjectMetadata( String repoId, String namespace, String projectId )
- throws MetadataResolutionException;
+ ProjectMetadata readProjectMetadata( String repoId, String namespace, String projectId );
ProjectVersionMetadata readProjectVersionMetadata( String repoId, String namespace, String projectId,
String projectVersion )
- throws MetadataResolutionException;
+ throws RepositoryStorageMetadataInvalidException, RepositoryStorageMetadataNotFoundException;
Collection<String> listRootNamespaces( String repoId, Filter<String> filter );
--- /dev/null
+package org.apache.archiva.metadata.repository.storage;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+public class RepositoryStorageMetadataException
+ extends Exception
+{
+ private final String id;
+
+ protected RepositoryStorageMetadataException( String id, String msg )
+ {
+ super( msg );
+
+ this.id = id;
+ }
+
+ protected RepositoryStorageMetadataException( String id, String msg, Throwable throwable )
+ {
+ super( msg, throwable );
+
+ this.id = id;
+ }
+
+ public String getId()
+ {
+ return id;
+ }
+}
--- /dev/null
+package org.apache.archiva.metadata.repository.storage;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+public class RepositoryStorageMetadataInvalidException
+ extends RepositoryStorageMetadataException
+{
+ public RepositoryStorageMetadataInvalidException( String id, String msg )
+ {
+ super( id, msg );
+ }
+
+ public RepositoryStorageMetadataInvalidException( String id, String msg, Throwable throwable )
+ {
+ super( id, msg, throwable );
+ }
+}
--- /dev/null
+package org.apache.archiva.metadata.repository.storage;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+public class RepositoryStorageMetadataNotFoundException
+ extends RepositoryStorageMetadataException
+{
+ public RepositoryStorageMetadataNotFoundException( String msg )
+ {
+ super( "missing-pom", msg );
+ }
+}
* under the License.
*/
+import org.apache.archiva.metadata.model.ProjectVersionMetadata;
+import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataException;
+
/**
- * Listen to events on the repository. This class is a stopgap
- * refactoring measure until an event bus is in place to handle
+ * Listen to events on the repository. This class is a stopgap
+ * refactoring measure until an event bus is in place to handle
* generic events such as these.
+ *
+ * This assumes that the events occur before the action has completed, though they don't currently offer any mechanism
+ * to prevent an event from occurring or guarantee that it will happen.
+ *
+ * FIXME: this needs to be made more permanent since 3rd party plugins will depend on it heavily
*/
-public interface RepositoryListener
+public interface RepositoryListener
{
- /**
- * Event for the deletion of a given artifact.
- */
- void deleteArtifact( String repositoryId, String namespace, String project, String version, String id );
+ void deleteArtifact( MetadataRepository metadataRepository, String repositoryId, String namespace, String project,
+ String version, String id );
+
+ void addArtifact( RepositorySession session, String repoId, String namespace, String projectId,
+ ProjectVersionMetadata metadata );
+
+ // FIXME: this would be better as a "processException" method, with the event information captured in a single class
+ void addArtifactProblem( RepositorySession session, String repoId, String namespace, String projectId,
+ String projectVersion, RepositoryStorageMetadataException exception );
}
* under the License.
*/
+import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import java.util.Collection;
public interface AuditManager
{
- List<AuditEvent> getMostRecentAuditEvents( List<String> repositoryIds )
+ List<AuditEvent> getMostRecentAuditEvents( MetadataRepository metadataRepository, List<String> repositoryIds )
throws MetadataRepositoryException;
- void addAuditEvent( AuditEvent event )
+ void addAuditEvent( MetadataRepository repository, AuditEvent event )
throws MetadataRepositoryException;
- void deleteAuditEvents( String repositoryId )
+ void deleteAuditEvents( MetadataRepository metadataRepository, String repositoryId )
throws MetadataRepositoryException;
/**
* Get all audit events from the given repositories that match a certain range
*
- * @param repositoryIds the repositories to retrieve events for
- * @param startTime find events only after this time
- * @param endTime find events only before this time
+ * @param metadataRepository
+ * @param repositoryIds the repositories to retrieve events for
+ * @param startTime find events only after this time
+ * @param endTime find events only before this time
* @return the list of events found
*/
- List<AuditEvent> getAuditEventsInRange( Collection<String> repositoryIds, Date startTime, Date endTime )
+ List<AuditEvent> getAuditEventsInRange( MetadataRepository metadataRepository, Collection<String> repositoryIds,
+ Date startTime, Date endTime )
throws MetadataRepositoryException;
/**
* Get all audit events from the given repositories that match a certain range and resource pattern
*
- * @param repositoryIds the repositories to retrieve events for
- * @param resourcePattern find all events whose resources start with this string
- * @param startTime find events only after this time
- * @param endTime find events only before this time
+ * @param metadataRepository
+ * @param repositoryIds the repositories to retrieve events for
+ * @param resourcePattern find all events whose resources start with this string
+ * @param startTime find events only after this time
+ * @param endTime find events only before this time
* @return the list of events found
*/
- List<AuditEvent> getAuditEventsInRange( Collection<String> repositoryIds, String resourcePattern, Date startTime,
- Date endTime )
+ List<AuditEvent> getAuditEventsInRange( MetadataRepository metadataRepository, Collection<String> repositoryIds,
+ String resourcePattern, Date startTime, Date endTime )
throws MetadataRepositoryException;
}
public class DefaultAuditManager
implements AuditManager
{
- /**
- * @plexus.requirement
- */
- private MetadataRepository metadataRepository;
-
- private static final int NUM_RECENT_REVENTS = 10;
+ private static final int NUM_RECENT_EVENTS = 10;
private static final Logger log = LoggerFactory.getLogger( DefaultAuditManager.class );
private static final TimeZone UTC_TIME_ZONE = TimeZone.getTimeZone( "UTC" );
- public List<AuditEvent> getMostRecentAuditEvents( List<String> repositoryIds )
+ public List<AuditEvent> getMostRecentAuditEvents( MetadataRepository metadataRepository,
+ List<String> repositoryIds )
throws MetadataRepositoryException
{
// TODO: consider a more efficient implementation that directly gets the last ten from the content repository
}
}
Collections.sort( records );
- records = records.subList( 0, records.size() < NUM_RECENT_REVENTS ? records.size() : NUM_RECENT_REVENTS );
+ records = records.subList( 0, records.size() < NUM_RECENT_EVENTS ? records.size() : NUM_RECENT_EVENTS );
List<AuditEvent> events = new ArrayList<AuditEvent>( records.size() );
for ( AuditRecord record : records )
return events;
}
- public void addAuditEvent( AuditEvent event )
+ public void addAuditEvent( MetadataRepository repository, AuditEvent event )
throws MetadataRepositoryException
{
// ignore those with no repository - they will still be logged to the textual audit log
if ( event.getRepositoryId() != null )
{
- metadataRepository.addMetadataFacet( event.getRepositoryId(), event );
+ repository.addMetadataFacet( event.getRepositoryId(), event );
}
}
- public void deleteAuditEvents( String repositoryId )
+ public void deleteAuditEvents( MetadataRepository metadataRepository, String repositoryId )
throws MetadataRepositoryException
{
metadataRepository.removeMetadataFacets( repositoryId, AuditEvent.FACET_ID );
}
- public List<AuditEvent> getAuditEventsInRange( Collection<String> repositoryIds, Date startTime, Date endTime )
+ public List<AuditEvent> getAuditEventsInRange( MetadataRepository metadataRepository,
+ Collection<String> repositoryIds, Date startTime, Date endTime )
throws MetadataRepositoryException
{
- return getAuditEventsInRange( repositoryIds, null, startTime, endTime );
+ return getAuditEventsInRange( metadataRepository, repositoryIds, null, startTime, endTime );
}
- public List<AuditEvent> getAuditEventsInRange( Collection<String> repositoryIds, String resource, Date startTime,
+ public List<AuditEvent> getAuditEventsInRange( MetadataRepository metadataRepository,
+ Collection<String> repositoryIds, String resource, Date startTime,
Date endTime )
throws MetadataRepositoryException
{
return fmt;
}
- public void setMetadataRepository( MetadataRepository metadataRepository )
- {
- this.metadataRepository = metadataRepository;
- }
-
private static final class AuditRecord
implements Comparable<AuditRecord>
{
*/
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
*/
private AuditManager auditManager;
+ /**
+ * FIXME: this could be multiple implementations and needs to be configured. It also starts a separate session to
+ * the originator of the audit event that we may rather want to pass through.
+ *
+ * @plexus.requirement
+ */
+ private RepositorySessionFactory repositorySessionFactory;
+
public void auditEvent( AuditEvent event )
{
// for now we only log upload events, some of the others are quite noisy
if ( event.getAction().equals( AuditEvent.CREATE_FILE ) || event.getAction().equals( AuditEvent.UPLOAD_FILE ) ||
event.getAction().equals( AuditEvent.MERGING_REPOSITORIES ) )
{
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
try
{
- auditManager.addAuditEvent( event );
+ auditManager.addAuditEvent( repositorySession.getRepository(), event );
+ repositorySession.save();
}
catch ( MetadataRepositoryException e )
{
log.warn( "Unable to write audit event to repository: " + e.getMessage(), e );
}
+ finally
+ {
+ repositorySession.close();
+ }
}
}
}
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
- auditManager.setMetadataRepository( metadataRepository );
ManagedRepositoryConfiguration repository = new ManagedRepositoryConfiguration();
repository.setId( TEST_REPO_ID );
}
metadataRepositoryControl.replay();
- List<AuditEvent> events = auditManager.getMostRecentAuditEvents( Collections.singletonList( TEST_REPO_ID ) );
+ List<AuditEvent> events = auditManager.getMostRecentAuditEvents( metadataRepository, Collections.singletonList(
+ TEST_REPO_ID ) );
assertNotNull( events );
assertEquals( numEvents - 1, events.size() );
int expectedTimestampCounter = numEvents - 1;
}
metadataRepositoryControl.replay();
- List<AuditEvent> events = auditManager.getMostRecentAuditEvents( Collections.singletonList( TEST_REPO_ID ) );
+ List<AuditEvent> events = auditManager.getMostRecentAuditEvents( metadataRepository, Collections.singletonList(
+ TEST_REPO_ID ) );
assertNotNull( events );
assertEquals( numEvents, events.size() );
int expectedTimestampCounter = numEvents - 1;
}
metadataRepositoryControl.replay();
- events = auditManager.getMostRecentAuditEvents( Arrays.asList( TEST_REPO_ID, TEST_REPO_ID_2 ) );
+ events = auditManager.getMostRecentAuditEvents( metadataRepository, Arrays.asList( TEST_REPO_ID,
+ TEST_REPO_ID_2 ) );
assertNotNull( events );
assertEquals( numEvents - 1, events.size() );
int expectedTimestampCounter = numEvents - 1;
Collections.emptyList() );
metadataRepositoryControl.replay();
- assertTrue( auditManager.getMostRecentAuditEvents( Collections.singletonList( TEST_REPO_ID ) ).isEmpty() );
+ assertTrue( auditManager.getMostRecentAuditEvents( metadataRepository, Collections.singletonList(
+ TEST_REPO_ID ) ).isEmpty() );
metadataRepositoryControl.verify();
}
metadataRepositoryControl.replay();
- auditManager.addAuditEvent( event );
+ auditManager.addAuditEvent( metadataRepository, event );
metadataRepositoryControl.verify();
}
metadataRepositoryControl.replay();
- auditManager.addAuditEvent( event );
+ auditManager.addAuditEvent( metadataRepository, event );
metadataRepositoryControl.verify();
}
metadataRepositoryControl.replay();
- auditManager.deleteAuditEvents( TEST_REPO_ID );
+ auditManager.deleteAuditEvents( metadataRepository, TEST_REPO_ID );
metadataRepositoryControl.verify();
}
metadataRepositoryControl.replay();
- List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ),
- new Date( current.getTime() - 4000 ), new Date(
- current.getTime() - 2000 ) );
+ List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
+ TEST_REPO_ID ), new Date( current.getTime() - 4000 ), new Date( current.getTime() - 2000 ) );
assertEquals( 1, events.size() );
assertTestEvent( events.get( 0 ), TIMESTAMP_FORMAT.format( expectedTimestamp ), expectedEvent.getResource() );
metadataRepositoryControl.replay();
- List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ),
- new Date( current.getTime() - 4000 ), current );
+ List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
+ TEST_REPO_ID ), new Date( current.getTime() - 4000 ), current );
assertEquals( 2, events.size() );
assertTestEvent( events.get( 0 ), TIMESTAMP_FORMAT.format( ts3 ), expectedEvent3.getResource() );
metadataRepositoryControl.replay();
- List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ),
- new Date( current.getTime() - 20000 ), new Date(
- current.getTime() - 2000 ) );
+ List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
+ TEST_REPO_ID ), new Date( current.getTime() - 20000 ), new Date( current.getTime() - 2000 ) );
assertEquals( 2, events.size() );
assertTestEvent( events.get( 0 ), TIMESTAMP_FORMAT.format( expectedTimestamp ), expectedEvent2.getResource() );
metadataRepositoryControl.replay();
- List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ),
- new Date( current.getTime() - 20000 ), current );
+ List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
+ TEST_REPO_ID ), new Date( current.getTime() - 20000 ), current );
assertEquals( 3, events.size() );
assertTestEvent( events.get( 0 ), TIMESTAMP_FORMAT.format( ts3 ), expectedEvent3.getResource() );
metadataRepositoryControl.replay();
- List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ),
- TEST_RESOURCE_BASE, new Date(
- current.getTime() - 20000 ), current );
+ List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
+ TEST_REPO_ID ), TEST_RESOURCE_BASE, new Date( current.getTime() - 20000 ), current );
assertEquals( 2, events.size() );
assertTestEvent( events.get( 0 ), TIMESTAMP_FORMAT.format( ts3 ), expectedEvent3.getResource() );
metadataRepositoryControl.replay();
- List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ), "foo",
- new Date( current.getTime() - 20000 ), current );
+ List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
+ TEST_REPO_ID ), "foo", new Date( current.getTime() - 20000 ), current );
assertEquals( 0, events.size() );
metadataRepositoryControl.replay();
- List<AuditEvent> events = auditManager.getAuditEventsInRange( Arrays.asList( TEST_REPO_ID, TEST_REPO_ID_2 ),
+ List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Arrays.asList( TEST_REPO_ID,
+ TEST_REPO_ID_2 ),
new Date( current.getTime() - 20000 ), current );
assertEquals( 3, events.size() );
metadataRepositoryControl.replay();
- List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ),
- new Date( current.getTime() - 20000 ), new Date(
- current.getTime() - 16000 ) );
+ List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
+ TEST_REPO_ID ), new Date( current.getTime() - 20000 ), new Date( current.getTime() - 16000 ) );
assertEquals( 0, events.size() );
<artifactId>maven2-repository</artifactId>
<name>Maven 2.x Repository Support</name>
<dependencies>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>problem-reports</artifactId>
- </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>metadata-model</artifactId>
import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.metadata.repository.MetadataResolver;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
import org.apache.archiva.metadata.repository.storage.maven2.RepositoryModelResolver;
import org.apache.commons.lang.StringUtils;
private ModelBuilder builder;
/**
+ * TODO: can have other types, and this might eventually come through from the main request
+ *
* @plexus.requirement
*/
- private MetadataResolver metadataResolver;
+ private RepositorySessionFactory repositorySessionFactory;
/**
* @plexus.requirement role-hint="maven2"
Set<Artifact> dependencyArtifacts = createArtifacts( model, null );
- ArtifactMetadataSource metadataSource = new MetadataArtifactMetadataSource( repositoryIds );
+ RepositorySession repositorySession = repositorySessionFactory.createSession();
+ try
+ {
+ ArtifactMetadataSource metadataSource = new MetadataArtifactMetadataSource( repositoryIds,
+ repositorySession );
- // Note that we don't permit going to external repositories. We don't need to pass in a local and remote
- // since our metadata source has control over them
- collector.collect( dependencyArtifacts, projectArtifact, managedVersions, null, null, metadataSource, null,
- Collections.singletonList( listener ) );
+ // Note that we don't permit going to external repositories. We don't need to pass in a local and remote
+ // since our metadata source has control over them
+ collector.collect( dependencyArtifacts, projectArtifact, managedVersions, null, null, metadataSource,
+ null, Collections.singletonList( listener ) );
+ }
+ finally
+ {
+ repositorySession.close();
+ }
DependencyNode rootNode = listener.getRootNode();
{
private final List<String> repositoryIds;
- public MetadataArtifactMetadataSource( List<String> repositoryIds )
+ private final RepositorySession session;
+
+ private final MetadataResolver resolver;
+
+ public MetadataArtifactMetadataSource( List<String> repositoryIds, RepositorySession session )
{
this.repositoryIds = repositoryIds;
+ this.session = session;
+ resolver = this.session.getResolver();
}
// modified version from MavenMetadataSource to work with the simpler environment
Collection<String> projectVersions;
try
{
- projectVersions = metadataResolver.resolveProjectVersions( repoId, artifact.getGroupId(),
- artifact.getArtifactId() );
+ projectVersions = resolver.resolveProjectVersions( session, repoId, artifact.getGroupId(),
+ artifact.getArtifactId() );
}
catch ( MetadataResolutionException e )
{
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
-import org.apache.archiva.metadata.repository.MetadataRepository;
-import org.apache.archiva.metadata.repository.MetadataRepositoryException;
-import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.metadata.repository.filter.Filter;
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
import org.apache.archiva.metadata.repository.storage.RepositoryStorage;
-import org.apache.archiva.reports.RepositoryProblemFacet;
+import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException;
+import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataNotFoundException;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import java.util.List;
/**
+ * Maven 2 repository format storage implementation. This class currently takes parameters to indicate the repository to
+ * deal with rather than being instantiated per-repository.
+ * FIXME: instantiate one per repository and allocate permanently from a factory (which can be obtained within the session).
+ * TODO: finish Maven 1 implementation to prove this API
+ *
+ * The session is passed in as an argument to obtain any necessary resources, rather than the class being instantiated
+ * within the session in the context of a single managed repository's resolution needs.
+ *
* @plexus.component role="org.apache.archiva.metadata.repository.storage.RepositoryStorage" role-hint="maven2"
*/
public class Maven2RepositoryStorage
*/
private RepositoryPathTranslator pathTranslator;
- /**
- * @plexus.requirement
- */
- private MetadataRepository metadataRepository;
-
private final static Logger log = LoggerFactory.getLogger( Maven2RepositoryStorage.class );
private static final String METADATA_FILENAME = "maven-metadata.xml";
- private static final String PROBLEM_MISSING_POM = "missing-pom";
-
- private static final String PROBLEM_INVALID_POM = "invalid-pom";
-
- private static final String PROBLEM_MISLOCATED_POM = "mislocated-pom";
-
- private static final List<String> POTENTIAL_PROBLEMS = Arrays.asList( PROBLEM_INVALID_POM, PROBLEM_MISSING_POM,
- PROBLEM_MISLOCATED_POM );
-
public ProjectMetadata readProjectMetadata( String repoId, String namespace, String projectId )
{
// TODO: could natively implement the "shared model" concept from the browse action to avoid needing it there?
public ProjectVersionMetadata readProjectVersionMetadata( String repoId, String namespace, String projectId,
String projectVersion )
- throws MetadataResolutionException
+ throws RepositoryStorageMetadataNotFoundException, RepositoryStorageMetadataInvalidException
{
- // Remove problems associated with this version, since we'll be re-adding any that still exist
- // TODO: an event mechanism would remove coupling to the problem reporting plugin
- // TODO: this removes all problems - do we need something that just removes the problems created by this resolver?
- String name = RepositoryProblemFacet.createName( namespace, projectId, projectVersion, null );
- try
- {
- metadataRepository.removeMetadataFacet( repoId, RepositoryProblemFacet.FACET_ID, name );
- }
- catch ( MetadataRepositoryException e )
- {
- log.warn( "Unable to remove repository problem facets for the version being removed: " + e.getMessage(),
- e );
- }
-
ManagedRepositoryConfiguration repositoryConfiguration =
archivaConfiguration.getConfiguration().findManagedRepositoryById( repoId );
if ( !file.exists() )
{
- // TODO: an event mechanism would remove coupling to the problem reporting plugin
- addProblemReport( repoId, namespace, projectId, projectVersion, PROBLEM_MISSING_POM,
- "The artifact's POM file '" + file + "' was missing" );
-
// metadata could not be resolved
- return null;
+ throw new RepositoryStorageMetadataNotFoundException(
+ "The artifact's POM file '" + file.getAbsolutePath() + "' was missing" );
}
ModelBuildingRequest req = new DefaultModelBuildingRequest();
}
catch ( ModelBuildingException e )
{
- addProblemReport( repoId, namespace, projectId, projectVersion, PROBLEM_INVALID_POM,
- "The artifact's POM file '" + file + "' was invalid: " + e.getMessage() );
+ String msg = "The artifact's POM file '" + file + "' was invalid: " + e.getMessage();
- throw new MetadataResolutionException( e.getMessage() );
+ throw new RepositoryStorageMetadataInvalidException( "invalid-pom", msg, e );
}
// Check if the POM is in the correct location
message.append( "\nIncorrect version: " ).append( model.getVersion() );
}
- String msg = message.toString();
- addProblemReport( repoId, namespace, projectId, projectVersion, PROBLEM_MISLOCATED_POM, msg );
-
- throw new MetadataResolutionException( msg );
+ throw new RepositoryStorageMetadataInvalidException( "mislocated-pom", message.toString() );
}
ProjectVersionMetadata metadata = new ProjectVersionMetadata();
return metadata;
}
- private void addProblemReport( String repoId, String namespace, String projectId, String projectVersion,
- String problemId, String message )
- {
- // TODO: an event mechanism would remove coupling to the problem reporting plugin and allow other plugins to
- // generate metadata on the fly if appropriately checked for missing facets in the resolver
- RepositoryProblemFacet problem = new RepositoryProblemFacet();
- problem.setProblem( problemId );
- problem.setMessage( message );
- problem.setProject( projectId );
- problem.setNamespace( namespace );
- problem.setRepositoryId( repoId );
- problem.setVersion( projectVersion );
-
- try
- {
- metadataRepository.addMetadataFacet( repoId, problem );
- }
- catch ( MetadataRepositoryException e )
- {
- log.warn( "Unable to add repository problem facets for the version being removed: " + e.getMessage(), e );
- }
- }
-
private List<org.apache.archiva.metadata.model.Dependency> convertDependencies( List<Dependency> dependencies )
{
List<org.apache.archiva.metadata.model.Dependency> l =
import java.util.Collection;
+// FIXME: remove - this is useless, better to mock it or avoid needing it
public class TestMetadataResolver
implements MetadataResolver
{
- public ProjectVersionMetadata resolveProjectVersion( String repoId, String namespace, String projectId,
- String projectVersion )
+ public ProjectVersionMetadata resolveProjectVersion( RepositorySession session, String repoId, String namespace,
+ String projectId, String projectVersion )
+ throws MetadataResolutionException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
- public Collection<ProjectVersionReference> resolveProjectReferences( String repoId, String namespace,
- String projectId, String projectVersion )
+ public Collection<ProjectVersionReference> resolveProjectReferences( RepositorySession session, String repoId,
+ String namespace, String projectId,
+ String projectVersion )
+ throws MetadataResolutionException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
- public Collection<String> resolveRootNamespaces( String repoId )
+ public Collection<String> resolveRootNamespaces( RepositorySession session, String repoId )
+ throws MetadataResolutionException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
- public Collection<String> resolveNamespaces( String repoId, String namespace )
+ public Collection<String> resolveNamespaces( RepositorySession session, String repoId, String namespace )
+ throws MetadataResolutionException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
- public Collection<String> resolveProjects( String repoId, String namespace )
+ public Collection<String> resolveProjects( RepositorySession session, String repoId, String namespace )
+ throws MetadataResolutionException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
- public Collection<String> resolveProjectVersions( String repoId, String namespace, String projectId )
+ public Collection<String> resolveProjectVersions( RepositorySession session, String repoId, String namespace,
+ String projectId )
+ throws MetadataResolutionException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
- public Collection<ArtifactMetadata> resolveArtifacts( String repoId, String namespace, String projectId,
- String projectVersion )
+ public Collection<ArtifactMetadata> resolveArtifacts( RepositorySession session, String repoId, String namespace,
+ String projectId, String projectVersion )
+ throws MetadataResolutionException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
--- /dev/null
+package org.apache.archiva.metadata.repository;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+public class TestRepositorySessionFactory
+ implements RepositorySessionFactory
+{
+ private MetadataRepository metadataRepository = new TestMetadataRepository();
+
+ private MetadataResolver resolver = new TestMetadataResolver();
+
+ public RepositorySession createSession()
+ {
+ return new RepositorySession( metadataRepository, resolver );
+ }
+
+ public void setMetadataRepository( MetadataRepository metadataRepository )
+ {
+ this.metadataRepository = metadataRepository;
+ }
+}
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.Dependency;
-import org.apache.archiva.metadata.model.FacetedMetadata;
import org.apache.archiva.metadata.model.License;
import org.apache.archiva.metadata.model.MailingList;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
-import org.apache.archiva.metadata.repository.MetadataRepository;
-import org.apache.archiva.metadata.repository.MetadataResolutionException;
import org.apache.archiva.metadata.repository.filter.AllFilter;
import org.apache.archiva.metadata.repository.filter.ExcludesFilter;
import org.apache.archiva.metadata.repository.filter.Filter;
import org.apache.archiva.metadata.repository.storage.RepositoryStorage;
-import org.apache.archiva.reports.RepositoryProblemFacet;
+import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException;
+import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataNotFoundException;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
{
private static final Filter<String> ALL = new AllFilter<String>();
- private Maven2RepositoryStorage resolver;
+ private Maven2RepositoryStorage storage;
private static final String TEST_REPO_ID = "test";
private static final String EMPTY_SHA1 = "da39a3ee5e6b4b0d3255bfef95601890afd80709";
- private MetadataRepository metadataRepository;
-
public void setUp()
throws Exception
{
c.addManagedRepository( testRepo );
configuration.save( c );
- resolver = (Maven2RepositoryStorage) lookup( RepositoryStorage.class, "maven2" );
- metadataRepository = (MetadataRepository) lookup( MetadataRepository.class );
- metadataRepository.removeMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID );
+ storage = (Maven2RepositoryStorage) lookup( RepositoryStorage.class, "maven2" );
}
public void testGetProjectVersionMetadata()
throws Exception
{
- ProjectVersionMetadata metadata = resolver.readProjectVersionMetadata( TEST_REPO_ID, "org.apache.archiva",
- "archiva-common", "1.2.1" );
+ ProjectVersionMetadata metadata = storage.readProjectVersionMetadata( TEST_REPO_ID, "org.apache.archiva",
+ "archiva-common", "1.2.1" );
MavenProjectFacet facet = (MavenProjectFacet) metadata.getFacet( MavenProjectFacet.FACET_ID );
assertEquals( "jar", facet.getPackaging() );
assertEquals( "http://archiva.apache.org/ref/1.2.1/archiva-base/archiva-common", metadata.getUrl() );
public void testGetArtifactMetadata()
throws Exception
{
- Collection<ArtifactMetadata> springArtifacts = resolver.readArtifactsMetadata( TEST_REPO_ID,
- "org.codehaus.plexus",
- "plexus-spring", "1.2", ALL );
+ Collection<ArtifactMetadata> springArtifacts = storage.readArtifactsMetadata( TEST_REPO_ID,
+ "org.codehaus.plexus",
+ "plexus-spring", "1.2", ALL );
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( springArtifacts );
Collections.sort( artifacts, new Comparator<ArtifactMetadata>()
{
public void testGetArtifactMetadataSnapshots()
throws Exception
{
- Collection<ArtifactMetadata> testArtifacts = resolver.readArtifactsMetadata( TEST_REPO_ID, "com.example.test",
- "test-artifact", "1.0-SNAPSHOT",
- ALL );
+ Collection<ArtifactMetadata> testArtifacts = storage.readArtifactsMetadata( TEST_REPO_ID, "com.example.test",
+ "test-artifact", "1.0-SNAPSHOT",
+ ALL );
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( testArtifacts );
Collections.sort( artifacts, new Comparator<ArtifactMetadata>()
{
public void testGetProjectVersionMetadataForTimestampedSnapshot()
throws Exception
{
- ProjectVersionMetadata metadata = resolver.readProjectVersionMetadata( TEST_REPO_ID, "org.apache", "apache",
- "5-SNAPSHOT" );
+ ProjectVersionMetadata metadata = storage.readProjectVersionMetadata( TEST_REPO_ID, "org.apache", "apache",
+ "5-SNAPSHOT" );
MavenProjectFacet facet = (MavenProjectFacet) metadata.getFacet( MavenProjectFacet.FACET_ID );
assertEquals( "pom", facet.getPackaging() );
assertEquals( "http://www.apache.org/", metadata.getUrl() );
public void testGetProjectVersionMetadataForTimestampedSnapshotMissingMetadata()
throws Exception
{
- FacetedMetadata metadata = resolver.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test",
- "missing-metadata", "1.0-SNAPSHOT" );
- assertNull( metadata );
+ try
+ {
+ storage.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "missing-metadata", "1.0-SNAPSHOT" );
+ fail( "Should not be found" );
+ }
+ catch ( RepositoryStorageMetadataNotFoundException e )
+ {
+ assertEquals( "missing-pom", e.getId() );
+ }
}
public void testGetProjectVersionMetadataForTimestampedSnapshotMalformedMetadata()
throws Exception
{
- FacetedMetadata metadata = resolver.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test",
- "malformed-metadata", "1.0-SNAPSHOT" );
- assertNull( metadata );
+ try
+ {
+ storage.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "malformed-metadata",
+ "1.0-SNAPSHOT" );
+ fail( "Should not be found" );
+ }
+ catch ( RepositoryStorageMetadataNotFoundException e )
+ {
+ assertEquals( "missing-pom", e.getId() );
+ }
}
public void testGetProjectVersionMetadataForTimestampedSnapshotIncompleteMetadata()
throws Exception
{
- FacetedMetadata metadata = resolver.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test",
- "incomplete-metadata", "1.0-SNAPSHOT" );
- assertNull( metadata );
+ try
+ {
+ storage.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "incomplete-metadata",
+ "1.0-SNAPSHOT" );
+ fail( "Should not be found" );
+ }
+ catch ( RepositoryStorageMetadataNotFoundException e )
+ {
+ assertEquals( "missing-pom", e.getId() );
+ }
}
public void testGetProjectVersionMetadataForInvalidPom()
throws Exception
{
- assertTrue( metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID ).isEmpty() );
-
try
{
- resolver.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "invalid-pom", "1.0" );
+ storage.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "invalid-pom", "1.0" );
fail( "Should have received an exception due to invalid POM" );
}
- catch ( MetadataResolutionException e )
+ catch ( RepositoryStorageMetadataInvalidException e )
{
- assertFalse( metadataRepository.getMetadataFacets( TEST_REPO_ID,
- RepositoryProblemFacet.FACET_ID ).isEmpty() );
- RepositoryProblemFacet facet = (RepositoryProblemFacet) metadataRepository.getMetadataFacet( TEST_REPO_ID,
- RepositoryProblemFacet.FACET_ID,
- "com.example.test/invalid-pom/1.0" );
- assertEquals( "invalid-pom", facet.getProblem() );
+ assertEquals( "invalid-pom", e.getId() );
}
}
public void testGetProjectVersionMetadataForMislocatedPom()
throws Exception
{
- assertTrue( metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID ).isEmpty() );
-
try
{
- resolver.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "mislocated-pom", "1.0" );
+ storage.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "mislocated-pom", "1.0" );
fail( "Should have received an exception due to mislocated POM" );
}
- catch ( MetadataResolutionException e )
+ catch ( RepositoryStorageMetadataInvalidException e )
{
- assertFalse( metadataRepository.getMetadataFacets( TEST_REPO_ID,
- RepositoryProblemFacet.FACET_ID ).isEmpty() );
- RepositoryProblemFacet facet = (RepositoryProblemFacet) metadataRepository.getMetadataFacet( TEST_REPO_ID,
- RepositoryProblemFacet.FACET_ID,
- "com.example.test/mislocated-pom/1.0" );
- assertEquals( "mislocated-pom", facet.getProblem() );
+ assertEquals( "mislocated-pom", e.getId() );
}
}
public void testGetProjectVersionMetadataForMissingPom()
throws Exception
{
- assertTrue( metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID ).isEmpty() );
-
- FacetedMetadata metadata = resolver.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "missing-pom",
- "1.0" );
- assertNull( metadata );
-
- assertFalse( metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID ).isEmpty() );
- RepositoryProblemFacet facet = (RepositoryProblemFacet) metadataRepository.getMetadataFacet( TEST_REPO_ID,
- RepositoryProblemFacet.FACET_ID,
- "com.example.test/missing-pom/1.0" );
- assertEquals( "missing-pom", facet.getProblem() );
-
+ try
+ {
+ storage.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "missing-pom", "1.0" );
+ fail( "Should not be found" );
+ }
+ catch ( RepositoryStorageMetadataNotFoundException e )
+ {
+ assertEquals( "missing-pom", e.getId() );
+ }
}
public void testGetRootNamespaces()
{
- assertEquals( Arrays.asList( "com", "org" ), resolver.listRootNamespaces( TEST_REPO_ID, ALL ) );
+ assertEquals( Arrays.asList( "com", "org" ), storage.listRootNamespaces( TEST_REPO_ID, ALL ) );
}
public void testGetNamespaces()
{
- assertEquals( Arrays.asList( "example" ), resolver.listNamespaces( TEST_REPO_ID, "com", ALL ) );
- assertEquals( Arrays.asList( "test" ), resolver.listNamespaces( TEST_REPO_ID, "com.example", ALL ) );
- assertEquals( Collections.<String>emptyList(), resolver.listNamespaces( TEST_REPO_ID, "com.example.test",
- ALL ) );
-
- assertEquals( Arrays.asList( "apache", "codehaus" ), resolver.listNamespaces( TEST_REPO_ID, "org", ALL ) );
- assertEquals( Arrays.asList( "archiva", "maven" ), resolver.listNamespaces( TEST_REPO_ID, "org.apache", ALL ) );
- assertEquals( Collections.<String>emptyList(), resolver.listNamespaces( TEST_REPO_ID, "org.apache.archiva",
- ALL ) );
- assertEquals( Arrays.asList( "plugins", "shared" ), resolver.listNamespaces( TEST_REPO_ID, "org.apache.maven",
- ALL ) );
- assertEquals( Collections.<String>emptyList(), resolver.listNamespaces( TEST_REPO_ID,
- "org.apache.maven.plugins", ALL ) );
- assertEquals( Collections.<String>emptyList(), resolver.listNamespaces( TEST_REPO_ID, "org.apache.maven.shared",
- ALL ) );
-
- assertEquals( Arrays.asList( "plexus" ), resolver.listNamespaces( TEST_REPO_ID, "org.codehaus", ALL ) );
- assertEquals( Collections.<String>emptyList(), resolver.listNamespaces( TEST_REPO_ID, "org.codehaus.plexus",
- ALL ) );
+ assertEquals( Arrays.asList( "example" ), storage.listNamespaces( TEST_REPO_ID, "com", ALL ) );
+ assertEquals( Arrays.asList( "test" ), storage.listNamespaces( TEST_REPO_ID, "com.example", ALL ) );
+ assertEquals( Collections.<String>emptyList(), storage.listNamespaces( TEST_REPO_ID, "com.example.test",
+ ALL ) );
+
+ assertEquals( Arrays.asList( "apache", "codehaus" ), storage.listNamespaces( TEST_REPO_ID, "org", ALL ) );
+ assertEquals( Arrays.asList( "archiva", "maven" ), storage.listNamespaces( TEST_REPO_ID, "org.apache", ALL ) );
+ assertEquals( Collections.<String>emptyList(), storage.listNamespaces( TEST_REPO_ID, "org.apache.archiva",
+ ALL ) );
+ assertEquals( Arrays.asList( "plugins", "shared" ), storage.listNamespaces( TEST_REPO_ID, "org.apache.maven",
+ ALL ) );
+ assertEquals( Collections.<String>emptyList(), storage.listNamespaces( TEST_REPO_ID, "org.apache.maven.plugins",
+ ALL ) );
+ assertEquals( Collections.<String>emptyList(), storage.listNamespaces( TEST_REPO_ID, "org.apache.maven.shared",
+ ALL ) );
+
+ assertEquals( Arrays.asList( "plexus" ), storage.listNamespaces( TEST_REPO_ID, "org.codehaus", ALL ) );
+ assertEquals( Collections.<String>emptyList(), storage.listNamespaces( TEST_REPO_ID, "org.codehaus.plexus",
+ ALL ) );
}
public void testGetProjects()
{
- assertEquals( Collections.<String>emptyList(), resolver.listProjects( TEST_REPO_ID, "com", ALL ) );
- assertEquals( Collections.<String>emptyList(), resolver.listProjects( TEST_REPO_ID, "com.example", ALL ) );
+ assertEquals( Collections.<String>emptyList(), storage.listProjects( TEST_REPO_ID, "com", ALL ) );
+ assertEquals( Collections.<String>emptyList(), storage.listProjects( TEST_REPO_ID, "com.example", ALL ) );
assertEquals( Arrays.asList( "incomplete-metadata", "invalid-pom", "malformed-metadata", "mislocated-pom",
- "missing-metadata", "test-artifact" ), resolver.listProjects( TEST_REPO_ID,
- "com.example.test",
- ALL ) );
+ "missing-metadata", "test-artifact" ), storage.listProjects( TEST_REPO_ID,
+ "com.example.test",
+ ALL ) );
- assertEquals( Collections.<String>emptyList(), resolver.listProjects( TEST_REPO_ID, "org", ALL ) );
- assertEquals( Arrays.asList( "apache" ), resolver.listProjects( TEST_REPO_ID, "org.apache", ALL ) );
+ assertEquals( Collections.<String>emptyList(), storage.listProjects( TEST_REPO_ID, "org", ALL ) );
+ assertEquals( Arrays.asList( "apache" ), storage.listProjects( TEST_REPO_ID, "org.apache", ALL ) );
assertEquals( Arrays.asList( "archiva", "archiva-base", "archiva-common", "archiva-modules", "archiva-parent" ),
- resolver.listProjects( TEST_REPO_ID, "org.apache.archiva", ALL ) );
- assertEquals( Collections.<String>emptyList(), resolver.listProjects( TEST_REPO_ID, "org.apache.maven", ALL ) );
- assertEquals( Collections.<String>emptyList(), resolver.listProjects( TEST_REPO_ID, "org.apache.maven.plugins",
- ALL ) );
- assertEquals( Arrays.asList( "maven-downloader" ), resolver.listProjects( TEST_REPO_ID,
- "org.apache.maven.shared", ALL ) );
+ storage.listProjects( TEST_REPO_ID, "org.apache.archiva", ALL ) );
+ assertEquals( Collections.<String>emptyList(), storage.listProjects( TEST_REPO_ID, "org.apache.maven", ALL ) );
+ assertEquals( Collections.<String>emptyList(), storage.listProjects( TEST_REPO_ID, "org.apache.maven.plugins",
+ ALL ) );
+ assertEquals( Arrays.asList( "maven-downloader" ), storage.listProjects( TEST_REPO_ID,
+ "org.apache.maven.shared", ALL ) );
}
public void testGetProjectVersions()
{
- assertEquals( Arrays.asList( "1.0-SNAPSHOT" ), resolver.listProjectVersions( TEST_REPO_ID, "com.example.test",
- "incomplete-metadata", ALL ) );
- assertEquals( Arrays.asList( "1.0-SNAPSHOT" ), resolver.listProjectVersions( TEST_REPO_ID, "com.example.test",
- "malformed-metadata", ALL ) );
- assertEquals( Arrays.asList( "1.0-SNAPSHOT" ), resolver.listProjectVersions( TEST_REPO_ID, "com.example.test",
- "missing-metadata", ALL ) );
- assertEquals( Arrays.asList( "1.0" ), resolver.listProjectVersions( TEST_REPO_ID, "com.example.test",
- "invalid-pom", ALL ) );
-
- assertEquals( Arrays.asList( "4", "5-SNAPSHOT" ), resolver.listProjectVersions( TEST_REPO_ID, "org.apache",
- "apache", ALL ) );
-
- assertEquals( Arrays.asList( "1.2.1", "1.2.2" ), resolver.listProjectVersions( TEST_REPO_ID,
- "org.apache.archiva", "archiva",
- ALL ) );
- assertEquals( Arrays.asList( "1.2.1" ), resolver.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
- "archiva-base", ALL ) );
- assertEquals( Arrays.asList( "1.2.1" ), resolver.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
- "archiva-common", ALL ) );
- assertEquals( Arrays.asList( "1.2.1" ), resolver.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
- "archiva-modules", ALL ) );
- assertEquals( Arrays.asList( "3" ), resolver.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
- "archiva-parent", ALL ) );
-
- assertEquals( Collections.<String>emptyList(), resolver.listProjectVersions( TEST_REPO_ID,
- "org.apache.maven.shared",
- "maven-downloader", ALL ) );
+ assertEquals( Arrays.asList( "1.0-SNAPSHOT" ), storage.listProjectVersions( TEST_REPO_ID, "com.example.test",
+ "incomplete-metadata", ALL ) );
+ assertEquals( Arrays.asList( "1.0-SNAPSHOT" ), storage.listProjectVersions( TEST_REPO_ID, "com.example.test",
+ "malformed-metadata", ALL ) );
+ assertEquals( Arrays.asList( "1.0-SNAPSHOT" ), storage.listProjectVersions( TEST_REPO_ID, "com.example.test",
+ "missing-metadata", ALL ) );
+ assertEquals( Arrays.asList( "1.0" ), storage.listProjectVersions( TEST_REPO_ID, "com.example.test",
+ "invalid-pom", ALL ) );
+
+ assertEquals( Arrays.asList( "4", "5-SNAPSHOT" ), storage.listProjectVersions( TEST_REPO_ID, "org.apache",
+ "apache", ALL ) );
+
+ assertEquals( Arrays.asList( "1.2.1", "1.2.2" ), storage.listProjectVersions( TEST_REPO_ID,
+ "org.apache.archiva", "archiva",
+ ALL ) );
+ assertEquals( Arrays.asList( "1.2.1" ), storage.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
+ "archiva-base", ALL ) );
+ assertEquals( Arrays.asList( "1.2.1" ), storage.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
+ "archiva-common", ALL ) );
+ assertEquals( Arrays.asList( "1.2.1" ), storage.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
+ "archiva-modules", ALL ) );
+ assertEquals( Arrays.asList( "3" ), storage.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
+ "archiva-parent", ALL ) );
+
+ assertEquals( Collections.<String>emptyList(), storage.listProjectVersions( TEST_REPO_ID,
+ "org.apache.maven.shared",
+ "maven-downloader", ALL ) );
}
public void testGetArtifacts()
{
- List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( resolver.readArtifactsMetadata(
- TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.2", ALL ) );
+ List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( storage.readArtifactsMetadata( TEST_REPO_ID,
+ "org.codehaus.plexus",
+ "plexus-spring",
+ "1.2",
+ ALL ) );
assertEquals( 3, artifacts.size() );
Collections.sort( artifacts, new Comparator<ArtifactMetadata>()
{
{
ExcludesFilter<String> filter = new ExcludesFilter<String>( Collections.singletonList(
"plexus-spring-1.2.pom" ) );
- List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( resolver.readArtifactsMetadata(
- TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.2", filter ) );
+ List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( storage.readArtifactsMetadata( TEST_REPO_ID,
+ "org.codehaus.plexus",
+ "plexus-spring",
+ "1.2",
+ filter ) );
assertEquals( 2, artifacts.size() );
Collections.sort( artifacts, new Comparator<ArtifactMetadata>()
{
public void testGetArtifactsTimestampedSnapshots()
{
- List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( resolver.readArtifactsMetadata(
- TEST_REPO_ID, "com.example.test", "missing-metadata", "1.0-SNAPSHOT", ALL ) );
+ List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( storage.readArtifactsMetadata( TEST_REPO_ID,
+ "com.example.test",
+ "missing-metadata",
+ "1.0-SNAPSHOT",
+ ALL ) );
assertEquals( 1, artifacts.size() );
ArtifactMetadata artifact = artifacts.get( 0 );
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<implementation>org.apache.archiva.configuration.TestConfiguration</implementation>
</component>
+ <component>
+ <role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
+ <implementation>org.apache.archiva.metadata.repository.TestRepositorySessionFactory</implementation>
+ </component>
</components>
</component-set>
\ No newline at end of file
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
</component>
+ <component>
+ <role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
+ <implementation>org.apache.archiva.metadata.repository.TestRepositorySessionFactory</implementation>
+ </component>
</components>
</component-set>
\ No newline at end of file
import java.util.Set;
import java.util.StringTokenizer;
-/**
- * @plexus.component role="org.apache.archiva.metadata.repository.MetadataRepository"
- */
public class FileMetadataRepository
implements MetadataRepository
{
- /**
- * @plexus.requirement role="org.apache.archiva.metadata.model.MetadataFacetFactory"
- */
- private Map<String, MetadataFacetFactory> metadataFacetFactories;
+ private final Map<String, MetadataFacetFactory> metadataFacetFactories;
- /**
- * @plexus.requirement
- */
- private ArchivaConfiguration configuration;
+ private final ArchivaConfiguration configuration;
private static final Logger log = LoggerFactory.getLogger( FileMetadataRepository.class );
private static final String METADATA_KEY = "metadata";
+ public FileMetadataRepository( Map<String, MetadataFacetFactory> metadataFacetFactories,
+ ArchivaConfiguration configuration )
+ {
+ this.metadataFacetFactories = metadataFacetFactories;
+ this.configuration = configuration;
+ }
+
private File getBaseDirectory( String repoId )
{
// TODO: should be configurable, like the index
}
}
- public void setMetadataFacetFactories( Map<String, MetadataFacetFactory> metadataFacetFactories )
- {
- this.metadataFacetFactories = metadataFacetFactories;
- }
-
- public void setConfiguration( ArchivaConfiguration configuration )
- {
- this.configuration = configuration;
- }
-
private static class ArtifactComparator
implements Comparator<ArtifactMetadata>
{
--- /dev/null
+package org.apache.archiva.metadata.repository.file;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.metadata.model.MetadataFacetFactory;
+import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.archiva.metadata.repository.MetadataResolver;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.RepositorySessionFactory;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+
+import java.util.Map;
+
+/**
+ * @plexus.component role="org.apache.archiva.metadata.repository.RepositorySessionFactory" role-hint="file"
+ */
+public class FileRepositorySessionFactory
+ implements RepositorySessionFactory
+{
+ /**
+ * @plexus.requirement role="org.apache.archiva.metadata.model.MetadataFacetFactory"
+ */
+ private Map<String, MetadataFacetFactory> metadataFacetFactories;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArchivaConfiguration configuration;
+
+ /**
+ * @plexus.requirement
+ */
+ private MetadataResolver metadataResolver;
+
+ public RepositorySession createSession()
+ {
+ MetadataRepository metadataRepository = new FileMetadataRepository( metadataFacetFactories, configuration );
+
+ return new RepositorySession( metadataRepository, metadataResolver );
+ }
+}
ArchivaConfiguration config = createTestConfiguration( directory );
Map<String, MetadataFacetFactory> factories = createTestMetadataFacetFactories();
- FileMetadataRepository repository = new FileMetadataRepository();
- repository.setConfiguration( config );
- repository.setMetadataFacetFactories( factories );
- this.repository = repository;
+ this.repository = new FileMetadataRepository( factories, config );
}
protected static ArchivaConfiguration createTestConfiguration( File directory )
* under the License.
*/
+import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
+import org.apache.archiva.metadata.repository.RepositorySession;
+import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataException;
import org.apache.archiva.repository.events.RepositoryListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
{
private Logger log = LoggerFactory.getLogger( RepositoryProblemEventListener.class );
- /**
- * @plexus.requirement
- */
- private MetadataRepository metadataRepository;
-
- public void deleteArtifact( String repositoryId, String namespace, String project, String version, String id )
+ // FIXME: move to session
+ public void deleteArtifact( MetadataRepository metadataRepository, String repositoryId, String namespace,
+ String project, String version, String id )
{
String name = RepositoryProblemFacet.createName( namespace, project, version, id );
log.warn( "Unable to remove metadata facet as part of delete event: " + e.getMessage(), e );
}
}
+
+ public void addArtifact( RepositorySession session, String repoId, String namespace, String projectId,
+ ProjectVersionMetadata metadata )
+ {
+ // Remove problems associated with this version on successful addition
+ // TODO: this removes all problems - do we need something that just remove the problems we know are corrected?
+ String name = RepositoryProblemFacet.createName( namespace, projectId, metadata.getId(), null );
+ try
+ {
+ MetadataRepository metadataRepository = session.getRepository();
+ metadataRepository.removeMetadataFacet( repoId, RepositoryProblemFacet.FACET_ID, name );
+ session.markDirty();
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ log.warn( "Unable to remove repository problem facets for the version being corrected in the repository: " +
+ e.getMessage(), e );
+ }
+ }
+
+ public void addArtifactProblem( RepositorySession session, String repoId, String namespace, String projectId,
+ String projectVersion, RepositoryStorageMetadataException exception )
+ {
+ RepositoryProblemFacet problem = new RepositoryProblemFacet();
+ problem.setMessage( exception.getMessage() );
+ problem.setProject( projectId );
+ problem.setNamespace( namespace );
+ problem.setRepositoryId( repoId );
+ problem.setVersion( projectVersion );
+ problem.setProblem( exception.getId() );
+
+ try
+ {
+ session.getRepository().addMetadataFacet( repoId, problem );
+ session.markDirty();
+ }
+ catch ( MetadataRepositoryException e )
+ {
+ log.warn( "Unable to add repository problem facets for the version being removed: " + e.getMessage(), e );
+ }
+ }
+
}
\ No newline at end of file
{
private static final Logger log = LoggerFactory.getLogger( DefaultRepositoryStatisticsManager.class );
- /**
- * @plexus.requirement
- */
- private MetadataRepository metadataRepository;
-
private static final TimeZone UTC_TIME_ZONE = TimeZone.getTimeZone( "UTC" );
- public RepositoryStatistics getLastStatistics( String repositoryId )
+ public RepositoryStatistics getLastStatistics( MetadataRepository metadataRepository, String repositoryId )
throws MetadataRepositoryException
{
// TODO: consider a more efficient implementation that directly gets the last one from the content repository
}
}
- private void walkRepository( RepositoryStatistics stats, String repositoryId, String ns )
+ private void walkRepository( MetadataRepository metadataRepository, RepositoryStatistics stats, String repositoryId,
+ String ns )
throws MetadataResolutionException
{
for ( String namespace : metadataRepository.getNamespaces( repositoryId, ns ) )
{
- walkRepository( stats, repositoryId, ns + "." + namespace );
+ walkRepository( metadataRepository, stats, repositoryId, ns + "." + namespace );
}
Collection<String> projects = metadataRepository.getProjects( repositoryId, ns );
}
}
-
- public void addStatisticsAfterScan( String repositoryId, Date startTime, Date endTime, long totalFiles,
- long newFiles )
+ public void addStatisticsAfterScan( MetadataRepository metadataRepository, String repositoryId, Date startTime,
+ Date endTime, long totalFiles, long newFiles )
throws MetadataRepositoryException
{
RepositoryStatistics repositoryStatistics = new RepositoryStatistics();
{
for ( String ns : metadataRepository.getRootNamespaces( repositoryId ) )
{
- walkRepository( repositoryStatistics, repositoryId, ns );
+ walkRepository( metadataRepository, repositoryStatistics, repositoryId, ns );
}
}
catch ( MetadataResolutionException e )
metadataRepository.addMetadataFacet( repositoryId, repositoryStatistics );
}
- public void deleteStatistics( String repositoryId )
+ public void deleteStatistics( MetadataRepository metadataRepository, String repositoryId )
throws MetadataRepositoryException
{
metadataRepository.removeMetadataFacets( repositoryId, RepositoryStatistics.FACET_ID );
}
- public List<RepositoryStatistics> getStatisticsInRange( String repositoryId, Date startTime, Date endTime )
+ public List<RepositoryStatistics> getStatisticsInRange( MetadataRepository metadataRepository, String repositoryId,
+ Date startTime, Date endTime )
throws MetadataRepositoryException
{
List<RepositoryStatistics> results = new ArrayList<RepositoryStatistics>();
fmt.setTimeZone( UTC_TIME_ZONE );
return fmt;
}
-
- public void setMetadataRepository( MetadataRepository metadataRepository )
- {
- this.metadataRepository = metadataRepository;
- }
}
* under the License.
*/
+import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import java.util.Date;
public interface RepositoryStatisticsManager
{
- RepositoryStatistics getLastStatistics( String repositoryId )
+ RepositoryStatistics getLastStatistics( MetadataRepository metadataRepository, String repositoryId )
throws MetadataRepositoryException;
- void addStatisticsAfterScan( String repositoryId, Date startTime, Date endTime, long totalFiles, long newFiles )
+ void addStatisticsAfterScan( MetadataRepository metadataRepository, String repositoryId, Date startTime,
+ Date endTime, long totalFiles, long newFiles )
throws MetadataRepositoryException;
- void deleteStatistics( String repositoryId )
+ void deleteStatistics( MetadataRepository metadataRepository, String repositoryId )
throws MetadataRepositoryException;
- List<RepositoryStatistics> getStatisticsInRange( String repositoryId, Date startTime, Date endTime )
+ List<RepositoryStatistics> getStatisticsInRange( MetadataRepository metadataRepository, String repositoryId,
+ Date startTime, Date endTime )
throws MetadataRepositoryException;
}
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
- repositoryStatisticsManager.setMetadataRepository( metadataRepository );
}
public void testGetLatestStats()
SECOND_TEST_SCAN ), stats );
metadataRepositoryControl.replay();
- stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
+ stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
assertNotNull( stats );
assertEquals( 1314527915L, stats.getTotalArtifactFileSize() );
assertEquals( 123, stats.getNewFileCount() );
Collections.emptyList() );
metadataRepositoryControl.replay();
- RepositoryStatistics stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
+ RepositoryStatistics stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
assertNull( stats );
metadataRepositoryControl.verify();
metadataRepositoryControl.replay();
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime, current, 56345, 45 );
+ repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, startTime, current, 56345,
+ 45 );
- stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
+ stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
assertNotNull( stats );
assertEquals( 246900, stats.getTotalArtifactFileSize() );
assertEquals( 45, stats.getNewFileCount() );
metadataRepositoryControl.replay();
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime1, stats1.getScanEndTime(), 56345,
- 45 );
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime2, stats2.getScanEndTime(), 56345,
- 45 );
+ repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, startTime1,
+ stats1.getScanEndTime(), 56345, 45 );
+ repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, startTime2,
+ stats2.getScanEndTime(), 56345, 45 );
- assertNotNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
+ assertNotNull( repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID ) );
- repositoryStatisticsManager.deleteStatistics( TEST_REPO_ID );
+ repositoryStatisticsManager.deleteStatistics( metadataRepository, TEST_REPO_ID );
- assertNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
+ assertNull( repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID ) );
metadataRepositoryControl.verify();
}
metadataRepositoryControl.replay();
- assertNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
+ assertNull( repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID ) );
- repositoryStatisticsManager.deleteStatistics( TEST_REPO_ID );
+ repositoryStatisticsManager.deleteStatistics( metadataRepository, TEST_REPO_ID );
- assertNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
+ assertNull( repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID ) );
metadataRepositoryControl.verify();
}
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
- stats.getScanEndTime(), 56345, 45 );
+ repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
+ stats.getScanStartTime(), stats.getScanEndTime(), 56345,
+ 45 );
}
- List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date(
- current.getTime() - 4000 ), new Date( current.getTime() - 2000 ) );
+ List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( metadataRepository,
+ TEST_REPO_ID, new Date(
+ current.getTime() - 4000 ), new Date( current.getTime() - 2000 ) );
assertEquals( 1, list.size() );
assertEquals( new Date( current.getTime() - 3000 ), list.get( 0 ).getScanStartTime() );
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
- stats.getScanEndTime(), 56345, 45 );
+ repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
+ stats.getScanStartTime(), stats.getScanEndTime(), 56345,
+ 45 );
}
- List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date(
- current.getTime() - 4000 ), current );
+ List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( metadataRepository,
+ TEST_REPO_ID, new Date(
+ current.getTime() - 4000 ), current );
assertEquals( 2, list.size() );
assertEquals( new Date( current.getTime() - 3000 ), list.get( 1 ).getScanStartTime() );
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
- stats.getScanEndTime(), 56345, 45 );
+ repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
+ stats.getScanStartTime(), stats.getScanEndTime(), 56345,
+ 45 );
}
- List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date(
- current.getTime() - 20000 ), new Date( current.getTime() - 2000 ) );
+ List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( metadataRepository,
+ TEST_REPO_ID, new Date(
+ current.getTime() - 20000 ), new Date( current.getTime() - 2000 ) );
assertEquals( 2, list.size() );
assertEquals( new Date( current.getTime() - 12345 ), list.get( 1 ).getScanStartTime() );
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
- stats.getScanEndTime(), 56345, 45 );
+ repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
+ stats.getScanStartTime(), stats.getScanEndTime(), 56345,
+ 45 );
}
- List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date(
- current.getTime() - 20000 ), current );
+ List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( metadataRepository,
+ TEST_REPO_ID, new Date(
+ current.getTime() - 20000 ), current );
assertEquals( 3, list.size() );
assertEquals( new Date( current.getTime() - 12345 ), list.get( 2 ).getScanStartTime() );
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
- stats.getScanEndTime(), 56345, 45 );
+ repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
+ stats.getScanStartTime(), stats.getScanEndTime(), 56345,
+ 45 );
}
- List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date(
- current.getTime() - 20000 ), new Date( current.getTime() - 16000 ) );
+ List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( metadataRepository,
+ TEST_REPO_ID, new Date(
+ current.getTime() - 20000 ), new Date( current.getTime() - 16000 ) );
assertEquals( 0, list.size() );
*/
import org.apache.archiva.metadata.model.ArtifactMetadata;
+import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.filter.Filter;
import org.apache.archiva.metadata.repository.filter.IncludesFilter;
-import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
-import org.apache.maven.archiva.repository.RepositoryException;
-import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
-import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
-import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader;
+import org.apache.maven.archiva.common.utils.VersionComparator;
+import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.model.ArchivaRepositoryMetadata;
-import org.apache.maven.archiva.common.utils.VersionComparator;
-import org.apache.maven.archiva.common.utils.VersionUtil;
+import org.apache.maven.archiva.repository.RepositoryException;
+import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
+import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader;
+import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
-import java.util.List;
-import java.util.Date;
-import java.util.Calendar;
-import java.util.TimeZone;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.io.IOException;
import java.io.File;
-import java.io.FileOutputStream;
import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.Date;
+import java.util.List;
+import java.util.TimeZone;
/**
* @plexus.component role="org.apache.archiva.stagerepository.merge.RepositoryMerger" role-hint="maven2"
public class Maven2RepositoryMerger
implements RepositoryMerger
{
-
- /**
- * @plexus.requirement role-hint="default"
- */
- private MetadataRepository metadataRepository;
-
/**
* @plexus.requirement role-hint="default"
*/
this.configuration = configuration;
}
- public void setMetadataRepository( MetadataRepository metadataRepository )
- {
- this.metadataRepository = metadataRepository;
- }
-
- public void merge( String sourceRepoId, String targetRepoId )
+ public void merge( MetadataRepository metadataRepository, String sourceRepoId, String targetRepoId )
throws Exception
{
}
// TODO when UI needs a subset to merge
- public void merge( String sourceRepoId, String targetRepoId, Filter<ArtifactMetadata> filter )
+ public void merge( MetadataRepository metadataRepository, String sourceRepoId, String targetRepoId,
+ Filter<ArtifactMetadata> filter )
throws Exception
{
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( sourceRepoId );
{
// updating version metadata files
- File versionMetaDataFileInSourceRepo =
- pathTranslator.toFile( new File( sourceRepoPath ), artifactMetadata.getNamespace(),
- artifactMetadata.getProject(), artifactMetadata.getVersion(),
- METADATA_FILENAME );
+ File versionMetaDataFileInSourceRepo = pathTranslator.toFile( new File( sourceRepoPath ),
+ artifactMetadata.getNamespace(),
+ artifactMetadata.getProject(),
+ artifactMetadata.getVersion(),
+ METADATA_FILENAME );
- if( versionMetaDataFileInSourceRepo.exists() )
+ if ( versionMetaDataFileInSourceRepo.exists() )
{
- String relativePathToVersionMetadataFile =
- versionMetaDataFileInSourceRepo.getAbsolutePath().split( sourceRepoPath )[1];
+ String relativePathToVersionMetadataFile = versionMetaDataFileInSourceRepo.getAbsolutePath().split(
+ sourceRepoPath )[1];
File versionMetaDataFileInTargetRepo = new File( targetRepoPath, relativePathToVersionMetadataFile );
if ( !versionMetaDataFileInTargetRepo.exists() )
String projectDirectoryInSourceRepo = new File( versionMetaDataFileInSourceRepo.getParent() ).getParent();
File projectMetadataFileInSourceRepo = new File( projectDirectoryInSourceRepo, METADATA_FILENAME );
- if( projectMetadataFileInSourceRepo.exists() )
+ if ( projectMetadataFileInSourceRepo.exists() )
{
- String relativePathToProjectMetadataFile =
- projectMetadataFileInSourceRepo.getAbsolutePath().split( sourceRepoPath )[1];
+ String relativePathToProjectMetadataFile = projectMetadataFileInSourceRepo.getAbsolutePath().split(
+ sourceRepoPath )[1];
File projectMetadataFileInTargetRepo = new File( targetRepoPath, relativePathToProjectMetadataFile );
if ( !projectMetadataFileInTargetRepo.exists() )
return metadata;
}
- public List<ArtifactMetadata> getConflictingArtifacts( String sourceRepo, String targetRepo )
+ public List<ArtifactMetadata> getConflictingArtifacts( MetadataRepository metadataRepository, String sourceRepo,
+ String targetRepo )
throws Exception
{
List<ArtifactMetadata> targetArtifacts = metadataRepository.getArtifacts( targetRepo );
boolean isSame = false;
if ( ( sourceArtifact.getNamespace().equals( targetArtifact.getNamespace() ) ) &&
- ( sourceArtifact.getProject().equals( targetArtifact.getProject() ) ) &&
- ( sourceArtifact.getId().equals( targetArtifact.getId() ) ) &&
- ( sourceArtifact.getProjectVersion().equals( targetArtifact.getProjectVersion() ) ) )
+ ( sourceArtifact.getProject().equals( targetArtifact.getProject() ) ) && ( sourceArtifact.getId().equals(
+ targetArtifact.getId() ) ) && ( sourceArtifact.getProjectVersion().equals(
+ targetArtifact.getProjectVersion() ) ) )
{
isSame = true;
* under the License.
*/
-import java.util.List;
-
import org.apache.archiva.metadata.model.ArtifactMetadata;
+import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.filter.Filter;
+import java.util.List;
+
public interface RepositoryMerger
{
- void merge( String sourceRepoId, String targetRepoId )
+ void merge( MetadataRepository metadataRepository, String sourceRepoId, String targetRepoId )
+ throws Exception;
+
+ void merge( MetadataRepository metadataRepository, String sourceRepoId, String targetRepoId,
+ Filter<ArtifactMetadata> filter )
throws Exception;
- void merge( String sourceRepoId, String targetRepoId, Filter<ArtifactMetadata> filter ) throws Exception;
-
- public List<ArtifactMetadata> getConflictingArtifacts( String sourceRepo, String targetRepo )
+ public List<ArtifactMetadata> getConflictingArtifacts( MetadataRepository metadataRepository, String sourceRepo,
+ String targetRepo )
throws Exception;
}
\ No newline at end of file
* under the License.
*/
-import org.codehaus.plexus.spring.PlexusInSpringTestCase;
-import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.archiva.metadata.model.ArtifactMetadata;
+import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.configuration.RepositoryScanningConfiguration;
-import org.apache.maven.archiva.repository.RepositoryContentFactory;
-import org.apache.archiva.metadata.repository.MetadataRepository;
-import org.apache.archiva.metadata.model.ArtifactMetadata;
-import org.mockito.*;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-import static org.mockito.Mockito.verify;
+import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.junit.Before;
+import org.mockito.MockitoAnnotations;
-import java.util.List;
-import java.util.ArrayList;
import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.mockito.Mockito.*;
public class Maven2RepositoryMergerTest
extends PlexusInSpringTestCase
private static final String TEST_REPO_ID = "test";
- private static final String TARGET_REPOSITORY_ID = "target-repo";
-
- private Configuration config;
-
- @MockitoAnnotations.Mock
- private MetadataRepository metadataResolver;
-
- private RepositoryContentFactory repositoryFactory;
-
- private ArchivaConfiguration configuration;
-
private Maven2RepositoryMerger repositoryMerger;
private MetadataRepository metadataRepository;
MockitoAnnotations.initMocks( this );
metadataRepository = mock( MetadataRepository.class );
repositoryMerger = (Maven2RepositoryMerger) lookup( RepositoryMerger.class, "maven2" );
- repositoryMerger.setMetadataRepository( metadataRepository );
}
private List<ArtifactMetadata> getArtifacts()
configuration.save( c );
when( metadataRepository.getArtifacts( TEST_REPO_ID ) ).thenReturn( getArtifacts() );
- repositoryMerger.merge( TEST_REPO_ID, "target-rep" );
+ repositoryMerger.merge( metadataRepository, TEST_REPO_ID, "target-rep" );
verify( metadataRepository ).getArtifacts( TEST_REPO_ID );
}
when( metadataRepository.getArtifacts( sourceRepoId ) ).thenReturn( sourceRepoArtifactsList );
when( metadataRepository.getArtifacts( TEST_REPO_ID ) ).thenReturn( targetRepoArtifactsList );
- assertEquals( 1, repositoryMerger.getConflictingArtifacts( sourceRepoId, TEST_REPO_ID ).size() );
+ assertEquals( 1, repositoryMerger.getConflictingArtifacts( metadataRepository, sourceRepoId,
+ TEST_REPO_ID ).size() );
verify( metadataRepository ).getArtifacts( TEST_REPO_ID );
}