<groupId>org.codehaus.plexus.registry</groupId>
<artifactId>plexus-registry-api</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-container-default</artifactId>
+ </dependency>
+ <!-- Test Deps -->
<dependency>
<groupId>org.codehaus.plexus.registry</groupId>
<artifactId>plexus-registry-commons</artifactId>
<scope>test</scope>
</dependency>
- <!-- Test Deps -->
<dependency>
<groupId>easymock</groupId>
<artifactId>easymock</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-container-default</artifactId>
- <scope>test</scope>
- </dependency>
</dependencies>
<build>
<plugins>
import org.apache.commons.lang.math.NumberUtils;
import org.apache.maven.archiva.xml.XMLException;
import org.apache.maven.archiva.xml.XMLReader;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
-import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
+import org.codehaus.plexus.logging.Logger;
+import org.codehaus.plexus.logging.console.ConsoleLogger;
import java.io.File;
import java.io.FileOutputStream;
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
- *
- * @plexus.component role="org.apache.maven.archiva.configuration.ConfigurationUpgrade"
- * role-hint="default"
*/
public class ConfigurationUpgrade
- extends AbstractLogEnabled
- implements Initializable
{
public static final int CURRENT_CONFIG_VERSION = 1;
- /* NOTE: This component should *NOT USE* the configuration api to do it's upgrade */
+ private Logger logger;
- public void initialize()
- throws InitializationException
+ /**
+ * Perform the upgrade (if needed).
+ *
+ * NOTE: This component should *NOT USE* the configuration api to do it's upgrade
+ *
+ * @return true if the upgrade modified the archiva.xml file. false otherwise.
+ */
+ public boolean perform()
{
File userConfigFile = new File( System.getProperty( "user.home" ), ".m2/archiva.xml" );
if ( !userConfigFile.exists() )
{
writeDefaultConfigFile( userConfigFile );
- return;
+ return true;
}
boolean configOk = false;
catch ( XMLException e )
{
getLogger().warn( "Unable to read user configuration XML: " + e.getMessage(), e );
+ return false;
}
if ( !configOk )
{
FileUtils.copyFile( userConfigFile, new File( userConfigFile.getAbsolutePath() + ".bak" ) );
writeDefaultConfigFile( userConfigFile );
+ return true;
}
catch ( IOException e )
{
- getLogger().warn( "Unable to create backup of your configuration file: "
- + e.getMessage(), e );
+ getLogger().warn( "Unable to create backup of your configuration file: " + e.getMessage(), e );
}
}
+ return false;
}
private void upgradeVersion( File userConfigFile, XMLReader xml )
}
catch ( IOException e )
{
- getLogger().warn( "Unable to write default (generic) configuration file: "
- + e.getMessage(), e );
+ getLogger().warn( "Unable to write default (generic) configuration file: " + e.getMessage(), e );
}
}
}
}
+ public Logger getLogger()
+ {
+ if ( logger == null )
+ {
+ logger = new ConsoleLogger( ConsoleLogger.LEVEL_INFO, this.getClass().getName() );
+ }
+ return logger;
+ }
+
+ public void setLogger( Logger logger )
+ {
+ this.logger = logger;
+ }
+
}
import org.apache.maven.archiva.configuration.io.registry.ConfigurationRegistryReader;
import org.apache.maven.archiva.configuration.io.registry.ConfigurationRegistryWriter;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
* @plexus.component role="org.apache.maven.archiva.configuration.ArchivaConfiguration"
*/
public class DefaultArchivaConfiguration
+ extends AbstractLogEnabled
implements ArchivaConfiguration, RegistryListener, Initializable
{
/**
{
if ( configuration == null )
{
- // TODO: should this be the same as section? make sure unnamed sections still work (eg, sys properties)
- configuration = new ConfigurationRegistryReader().read( registry.getSubset( KEY ) );
-
- // TODO: for commons-configuration 1.3 only
- for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
- {
- RepositoryConfiguration c = (RepositoryConfiguration) i.next();
- c.setUrl( removeExpressions( c.getUrl() ) );
- }
+ configuration = load();
}
return configuration;
}
+ private Configuration load()
+ {
+ // TODO: should this be the same as section? make sure unnamed sections still work (eg, sys properties)
+ Configuration config = new ConfigurationRegistryReader().read( registry.getSubset( KEY ) );
+
+ // TODO: for commons-configuration 1.3 only
+ for ( Iterator i = config.getRepositories().iterator(); i.hasNext(); )
+ {
+ RepositoryConfiguration c = (RepositoryConfiguration) i.next();
+ c.setUrl( removeExpressions( c.getUrl() ) );
+ }
+
+ return config;
+ }
+
public void save( Configuration configuration )
throws RegistryException
{
throws InitializationException
{
registry.addChangeListener( this );
+
+ ConfigurationUpgrade upgrade = new ConfigurationUpgrade();
+ upgrade.setLogger( getLogger() );
+ if ( upgrade.perform() )
+ {
+ this.configuration = load();
+ }
}
public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
private String removeExpressions( String directory )
{
- String value = StringUtils.replace( directory, "${appserver.base}",
- registry.getString( "appserver.base", "${appserver.base}" ) );
- value = StringUtils.replace( value, "${appserver.home}",
- registry.getString( "appserver.home", "${appserver.home}" ) );
+ String value = StringUtils.replace( directory, "${appserver.base}", registry.getString( "appserver.base",
+ "${appserver.base}" ) );
+ value = StringUtils.replace( value, "${appserver.home}", registry.getString( "appserver.home",
+ "${appserver.home}" ) );
return value;
}
--- /dev/null
+# Set root logger level to DEBUG and its only appender to A1.
+log4j.rootLogger=INFO, A1
+
+# A1 is set to be a ConsoleAppender.
+log4j.appender.A1=org.apache.log4j.ConsoleAppender
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
+
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
+ <dependency>
+ <groupId>commons-collections</groupId>
+ <artifactId>commons-collections</artifactId>
+ </dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
--- /dev/null
+package org.apache.maven.archiva.model.functors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.collections.Predicate;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.model.ArchivaArtifactModel;
+
+/**
+ * Allows for selection of unprocessed artifacts.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class UnprocessedArtifactPredicate
+ implements Predicate
+{
+ private static UnprocessedArtifactPredicate INSTANCE = new UnprocessedArtifactPredicate();
+
+ public static UnprocessedArtifactPredicate getInstance()
+ {
+ return INSTANCE;
+ }
+
+ public boolean evaluate( Object object )
+ {
+ boolean satisfies = false;
+
+ if ( object instanceof ArchivaArtifact )
+ {
+ ArchivaArtifact artifact = (ArchivaArtifact) object;
+ satisfies = !artifact.getModel().isProcessed();
+ }
+ else if ( object instanceof ArchivaArtifactModel )
+ {
+ ArchivaArtifactModel model = (ArchivaArtifactModel) object;
+ satisfies = !model.isProcessed();
+ }
+
+ return satisfies;
+ }
+}
<artifactId>derby</artifactId>
</dependency>
<!-- TEST DEPS -->
+ <dependency>
+ <groupId>org.codehaus.plexus.registry</groupId>
+ <artifactId>plexus-registry-commons</artifactId>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>hsqldb</groupId>
<artifactId>hsqldb</artifactId>
import org.apache.maven.archiva.database.constraints.UniqueArtifactIdConstraint;
import org.apache.maven.archiva.database.constraints.UniqueGroupIdConstraint;
import org.apache.maven.archiva.database.constraints.UniqueVersionConstraint;
+import org.apache.maven.archiva.database.updater.DatabaseUpdater;
+import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaProjectModel;
import org.codehaus.plexus.logging.AbstractLogEnabled;
* @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.database.browsing.RepositoryBrowsing"
- * role-hint="default"
*/
public class DefaultRepositoryBrowsing
extends AbstractLogEnabled
*/
private ArchivaDAO dao;
+ /**
+ * @plexus.requirement role-hint="jdo"
+ */
+ private DatabaseUpdater dbUpdater;
+
public BrowsingResults getRoot()
{
List groups = dao.query( new UniqueGroupIdConstraint() );
public ArchivaProjectModel selectVersion( String groupId, String artifactId, String version )
throws ObjectNotFoundException, ArchivaDatabaseException
{
- ArchivaProjectModel model = dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version );
-
- // TODO: if the model isn't found. load it from disk, insert into DB, and then return it.
-
- return model;
+ ArchivaArtifact pomArtifact = null;
+
+ try
+ {
+ pomArtifact = dao.getArtifactDAO().getArtifact( groupId, artifactId, version, null, "pom" );
+
+ if ( pomArtifact == null )
+ {
+ throw new ObjectNotFoundException( "Unable to find artifact [" + groupId + ":" + artifactId + ":"
+ + version + "]" );
+ }
+ }
+ catch ( ObjectNotFoundException e )
+ {
+ throw e;
+ }
+
+ ArchivaProjectModel model;
+
+ if ( pomArtifact.getModel().isProcessed() )
+ {
+ // It's been processed. return it.
+ model = dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version );
+ return model;
+ }
+
+ // Process it.
+ dbUpdater.updateUnprocessed( pomArtifact );
+
+ // Find it.
+ try
+ {
+ model = dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version );
+
+ if ( model == null )
+ {
+ throw new ObjectNotFoundException( "Unable to find project model for [" + groupId + ":" + artifactId + ":"
+ + version + "]" );
+ }
+
+ return model;
+ }
+ catch ( ObjectNotFoundException e )
+ {
+ throw e;
+ }
}
}
* under the License.
*/
-import org.apache.maven.archiva.configuration.ArchivaConfiguration;
-import org.apache.maven.archiva.configuration.DatabaseScanningConfiguration;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.IteratorUtils;
+import org.apache.commons.collections.Predicate;
+import org.apache.commons.collections.functors.NotPredicate;
import org.apache.maven.archiva.consumers.ArchivaArtifactConsumer;
-import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.model.functors.UnprocessedArtifactPredicate;
import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
-import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
-import org.codehaus.plexus.registry.Registry;
-import org.codehaus.plexus.registry.RegistryListener;
-import java.util.ArrayList;
-import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
-import java.util.Map;
/**
* JdoDatabaseUpdater
*/
public class JdoDatabaseUpdater
extends AbstractLogEnabled
- implements DatabaseUpdater, RegistryListener, Initializable
+ implements DatabaseUpdater
{
/**
* @plexus.requirement role-hint="jdo"
/**
* @plexus.requirement
*/
- private ArchivaConfiguration configuration;
+ private DatabaseConsumers dbConsumers;
- /**
- * The collection of available consumers.
- * @plexus.requirement role="org.apache.maven.archiva.consumers.ArchivaArtifactConsumer"
- */
- private Map availableConsumers;
-
- /**
- * The list of active consumers for unprocessed content.
- */
- private List activeUnprocessedConsumers = new ArrayList();
-
- /**
- * The list of active consumers for processed content.
- */
- private List activeProcessedConsumers = new ArrayList();
-
- /**
- * The list of registry (configuration) property names that will trigger a refresh of the activeConsumers list.
- */
- private List propertyNameTriggers = new ArrayList();
+ private ProcessArchivaArtifactClosure processArtifactClosure = new ProcessArchivaArtifactClosure();
public void update()
throws ArchivaDatabaseException
{
List unprocessedArtifacts = dao.getArtifactDAO().queryArtifacts( new ArtifactsProcessedConstraint( false ) );
- beginConsumerLifecycle( this.activeUnprocessedConsumers );
+ beginConsumerLifecycle( dbConsumers.getSelectedUnprocessedConsumers() );
try
{
// Process each consumer.
- Iterator it = unprocessedArtifacts.iterator();
+ Predicate predicate = UnprocessedArtifactPredicate.getInstance();
+
+ Iterator it = IteratorUtils.filteredIterator( unprocessedArtifacts.iterator(), predicate );
while ( it.hasNext() )
{
ArchivaArtifact artifact = (ArchivaArtifact) it.next();
-
- if ( !artifact.getModel().isProcessed() )
- {
- updateUnprocessed( artifact );
- }
+ updateUnprocessed( artifact );
}
}
finally
{
- consumerConsumerLifecycle( this.activeUnprocessedConsumers );
+ endConsumerLifecycle( dbConsumers.getSelectedUnprocessedConsumers() );
}
- }
+ }
public void updateAllProcessed()
throws ArchivaDatabaseException
{
List processedArtifacts = dao.getArtifactDAO().queryArtifacts( new ArtifactsProcessedConstraint( true ) );
- beginConsumerLifecycle( this.activeProcessedConsumers );
+ beginConsumerLifecycle( dbConsumers.getSelectedCleanupConsumers() );
try
{
// Process each consumer.
- Iterator it = processedArtifacts.iterator();
+ Predicate predicate = NotPredicate.getInstance( UnprocessedArtifactPredicate.getInstance() );
+
+ Iterator it = IteratorUtils.filteredIterator( processedArtifacts.iterator(), predicate );
while ( it.hasNext() )
{
ArchivaArtifact artifact = (ArchivaArtifact) it.next();
-
- if ( !artifact.getModel().isProcessed() )
- {
- updateProcessed( artifact );
- }
+ updateProcessed( artifact );
}
}
finally
{
- consumerConsumerLifecycle( this.activeProcessedConsumers );
+ endConsumerLifecycle( dbConsumers.getSelectedCleanupConsumers() );
}
}
- private void consumerConsumerLifecycle( List consumers )
+ private void endConsumerLifecycle( List consumers )
{
Iterator it = consumers.iterator();
while ( it.hasNext() )
public void updateUnprocessed( ArchivaArtifact artifact )
throws ArchivaDatabaseException
{
- Iterator it = this.activeUnprocessedConsumers.iterator();
- while ( it.hasNext() )
+ List consumers = dbConsumers.getSelectedUnprocessedConsumers();
+
+ if ( CollectionUtils.isEmpty( consumers ) )
{
- ArchivaArtifactConsumer consumer = (ArchivaArtifactConsumer) it.next();
- try
- {
- consumer.processArchivaArtifact( artifact );
- }
- catch ( ConsumerException e )
- {
- getLogger().warn( "Unable to consume (unprocessed) artifact: " + artifact );
- }
+ getLogger().warn( "There are no selected consumers for unprocessed artifacts." );
+ return;
}
+
+ this.processArtifactClosure.setArtifact( artifact );
+ CollectionUtils.forAllDo( consumers, this.processArtifactClosure );
artifact.getModel().setWhenProcessed( new Date() );
dao.getArtifactDAO().saveArtifact( artifact );
public void updateProcessed( ArchivaArtifact artifact )
throws ArchivaDatabaseException
{
- Iterator it = this.activeProcessedConsumers.iterator();
- while ( it.hasNext() )
- {
- ArchivaArtifactConsumer consumer = (ArchivaArtifactConsumer) it.next();
- try
- {
- consumer.processArchivaArtifact( artifact );
- }
- catch ( ConsumerException e )
- {
- getLogger().warn( "Unable to consume (processed) artifact: " + artifact );
- }
- }
- }
-
- private void updateActiveConsumers()
- {
- this.activeUnprocessedConsumers.clear();
- this.activeProcessedConsumers.clear();
+ List consumers = dbConsumers.getSelectedCleanupConsumers();
- DatabaseScanningConfiguration dbScanning = configuration.getConfiguration().getDatabaseScanning();
- if ( dbScanning == null )
+ if ( CollectionUtils.isEmpty( consumers ) )
{
- getLogger().error( "No Database Consumers found!" );
+ getLogger().warn( "There are no selected consumers for artifact cleanup." );
return;
}
-
- this.activeUnprocessedConsumers.addAll( getActiveConsumerList( dbScanning.getUnprocessedConsumers() ) );
- this.activeProcessedConsumers.addAll( getActiveConsumerList( dbScanning.getCleanupConsumers() ) );
- }
-
- private List getActiveConsumerList( List potentialConsumerList )
- {
- if ( ( potentialConsumerList == null ) || ( potentialConsumerList.isEmpty() ) )
- {
- return Collections.EMPTY_LIST;
- }
-
- List ret = new ArrayList();
-
- Iterator it = potentialConsumerList.iterator();
- while ( it.hasNext() )
- {
- String consumerName = (String) it.next();
- if ( !availableConsumers.containsKey( consumerName ) )
- {
- getLogger().warn( "Requested Consumer [" + consumerName + "] does not exist. Disabling." );
- continue;
- }
-
- ret.add( consumerName );
- }
-
- return ret;
- }
-
- public void initialize()
- throws InitializationException
- {
- propertyNameTriggers = new ArrayList();
- propertyNameTriggers.add( "databaseScanning" );
- propertyNameTriggers.add( "unprocessedConsumers" );
- propertyNameTriggers.add( "unprocessedConsumer" );
- propertyNameTriggers.add( "processedConsumers" );
- propertyNameTriggers.add( "processedConsumer" );
-
- configuration.addChangeListener( this );
- updateActiveConsumers();
- }
-
- public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
- {
- if ( propertyNameTriggers.contains( propertyName ) )
- {
- updateActiveConsumers();
- }
- }
-
- public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
- {
- /* nothing to do here */
+
+ this.processArtifactClosure.setArtifact( artifact );
+ CollectionUtils.forAllDo( consumers, this.processArtifactClosure );
}
}
--- /dev/null
+package org.apache.maven.archiva.database.updater;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.collections.Closure;
+import org.apache.maven.archiva.consumers.ArchivaArtifactConsumer;
+import org.apache.maven.archiva.consumers.ConsumerException;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+
+/**
+ * ProcessArchivaArtifactClosure
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.commons.collections.Closure"
+ * role-hint="process-artifact"
+ * instantiation-strategy="per-lookup"
+ */
+class ProcessArchivaArtifactClosure
+ extends AbstractLogEnabled
+ implements Closure
+{
+ private ArchivaArtifact artifact;
+
+ public void execute( Object input )
+ {
+ if ( input instanceof ArchivaArtifactConsumer )
+ {
+ ArchivaArtifactConsumer consumer = (ArchivaArtifactConsumer) input;
+
+ try
+ {
+ consumer.processArchivaArtifact( artifact );
+ }
+ catch ( ConsumerException e )
+ {
+ getLogger().warn(
+ "Unable to process artifact [" + artifact + "] with consumer [" + consumer.getId()
+ + "]" );
+ }
+ }
+
+ }
+
+ public ArchivaArtifact getArtifact()
+ {
+ return artifact;
+ }
+
+ public void setArtifact( ArchivaArtifact artifact )
+ {
+ this.artifact = artifact;
+ }
+}
\ No newline at end of file
* under the License.
*/
+import org.apache.maven.archiva.consumers.DatabaseCleanupConsumer;
+import org.apache.maven.archiva.consumers.DatabaseUnprocessedArtifactConsumer;
+import org.apache.maven.archiva.database.updater.TestDatabaseCleanupConsumer;
+import org.apache.maven.archiva.database.updater.TestDatabaseUnprocessedConsumer;
import org.codehaus.plexus.PlexusTestCase;
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
import org.codehaus.plexus.jdo.JdoFactory;
this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
}
+ protected TestDatabaseCleanupConsumer lookupTestCleanupConsumer()
+ throws Exception
+ {
+ TestDatabaseCleanupConsumer consumer = (TestDatabaseCleanupConsumer) lookup( DatabaseCleanupConsumer.class,
+ "test-db-cleanup" );
+ assertNotNull( "Test Database Cleanup Consumer should not be null.", consumer );
+ return consumer;
+ }
+
+ protected TestDatabaseUnprocessedConsumer lookupTestUnprocessedConsumer()
+ throws Exception
+ {
+ TestDatabaseUnprocessedConsumer consumer = (TestDatabaseUnprocessedConsumer) lookup(
+ DatabaseUnprocessedArtifactConsumer.class,
+ "test-db-unprocessed" );
+ assertNotNull( "Test Database Unprocessed Consumer should not be null.", consumer );
+ return consumer;
+ }
+
protected Date toDate( String txt )
throws Exception
{
public RepositoryBrowsing lookupBrowser()
throws Exception
{
- RepositoryBrowsing browser = (RepositoryBrowsing) lookup( RepositoryBrowsing.class.getName(), "default" );
+ RepositoryBrowsing browser = (RepositoryBrowsing) lookup( RepositoryBrowsing.class.getName() );
assertNotNull( "RepositoryBrowsing should not be null.", browser );
return browser;
}
--- /dev/null
+package org.apache.maven.archiva.database.updater;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.collections.CollectionUtils;
+import org.codehaus.plexus.PlexusTestCase;
+
+import java.util.List;
+
+/**
+ * DatabaseConsumersTest
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DatabaseConsumersTest
+ extends PlexusTestCase
+{
+ private DatabaseConsumers lookupDbConsumers()
+ throws Exception
+ {
+ DatabaseConsumers dbconsumers = (DatabaseConsumers) lookup( DatabaseConsumers.class );
+ assertNotNull( "DatabaseConsumers should not be null.", dbconsumers );
+ return dbconsumers;
+ }
+
+ public void testGetAvailableCleanupConsumers()
+ throws Exception
+ {
+ DatabaseConsumers dbconsumers = lookupDbConsumers();
+ List available = dbconsumers.getAvailableCleanupConsumers();
+ assertNotNull( "Available Cleanup Consumers should never be null.", available );
+
+ assertTrue( "Available Cleanup Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
+ }
+
+ public void testGetAvailableUnprocessedConsumers()
+ throws Exception
+ {
+ DatabaseConsumers dbconsumers = lookupDbConsumers();
+ List available = dbconsumers.getAvailableUnprocessedConsumers();
+ assertNotNull( "Available Unprocessed Consumers should never be null.", available );
+
+ assertTrue( "Available Unprocessed Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
+ }
+
+ public void testGetSelectedCleanupConsumers()
+ throws Exception
+ {
+ DatabaseConsumers dbconsumers = lookupDbConsumers();
+ List available = dbconsumers.getSelectedCleanupConsumers();
+ assertNotNull( "Selected Cleanup Consumers should never be null.", available );
+
+ assertTrue( "Selected Cleanup Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
+ }
+
+ public void testGetSelectedUnprocessedConsumers()
+ throws Exception
+ {
+ DatabaseConsumers dbconsumers = lookupDbConsumers();
+ List available = dbconsumers.getSelectedUnprocessedConsumers();
+ assertNotNull( "Selected Unprocessed Consumers should never be null.", available );
+
+ assertTrue( "Selected Unprocessed Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.database.updater;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.database.AbstractArchivaDatabaseTestCase;
+import org.apache.maven.archiva.database.ArtifactDAO;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+
+import java.util.Date;
+
+/**
+ * DatabaseUpdaterTest
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DatabaseUpdaterTest
+ extends AbstractArchivaDatabaseTestCase
+{
+ private DatabaseUpdater dbupdater;
+
+ public ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String whenProcessed )
+ throws Exception
+ {
+ ArchivaArtifact artifact = dao.getArtifactDAO().createArtifact( groupId, artifactId, version, "", "jar" );
+ assertNotNull( "Artifact should not be null.", artifact );
+ Date dateWhenProcessed = null;
+
+ if ( whenProcessed != null )
+ {
+ dateWhenProcessed = toDate( whenProcessed );
+ }
+
+ artifact.getModel().setWhenProcessed( dateWhenProcessed );
+
+ // Satisfy table / column requirements.
+ artifact.getModel().setLastModified( new Date() );
+
+ return artifact;
+ }
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ ArtifactDAO adao = dao.getArtifactDAO();
+ assertNotNull( "Artifact DAO should not be null.", adao );
+
+ adao.saveArtifact( createArtifact( "org.apache.maven.archiva", "archiva-common", "1.0-SNAPSHOT", null ) );
+ adao.saveArtifact( createArtifact( "org.apache.maven.archiva", "archiva-utils", "1.0-SNAPSHOT", null ) );
+ adao.saveArtifact( createArtifact( "org.apache.maven.archiva", "archiva-old", "0.1", "2004/02/15 9:01:00" ) );
+ adao.saveArtifact( createArtifact( "org.apache.maven.archiva", "archiva-database", "1.0-SNAPSHOT", null ) );
+
+ dbupdater = (DatabaseUpdater) lookup( DatabaseUpdater.class, "jdo" );
+ assertNotNull( "DatabaseUpdater should not be null.", dbupdater );
+ }
+
+ public void testUpdateUnprocessed()
+ throws Exception
+ {
+ String groupId = "org.apache.maven.archiva";
+ String artifactId = "archiva-utils";
+ String version = "1.0-SNAPSHOT";
+ String classifier = "";
+ String type = "jar";
+
+ TestDatabaseUnprocessedConsumer consumer = lookupTestUnprocessedConsumer();
+ consumer.resetCount();
+
+ // Check the state of the artifact in the DB.
+ ArchivaArtifact savedArtifact = dao.getArtifactDAO().getArtifact( groupId, artifactId, version, classifier,
+ type );
+ assertFalse( "Artifact should not be considered processed (yet).", savedArtifact.getModel().isProcessed() );
+
+ // Update the artifact
+ dbupdater.updateUnprocessed( savedArtifact );
+
+ // Check the update.
+ ArchivaArtifact processed = dao.getArtifactDAO().getArtifact( groupId, artifactId, version, classifier, type );
+ assertTrue( "Artifact should be flagged as processed.", processed.getModel().isProcessed() );
+
+ // Did the unprocessed consumer do it's thing?
+ assertEquals( "Processed Count.", 1, consumer.getCountProcessed() );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.database.updater;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
+import org.apache.maven.archiva.consumers.ConsumerException;
+import org.apache.maven.archiva.consumers.DatabaseCleanupConsumer;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+
+import java.util.List;
+
+/**
+ * TestDatabaseCleanupConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class TestDatabaseCleanupConsumer
+ extends AbstractMonitoredConsumer
+ implements DatabaseCleanupConsumer
+{
+ private int countBegin = 0;
+ private int countComplete = 0;
+ private int countProcessed = 0;
+
+ public void resetCount()
+ {
+ countBegin = 0;
+ countProcessed = 0;
+ countComplete = 0;
+ }
+
+ public void beginScan()
+ {
+ countBegin++;
+ }
+
+ public void completeScan()
+ {
+ countComplete++;
+ }
+
+ public List getIncludedTypes()
+ {
+ return null;
+ }
+
+ public void processArchivaArtifact( ArchivaArtifact artifact )
+ throws ConsumerException
+ {
+ countProcessed++;
+ }
+
+ public String getDescription()
+ {
+ return "Test Consumer for Database Unprocessed";
+ }
+
+ public String getId()
+ {
+ return "test-db-unprocessed";
+ }
+
+ public boolean isPermanent()
+ {
+ return false;
+ }
+
+ public int getCountBegin()
+ {
+ return countBegin;
+ }
+
+ public int getCountComplete()
+ {
+ return countComplete;
+ }
+
+ public int getCountProcessed()
+ {
+ return countProcessed;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.database.updater;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
+import org.apache.maven.archiva.consumers.ConsumerException;
+import org.apache.maven.archiva.consumers.DatabaseUnprocessedArtifactConsumer;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * TestDatabaseUnprocessedConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class TestDatabaseUnprocessedConsumer
+ extends AbstractMonitoredConsumer
+ implements DatabaseUnprocessedArtifactConsumer
+{
+ private int countBegin = 0;
+
+ private int countComplete = 0;
+
+ private int countProcessed = 0;
+
+ public void resetCount()
+ {
+ countBegin = 0;
+ countProcessed = 0;
+ countComplete = 0;
+ }
+
+ public void beginScan()
+ {
+ countBegin++;
+ }
+
+ public void completeScan()
+ {
+ countComplete++;
+ }
+
+ public List getIncludedTypes()
+ {
+ List types = new ArrayList();
+ types.add( "pom" );
+ types.add( "jar" );
+ return types;
+ }
+
+ public void processArchivaArtifact( ArchivaArtifact artifact )
+ throws ConsumerException
+ {
+ getLogger().info( "Processing Artifact: " + artifact );
+ countProcessed++;
+ }
+
+ public String getDescription()
+ {
+ return "Test Consumer for Database Unprocessed";
+ }
+
+ public String getId()
+ {
+ return "test-db-unprocessed";
+ }
+
+ public boolean isPermanent()
+ {
+ return false;
+ }
+
+ public int getCountBegin()
+ {
+ return countBegin;
+ }
+
+ public int getCountComplete()
+ {
+ return countComplete;
+ }
+
+ public int getCountProcessed()
+ {
+ return countProcessed;
+ }
+}
</otherProperties>
</configuration>
</component>
+
+ <component>
+ <role>org.apache.maven.archiva.consumers.DatabaseCleanupConsumer</role>
+ <role-hint>test-db-cleanup</role-hint>
+ <implementation>org.apache.maven.archiva.database.updater.TestDatabaseCleanupConsumer</implementation>
+ </component>
+
+ <component>
+ <role>org.apache.maven.archiva.consumers.DatabaseUnprocessedArtifactConsumer</role>
+ <role-hint>test-db-unprocessed</role-hint>
+ <implementation>org.apache.maven.archiva.database.updater.TestDatabaseUnprocessedConsumer</implementation>
+ </component>
<component>
<role>org.codehaus.plexus.logging.LoggerManager</role>
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<configuration>
+ <repositories>
+ <repository>
+ <id>internal</id>
+ <name>Archiva Managed Internal Repository</name>
+ <url>file://${appserver.home}/repositories/internal</url>
+ <layout>default</layout>
+ <releases>true</releases>
+ <snapshots>false</snapshots>
+ <indexed>true</indexed>
+ <refreshCronExpression>0 0 * * ?</refreshCronExpression>
+ </repository>
+ <repository>
+ <id>snapshots</id>
+ <name>Archiva Managed Snapshot Repository</name>
+ <url>file://${appserver.home}/repositories/internal</url>
+ <layout>default</layout>
+ <releases>false</releases>
+ <snapshots>true</snapshots>
+ <indexed>true</indexed>
+ <refreshCronExpression>0 0,30 * * ?</refreshCronExpression>
+ </repository>
+ <repository>
+ <id>central</id>
+ <name>Central Repository</name>
+ <url>http://repo1.maven.org/maven2</url>
+ <layout>default</layout>
+ <releases>true</releases>
+ <snapshots>false</snapshots>
+ <indexed>false</indexed>
+ </repository>
+ <repository>
+ <id>maven2-repository.dev.java.net</id>
+ <name>Java.net Repository for Maven 2</name>
+ <url>https://maven2-repository.dev.java.net/nonav/repository</url>
+ <layout>default</layout>
+ <releases>true</releases>
+ <snapshots>false</snapshots>
+ <indexed>false</indexed>
+ </repository>
+ </repositories>
+
+ <proxyConnectors />
+
+ <networkProxies />
+
+ <repositoryScanning>
+ <fileTypes>
+ <fileType>
+ <id>artifacts</id>
+ <patterns>
+ <pattern>**/*.pom</pattern>
+ <pattern>**/*.jar</pattern>
+ <pattern>**/*.ear</pattern>
+ <pattern>**/*.war</pattern>
+ <pattern>**/*.car</pattern>
+ <pattern>**/*.sar</pattern>
+ <pattern>**/*.mar</pattern>
+ <pattern>**/*.rar</pattern>
+ <pattern>**/*.dtd</pattern>
+ <pattern>**/*.tld</pattern>
+ <pattern>**/*.tar.gz</pattern>
+ <pattern>**/*.tar.bz2</pattern>
+ <pattern>**/*.zip</pattern>
+ </patterns>
+ </fileType>
+ <fileType>
+ <id>indexable-content</id>
+ <patterns>
+ <pattern>**/*.txt</pattern>
+ <pattern>**/*.TXT</pattern>
+ <pattern>**/*.block</pattern>
+ <pattern>**/*.config</pattern>
+ <pattern>**/*.pom</pattern>
+ <pattern>**/*.xml</pattern>
+ <pattern>**/*.xsd</pattern>
+ <pattern>**/*.dtd</pattern>
+ <pattern>**/*.tld</pattern>
+ </patterns>
+ </fileType>
+ <fileType>
+ <id>auto-remove</id>
+ <patterns>
+ <pattern>**/*.bak</pattern>
+ <pattern>**/*~</pattern>
+ <pattern>**/*-</pattern>
+ </patterns>
+ </fileType>
+ <fileType>
+ <id>ignored</id>
+ <patterns>
+ <pattern>**/.htaccess</pattern>
+ <pattern>**/KEYS</pattern>
+ <pattern>**/*.rb</pattern>
+ <pattern>**/*.sh</pattern>
+ <pattern>**/.svn/**</pattern>
+ <pattern>**/.DAV/**</pattern>
+ </patterns>
+ </fileType>
+ </fileTypes>
+ <knownContentConsumers>
+ <knownContentConsumer>update-db-artifact</knownContentConsumer>
+ <knownContentConsumer>create-missing-checksums</knownContentConsumer>
+ <knownContentConsumer>update-db-repository-metadata</knownContentConsumer>
+ <knownContentConsumer>validate-checksum</knownContentConsumer>
+ <knownContentConsumer>validate-signature</knownContentConsumer>
+ <knownContentConsumer>index-content</knownContentConsumer>
+ <knownContentConsumer>auto-remove</knownContentConsumer>
+ <knownContentConsumer>auto-rename</knownContentConsumer>
+ </knownContentConsumers>
+ <invalidContentConsumers>
+ <invalidContentConsumer>update-db-bad-content</invalidContentConsumer>
+ </invalidContentConsumers>
+ </repositoryScanning>
+
+ <databaseScanning>
+ <cronExpression>0 0 * * ?</cronExpression>
+ <unprocessedConsumers>
+ <unprocessedConsumer>test-db-unprocessed</unprocessedConsumer>
+ <unprocessedConsumer>index-artifact</unprocessedConsumer>
+ <unprocessedConsumer>update-db-project</unprocessedConsumer>
+ <unprocessedConsumer>validate-repository-metadata</unprocessedConsumer>
+ <unprocessedConsumer>index-archive-toc</unprocessedConsumer>
+ <unprocessedConsumer>update-db-bytecode-stats</unprocessedConsumer>
+ <unprocessedConsumer>index-public-methods</unprocessedConsumer>
+ </unprocessedConsumers>
+ <cleanupConsumers>
+ <cleanupConsumer>test-db-cleanup</cleanupConsumer>
+ <cleanupConsumer>not-present-remove-db-artifact</cleanupConsumer>
+ <cleanupConsumer>not-present-remove-db-project</cleanupConsumer>
+ <cleanupConsumer>not-present-remove-indexed</cleanupConsumer>
+ </cleanupConsumers>
+ </databaseScanning>
+
+</configuration>
--- /dev/null
+<?xml version="1.0" ?>
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
+ <implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
+ <requirements>
+ <requirement>
+ <role>org.codehaus.plexus.registry.Registry</role>
+ <role-hint>configured</role-hint>
+ </requirement>
+ </requirements>
+ </component>
+ <component>
+ <role>org.codehaus.plexus.registry.Registry</role>
+ <role-hint>configured</role-hint>
+ <implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
+ <configuration>
+ <properties>
+ <system/>
+ <xml fileName="${basedir}/src/test/resources/archiva-test.xml"
+ config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
+ </properties>
+ </configuration>
+ </component>
+ </components>
+</component-set>
+
--- /dev/null
+<?xml version="1.0" ?>
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
+ <implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
+ <requirements>
+ <requirement>
+ <role>org.codehaus.plexus.registry.Registry</role>
+ <role-hint>configured</role-hint>
+ </requirement>
+ </requirements>
+ </component>
+ <component>
+ <role>org.codehaus.plexus.registry.Registry</role>
+ <role-hint>configured</role-hint>
+ <implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
+ <configuration>
+ <properties>
+ <system/>
+ <xml fileName="${basedir}/src/test/resources/archiva-test.xml"
+ config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
+ </properties>
+ </configuration>
+ </component>
+ </components>
+</component-set>
+
<artifactId>plexus-registry-api</artifactId>
</dependency>
<!-- Test Dependencies -->
- <dependency>
- <groupId>hsqldb</groupId>
- <artifactId>hsqldb</artifactId>
- <scope>test</scope>
- </dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-core-consumers</artifactId>
+ <artifactId>archiva-database-consumers</artifactId>
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-database-consumers</artifactId>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
<scope>test</scope>
</dependency>
<dependency>
RepositoryContentStatistics stats = repoScanner.scan( arepo, sinceWhen );
- stats = (RepositoryContentStatistics) dao.save( stats );
-
getLogger().info( "Finished repository task: " + stats.toDump( arepo ) );
+
+ stats = (RepositoryContentStatistics) dao.save( stats );
}
catch ( ArchivaDatabaseException e )
{
+++ /dev/null
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<configuration>
- <repositories>
- <repository>
- <id>internal</id>
- <name>Archiva Managed Internal Repository</name>
- <url>file://${appserver.home}/repositories/internal</url>
- <layout>default</layout>
- <releases>true</releases>
- <snapshots>false</snapshots>
- <indexed>true</indexed>
- <refreshCronExpression>0 0 * * ?</refreshCronExpression>
- </repository>
- <repository>
- <id>snapshots</id>
- <name>Archiva Managed Snapshot Repository</name>
- <url>file://${appserver.home}/repositories/internal</url>
- <layout>default</layout>
- <releases>false</releases>
- <snapshots>true</snapshots>
- <indexed>true</indexed>
- <refreshCronExpression>0 0,30 * * ?</refreshCronExpression>
- </repository>
- <repository>
- <id>central</id>
- <name>Central Repository</name>
- <url>http://repo1.maven.org/maven2</url>
- <layout>default</layout>
- <releases>true</releases>
- <snapshots>false</snapshots>
- <indexed>false</indexed>
- </repository>
- <repository>
- <id>maven2-repository.dev.java.net</id>
- <name>Java.net Repository for Maven 2</name>
- <url>https://maven2-repository.dev.java.net/nonav/repository</url>
- <layout>default</layout>
- <releases>true</releases>
- <snapshots>false</snapshots>
- <indexed>false</indexed>
- </repository>
- </repositories>
-
- <proxyConnectors>
- <proxyConnector>
- <sourceRepoId>internal</sourceRepoId>
- <targetRepoId>central</targetRepoId>
- <proxyId />
- <snapshotsPolicy>disabled</snapshotsPolicy>
- <releasePolicy>never</releasePolicy>
- <failurePolicy>not-found</failurePolicy>
- </proxyConnector>
- <proxyConnector>
- <sourceRepoId>internal</sourceRepoId>
- <targetRepoId>maven2-repository.dev.java.net</targetRepoId>
- <proxyId />
- <snapshotsPolicy>disabled</snapshotsPolicy>
- <releasePolicy>never</releasePolicy>
- <failurePolicy>not-found</failurePolicy>
- <whiteListPatterns>
- <whiteListPattern>javax/**</whiteListPattern>
- </whiteListPatterns>
- </proxyConnector>
- </proxyConnectors>
-
- <networkProxies>
- <networkProxy>
- <id>example</id>
- <protocol>http</protocol>
- <host>proxy.mycompany.com</host>
- <port>8080</port>
- <username>myself</username>
- <password>mypass</password>
- </networkProxy>
- </networkProxies>
-
- <repositoryScanning>
- <fileTypes>
- <fileType>
- <id>artifacts</id>
- <patterns>
- <pattern>**/*.pom</pattern>
- <pattern>**/*.jar</pattern>
- <pattern>**/*.ear</pattern>
- <pattern>**/*.war</pattern>
- <pattern>**/*.car</pattern>
- <pattern>**/*.sar</pattern>
- <pattern>**/*.mar</pattern>
- <pattern>**/*.rar</pattern>
- <pattern>**/*.dtd</pattern>
- <pattern>**/*.tld</pattern>
- <pattern>**/*.tar.gz</pattern>
- <pattern>**/*.tar.bz2</pattern>
- <pattern>**/*.zip</pattern>
- </patterns>
- </fileType>
- <fileType>
- <id>indexable-content</id>
- <patterns>
- <pattern>**/*.txt</pattern>
- <pattern>**/*.TXT</pattern>
- <pattern>**/*.block</pattern>
- <pattern>**/*.config</pattern>
- <pattern>**/*.pom</pattern>
- <pattern>**/*.xml</pattern>
- <pattern>**/*.xsd</pattern>
- <pattern>**/*.dtd</pattern>
- <pattern>**/*.tld</pattern>
- </patterns>
- </fileType>
- <fileType>
- <id>auto-remove</id>
- <patterns>
- <pattern>**/*.bak</pattern>
- <pattern>**/*~</pattern>
- <pattern>**/*-</pattern>
- </patterns>
- </fileType>
- <fileType>
- <id>ignored</id>
- <patterns>
- <pattern>**/.htaccess</pattern>
- <pattern>**/KEYS</pattern>
- <pattern>**/*.rb</pattern>
- <pattern>**/*.sh</pattern>
- <pattern>**/.svn/**</pattern>
- <pattern>**/.DAV/**</pattern>
- </patterns>
- </fileType>
- </fileTypes>
- <knownContentConsumers>
- <knownContentConsumer>update-db-artifact</knownContentConsumer>
- <knownContentConsumer>create-missing-checksums</knownContentConsumer>
- <knownContentConsumer>update-db-repository-metadata</knownContentConsumer>
- <knownContentConsumer>validate-checksum</knownContentConsumer>
- <knownContentConsumer>validate-signature</knownContentConsumer>
- <knownContentConsumer>index-content</knownContentConsumer>
- <knownContentConsumer>auto-remove</knownContentConsumer>
- <knownContentConsumer>auto-rename</knownContentConsumer>
- </knownContentConsumers>
- <invalidContentConsumers>
- <invalidContentConsumer>update-db-bad-content</invalidContentConsumer>
- </invalidContentConsumers>
- </repositoryScanning>
-
- <databaseScanning>
- <cronExpression>0 0 * * ?</cronExpression>
- <unprocessedConsumers>
- <unprocessedConsumer>update-db-artifact</unprocessedConsumer>
- </unprocessedConsumers>
- <cleanupConsumers>
- <cleanupConsumer>not-present-remove-db-artifact</cleanupConsumer>
- <cleanupConsumer>not-present-remove-db-project</cleanupConsumer>
- <cleanupConsumer>not-present-remove-indexed</cleanupConsumer>
- </cleanupConsumers>
- </databaseScanning>
-
-</configuration>
--- /dev/null
+package org.apache.maven.archiva.scheduled;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
+import org.apache.maven.archiva.consumers.ConsumerException;
+import org.apache.maven.archiva.consumers.DatabaseCleanupConsumer;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+
+import java.util.List;
+
+/**
+ * TestDatabaseCleanupConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class TestDatabaseCleanupConsumer
+ extends AbstractMonitoredConsumer
+ implements DatabaseCleanupConsumer
+{
+ private int countBegin = 0;
+ private int countComplete = 0;
+ private int countProcessed = 0;
+
+ public void resetCount()
+ {
+ countBegin = 0;
+ countProcessed = 0;
+ countComplete = 0;
+ }
+
+ public void beginScan()
+ {
+ countBegin++;
+ }
+
+ public void completeScan()
+ {
+ countComplete++;
+ }
+
+ public List getIncludedTypes()
+ {
+ return null;
+ }
+
+ public void processArchivaArtifact( ArchivaArtifact artifact )
+ throws ConsumerException
+ {
+ countProcessed++;
+ }
+
+ public String getDescription()
+ {
+ return "Test Consumer for Database Unprocessed";
+ }
+
+ public String getId()
+ {
+ return "test-db-unprocessed";
+ }
+
+ public boolean isPermanent()
+ {
+ return false;
+ }
+
+ public int getCountBegin()
+ {
+ return countBegin;
+ }
+
+ public int getCountComplete()
+ {
+ return countComplete;
+ }
+
+ public int getCountProcessed()
+ {
+ return countProcessed;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.scheduled;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
+import org.apache.maven.archiva.consumers.ConsumerException;
+import org.apache.maven.archiva.consumers.DatabaseUnprocessedArtifactConsumer;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * TestDatabaseUnprocessedConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class TestDatabaseUnprocessedConsumer
+ extends AbstractMonitoredConsumer
+ implements DatabaseUnprocessedArtifactConsumer
+{
+ private int countBegin = 0;
+
+ private int countComplete = 0;
+
+ private int countProcessed = 0;
+
+ public void resetCount()
+ {
+ countBegin = 0;
+ countProcessed = 0;
+ countComplete = 0;
+ }
+
+ public void beginScan()
+ {
+ countBegin++;
+ }
+
+ public void completeScan()
+ {
+ countComplete++;
+ }
+
+ public List getIncludedTypes()
+ {
+ List types = new ArrayList();
+ types.add( "pom" );
+ types.add( "jar" );
+ return types;
+ }
+
+ public void processArchivaArtifact( ArchivaArtifact artifact )
+ throws ConsumerException
+ {
+ getLogger().info( "Processing Artifact: " + artifact );
+ countProcessed++;
+ }
+
+ public String getDescription()
+ {
+ return "Test Consumer for Database Unprocessed";
+ }
+
+ public String getId()
+ {
+ return "test-db-unprocessed";
+ }
+
+ public boolean isPermanent()
+ {
+ return false;
+ }
+
+ public int getCountBegin()
+ {
+ return countBegin;
+ }
+
+ public int getCountComplete()
+ {
+ return countComplete;
+ }
+
+ public int getCountProcessed()
+ {
+ return countProcessed;
+ }
+}
private TaskExecutor taskExecutor;
protected ArchivaDAO dao;
-
+
protected void setUp()
throws Exception
{
super.setUp();
-
+
DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );
- jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
+ jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
/* derby version
- File derbyDbDir = new File( "target/plexus-home/testdb" );
- if ( derbyDbDir.exists() )
- {
- FileUtils.deleteDirectory( derbyDbDir );
- }
+ File derbyDbDir = new File( "target/plexus-home/testdb" );
+ if ( derbyDbDir.exists() )
+ {
+ FileUtils.deleteDirectory( derbyDbDir );
+ }
- jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.apache.derby.jdbc.EmbeddedDriver" ) );
- jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:derby:" + derbyDbDir.getAbsolutePath() + ";create=true" ) );
- */
+ jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.apache.derby.jdbc.EmbeddedDriver" ) );
+ jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:derby:" + derbyDbDir.getAbsolutePath() + ";create=true" ) );
+ */
- jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
+ jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:" + getName() ) );
-
- jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
- jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
+ jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
+
+ jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
- jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
+ jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
- jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
+ jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
- jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
+ jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );
System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
}
- URL jdoFileUrls[] = new URL[] { getClass()
- .getResource( "/org/apache/maven/archiva/model/package.jdo" ) };
+ URL jdoFileUrls[] = new URL[] { getClass().getResource( "/org/apache/maven/archiva/model/package.jdo" ) };
if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
{
taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-database-update" );
}
- public void testExecutor() throws Exception
+ public void testExecutor()
+ throws Exception
{
RepositoryDAO repoDao = dao.getRepositoryDAO();
String repoUri = "file://" + StringUtils.replace( repoDir.getAbsolutePath(), "\\", "/" );
// Create it
- ArchivaRepository repo =
- repoDao.createRepository( "testRepo", "Test Repository", repoUri );
+ ArchivaRepository repo = repoDao.createRepository( "testRepo", "Test Repository", repoUri );
assertNotNull( repo );
// Set some mandatory values
assertEquals( "testRepo", JDOHelper.getObjectId( repoSaved.getModel() ).toString() );
ArtifactDAO adao = dao.getArtifactDAO();
-
+
ArchivaArtifact sqlArtifact = adao.createArtifact( "javax.sql", "jdbc", "2.0", "", "jar" );
sqlArtifact.getModel().setLastModified( new Date() );
sqlArtifact.getModel().setSize( 1234 );
sqlArtifact.getModel().setOrigin( "testcase" );
sqlArtifact.getModel().setWhenProcessed( null );
-
+
adao.saveArtifact( sqlArtifact );
-
+
ArchivaArtifact artifact = adao.getArtifact( "javax.sql", "jdbc", "2.0", null, "jar" );
-
+
assertNotNull( artifact );
+
+ // Test for artifact existance.
+ List artifactList = adao.queryArtifacts( null );
+ assertNotNull( "Artifact list should not be null.", artifactList );
+ assertEquals( "Artifact list size", 1, artifactList.size() );
+ // Test for unprocessed artifacts.
List unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
-
- assertNotNull( unprocessedResultList );
- assertEquals("Incorrect number of unprocessed artifacts detected.", 1, unprocessedResultList.size() );
-
+ assertNotNull( "Unprocessed Results should not be null.", unprocessedResultList );
+ assertEquals( "Incorrect number of unprocessed artifacts detected.", 1, unprocessedResultList.size() );
+
+ // Execute the database task.
DatabaseTask dataTask = new DatabaseTask();
-
+
dataTask.setName( "testDataTask" );
taskExecutor.executeTask( dataTask );
- List processedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( true ) );
+ // Test for artifact existance.
+ artifactList = adao.queryArtifacts( null );
+ assertNotNull( "Artifact list should not be null.", artifactList );
+ assertEquals( "Artifact list size", 1, artifactList.size() );
- assertNotNull( processedResultList );
- assertEquals("Incorrect number of processed artifacts detected.", 1, processedResultList.size() );
-
+ // Test for processed artifacts.
+ List processedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( true ) );
+ assertNotNull( "Processed Results should not be null.", processedResultList );
+ assertEquals( "Incorrect number of processed artifacts detected.", 1, processedResultList.size() );
}
-
}
assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
String repoUri = "file://" + StringUtils.replace( repoDir.getAbsolutePath(), "\\", "/" );
-
// Create it
ArchivaRepository repo =
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<configuration>
+ <repositories>
+ <repository>
+ <id>testRepo</id>
+ <name>Archiva Test Repository</name>
+ <url>file://${basedir}/src/test/repositories/default-repository</url>
+ <layout>default</layout>
+ <releases>true</releases>
+ <snapshots>false</snapshots>
+ <indexed>true</indexed>
+ <refreshCronExpression>0 0 * * ?</refreshCronExpression>
+ </repository>
+ </repositories>
+
+ <proxyConnectors />
+
+ <networkProxies />
+
+ <repositoryScanning>
+ <fileTypes>
+ <fileType>
+ <id>artifacts</id>
+ <patterns>
+ <pattern>**/*.pom</pattern>
+ <pattern>**/*.jar</pattern>
+ <pattern>**/*.ear</pattern>
+ <pattern>**/*.war</pattern>
+ <pattern>**/*.car</pattern>
+ <pattern>**/*.sar</pattern>
+ <pattern>**/*.mar</pattern>
+ <pattern>**/*.rar</pattern>
+ <pattern>**/*.dtd</pattern>
+ <pattern>**/*.tld</pattern>
+ <pattern>**/*.tar.gz</pattern>
+ <pattern>**/*.tar.bz2</pattern>
+ <pattern>**/*.zip</pattern>
+ </patterns>
+ </fileType>
+ <fileType>
+ <id>indexable-content</id>
+ <patterns>
+ <pattern>**/*.txt</pattern>
+ <pattern>**/*.TXT</pattern>
+ <pattern>**/*.block</pattern>
+ <pattern>**/*.config</pattern>
+ <pattern>**/*.pom</pattern>
+ <pattern>**/*.xml</pattern>
+ <pattern>**/*.xsd</pattern>
+ <pattern>**/*.dtd</pattern>
+ <pattern>**/*.tld</pattern>
+ </patterns>
+ </fileType>
+ <fileType>
+ <id>auto-remove</id>
+ <patterns>
+ <pattern>**/*.bak</pattern>
+ <pattern>**/*~</pattern>
+ <pattern>**/*-</pattern>
+ </patterns>
+ </fileType>
+ <fileType>
+ <id>ignored</id>
+ <patterns>
+ <pattern>**/.htaccess</pattern>
+ <pattern>**/KEYS</pattern>
+ <pattern>**/*.rb</pattern>
+ <pattern>**/*.sh</pattern>
+ <pattern>**/.svn/**</pattern>
+ <pattern>**/.DAV/**</pattern>
+ </patterns>
+ </fileType>
+ </fileTypes>
+ <knownContentConsumers>
+ <knownContentConsumer>update-db-artifact</knownContentConsumer>
+ <knownContentConsumer>create-missing-checksums</knownContentConsumer>
+ <knownContentConsumer>update-db-repository-metadata</knownContentConsumer>
+ <knownContentConsumer>validate-checksum</knownContentConsumer>
+ <knownContentConsumer>validate-signature</knownContentConsumer>
+ <knownContentConsumer>index-content</knownContentConsumer>
+ <knownContentConsumer>auto-remove</knownContentConsumer>
+ <knownContentConsumer>auto-rename</knownContentConsumer>
+ </knownContentConsumers>
+ <invalidContentConsumers>
+ <invalidContentConsumer>update-db-bad-content</invalidContentConsumer>
+ </invalidContentConsumers>
+ </repositoryScanning>
+
+ <databaseScanning>
+ <cronExpression>0 0 * * ?</cronExpression>
+ <unprocessedConsumers>
+ <unprocessedConsumer>test-db-unprocessed</unprocessedConsumer>
+ <unprocessedConsumer>update-db-artifact</unprocessedConsumer>
+ </unprocessedConsumers>
+ <cleanupConsumers>
+ <cleanupConsumer>test-db-cleanup</cleanupConsumer>
+ <cleanupConsumer>not-present-remove-db-artifact</cleanupConsumer>
+ <cleanupConsumer>not-present-remove-db-project</cleanupConsumer>
+ <cleanupConsumer>not-present-remove-indexed</cleanupConsumer>
+ </cleanupConsumers>
+ </databaseScanning>
+
+</configuration>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
- <role-hint>test-configuration</role-hint>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements>
<requirement>
<configuration>
<properties>
<system/>
- <xml fileName="${basedir}/src/test/conf/repository-manager.xml"
+ <xml fileName="${basedir}/src/test/resources/archiva-test.xml"
config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
</component>
+ <component>
+ <role>org.apache.maven.archiva.consumers.DatabaseCleanupConsumer</role>
+ <role-hint>test-db-cleanup</role-hint>
+ <implementation>org.apache.maven.archiva.scheduled.TestDatabaseCleanupConsumer</implementation>
+ </component>
+
+ <component>
+ <role>org.apache.maven.archiva.consumers.DatabaseUnprocessedArtifactConsumer</role>
+ <role-hint>test-db-unprocessed</role-hint>
+ <implementation>org.apache.maven.archiva.scheduled.TestDatabaseUnprocessedConsumer</implementation>
+ </component>
+
+
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
<role-hint>archiva</role-hint>
<configuration>
<properties>
<system/>
- <xml fileName="${basedir}/src/test/conf/repository-manager.xml"
+ <xml fileName="${basedir}/src/test/resources/archiva-test.xml"
config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ObjectNotFoundException;
+import org.apache.maven.archiva.database.browsing.RepositoryBrowsing;
import org.apache.maven.archiva.model.ArchivaProjectModel;
import org.apache.maven.archiva.web.util.VersionMerger;
import org.apache.maven.wagon.ResourceDoesNotExistException;
/* .\ Not Exposed \._____________________________________________ */
/**
- * @plexus.requirement role-hint="jdo"
+ * @plexus.requirement role-hint="default"
*/
- private ArchivaDAO dao;
+ private RepositoryBrowsing repoBrowsing;
- /**
- * @plexus.requirement
- */
- private ArchivaConfiguration archivaConfiguration;
-
/* .\ Input Parameters \.________________________________________ */
private String groupId;
}
private ArchivaProjectModel readProject()
- throws ObjectNotFoundException, ArchivaDatabaseException
+ throws ArchivaDatabaseException
{
- return dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version );
+ return repoBrowsing.selectVersion( groupId, artifactId, version );
}
private boolean checkParameters()
<plexus>
<load-on-start>
- <component>
- <role>org.apache.maven.archiva.configuration.ConfigurationUpgrade</role>
- <role-hint>default</role-hint>
- </component>
<component>
<role>org.apache.maven.archiva.web.startup.ConfigurationSynchronization</role>
<role-hint>default</role-hint>