import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
+import org.apache.maven.archiva.database.RepositoryDAO;
import org.apache.maven.archiva.database.updater.DatabaseUpdater;
+import org.apache.maven.archiva.model.ArchivaRepository;
+import org.apache.maven.archiva.repository.RepositoryException;
+import org.apache.maven.archiva.repository.scanner.RepositoryScanner;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
+import java.util.Map;
/**
*
*/
private DatabaseUpdater databaseUpdater;
+ /**
+ * @plexus.requirement role-hint="jdo"
+ */
+ private RepositoryDAO repositoryDAO;
+
+ /**
+ * The collection of available consumers.
+ * @plexus.requirement role="org.apache.maven.archiva.consumers.ArchivaArtifactConsumer"
+ */
+ private Map availableConsumers;
+
public void executeTask( Task task ) throws TaskExecutionException
{
long time = System.currentTimeMillis();
- // insert repository scanning codelets here
+ try
+ {
+ ArchivaRepository arepo = repositoryDAO.getRepository( task.getRepositoryId() );
+
+ RepositoryScanner scanner = new RepositoryScanner();
+
+ scanner.scan( arepo, getActiveConsumerList(), true );
+
+ }
+ catch ( ArchivaDatabaseException e )
+ {
+ throw new TaskExecutionException( "Database error when executing repository job.", e );
+ }
+ catch ( RepositoryException e )
+ {
+ throw new TaskExecutionException( "Repository error when executing repository job.", e );
+ }
+
time = System.currentTimeMillis() - time;
getLogger().info( "Finished repository task for " + time + "ms." );
}
+
+
+ private List getActiveConsumerList()
+ {
+ List activeConsumers = new ArrayList();
+
+ List configuredGoodConsumers = archivaConfiguration.getConfiguration().getRepositoryScanning().getGoodConsumers();
+ List configuredBadConsumers = archivaConfiguration.getConfiguration().getRepositoryScanning().getBadConsumers();
+
+ for ( Iterator i = configuredGoodConsumers.iterator(); i.hasNext(); )
+ {
+ String consumer = (String)i.next();
+
+ if ( availableConsumers.containsKey( availableConsumers.get( consumer ) ) )
+ {
+ activeConsumers.add( availableConsumers.get( consumer ) );
+ }
+ else
+ {
+ getLogger().warn( "RequestedConsumer [" + consumer + "] does not exist. Skipping in repository scan." );
+ }
+ }
+
+ for ( Iterator i = configuredBadConsumers.iterator(); i.hasNext(); )
+ {
+ String consumer = (String)i.next();
+
+ if ( availableConsumers.containsKey( availableConsumers.get( consumer ) ) )
+ {
+ activeConsumers.add( availableConsumers.get( consumer ) );
+ }
+ else
+ {
+ getLogger().warn( "RequestedConsumer [" + consumer + "] does not exist. Skipping in repository scan." );
+ }
+ }
+
+ return activeConsumers;
+ }
+
}
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.database.ArchivaDAO;
+import org.apache.maven.archiva.database.ArchivaDatabaseException;
+import org.apache.maven.archiva.database.RepositoryDAO;
+import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
+import org.codehaus.plexus.jdo.JdoFactory;
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
+import org.jpox.SchemaTool;
import java.io.File;
+import java.net.URL;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.jdo.JDOHelper;
+import javax.jdo.PersistenceManager;
+import javax.jdo.PersistenceManagerFactory;
/**
* IndexerTaskExecutorTest
{
private TaskExecutor taskExecutor;
+ protected ArchivaDAO dao;
+
protected void setUp()
throws Exception
{
super.setUp();
+ DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
+ assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );
+
+ jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
+
+ /* derby version
+ File derbyDbDir = new File( "target/plexus-home/testdb" );
+ if ( derbyDbDir.exists() )
+ {
+ FileUtils.deleteDirectory( derbyDbDir );
+ }
+
+ jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.apache.derby.jdbc.EmbeddedDriver" ) );
+ jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:derby:" + derbyDbDir.getAbsolutePath() + ";create=true" ) );
+ */
+
+ jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
+ jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:" + getName() ) );
+
+ jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
+
+ jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
+
+ jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
+
+ jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
+
+ jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
+
+ jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );
+
+ jdoFactory.setProperty( "javax.jdo.option.RestoreValues", "true" );
+
+ // jdoFactory.setProperty( "org.jpox.autoCreateColumns", "true" );
+
+ jdoFactory.setProperty( "org.jpox.validateTables", "true" );
+
+ jdoFactory.setProperty( "org.jpox.validateColumns", "true" );
+
+ jdoFactory.setProperty( "org.jpox.validateConstraints", "true" );
+
+ Properties properties = jdoFactory.getProperties();
+
+ for ( Iterator it = properties.entrySet().iterator(); it.hasNext(); )
+ {
+ Map.Entry entry = (Map.Entry) it.next();
+
+ System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
+ }
+
+ URL jdoFileUrls[] = new URL[] { getClass()
+ .getResource( "/org/apache/maven/archiva/model/package.jdo" ) };
+
+ if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
+ {
+ fail( "Unable to process test " + getName() + " - missing package.jdo." );
+ }
+
+ File propsFile = null; // intentional
+ boolean verbose = true;
+
+ SchemaTool.deleteSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose );
+ SchemaTool.createSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose, null );
+
+ PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
+
+ assertNotNull( pmf );
+
+ PersistenceManager pm = pmf.getPersistenceManager();
+
+ pm.close();
+
+ this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
taskExecutor = (TaskExecutor) lookup( TaskExecutor.class.getName(), "test-archiva-task-executor" );
}
- public void testExecutor()
- throws TaskExecutionException
+ public void testExecutor() throws Exception
{
- taskExecutor.executeTask( new TestRepositoryTask() );
- }
+ RepositoryDAO repoDao = dao.getRepositoryDAO();
+
+ // Create it
+ ArchivaRepository repo =
+ repoDao.createRepository( "testRepo", "Test Repository", "http://localhost:8080/repository/foo" );
+ assertNotNull( repo );
+
+ // Set some mandatory values
+ repo.getModel().setCreationSource( "Test Case" );
+ repo.getModel().setLayoutName( "default" );
+
+ // Save it.
+ ArchivaRepository repoSaved = repoDao.saveRepository( repo );
+ assertNotNull( repoSaved );
+ assertNotNull( repoSaved.getModel() );
+ assertEquals( "testRepo", JDOHelper.getObjectId( repoSaved.getModel() ).toString() );
+
+ RepositoryTask repoTask = new RepositoryTask();
+
+ repoTask.setName( "testTask" );
+ repoTask.setRepositoryId( "testRepo" );
+
+ taskExecutor.executeTask( repoTask );
- class TestRepositoryTask
- extends RepositoryTask
- {
- public String getName()
- {
- return "TestRepositoryTask";
- }
}
+
}