import org.apache.maven.archiva.repository.audit.AuditEvent;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Base class for all repository purge tasks.
public void completeScan()
{
- final File indexLocation = new File( managedRepository, ".index" );
- try
+ synchronized( indexer )
{
- indexerEngine.endIndexing( context );
- indexPacker.packIndex( context, indexLocation );
- indexer.removeIndexingContext( context, false );
- uinfos = null;
- }
- catch ( IOException e )
- {
- log.error( "Could not pack index" + indexLocation.getAbsolutePath(), e );
+ final File indexLocation = new File( managedRepository, ".index" );
+ try
+ {
+ indexerEngine.endIndexing( context );
+ indexPacker.packIndex( context, indexLocation );
+ indexer.removeIndexingContext( context, false );
+ uinfos = null;
+ }
+ catch ( IOException e )
+ {
+ log.error( "Could not pack index" + indexLocation.getAbsolutePath(), e );
+ }
}
}
{
RepositoryContentConsumer consumer = (RepositoryContentConsumer) input;
consumer.completeScan();
- log.info( "Consumer [" + consumer.getId() + "] completed for repository [" + repository.getId() + "]" );
+ log.debug( "Consumer [" + consumer.getId() + "] completed for repository [" + repository.getId() + "]" );
}
}
}
public RepositoryContentConsumers( ArchivaConfiguration archivaConfiguration )
{
- this.archivaConfiguration = archivaConfiguration;
+ this.archivaConfiguration = archivaConfiguration;
}
public void setApplicationContext( ApplicationContext applicationContext )
}
/**
- * A convienence method to execute all of the active selected consumers for a particular arbitrary file.
+ * A convenience method to execute all of the active selected consumers for a particular arbitrary file.
+ * NOTE: Make sure that there is no repository scanning task executing before invoking this so as to prevent
+ * the index writer/reader of the current index-content consumer executing from getting closed. For an example,
+ * see ArchivaDavResource#executeConsumers( File ).
*
* @param repository the repository configuration to use.
* @param localFile the local file to execute the consumers against.
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
<bean id="repositoryContentConsumers" class="org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers" scope="prototype">
- <constructor-arg>
- <ref bean="archivaConfiguration"/>
- </constructor-arg>
+ <constructor-arg ref="archivaConfiguration"/>
</bean>
</beans>
\ No newline at end of file
public boolean isProcessingRepositoryTask( String repositoryId )
throws ArchivaException;
+
+ public boolean isProcessingRepositoryTaskWithName( String taskName )
+ throws ArchivaException;
public void queueDatabaseTask( DatabaseTask task )
throws TaskQueueException;
import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
+import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskNameSelectionPredicate;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskSelectionPredicate;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
public boolean isProcessingAnyRepositoryTask()
throws ArchivaException
{
- List<? extends Task> queue = null;
-
- try
- {
- queue = repositoryScanningQueue.getQueueSnapshot();
- }
- catch ( TaskQueueException e )
+ synchronized( repositoryScanningQueue )
{
- throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
+ List<? extends Task> queue = null;
+
+ try
+ {
+ queue = repositoryScanningQueue.getQueueSnapshot();
+ }
+ catch ( TaskQueueException e )
+ {
+ throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
+ }
+
+ return !queue.isEmpty();
}
-
- return !queue.isEmpty();
}
@SuppressWarnings("unchecked")
public boolean isProcessingRepositoryTask( String repositoryId )
throws ArchivaException
{
- List<? extends Task> queue = null;
-
- try
+ synchronized( repositoryScanningQueue )
{
- queue = repositoryScanningQueue.getQueueSnapshot();
+ List<? extends Task> queue = null;
+
+ try
+ {
+ queue = repositoryScanningQueue.getQueueSnapshot();
+ }
+ catch ( TaskQueueException e )
+ {
+ throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
+ }
+
+ return CollectionUtils.exists( queue, new RepositoryTaskSelectionPredicate( repositoryId ) );
}
- catch ( TaskQueueException e )
+ }
+
+ @SuppressWarnings("unchecked")
+ public boolean isProcessingRepositoryTaskWithName( String taskName )
+ throws ArchivaException
+ {
+ synchronized( repositoryScanningQueue )
{
- throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
+ List<? extends Task> queue = null;
+
+ try
+ {
+ queue = repositoryScanningQueue.getQueueSnapshot();
+ }
+ catch ( TaskQueueException e )
+ {
+ throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
+ }
+
+ return CollectionUtils.exists( queue, new RepositoryTaskNameSelectionPredicate( taskName ) );
}
-
- return CollectionUtils.exists( queue, new RepositoryTaskSelectionPredicate( repositoryId ) );
}
@SuppressWarnings("unchecked")
public void queueRepositoryTask( RepositoryTask task )
throws TaskQueueException
{
- repositoryScanningQueue.put( task );
+ synchronized( repositoryScanningQueue )
+ {
+ repositoryScanningQueue.put( task );
+ }
}
public void queueDatabaseTask( DatabaseTask task )
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.RepositoryContentStatistics;
import org.apache.maven.archiva.repository.RepositoryException;
+import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
import org.apache.maven.archiva.repository.scanner.RepositoryScanStatistics;
import org.apache.maven.archiva.repository.scanner.RepositoryScanner;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
* @plexus.requirement
*/
private RepositoryScanner repoScanner;
+
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryContentConsumers consumers;
+
+ private Task task;
public void initialize()
throws InitializationException
public void executeTask( Task task )
throws TaskExecutionException
{
+ this.task = task;
+
RepositoryTask repoTask = (RepositoryTask) task;
if ( StringUtils.isBlank( repoTask.getRepositoryId() ) )
}
log.info( "Executing task from queue with job name: " + repoTask.getName() );
+
+ ManagedRepositoryConfiguration arepo = archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() );
- try
+ // execute consumers on resource file if set
+ if( repoTask.getResourceFile() != null )
{
- ManagedRepositoryConfiguration arepo = archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() );
- if ( arepo == null )
- {
- throw new TaskExecutionException( "Unable to execute RepositoryTask with invalid repository id: " + repoTask.getRepositoryId() );
- }
-
- long sinceWhen = RepositoryScanner.FRESH_SCAN;
-
- List<RepositoryContentStatistics> results = (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( arepo.getId() ) );
-
- if ( CollectionUtils.isNotEmpty( results ) )
- {
- RepositoryContentStatistics lastStats = results.get( 0 );
- sinceWhen = lastStats.getWhenGathered().getTime() + lastStats.getDuration();
+ consumers.executeConsumers( arepo, repoTask.getResourceFile() );
+ }
+ else
+ {
+ // otherwise, execute consumers on whole repository
+ try
+ {
+ if ( arepo == null )
+ {
+ throw new TaskExecutionException( "Unable to execute RepositoryTask with invalid repository id: " + repoTask.getRepositoryId() );
+ }
+
+ long sinceWhen = RepositoryScanner.FRESH_SCAN;
+
+ List<RepositoryContentStatistics> results = (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( arepo.getId() ) );
+
+ if ( CollectionUtils.isNotEmpty( results ) )
+ {
+ RepositoryContentStatistics lastStats = results.get( 0 );
+ sinceWhen = lastStats.getWhenGathered().getTime() + lastStats.getDuration();
+ }
+
+ RepositoryScanStatistics stats = repoScanner.scan( arepo, sinceWhen );
+
+ log.info( "Finished repository task: " + stats.toDump( arepo ) );
+
+ RepositoryContentStatistics dbstats = constructRepositoryStatistics( arepo, sinceWhen, results, stats );
+
+ dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( dbstats );
+
+ this.task = null;
}
-
- RepositoryScanStatistics stats = repoScanner.scan( arepo, sinceWhen );
-
- log.info( "Finished repository task: " + stats.toDump( arepo ) );
-
- RepositoryContentStatistics dbstats = constructRepositoryStatistics( arepo, sinceWhen, results, stats );
-
- dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( dbstats );
+ catch ( RepositoryException e )
+ {
+ throw new TaskExecutionException( "Repository error when executing repository job.", e );
+ }
}
- catch ( RepositoryException e )
- {
- throw new TaskExecutionException( "Repository error when executing repository job.", e );
- }
}
@SuppressWarnings("unchecked")
dbstats.setTotalProjectCount( artifactIds.size() );
return dbstats;
- }
+ }
+
+ public Task getCurrentTaskInExecution()
+ {
+ return task;
+ }
}
package org.apache.maven.archiva.scheduled.tasks;
+import java.io.File;
+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
long maxExecutionTime;
+ File resourceFile;
+
public String getRepositoryId()
{
return repositoryId;
{
this.queuePolicy = queuePolicy;
}
+
+ public File getResourceFile()
+ {
+ return resourceFile;
+ }
+
+ public void setResourceFile( File resourceFile )
+ {
+ this.resourceFile = resourceFile;
+ }
}
--- /dev/null
+package org.apache.maven.archiva.scheduled.tasks;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.collections.Predicate;
+import org.apache.commons.lang.StringUtils;
+
+public class RepositoryTaskNameSelectionPredicate
+ implements Predicate
+{
+ private String taskName;
+
+ public RepositoryTaskNameSelectionPredicate( String taskName )
+ {
+ this.taskName = taskName;
+ }
+
+ public boolean evaluate( Object object )
+ {
+ boolean satisfies = false;
+
+ if ( object instanceof RepositoryTask )
+ {
+ RepositoryTask task = (RepositoryTask) object;
+ return StringUtils.equals( taskName, task.getName() );
+ }
+
+ return satisfies;
+ }
+}
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-layer</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-scheduled</artifactId>
+ </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-proxy</artifactId>
<artifactId>slf4j-simple</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>org.apache.maven.wagon</groupId>
<artifactId>wagon-http-lightweight</artifactId>
import org.apache.jackrabbit.webdav.property.DavPropertySet;
import org.apache.jackrabbit.webdav.property.DefaultDavProperty;
import org.apache.jackrabbit.webdav.property.ResourceType;
+import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.repository.audit.AuditEvent;
import org.apache.maven.archiva.repository.audit.AuditListener;
import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
+import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
+import org.apache.maven.archiva.scheduled.DefaultArchivaTaskScheduler;
+import org.apache.maven.archiva.scheduled.executors.ArchivaRepositoryScanningTaskExecutor;
+import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
+import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.apache.maven.archiva.webdav.util.IndexWriter;
import org.apache.maven.archiva.webdav.util.MimeTypes;
+import org.codehaus.plexus.taskqueue.TaskQueueException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
*/
private String principal;
public static final String COMPLIANCE_CLASS = "1, 2";
+
+ private ArchivaTaskScheduler scheduler;
+
+ private ArchivaRepositoryScanningTaskExecutor taskExecutor;
+
+ private Logger log = LoggerFactory.getLogger( ArchivaDavResource.class );
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository,
DavSession session, ArchivaDavResourceLocator locator, DavResourceFactory factory,
MimeTypes mimeTypes, List<AuditListener> auditListeners,
- RepositoryContentConsumers consumers )
+ RepositoryContentConsumers consumers, ArchivaTaskScheduler scheduler, TaskExecutor taskExecutor )
{
this.localResource = new File( localResource );
this.logicalResource = logicalResource;
this.mimeTypes = mimeTypes;
this.consumers = consumers;
this.auditListeners = auditListeners;
+ this.scheduler = scheduler;
+ this.taskExecutor = ( ArchivaRepositoryScanningTaskExecutor ) taskExecutor;
}
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository,
String remoteAddr, String principal, DavSession session, ArchivaDavResourceLocator locator,
DavResourceFactory factory, MimeTypes mimeTypes, List<AuditListener> auditListeners,
- RepositoryContentConsumers consumers )
+ RepositoryContentConsumers consumers, ArchivaTaskScheduler scheduler, TaskExecutor taskExecutor )
{
this( localResource, logicalResource, repository, session, locator, factory, mimeTypes, auditListeners,
- consumers );
+ consumers, scheduler, taskExecutor );
this.remoteAddr = remoteAddr;
this.principal = principal;
inputContext.getContentLength() + " but was " + localFile.length() );
}
- // Just-in-time update of the index and database by executing the consumers for this artifact
- consumers.executeConsumers( repository, localFile );
+ executeConsumers( localFile );
triggerAuditEvent( resource, exists ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE );
}
listener.auditEvent( event );
}
}
+
+ private void executeConsumers( File localFile )
+ {
+ try
+ {
+ RepositoryTask currentTaskInExecution = ( RepositoryTask ) taskExecutor.getCurrentTaskInExecution();
+ if( currentTaskInExecution != null || scheduler.isProcessingAnyRepositoryTask() )
+ {
+ // check if the repository is already queued to be scanned
+ if( scheduler.isProcessingRepositoryTaskWithName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repository.getId() )
+ || scheduler.isProcessingRepositoryTaskWithName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repository.getId() + ":" + localFile.getName() ) )
+ {
+ // no need to execute the consumers since repo is already queued
+ return;
+ }
+ else
+ {
+ // schedule the task
+ RepositoryTask task = new RepositoryTask();
+ task.setRepositoryId( repository.getId() );
+ task.setName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repository.getId() + ":" + localFile.getName() );
+ task.setQueuePolicy( ArchivaTask.QUEUE_POLICY_WAIT );
+ task.setResourceFile( localFile );
+
+ try
+ {
+ scheduler.queueRepositoryTask( task );
+ }
+ catch ( TaskQueueException e )
+ {
+ log.error( "Unable to queue repository task to execute consumers on resource file ['" +
+ localFile.getName() + "']." );
+ }
+ }
+ }
+ else
+ {
+ // Just-in-time update of the index and database by executing the consumers for this artifact
+ consumers.executeConsumers( repository, localFile );
+ }
+ }
+ catch ( ArchivaException e )
+ {
+ log.error( "Unable to queue repository task to execute consumers on resource file ['" +
+ localFile.getName() + "']." );
+ }
+ }
}
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
+import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
import org.apache.maven.archiva.security.ServletAuthenticator;
import org.apache.maven.archiva.webdav.util.MimeTypes;
import org.apache.maven.archiva.webdav.util.RepositoryPathUtil;
import org.codehaus.plexus.redback.system.SecuritySession;
import org.codehaus.plexus.redback.users.User;
import org.codehaus.plexus.redback.users.UserManager;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import org.codehaus.redback.integration.filter.authentication.HttpAuthenticator;
import org.slf4j.Logger;
* @plexus.requirement role-hint="md5";
*/
private Digester digestMd5;
+
+ /**
+ * @plexus.requirement role-hint="repository-scanning"
+ */
+ private TaskExecutor taskExecutor;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArchivaTaskScheduler scheduler;
public DavResource createResource( final DavResourceLocator locator, final DavServletRequest request,
final DavServletResponse response )
new ArchivaDavResource( metadataChecksum.getAbsolutePath(), logicalResource.getPath(),
null, request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes,
- auditListeners, consumers );
+ auditListeners, consumers, scheduler, taskExecutor );
}
}
else
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
null, request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes,
- auditListeners, consumers );
+ auditListeners, consumers, scheduler, taskExecutor );
}
catch ( RepositoryMetadataException r )
{
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
managedRepository.getRepository(), request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners,
- consumers );
+ consumers, scheduler, taskExecutor );
if ( WebdavMethodUtil.isReadMethod( request.getMethod() ) )
{
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
managedRepository.getRepository(), request.getRemoteAddr(),
activePrincipal, request.getDavSession(), archivaLocator, this,
- mimeTypes, auditListeners, consumers );
+ mimeTypes, auditListeners, consumers, scheduler, taskExecutor );
}
catch ( LayoutException e )
{
File resourceFile = new File( managedRepository.getRepoRoot(), logicalResource );
DavResource resource =
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource, managedRepository.getRepository(),
- davSession, archivaLocator, this, mimeTypes, auditListeners, consumers );
+ davSession, archivaLocator, this, mimeTypes, auditListeners, consumers, scheduler, taskExecutor );
resource.addLockManager( lockManager );
return resource;
{
this.httpAuth = httpAuth;
}
+
+ public void setTaskExecutor( TaskExecutor taskExecutor )
+ {
+ this.taskExecutor = taskExecutor;
+ }
+
+ public void setScheduler( ArchivaTaskScheduler scheduler )
+ {
+ this.scheduler = scheduler;
+ }
}
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.codehaus.plexus.spring.PlexusToSpringUtils;
-public class DavResourceTest extends PlexusInSpringTestCase
+public class DavResourceTest
+ extends PlexusInSpringTestCase
{
private DavSession session;
-
+
private MimeTypes mimeTypes;
-
+
private ArchivaDavResourceLocator resourceLocator;
-
+
private DavResourceFactory resourceFactory;
-
+
private File baseDir;
-
+
private final String REPOPATH = "myresource.jar";
-
+
private File myResource;
-
+
private DavResource resource;
-
+
private LockManager lockManager;
private RepositoryContentConsumers consumers;
private ManagedRepositoryConfiguration repository = new ManagedRepositoryConfiguration();
-
+
@Override
protected void setUp()
throws Exception
{
super.setUp();
session = new ArchivaDavSession();
- mimeTypes = (MimeTypes)getApplicationContext().getBean(PlexusToSpringUtils.buildSpringId(MimeTypes.class));
- baseDir = getTestFile("target/DavResourceTest");
+ mimeTypes = (MimeTypes) getApplicationContext().getBean( PlexusToSpringUtils.buildSpringId( MimeTypes.class ) );
+ baseDir = getTestFile( "target/DavResourceTest" );
baseDir.mkdirs();
- myResource = new File(baseDir, "myresource.jar");
- assertTrue("Could not create " + myResource.getAbsolutePath(), myResource.createNewFile());
+ myResource = new File( baseDir, "myresource.jar" );
+ assertTrue( "Could not create " + myResource.getAbsolutePath(), myResource.createNewFile() );
resourceFactory = new RootContextDavResourceFactory();
- resourceLocator = (ArchivaDavResourceLocator)new ArchivaDavLocatorFactory().createResourceLocator("/", REPOPATH);
- resource = getDavResource(resourceLocator.getHref(false), myResource);
+ resourceLocator =
+ (ArchivaDavResourceLocator) new ArchivaDavLocatorFactory().createResourceLocator( "/", REPOPATH );
+ resource = getDavResource( resourceLocator.getHref( false ), myResource );
lockManager = new SimpleLockManager();
- resource.addLockManager(lockManager);
- consumers = (RepositoryContentConsumers)getApplicationContext().getBean("repositoryContentConsumers");
+ resource.addLockManager( lockManager );
+ consumers = (RepositoryContentConsumers) getApplicationContext().getBean( "repositoryContentConsumers" );
}
@Override
throws Exception
{
super.tearDown();
- release(mimeTypes);
- FileUtils.deleteDirectory(baseDir);
+ release( mimeTypes );
+ FileUtils.deleteDirectory( baseDir );
}
-
- private DavResource getDavResource(String logicalPath, File file)
+
+ private DavResource getDavResource( String logicalPath, File file )
{
return new ArchivaDavResource( file.getAbsolutePath(), logicalPath, repository, session, resourceLocator,
- resourceFactory, mimeTypes, Collections.<AuditListener>emptyList(), consumers );
+ resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), consumers,
+ null, null );
}
-
+
public void testDeleteNonExistantResourceShould404()
throws Exception
{
- File dir = new File(baseDir, "testdir");
+ File dir = new File( baseDir, "testdir" );
try
{
- DavResource directoryResource = getDavResource("/testdir", dir);
- directoryResource.getCollection().removeMember(directoryResource);
- fail("Did not throw DavException");
+ DavResource directoryResource = getDavResource( "/testdir", dir );
+ directoryResource.getCollection().removeMember( directoryResource );
+ fail( "Did not throw DavException" );
}
- catch (DavException e)
+ catch ( DavException e )
{
- assertEquals(DavServletResponse.SC_NOT_FOUND, e.getErrorCode());
+ assertEquals( DavServletResponse.SC_NOT_FOUND, e.getErrorCode() );
}
}
-
+
public void testDeleteCollection()
throws Exception
{
- File dir = new File(baseDir, "testdir");
+ File dir = new File( baseDir, "testdir" );
try
{
- assertTrue(dir.mkdir());
- DavResource directoryResource = getDavResource("/testdir", dir);
- directoryResource.getCollection().removeMember(directoryResource);
- assertFalse(dir.exists());
+ assertTrue( dir.mkdir() );
+ DavResource directoryResource = getDavResource( "/testdir", dir );
+ directoryResource.getCollection().removeMember( directoryResource );
+ assertFalse( dir.exists() );
}
finally
{
- FileUtils.deleteDirectory(dir);
+ FileUtils.deleteDirectory( dir );
}
}
-
+
public void testDeleteResource()
throws Exception
{
- assertTrue(myResource.exists());
- resource.getCollection().removeMember(resource);
- assertFalse(myResource.exists());
+ assertTrue( myResource.exists() );
+ resource.getCollection().removeMember( resource );
+ assertFalse( myResource.exists() );
}
-
+
public void testIsLockable()
{
- assertTrue(resource.isLockable(Type.WRITE, Scope.EXCLUSIVE));
- assertFalse(resource.isLockable(Type.WRITE, Scope.SHARED));
+ assertTrue( resource.isLockable( Type.WRITE, Scope.EXCLUSIVE ) );
+ assertFalse( resource.isLockable( Type.WRITE, Scope.SHARED ) );
}
-
+
public void testLock()
throws Exception
{
- assertEquals(0, resource.getLocks().length);
-
- LockInfo info = new LockInfo(Scope.EXCLUSIVE, Type.WRITE, "/", 0, false);
- lockManager.createLock(info, resource);
-
- assertEquals(1, resource.getLocks().length);
+ assertEquals( 0, resource.getLocks().length );
+
+ LockInfo info = new LockInfo( Scope.EXCLUSIVE, Type.WRITE, "/", 0, false );
+ lockManager.createLock( info, resource );
+
+ assertEquals( 1, resource.getLocks().length );
}
-
+
public void testLockIfResourceUnlockable()
throws Exception
{
- assertEquals(0, resource.getLocks().length);
-
- LockInfo info = new LockInfo(Scope.SHARED, Type.WRITE, "/", 0, false);
+ assertEquals( 0, resource.getLocks().length );
+
+ LockInfo info = new LockInfo( Scope.SHARED, Type.WRITE, "/", 0, false );
try
{
- lockManager.createLock(info, resource);
- fail("Did not throw dav exception");
+ lockManager.createLock( info, resource );
+ fail( "Did not throw dav exception" );
}
- catch (Exception e)
+ catch ( Exception e )
{
- //Simple lock manager will die
+ // Simple lock manager will die
}
- assertEquals(0, resource.getLocks().length);
+ assertEquals( 0, resource.getLocks().length );
}
-
+
public void testGetLock()
throws Exception
{
- LockInfo info = new LockInfo(Scope.EXCLUSIVE, Type.WRITE, "/", 0, false);
- lockManager.createLock(info, resource);
-
- assertEquals(1, resource.getLocks().length);
-
- //Lock should exist
- assertNotNull(resource.getLock(Type.WRITE, Scope.EXCLUSIVE));
-
- //Lock should not exist
- assertNull(resource.getLock(Type.WRITE, Scope.SHARED));
+ LockInfo info = new LockInfo( Scope.EXCLUSIVE, Type.WRITE, "/", 0, false );
+ lockManager.createLock( info, resource );
+
+ assertEquals( 1, resource.getLocks().length );
+
+ // Lock should exist
+ assertNotNull( resource.getLock( Type.WRITE, Scope.EXCLUSIVE ) );
+
+ // Lock should not exist
+ assertNull( resource.getLock( Type.WRITE, Scope.SHARED ) );
}
-
-
+
public void testRefreshLockThrowsExceptionIfNoLockIsPresent()
throws Exception
{
- LockInfo info = new LockInfo(Scope.EXCLUSIVE, Type.WRITE, "/", 0, false);
-
- assertEquals(0, resource.getLocks().length);
-
+ LockInfo info = new LockInfo( Scope.EXCLUSIVE, Type.WRITE, "/", 0, false );
+
+ assertEquals( 0, resource.getLocks().length );
+
try
{
- lockManager.refreshLock(info, "notoken", resource);
- fail("Did not throw dav exception");
+ lockManager.refreshLock( info, "notoken", resource );
+ fail( "Did not throw dav exception" );
}
- catch (DavException e)
+ catch ( DavException e )
{
- assertEquals(DavServletResponse.SC_PRECONDITION_FAILED, e.getErrorCode());
+ assertEquals( DavServletResponse.SC_PRECONDITION_FAILED, e.getErrorCode() );
}
-
- assertEquals(0, resource.getLocks().length);
+
+ assertEquals( 0, resource.getLocks().length );
}
-
+
public void testRefreshLock()
throws Exception
{
- LockInfo info = new LockInfo(Scope.EXCLUSIVE, Type.WRITE, "/", 0, false);
-
- assertEquals(0, resource.getLocks().length);
-
- lockManager.createLock(info, resource);
-
- assertEquals(1, resource.getLocks().length);
-
+ LockInfo info = new LockInfo( Scope.EXCLUSIVE, Type.WRITE, "/", 0, false );
+
+ assertEquals( 0, resource.getLocks().length );
+
+ lockManager.createLock( info, resource );
+
+ assertEquals( 1, resource.getLocks().length );
+
ActiveLock lock = resource.getLocks()[0];
- lockManager.refreshLock(info, lock.getToken(), resource);
-
- assertEquals(1, resource.getLocks().length);
+ lockManager.refreshLock( info, lock.getToken(), resource );
+
+ assertEquals( 1, resource.getLocks().length );
}
-
+
public void testUnlock()
throws Exception
{
- LockInfo info = new LockInfo(Scope.EXCLUSIVE, Type.WRITE, "/", 0, false);
-
- assertEquals(0, resource.getLocks().length);
-
- lockManager.createLock(info, resource);
-
- assertEquals(1, resource.getLocks().length);
-
+ LockInfo info = new LockInfo( Scope.EXCLUSIVE, Type.WRITE, "/", 0, false );
+
+ assertEquals( 0, resource.getLocks().length );
+
+ lockManager.createLock( info, resource );
+
+ assertEquals( 1, resource.getLocks().length );
+
ActiveLock lock = resource.getLocks()[0];
- lockManager.releaseLock(lock.getToken(), resource);
-
- assertEquals(0, resource.getLocks().length);
- }
-
+ lockManager.releaseLock( lock.getToken(), resource );
+
+ assertEquals( 0, resource.getLocks().length );
+ }
+
public void testUnlockThrowsDavExceptionIfNotLocked()
throws Exception
{
- LockInfo info = new LockInfo(Scope.EXCLUSIVE, Type.WRITE, "/", 0, false);
-
- assertEquals(0, resource.getLocks().length);
-
- lockManager.createLock(info, resource);
-
- assertEquals(1, resource.getLocks().length);
+ LockInfo info = new LockInfo( Scope.EXCLUSIVE, Type.WRITE, "/", 0, false );
+
+ assertEquals( 0, resource.getLocks().length );
+
+ lockManager.createLock( info, resource );
+
+ assertEquals( 1, resource.getLocks().length );
try
{
- lockManager.releaseLock("BLAH", resource);
- fail("Did not throw DavException");
+ lockManager.releaseLock( "BLAH", resource );
+ fail( "Did not throw DavException" );
}
- catch (DavException e)
+ catch ( DavException e )
{
- assertEquals(DavServletResponse.SC_LOCKED, e.getErrorCode());
+ assertEquals( DavServletResponse.SC_LOCKED, e.getErrorCode() );
}
-
- assertEquals(1, resource.getLocks().length);
+
+ assertEquals( 1, resource.getLocks().length );
}
-
+
public void testUnlockThrowsDavExceptionIfResourceNotLocked()
throws Exception
- {
- assertEquals(0, resource.getLocks().length);
+ {
+ assertEquals( 0, resource.getLocks().length );
try
{
- lockManager.releaseLock("BLAH", resource);
- fail("Did not throw DavException");
+ lockManager.releaseLock( "BLAH", resource );
+ fail( "Did not throw DavException" );
}
- catch (DavException e)
+ catch ( DavException e )
{
- assertEquals(DavServletResponse.SC_PRECONDITION_FAILED, e.getErrorCode());
+ assertEquals( DavServletResponse.SC_PRECONDITION_FAILED, e.getErrorCode() );
}
-
- assertEquals(0, resource.getLocks().length);
+
+ assertEquals( 0, resource.getLocks().length );
}
-
- private class RootContextDavResourceFactory implements DavResourceFactory
+
+ private class RootContextDavResourceFactory
+ implements DavResourceFactory
{
- public DavResource createResource(DavResourceLocator locator, DavServletRequest request, DavServletResponse response) throws DavException {
- throw new UnsupportedOperationException("Not supported yet.");
+ public DavResource createResource( DavResourceLocator locator, DavServletRequest request,
+ DavServletResponse response )
+ throws DavException
+ {
+ throw new UnsupportedOperationException( "Not supported yet." );
}
- public DavResource createResource(DavResourceLocator locator, DavSession session) throws DavException {
+ public DavResource createResource( DavResourceLocator locator, DavSession session )
+ throws DavException
+ {
return new ArchivaDavResource( baseDir.getAbsolutePath(), "/", repository, session, resourceLocator,
- resourceFactory, mimeTypes, Collections.<AuditListener>emptyList(), consumers );
+ resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(),
+ consumers, null, null );
}
}
}
<role-hint>md5</role-hint>
<field-name>digestMd5</field-name>
</requirement>
+ <requirement>
+ <role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
+ <role-hint>repository-scanning</role-hint>
+ <field-name>taskExecutor</field-name>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
+ <field-name>scheduler</field-name>
+ </requirement>
</requirements>
</component>
+ <component>
+ <role>org.codehaus.plexus.jdo.JdoFactory</role>
+ <role-hint>archiva</role-hint>
+ <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+
+ <configuration>
+ <!-- Database Configuration -->
+ <driverName>org.hsqldb.jdbcDriver</driverName>
+ <url>jdbc:hsqldb:mem:TESTDB</url>
+ <userName>sa</userName>
+ <password></password>
+
+ <!-- JPOX and JDO configuration -->
+ <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
+ <otherProperties>
+ <property>
+ <name>javax.jdo.PersistenceManagerFactoryClass</name>
+ <value>org.jpox.PersistenceManagerFactoryImpl</value>
+ </property>
+ <property>
+ <name>org.jpox.autoCreateSchema</name>
+ <value>true</value>
+ </property>
+ <property>
+ <name>org.jpox.validateTables</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.validateConstraints</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.validateColumns</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.autoStartMechanism</name>
+ <value>None</value>
+ </property>
+ <property>
+ <name>org.jpox.transactionIsolation</name>
+ <value>READ_UNCOMMITTED</value>
+ </property>
+ <property>
+ <name>org.jpox.poid.transactionIsolation</name>
+ <value>READ_UNCOMMITTED</value>
+ </property>
+ <property>
+ <name>org.jpox.rdbms.dateTimezone</name>
+ <value>JDK_DEFAULT_TIMEZONE</value>
+ </property>
+ </otherProperties>
+ </configuration>
+ </component>
</components>
</plexus>
<role-hint>md5</role-hint>
<field-name>digestMd5</field-name>
</requirement>
+ <requirement>
+ <role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
+ <role-hint>repository-scanning</role-hint>
+ <field-name>taskExecutor</field-name>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
+ <field-name>scheduler</field-name>
+ </requirement>
</requirements>
</component>
+ <component>
+ <role>org.codehaus.plexus.jdo.JdoFactory</role>
+ <role-hint>archiva</role-hint>
+ <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+
+ <configuration>
+ <!-- Database Configuration -->
+ <driverName>org.hsqldb.jdbcDriver</driverName>
+ <url>jdbc:hsqldb:mem:TESTDB</url>
+ <userName>sa</userName>
+ <password></password>
+
+ <!-- JPOX and JDO configuration -->
+ <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
+ <otherProperties>
+ <property>
+ <name>javax.jdo.PersistenceManagerFactoryClass</name>
+ <value>org.jpox.PersistenceManagerFactoryImpl</value>
+ </property>
+ <property>
+ <name>org.jpox.autoCreateSchema</name>
+ <value>true</value>
+ </property>
+ <property>
+ <name>org.jpox.validateTables</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.validateConstraints</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.validateColumns</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.autoStartMechanism</name>
+ <value>None</value>
+ </property>
+ <property>
+ <name>org.jpox.transactionIsolation</name>
+ <value>READ_UNCOMMITTED</value>
+ </property>
+ <property>
+ <name>org.jpox.poid.transactionIsolation</name>
+ <value>READ_UNCOMMITTED</value>
+ </property>
+ <property>
+ <name>org.jpox.rdbms.dateTimezone</name>
+ <value>JDK_DEFAULT_TIMEZONE</value>
+ </property>
+ </otherProperties>
+ </configuration>
+ </component>
</components>
</plexus>