]> source.dussan.org Git - archiva.git/commitdiff
o synchronize repository scanning to prevent index writer/reader from being closed...
authorMaria Odea B. Ching <oching@apache.org>
Sun, 22 Mar 2009 02:24:21 +0000 (02:24 +0000)
committerMaria Odea B. Ching <oching@apache.org>
Sun, 22 Mar 2009 02:24:21 +0000 (02:24 +0000)
repository scanning is in progress
o updated test cases to accomodate changes

git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@757116 13f79535-47bb-0310-9956-ffa450edef68

16 files changed:
archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/AbstractRepositoryPurge.java
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/main/java/org/apache/archiva/consumers/lucene/NexusIndexerConsumer.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/scanner/functors/TriggerScanCompletedClosure.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/RepositoryContentConsumers.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/resources/META-INF/spring-context.xml
archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/ArchivaTaskScheduler.java
archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/DefaultArchivaTaskScheduler.java
archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/executors/ArchivaRepositoryScanningTaskExecutor.java
archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/RepositoryTask.java
archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/RepositoryTaskNameSelectionPredicate.java [new file with mode: 0644]
archiva-modules/archiva-web/archiva-webdav/pom.xml
archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/maven/archiva/webdav/ArchivaDavResource.java
archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/maven/archiva/webdav/ArchivaDavResourceFactory.java
archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/maven/archiva/webdav/DavResourceTest.java
archiva-modules/archiva-web/archiva-webdav/src/test/resources/org/apache/maven/archiva/webdav/RepositoryServletSecurityTest.xml
archiva-modules/archiva-web/archiva-webdav/src/test/resources/org/apache/maven/archiva/webdav/RepositoryServletTest.xml

index 467c56af46aaa56e9db99b0f8401e6bb7529fb97..c0c4e525f828770497cd97b3eacbbfe8945790d7 100644 (file)
@@ -32,8 +32,6 @@ import org.apache.maven.archiva.model.ArtifactReference;
 import org.apache.maven.archiva.repository.audit.AuditEvent;
 import org.apache.maven.archiva.repository.ManagedRepositoryContent;
 import org.apache.maven.archiva.repository.events.RepositoryListener;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * Base class for all repository purge tasks.
index 39a04e8fffcb6dc40843ec8267b52b4f056b523c..b9d61d451e80ad88db64c2399031c2933e6e8fc5 100644 (file)
@@ -202,17 +202,20 @@ public class NexusIndexerConsumer
 
     public void completeScan()
     {   
-        final File indexLocation = new File( managedRepository, ".index" );
-        try
+        synchronized( indexer )
         {
-            indexerEngine.endIndexing( context );            
-            indexPacker.packIndex( context, indexLocation );
-            indexer.removeIndexingContext( context, false );
-            uinfos = null;
-        }
-        catch ( IOException e )
-        {
-            log.error( "Could not pack index" + indexLocation.getAbsolutePath(), e );
+            final File indexLocation = new File( managedRepository, ".index" );
+            try
+            {
+                indexerEngine.endIndexing( context );            
+                indexPacker.packIndex( context, indexLocation );
+                indexer.removeIndexingContext( context, false );
+                uinfos = null;
+            }
+            catch ( IOException e )
+            {
+                log.error( "Could not pack index" + indexLocation.getAbsolutePath(), e );
+            }
         }
     }
 
index 52a9ca18a218ef7157b49e6a98f1547ca9d97021..82875a6ab31deaab2feca88cabad14c1145b1991 100644 (file)
@@ -46,7 +46,7 @@ public class TriggerScanCompletedClosure
         {
             RepositoryContentConsumer consumer = (RepositoryContentConsumer) input;
             consumer.completeScan();
-            log.info( "Consumer [" + consumer.getId() + "] completed for repository [" + repository.getId() + "]" );
+            log.debug( "Consumer [" + consumer.getId() + "] completed for repository [" + repository.getId() + "]" );
         }
     }
 }
index 119165b0bf3b9060c64dccc93769fc5fa4df1447..e8e4ee521942ee1c25a62296aebb019ec139e751 100644 (file)
@@ -61,7 +61,7 @@ public class RepositoryContentConsumers
 
     public RepositoryContentConsumers( ArchivaConfiguration archivaConfiguration )
     {
-        this.archivaConfiguration = archivaConfiguration;
+        this.archivaConfiguration = archivaConfiguration;      
     }
 
     public void setApplicationContext( ApplicationContext applicationContext )
@@ -222,7 +222,10 @@ public class RepositoryContentConsumers
     }
 
     /**
-     * A convienence method to execute all of the active selected consumers for a particular arbitrary file.
+     * A convenience method to execute all of the active selected consumers for a particular arbitrary file.
+     * NOTE: Make sure that there is no repository scanning task executing before invoking this so as to prevent
+     * the index writer/reader of the current index-content consumer executing from getting closed. For an example,
+     * see ArchivaDavResource#executeConsumers( File ). 
      * 
      * @param repository the repository configuration to use.
      * @param localFile the local file to execute the consumers against.
index 994559fddb4ce28ff0341463fc6af2982ba407e9..5cc3f68974a493c2daebc12250e9705d71bc5018 100644 (file)
@@ -4,8 +4,6 @@
        xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
 
     <bean id="repositoryContentConsumers" class="org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers" scope="prototype">
-      <constructor-arg>
-        <ref bean="archivaConfiguration"/>
-      </constructor-arg>
+      <constructor-arg ref="archivaConfiguration"/>
     </bean>
 </beans>
\ No newline at end of file
index 52bdba4a2b5bc01297efb6c4337e73bcd632b862..2edb02150120bd31c1493484b89dac6aac63830b 100644 (file)
@@ -44,6 +44,9 @@ public interface ArchivaTaskScheduler
 
     public boolean isProcessingRepositoryTask( String repositoryId )
         throws ArchivaException;
+    
+    public boolean isProcessingRepositoryTaskWithName( String taskName )
+        throws ArchivaException;
 
     public void queueDatabaseTask( DatabaseTask task )
         throws TaskQueueException;
index 599de9736505a6e71887b5ce08d4556dfdc91b91..4f0299df3f37e018c031f5c19452e135f2f75c8c 100644 (file)
@@ -31,6 +31,7 @@ import org.apache.maven.archiva.repository.scanner.RepositoryScanStatistics;
 import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
 import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
 import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
+import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskNameSelectionPredicate;
 import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskSelectionPredicate;
 import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
 import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
@@ -325,36 +326,63 @@ public class DefaultArchivaTaskScheduler
     public boolean isProcessingAnyRepositoryTask()
         throws ArchivaException
     {
-        List<? extends Task> queue = null;
-
-        try
-        {
-            queue = repositoryScanningQueue.getQueueSnapshot();
-        }
-        catch ( TaskQueueException e )
+        synchronized( repositoryScanningQueue )
         {
-            throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
+            List<? extends Task> queue = null;
+    
+            try
+            {
+                queue = repositoryScanningQueue.getQueueSnapshot();
+            }
+            catch ( TaskQueueException e )
+            {
+                throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
+            }
+    
+            return !queue.isEmpty();
         }
-
-        return !queue.isEmpty();
     }
 
     @SuppressWarnings("unchecked")
     public boolean isProcessingRepositoryTask( String repositoryId )
         throws ArchivaException
     {
-        List<? extends Task> queue = null;
-
-        try
+        synchronized( repositoryScanningQueue )
         {
-            queue = repositoryScanningQueue.getQueueSnapshot();
+            List<? extends Task> queue = null;
+    
+            try
+            {
+                queue = repositoryScanningQueue.getQueueSnapshot();
+            }
+            catch ( TaskQueueException e )
+            {
+                throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
+            }
+    
+            return CollectionUtils.exists( queue, new RepositoryTaskSelectionPredicate( repositoryId ) );
         }
-        catch ( TaskQueueException e )
+    }
+    
+    @SuppressWarnings("unchecked")
+    public boolean isProcessingRepositoryTaskWithName( String taskName )
+        throws ArchivaException
+    {
+        synchronized( repositoryScanningQueue )
         {
-            throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
+            List<? extends Task> queue = null;
+    
+            try
+            {
+                queue = repositoryScanningQueue.getQueueSnapshot();
+            }
+            catch ( TaskQueueException e )
+            {
+                throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
+            }
+    
+            return CollectionUtils.exists( queue, new RepositoryTaskNameSelectionPredicate( taskName ) );
         }
-
-        return CollectionUtils.exists( queue, new RepositoryTaskSelectionPredicate( repositoryId ) );
     }
 
     @SuppressWarnings("unchecked")
@@ -378,7 +406,10 @@ public class DefaultArchivaTaskScheduler
     public void queueRepositoryTask( RepositoryTask task )
         throws TaskQueueException
     {
-        repositoryScanningQueue.put( task );
+        synchronized( repositoryScanningQueue )
+        {
+            repositoryScanningQueue.put( task );
+        }
     }
 
     public void queueDatabaseTask( DatabaseTask task )
index 6163ed4f549094f6e7cbccd8b0e0e036b3ce9488..76cf3bbe5681fc25d93c5ccede0c9b44ca75884e 100644 (file)
@@ -34,6 +34,7 @@ import org.apache.maven.archiva.database.constraints.UniqueGroupIdConstraint;
 import org.apache.maven.archiva.model.ArchivaArtifact;
 import org.apache.maven.archiva.model.RepositoryContentStatistics;
 import org.apache.maven.archiva.repository.RepositoryException;
+import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
 import org.apache.maven.archiva.repository.scanner.RepositoryScanStatistics;
 import org.apache.maven.archiva.repository.scanner.RepositoryScanner;
 import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
@@ -79,6 +80,13 @@ public class ArchivaRepositoryScanningTaskExecutor
      * @plexus.requirement
      */
     private RepositoryScanner repoScanner;
+    
+    /**
+     * @plexus.requirement
+     */
+    private RepositoryContentConsumers consumers;
+    
+    private Task task;
 
     public void initialize()
         throws InitializationException
@@ -90,6 +98,8 @@ public class ArchivaRepositoryScanningTaskExecutor
     public void executeTask( Task task )
         throws TaskExecutionException
     {
+        this.task = task;
+        
         RepositoryTask repoTask = (RepositoryTask) task;
         
         if ( StringUtils.isBlank( repoTask.getRepositoryId() ) )
@@ -98,37 +108,49 @@ public class ArchivaRepositoryScanningTaskExecutor
         }
 
         log.info( "Executing task from queue with job name: " + repoTask.getName() );
+
+        ManagedRepositoryConfiguration arepo = archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() );
         
-        try
+        // execute consumers on resource file if set
+        if( repoTask.getResourceFile() != null )
         {
-            ManagedRepositoryConfiguration arepo = archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() );
-            if ( arepo == null )
-            {
-                throw new TaskExecutionException( "Unable to execute RepositoryTask with invalid repository id: " + repoTask.getRepositoryId() );
-            }
-
-            long sinceWhen = RepositoryScanner.FRESH_SCAN;
-
-            List<RepositoryContentStatistics> results = (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( arepo.getId() ) );
-
-            if ( CollectionUtils.isNotEmpty( results ) )
-            {
-                RepositoryContentStatistics lastStats = results.get( 0 );
-                sinceWhen = lastStats.getWhenGathered().getTime() + lastStats.getDuration();
+            consumers.executeConsumers( arepo, repoTask.getResourceFile() );
+        }
+        else
+        {
+            // otherwise, execute consumers on whole repository
+            try
+            {   
+                if ( arepo == null )
+                {
+                    throw new TaskExecutionException( "Unable to execute RepositoryTask with invalid repository id: " + repoTask.getRepositoryId() );
+                }
+    
+                long sinceWhen = RepositoryScanner.FRESH_SCAN;
+    
+                List<RepositoryContentStatistics> results = (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( arepo.getId() ) );
+    
+                if ( CollectionUtils.isNotEmpty( results ) )
+                {
+                    RepositoryContentStatistics lastStats = results.get( 0 );
+                    sinceWhen = lastStats.getWhenGathered().getTime() + lastStats.getDuration();
+                }
+    
+                RepositoryScanStatistics stats = repoScanner.scan( arepo, sinceWhen );
+    
+                log.info( "Finished repository task: " + stats.toDump( arepo ) );
+                
+                RepositoryContentStatistics dbstats = constructRepositoryStatistics( arepo, sinceWhen, results, stats );
+                
+                dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( dbstats );   
+                
+                this.task = null;
             }
-
-            RepositoryScanStatistics stats = repoScanner.scan( arepo, sinceWhen );
-
-            log.info( "Finished repository task: " + stats.toDump( arepo ) );
-            
-            RepositoryContentStatistics dbstats = constructRepositoryStatistics( arepo, sinceWhen, results, stats );
-            
-            dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( dbstats );            
+            catch ( RepositoryException e )
+            {   
+                throw new TaskExecutionException( "Repository error when executing repository job.", e );
+            }    
         }
-        catch ( RepositoryException e )
-        {   
-            throw new TaskExecutionException( "Repository error when executing repository job.", e );
-        }    
     }
 
     @SuppressWarnings("unchecked")
@@ -176,5 +198,10 @@ public class ArchivaRepositoryScanningTaskExecutor
         dbstats.setTotalProjectCount( artifactIds.size() );
                         
         return dbstats;
-    }    
+    }   
+    
+    public Task getCurrentTaskInExecution()
+    {
+        return task;
+    }
 }
index 12d33a894d36eca323a2a508580de34573a833ed..d18afcf61f6e3e2985c4599da36bc272899ed0a0 100644 (file)
@@ -1,5 +1,7 @@
 package org.apache.maven.archiva.scheduled.tasks;
 
+import java.io.File;
+
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -36,6 +38,8 @@ public class RepositoryTask
 
     long maxExecutionTime;
     
+    File resourceFile;
+    
     public String getRepositoryId()
     {
         return repositoryId;
@@ -75,4 +79,14 @@ public class RepositoryTask
     {
         this.queuePolicy = queuePolicy;
     }
+
+    public File getResourceFile()
+    {
+        return resourceFile;
+    }
+
+    public void setResourceFile( File resourceFile )
+    {
+        this.resourceFile = resourceFile;
+    }
 }
diff --git a/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/RepositoryTaskNameSelectionPredicate.java b/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/tasks/RepositoryTaskNameSelectionPredicate.java
new file mode 100644 (file)
index 0000000..5e2f460
--- /dev/null
@@ -0,0 +1,47 @@
+package org.apache.maven.archiva.scheduled.tasks;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.collections.Predicate;
+import org.apache.commons.lang.StringUtils;
+
+public class RepositoryTaskNameSelectionPredicate
+    implements Predicate
+{
+    private String taskName;
+
+    public RepositoryTaskNameSelectionPredicate( String taskName )
+    {
+        this.taskName = taskName;
+    }
+
+    public boolean evaluate( Object object )
+    {
+        boolean satisfies = false;
+
+        if ( object instanceof RepositoryTask )
+        {
+            RepositoryTask task = (RepositoryTask) object;
+            return StringUtils.equals( taskName, task.getName() );
+        }
+
+        return satisfies;
+    }
+}
index c9548b53afa0ff5ff846eea7276ecea683f540df..b48e3f9c3ca78f183b6194b1830cdfa792cde1ed 100644 (file)
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-repository-layer</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.archiva</groupId>                          
+      <artifactId>archiva-scheduled</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-proxy</artifactId>
       <artifactId>slf4j-simple</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>hsqldb</groupId>
+      <artifactId>hsqldb</artifactId>
+      <scope>test</scope>
+    </dependency>
     <dependency>
       <groupId>org.apache.maven.wagon</groupId>
       <artifactId>wagon-http-lightweight</artifactId>
index f715960e4c1cb5556b2a8906601a3eb4bd7e6ec1..1de35015b7b1ab0a14638098b0a79858a54907f7 100644 (file)
@@ -53,15 +53,25 @@ import org.apache.jackrabbit.webdav.property.DavPropertyNameSet;
 import org.apache.jackrabbit.webdav.property.DavPropertySet;
 import org.apache.jackrabbit.webdav.property.DefaultDavProperty;
 import org.apache.jackrabbit.webdav.property.ResourceType;
+import org.apache.maven.archiva.common.ArchivaException;
 import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
 import org.apache.maven.archiva.repository.audit.AuditEvent;
 import org.apache.maven.archiva.repository.audit.AuditListener;
 import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
+import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
+import org.apache.maven.archiva.scheduled.DefaultArchivaTaskScheduler;
+import org.apache.maven.archiva.scheduled.executors.ArchivaRepositoryScanningTaskExecutor;
+import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
+import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
 import org.apache.maven.archiva.webdav.util.IndexWriter;
 import org.apache.maven.archiva.webdav.util.MimeTypes;
+import org.codehaus.plexus.taskqueue.TaskQueueException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
 import org.joda.time.DateTime;
 import org.joda.time.format.DateTimeFormatter;
 import org.joda.time.format.ISODateTimeFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  */
@@ -97,11 +107,17 @@ public class ArchivaDavResource
     private String principal;
     
        public static final String COMPLIANCE_CLASS = "1, 2";
+       
+       private ArchivaTaskScheduler scheduler;
+       
+       private ArchivaRepositoryScanningTaskExecutor taskExecutor;
+       
+       private Logger log = LoggerFactory.getLogger( ArchivaDavResource.class );
 
     public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository,
                                DavSession session, ArchivaDavResourceLocator locator, DavResourceFactory factory,
                                MimeTypes mimeTypes, List<AuditListener> auditListeners,
-                               RepositoryContentConsumers consumers )
+                               RepositoryContentConsumers consumers, ArchivaTaskScheduler scheduler, TaskExecutor taskExecutor )
     {
         this.localResource = new File( localResource ); 
         this.logicalResource = logicalResource;
@@ -116,15 +132,17 @@ public class ArchivaDavResource
         this.mimeTypes = mimeTypes;
         this.consumers = consumers;
         this.auditListeners = auditListeners;
+        this.scheduler = scheduler;
+        this.taskExecutor = ( ArchivaRepositoryScanningTaskExecutor ) taskExecutor;
     }
 
     public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository,
                                String remoteAddr, String principal, DavSession session, ArchivaDavResourceLocator locator,
                                DavResourceFactory factory, MimeTypes mimeTypes, List<AuditListener> auditListeners,
-                               RepositoryContentConsumers consumers )
+                               RepositoryContentConsumers consumers, ArchivaTaskScheduler scheduler, TaskExecutor taskExecutor )
     {
         this( localResource, logicalResource, repository, session, locator, factory, mimeTypes, auditListeners,
-              consumers );
+              consumers, scheduler, taskExecutor );
 
         this.remoteAddr = remoteAddr;
         this.principal = principal;
@@ -303,8 +321,7 @@ public class ArchivaDavResource
                     inputContext.getContentLength() + " but was " + localFile.length() );
             }
             
-            // Just-in-time update of the index and database by executing the consumers for this artifact
-            consumers.executeConsumers( repository, localFile );
+            executeConsumers( localFile );            
             
             triggerAuditEvent( resource, exists ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE );
         }
@@ -623,4 +640,51 @@ public class ArchivaDavResource
             listener.auditEvent( event );
         }
     }
+    
+    private void executeConsumers( File localFile )
+    {
+        try
+        {
+            RepositoryTask currentTaskInExecution = ( RepositoryTask ) taskExecutor.getCurrentTaskInExecution();
+            if( currentTaskInExecution != null || scheduler.isProcessingAnyRepositoryTask() )
+            {   
+                // check if the repository is already queued to be scanned
+                if( scheduler.isProcessingRepositoryTaskWithName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repository.getId() )
+                        || scheduler.isProcessingRepositoryTaskWithName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repository.getId() + ":" + localFile.getName() ) )
+                {
+                    // no need to execute the consumers since repo is already queued
+                    return;
+                }
+                else
+                {
+                    // schedule the task
+                    RepositoryTask task = new RepositoryTask();
+                    task.setRepositoryId( repository.getId() );
+                    task.setName( DefaultArchivaTaskScheduler.REPOSITORY_JOB + ":" + repository.getId() + ":" + localFile.getName() );
+                    task.setQueuePolicy( ArchivaTask.QUEUE_POLICY_WAIT );
+                    task.setResourceFile( localFile );
+                    
+                    try
+                    {
+                        scheduler.queueRepositoryTask( task );
+                    }
+                    catch ( TaskQueueException e )
+                    {
+                        log.error( "Unable to queue repository task to execute consumers on resource file ['" +
+                            localFile.getName() + "']." );
+                    }
+                }
+            }
+            else
+            {
+                // Just-in-time update of the index and database by executing the consumers for this artifact
+                consumers.executeConsumers( repository, localFile );
+            }
+        }
+        catch ( ArchivaException e )
+        {
+            log.error( "Unable to queue repository task to execute consumers on resource file ['" +
+                       localFile.getName() + "']." );
+        }
+    }
 }
index 71e89c4d973e553e668f9aa73dad46c9c096b77d..1a53c193e2d12d4742df902ea4cba48041b2aca2 100644 (file)
@@ -61,6 +61,7 @@ import org.apache.maven.archiva.repository.metadata.RepositoryMetadataMerge;
 import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader;
 import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
 import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
+import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
 import org.apache.maven.archiva.security.ServletAuthenticator;
 import org.apache.maven.archiva.webdav.util.MimeTypes;
 import org.apache.maven.archiva.webdav.util.RepositoryPathUtil;
@@ -81,6 +82,7 @@ import org.codehaus.plexus.redback.policy.MustChangePasswordException;
 import org.codehaus.plexus.redback.system.SecuritySession;
 import org.codehaus.plexus.redback.users.User;
 import org.codehaus.plexus.redback.users.UserManager;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
 import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
 import org.codehaus.redback.integration.filter.authentication.HttpAuthenticator;
 import org.slf4j.Logger;
@@ -167,6 +169,16 @@ public class ArchivaDavResourceFactory
      * @plexus.requirement role-hint="md5";
      */
     private Digester digestMd5;
+    
+    /**
+     * @plexus.requirement role-hint="repository-scanning"
+     */
+    private TaskExecutor taskExecutor;
+    
+    /**
+     * @plexus.requirement
+     */
+    private ArchivaTaskScheduler scheduler;
 
     public DavResource createResource( final DavResourceLocator locator, final DavServletRequest request,
                                        final DavServletResponse response )
@@ -259,7 +271,7 @@ public class ArchivaDavResourceFactory
                             new ArchivaDavResource( metadataChecksum.getAbsolutePath(), logicalResource.getPath(),
                                                     null, request.getRemoteAddr(), activePrincipal,
                                                     request.getDavSession(), archivaLocator, this, mimeTypes,
-                                                    auditListeners, consumers );
+                                                    auditListeners, consumers, scheduler, taskExecutor );
                     }
                 }
                 else
@@ -294,7 +306,7 @@ public class ArchivaDavResourceFactory
                                 new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
                                                         null, request.getRemoteAddr(), activePrincipal,
                                                         request.getDavSession(), archivaLocator, this, mimeTypes,
-                                                        auditListeners, consumers );
+                                                        auditListeners, consumers, scheduler, taskExecutor );
                         }
                         catch ( RepositoryMetadataException r )
                         {
@@ -399,7 +411,7 @@ public class ArchivaDavResourceFactory
                 new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
                                         managedRepository.getRepository(), request.getRemoteAddr(), activePrincipal,
                                         request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners,
-                                        consumers );
+                                        consumers, scheduler, taskExecutor );
 
             if ( WebdavMethodUtil.isReadMethod( request.getMethod() ) )
             {
@@ -430,7 +442,7 @@ public class ArchivaDavResourceFactory
                                 new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
                                                         managedRepository.getRepository(), request.getRemoteAddr(),
                                                         activePrincipal, request.getDavSession(), archivaLocator, this,
-                                                        mimeTypes, auditListeners, consumers );
+                                                        mimeTypes, auditListeners, consumers, scheduler, taskExecutor );
                         }
                         catch ( LayoutException e )
                         {
@@ -505,7 +517,7 @@ public class ArchivaDavResourceFactory
         File resourceFile = new File( managedRepository.getRepoRoot(), logicalResource );
         DavResource resource =
             new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource, managedRepository.getRepository(),
-                                    davSession, archivaLocator, this, mimeTypes, auditListeners, consumers );
+                                    davSession, archivaLocator, this, mimeTypes, auditListeners, consumers, scheduler, taskExecutor );
 
         resource.addLockManager( lockManager );
         return resource;
@@ -999,4 +1011,14 @@ public class ArchivaDavResourceFactory
     {
         this.httpAuth = httpAuth;
     }
+
+    public void setTaskExecutor( TaskExecutor taskExecutor )
+    {
+        this.taskExecutor = taskExecutor;
+    }
+
+    public void setScheduler( ArchivaTaskScheduler scheduler )
+    {
+        this.scheduler = scheduler;
+    }
 }
index 74cefed44f7ae47c78c2b6fdd9ad94f7c048db00..4365f0a788a76bdaae7a51ef66d9d06c5ae57df0 100644 (file)
@@ -43,47 +43,49 @@ import org.apache.maven.archiva.webdav.util.MimeTypes;
 import org.codehaus.plexus.spring.PlexusInSpringTestCase;
 import org.codehaus.plexus.spring.PlexusToSpringUtils;
 
-public class DavResourceTest extends PlexusInSpringTestCase
+public class DavResourceTest
+    extends PlexusInSpringTestCase
 {
     private DavSession session;
-    
+
     private MimeTypes mimeTypes;
-    
+
     private ArchivaDavResourceLocator resourceLocator;
-    
+
     private DavResourceFactory resourceFactory;
-    
+
     private File baseDir;
-    
+
     private final String REPOPATH = "myresource.jar";
-    
+
     private File myResource;
-    
+
     private DavResource resource;
-    
+
     private LockManager lockManager;
 
     private RepositoryContentConsumers consumers;
 
     private ManagedRepositoryConfiguration repository = new ManagedRepositoryConfiguration();
-    
+
     @Override
     protected void setUp()
         throws Exception
     {
         super.setUp();
         session = new ArchivaDavSession();
-        mimeTypes = (MimeTypes)getApplicationContext().getBean(PlexusToSpringUtils.buildSpringId(MimeTypes.class));
-        baseDir = getTestFile("target/DavResourceTest");
+        mimeTypes = (MimeTypes) getApplicationContext().getBean( PlexusToSpringUtils.buildSpringId( MimeTypes.class ) );
+        baseDir = getTestFile( "target/DavResourceTest" );
         baseDir.mkdirs();
-        myResource = new File(baseDir, "myresource.jar");
-        assertTrue("Could not create " + myResource.getAbsolutePath(), myResource.createNewFile());
+        myResource = new File( baseDir, "myresource.jar" );
+        assertTrue( "Could not create " + myResource.getAbsolutePath(), myResource.createNewFile() );
         resourceFactory = new RootContextDavResourceFactory();
-        resourceLocator = (ArchivaDavResourceLocator)new ArchivaDavLocatorFactory().createResourceLocator("/", REPOPATH);
-        resource = getDavResource(resourceLocator.getHref(false), myResource);
+        resourceLocator =
+            (ArchivaDavResourceLocator) new ArchivaDavLocatorFactory().createResourceLocator( "/", REPOPATH );
+        resource = getDavResource( resourceLocator.getHref( false ), myResource );
         lockManager = new SimpleLockManager();
-        resource.addLockManager(lockManager);
-        consumers = (RepositoryContentConsumers)getApplicationContext().getBean("repositoryContentConsumers");
+        resource.addLockManager( lockManager );
+        consumers = (RepositoryContentConsumers) getApplicationContext().getBean( "repositoryContentConsumers" );        
     }
 
     @Override
@@ -91,215 +93,222 @@ public class DavResourceTest extends PlexusInSpringTestCase
         throws Exception
     {
         super.tearDown();
-        release(mimeTypes);
-        FileUtils.deleteDirectory(baseDir);
+        release( mimeTypes );
+        FileUtils.deleteDirectory( baseDir );
     }
-    
-    private DavResource getDavResource(String logicalPath, File file)
+
+    private DavResource getDavResource( String logicalPath, File file )
     {
         return new ArchivaDavResource( file.getAbsolutePath(), logicalPath, repository, session, resourceLocator,
-                                       resourceFactory, mimeTypes, Collections.<AuditListener>emptyList(), consumers );
+                                       resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), consumers,
+                                       null, null );
     }
-    
+
     public void testDeleteNonExistantResourceShould404()
         throws Exception
     {
-        File dir = new File(baseDir, "testdir");
+        File dir = new File( baseDir, "testdir" );
         try
         {
-            DavResource directoryResource = getDavResource("/testdir", dir);
-            directoryResource.getCollection().removeMember(directoryResource);
-            fail("Did not throw DavException");
+            DavResource directoryResource = getDavResource( "/testdir", dir );
+            directoryResource.getCollection().removeMember( directoryResource );
+            fail( "Did not throw DavException" );
         }
-        catch (DavException e)
+        catch ( DavException e )
         {
-            assertEquals(DavServletResponse.SC_NOT_FOUND, e.getErrorCode());
+            assertEquals( DavServletResponse.SC_NOT_FOUND, e.getErrorCode() );
         }
     }
-    
+
     public void testDeleteCollection()
         throws Exception
     {
-        File dir = new File(baseDir, "testdir");
+        File dir = new File( baseDir, "testdir" );
         try
         {
-            assertTrue(dir.mkdir());
-            DavResource directoryResource = getDavResource("/testdir", dir);
-            directoryResource.getCollection().removeMember(directoryResource);
-            assertFalse(dir.exists());
+            assertTrue( dir.mkdir() );
+            DavResource directoryResource = getDavResource( "/testdir", dir );
+            directoryResource.getCollection().removeMember( directoryResource );
+            assertFalse( dir.exists() );
         }
         finally
         {
-            FileUtils.deleteDirectory(dir);
+            FileUtils.deleteDirectory( dir );
         }
     }
-    
+
     public void testDeleteResource()
         throws Exception
     {
-        assertTrue(myResource.exists());
-        resource.getCollection().removeMember(resource);
-        assertFalse(myResource.exists());
+        assertTrue( myResource.exists() );
+        resource.getCollection().removeMember( resource );
+        assertFalse( myResource.exists() );
     }
-    
+
     public void testIsLockable()
     {
-        assertTrue(resource.isLockable(Type.WRITE, Scope.EXCLUSIVE));
-        assertFalse(resource.isLockable(Type.WRITE, Scope.SHARED));
+        assertTrue( resource.isLockable( Type.WRITE, Scope.EXCLUSIVE ) );
+        assertFalse( resource.isLockable( Type.WRITE, Scope.SHARED ) );
     }
-    
+
     public void testLock()
         throws Exception
     {
-        assertEquals(0, resource.getLocks().length);
-       
-        LockInfo info = new LockInfo(Scope.EXCLUSIVE, Type.WRITE, "/", 0, false);
-        lockManager.createLock(info, resource);
-        
-        assertEquals(1, resource.getLocks().length);
+        assertEquals( 0, resource.getLocks().length );
+
+        LockInfo info = new LockInfo( Scope.EXCLUSIVE, Type.WRITE, "/", 0, false );
+        lockManager.createLock( info, resource );
+
+        assertEquals( 1, resource.getLocks().length );
     }
-    
+
     public void testLockIfResourceUnlockable()
         throws Exception
     {
-        assertEquals(0, resource.getLocks().length);
-       
-        LockInfo info = new LockInfo(Scope.SHARED, Type.WRITE, "/", 0, false);
+        assertEquals( 0, resource.getLocks().length );
+
+        LockInfo info = new LockInfo( Scope.SHARED, Type.WRITE, "/", 0, false );
         try
         {
-            lockManager.createLock(info, resource);
-            fail("Did not throw dav exception");
+            lockManager.createLock( info, resource );
+            fail( "Did not throw dav exception" );
         }
-        catch (Exception e)
+        catch ( Exception e )
         {
-            //Simple lock manager will die
+            // Simple lock manager will die
         }
-        assertEquals(0, resource.getLocks().length); 
+        assertEquals( 0, resource.getLocks().length );
     }
-    
+
     public void testGetLock()
         throws Exception
     {
-        LockInfo info = new LockInfo(Scope.EXCLUSIVE, Type.WRITE, "/", 0, false);
-        lockManager.createLock(info, resource);
-        
-        assertEquals(1, resource.getLocks().length);
-        
-        //Lock should exist
-        assertNotNull(resource.getLock(Type.WRITE, Scope.EXCLUSIVE));
-        
-        //Lock should not exist
-        assertNull(resource.getLock(Type.WRITE, Scope.SHARED));
+        LockInfo info = new LockInfo( Scope.EXCLUSIVE, Type.WRITE, "/", 0, false );
+        lockManager.createLock( info, resource );
+
+        assertEquals( 1, resource.getLocks().length );
+
+        // Lock should exist
+        assertNotNull( resource.getLock( Type.WRITE, Scope.EXCLUSIVE ) );
+
+        // Lock should not exist
+        assertNull( resource.getLock( Type.WRITE, Scope.SHARED ) );
     }
-    
-    
+
     public void testRefreshLockThrowsExceptionIfNoLockIsPresent()
         throws Exception
     {
-        LockInfo info = new LockInfo(Scope.EXCLUSIVE, Type.WRITE, "/", 0, false);
-        
-        assertEquals(0, resource.getLocks().length);       
-        
+        LockInfo info = new LockInfo( Scope.EXCLUSIVE, Type.WRITE, "/", 0, false );
+
+        assertEquals( 0, resource.getLocks().length );
+
         try
         {
-            lockManager.refreshLock(info, "notoken", resource);
-            fail("Did not throw dav exception");
+            lockManager.refreshLock( info, "notoken", resource );
+            fail( "Did not throw dav exception" );
         }
-        catch (DavException e)
+        catch ( DavException e )
         {
-            assertEquals(DavServletResponse.SC_PRECONDITION_FAILED, e.getErrorCode());
+            assertEquals( DavServletResponse.SC_PRECONDITION_FAILED, e.getErrorCode() );
         }
-        
-        assertEquals(0, resource.getLocks().length);
+
+        assertEquals( 0, resource.getLocks().length );
     }
-    
+
     public void testRefreshLock()
         throws Exception
     {
-        LockInfo info = new LockInfo(Scope.EXCLUSIVE, Type.WRITE, "/", 0, false);
-        
-        assertEquals(0, resource.getLocks().length);
-        
-        lockManager.createLock(info, resource);
-        
-        assertEquals(1, resource.getLocks().length);
-        
+        LockInfo info = new LockInfo( Scope.EXCLUSIVE, Type.WRITE, "/", 0, false );
+
+        assertEquals( 0, resource.getLocks().length );
+
+        lockManager.createLock( info, resource );
+
+        assertEquals( 1, resource.getLocks().length );
+
         ActiveLock lock = resource.getLocks()[0];
 
-        lockManager.refreshLock(info, lock.getToken(), resource);
-        
-        assertEquals(1, resource.getLocks().length);
+        lockManager.refreshLock( info, lock.getToken(), resource );
+
+        assertEquals( 1, resource.getLocks().length );
     }
-    
+
     public void testUnlock()
         throws Exception
     {
-        LockInfo info = new LockInfo(Scope.EXCLUSIVE, Type.WRITE, "/", 0, false);
-        
-        assertEquals(0, resource.getLocks().length);
-        
-        lockManager.createLock(info, resource);
-        
-        assertEquals(1, resource.getLocks().length);
-        
+        LockInfo info = new LockInfo( Scope.EXCLUSIVE, Type.WRITE, "/", 0, false );
+
+        assertEquals( 0, resource.getLocks().length );
+
+        lockManager.createLock( info, resource );
+
+        assertEquals( 1, resource.getLocks().length );
+
         ActiveLock lock = resource.getLocks()[0];
 
-        lockManager.releaseLock(lock.getToken(), resource);
-        
-        assertEquals(0, resource.getLocks().length);
-    }    
-    
+        lockManager.releaseLock( lock.getToken(), resource );
+
+        assertEquals( 0, resource.getLocks().length );
+    }
+
     public void testUnlockThrowsDavExceptionIfNotLocked()
         throws Exception
     {
-        LockInfo info = new LockInfo(Scope.EXCLUSIVE, Type.WRITE, "/", 0, false);
-        
-        assertEquals(0, resource.getLocks().length);
-        
-        lockManager.createLock(info, resource);
-        
-        assertEquals(1, resource.getLocks().length);
+        LockInfo info = new LockInfo( Scope.EXCLUSIVE, Type.WRITE, "/", 0, false );
+
+        assertEquals( 0, resource.getLocks().length );
+
+        lockManager.createLock( info, resource );
+
+        assertEquals( 1, resource.getLocks().length );
 
         try
         {
-            lockManager.releaseLock("BLAH", resource);
-            fail("Did not throw DavException");
+            lockManager.releaseLock( "BLAH", resource );
+            fail( "Did not throw DavException" );
         }
-        catch (DavException e)
+        catch ( DavException e )
         {
-            assertEquals(DavServletResponse.SC_LOCKED, e.getErrorCode());
+            assertEquals( DavServletResponse.SC_LOCKED, e.getErrorCode() );
         }
-        
-        assertEquals(1, resource.getLocks().length);      
+
+        assertEquals( 1, resource.getLocks().length );
     }
-    
+
     public void testUnlockThrowsDavExceptionIfResourceNotLocked()
         throws Exception
-    {        
-        assertEquals(0, resource.getLocks().length);
+    {
+        assertEquals( 0, resource.getLocks().length );
 
         try
         {
-            lockManager.releaseLock("BLAH", resource);
-            fail("Did not throw DavException");
+            lockManager.releaseLock( "BLAH", resource );
+            fail( "Did not throw DavException" );
         }
-        catch (DavException e)
+        catch ( DavException e )
         {
-            assertEquals(DavServletResponse.SC_PRECONDITION_FAILED, e.getErrorCode());
+            assertEquals( DavServletResponse.SC_PRECONDITION_FAILED, e.getErrorCode() );
         }
-        
-        assertEquals(0, resource.getLocks().length);      
+
+        assertEquals( 0, resource.getLocks().length );
     }
-    
-    private class RootContextDavResourceFactory implements DavResourceFactory
+
+    private class RootContextDavResourceFactory
+        implements DavResourceFactory
     {
-        public DavResource createResource(DavResourceLocator locator, DavServletRequest request, DavServletResponse response) throws DavException {
-            throw new UnsupportedOperationException("Not supported yet.");
+        public DavResource createResource( DavResourceLocator locator, DavServletRequest request,
+                                           DavServletResponse response )
+            throws DavException
+        {
+            throw new UnsupportedOperationException( "Not supported yet." );
         }
 
-        public DavResource createResource(DavResourceLocator locator, DavSession session) throws DavException {
+        public DavResource createResource( DavResourceLocator locator, DavSession session )
+            throws DavException
+        {
             return new ArchivaDavResource( baseDir.getAbsolutePath(), "/", repository, session, resourceLocator,
-                                           resourceFactory, mimeTypes, Collections.<AuditListener>emptyList(), consumers );
+                                           resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(),
+                                           consumers, null, null );
         }
     }
 }
index b55a4e1514f7102be3905ef8673f191bc0aaeafd..418e38b34466571a5ba147701a85e3ec370bfa29 100644 (file)
           <role-hint>md5</role-hint>
           <field-name>digestMd5</field-name>
         </requirement>
+        <requirement>
+          <role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
+          <role-hint>repository-scanning</role-hint>
+          <field-name>taskExecutor</field-name>
+        </requirement>
+        <requirement>
+          <role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
+          <field-name>scheduler</field-name>
+        </requirement>
       </requirements>
     </component>
+    <component>
+      <role>org.codehaus.plexus.jdo.JdoFactory</role>
+      <role-hint>archiva</role-hint>
+      <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+      
+      <configuration>
+        <!-- Database Configuration -->
+        <driverName>org.hsqldb.jdbcDriver</driverName>
+        <url>jdbc:hsqldb:mem:TESTDB</url>
+        <userName>sa</userName>
+        <password></password>
+        
+        <!-- JPOX and JDO configuration -->
+        <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
+        <otherProperties>
+          <property>
+            <name>javax.jdo.PersistenceManagerFactoryClass</name>
+            <value>org.jpox.PersistenceManagerFactoryImpl</value>
+          </property>
+          <property>
+            <name>org.jpox.autoCreateSchema</name>
+            <value>true</value>
+          </property>
+          <property>
+            <name>org.jpox.validateTables</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.validateConstraints</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.validateColumns</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.autoStartMechanism</name>
+            <value>None</value>
+          </property>
+          <property>
+            <name>org.jpox.transactionIsolation</name>
+            <value>READ_UNCOMMITTED</value>
+          </property>
+          <property>
+            <name>org.jpox.poid.transactionIsolation</name>
+            <value>READ_UNCOMMITTED</value>
+          </property>
+          <property>
+            <name>org.jpox.rdbms.dateTimezone</name>
+            <value>JDK_DEFAULT_TIMEZONE</value>
+          </property>
+        </otherProperties>
+      </configuration>
+    </component>
   </components>
 </plexus>
index 017d761b8a0124d0285a9a2f15f90e1fac8254a0..a04b4cfdc0c6869554b5580e0945f3ddb8c2d0ca 100644 (file)
           <role-hint>md5</role-hint>
           <field-name>digestMd5</field-name>
         </requirement>
+        <requirement>
+          <role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
+          <role-hint>repository-scanning</role-hint>
+          <field-name>taskExecutor</field-name>
+        </requirement>
+        <requirement>
+          <role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
+          <field-name>scheduler</field-name>
+        </requirement>
       </requirements>
     </component>
+    <component>
+      <role>org.codehaus.plexus.jdo.JdoFactory</role>
+      <role-hint>archiva</role-hint>
+      <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+      
+      <configuration>
+        <!-- Database Configuration -->
+        <driverName>org.hsqldb.jdbcDriver</driverName>
+        <url>jdbc:hsqldb:mem:TESTDB</url>
+        <userName>sa</userName>
+        <password></password>
+        
+        <!-- JPOX and JDO configuration -->
+        <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
+        <otherProperties>
+          <property>
+            <name>javax.jdo.PersistenceManagerFactoryClass</name>
+            <value>org.jpox.PersistenceManagerFactoryImpl</value>
+          </property>
+          <property>
+            <name>org.jpox.autoCreateSchema</name>
+            <value>true</value>
+          </property>
+          <property>
+            <name>org.jpox.validateTables</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.validateConstraints</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.validateColumns</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.autoStartMechanism</name>
+            <value>None</value>
+          </property>
+          <property>
+            <name>org.jpox.transactionIsolation</name>
+            <value>READ_UNCOMMITTED</value>
+          </property>
+          <property>
+            <name>org.jpox.poid.transactionIsolation</name>
+            <value>READ_UNCOMMITTED</value>
+          </property>
+          <property>
+            <name>org.jpox.rdbms.dateTimezone</name>
+            <value>JDK_DEFAULT_TIMEZONE</value>
+          </property>
+        </otherProperties>
+      </configuration>
+    </component>
   </components>
 </plexus>