]> source.dussan.org Git - archiva.git/commitdiff
Merged /archiva/trunk:r885006-887051 (applied changes to scheduler manually due to...
authorBrett Porter <brett@apache.org>
Fri, 4 Dec 2009 04:08:30 +0000 (04:08 +0000)
committerBrett Porter <brett@apache.org>
Fri, 4 Dec 2009 04:08:30 +0000 (04:08 +0000)
git-svn-id: https://svn.apache.org/repos/asf/archiva/branches/MRM-1025@887052 13f79535-47bb-0310-9956-ffa450edef68

archiva-modules/archiva-scheduler/archiva-scheduler-indexing/src/main/java/org/apache/archiva/scheduler/indexing/ArchivaIndexingTaskExecutor.java
archiva-modules/archiva-scheduler/archiva-scheduler-indexing/src/test/java/org/apache/archiva/scheduler/indexing/ArchivaIndexingTaskExecutorTest.java
archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/main/java/org/apache/archiva/scheduler/repository/ArchivaRepositoryScanningTaskExecutor.java
archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/scheduler/repository/ArchivaRepositoryScanningTaskExecutorTest.java

index 64e13b63635c7dd7af0034739717dd818721d396..04ab728e967a888c1d10ececd9ae75974d42b8b4 100644 (file)
@@ -91,9 +91,9 @@ public class ArchivaIndexingTaskExecutor
                 }
                 catch ( IOException e )
                 {
-                    log.error( "Error occurred while executing indexing task '" + indexingTask + "'" );
+                    log.error( "Error occurred while executing indexing task '" + indexingTask + "': " + e.getMessage() );
                     throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask
-                        + "'" );
+                        + "'", e );
                 }
                 finally
                 {
@@ -166,9 +166,9 @@ public class ArchivaIndexingTaskExecutor
                 }
                 catch ( IOException e )
                 {
-                    log.error( "Error occurred while executing indexing task '" + indexingTask + "'" );
+                    log.error( "Error occurred while executing indexing task '" + indexingTask + "': " + e.getMessage() );
                     throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask
-                        + "'" );
+                        + "'", e );
                 }
             }
         }
index d8aeb17871c06c29525d90ee8a262aaf3e1db2f7..bdc934c5df34ee11e63c8b5df59c7e2aeb9c97bd 100644 (file)
@@ -203,6 +203,8 @@ public class ArchivaIndexingTaskExecutorTest
         IndexSearcher searcher = new IndexSearcher( repositoryConfig.getLocation() + "/.indexer" );
         TopDocs topDocs = searcher.search( q, null, 10 );
 
+        searcher.close();
+
         assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
         assertFalse( new File( repositoryConfig.getLocation(), ".index" ).exists() );
 
index 7585e699c53cb12d871f464f6a4560c6491c603b..82fb2602d59cd0f2971c316f47dbcea865250d90 100644 (file)
@@ -9,7 +9,7 @@ package org.apache.archiva.scheduler.repository;
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *  http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
@@ -152,7 +152,7 @@ public class ArchivaRepositoryScanningTaskExecutor
                     RepositoryContentStatistics lastStats = results.get( 0 );
                     if( !repoTask.isScanAll() )
                     {
-                        sinceWhen = lastStats.getWhenGathered().getTime() + lastStats.getDuration();
+                        sinceWhen = lastStats.getWhenGathered().getTime() - lastStats.getDuration();
                     }
                 }
 
@@ -210,7 +210,7 @@ public class ArchivaRepositoryScanningTaskExecutor
             log.error( "Error occurred while querying artifacts for artifact count : " + ae.getMessage() );
         }
 
-        // total repo size
+        // total repo size -- TODO: needs to exclude ignored files (eg .svn)
         long size = FileUtils.sizeOfDirectory( new File( arepo.getLocation() ) );
         dbstats.setTotalSize( size );
 
index fe7ce034e6d472f56263d97190d26496688dea81..f44e1da66ca3eca2f1a4004b72dd83c9e17c5e00 100644 (file)
@@ -32,6 +32,7 @@ import org.apache.maven.archiva.configuration.ArchivaConfiguration;
 import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
 import org.apache.maven.archiva.database.ArchivaDAO;
 import org.apache.maven.archiva.database.ArtifactDAO;
+import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
 import org.apache.maven.archiva.model.ArchivaArtifact;
 import org.apache.maven.archiva.model.RepositoryContentStatistics;
 import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
@@ -42,7 +43,7 @@ import org.codehaus.plexus.util.FileUtils;
 import org.jpox.SchemaTool;
 
 /**
- * ArchivaRepositoryScanningTaskExecutorTest 
+ * ArchivaRepositoryScanningTaskExecutorTest
  *
  * @version $Id$
  */
@@ -52,42 +53,44 @@ public class ArchivaRepositoryScanningTaskExecutorTest
     private TaskExecutor taskExecutor;
 
     protected ArchivaDAO dao;
-    
+
     private File repoDir;
-    
+
+    private static final String TEST_REPO_ID = "testRepo";
+
     protected void setUp()
         throws Exception
     {
         super.setUp();
-        
+
         DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
         assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );
 
-        jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" ); 
+        jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
 
         /* derby version
-        File derbyDbDir = new File( "target/plexus-home/testdb" );
-        if ( derbyDbDir.exists() )
-        {
-            FileUtils.deleteDirectory( derbyDbDir );
-        }
+       File derbyDbDir = new File( "target/plexus-home/testdb" );
+       if ( derbyDbDir.exists() )
+       {
+           FileUtils.deleteDirectory( derbyDbDir );
+       }
 
-        jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.apache.derby.jdbc.EmbeddedDriver" ) );   
-        jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:derby:" + derbyDbDir.getAbsolutePath() + ";create=true" ) );
-         */   
+       jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.apache.derby.jdbc.EmbeddedDriver" ) );
+       jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:derby:" + derbyDbDir.getAbsolutePath() + ";create=true" ) );
+        */
 
-        jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );   
+        jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
         jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:" + getName() ) );
-        
-        jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) ); 
 
-        jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) ); 
+        jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
 
-        jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );  
+        jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
 
-        jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );  
+        jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
 
-        jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );  
+        jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
+
+        jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
 
         jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );
 
@@ -108,8 +111,7 @@ public class ArchivaRepositoryScanningTaskExecutorTest
             System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
         }
 
-        URL jdoFileUrls[] = new URL[] { getClass()
-            .getResource( "/org/apache/maven/archiva/model/package.jdo" ) }; 
+        URL jdoFileUrls[] = new URL[]{getClass().getResource( "/org/apache/maven/archiva/model/package.jdo" )};
 
         if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
         {
@@ -119,8 +121,8 @@ public class ArchivaRepositoryScanningTaskExecutorTest
         File propsFile = null; // intentional
         boolean verbose = true;
 
-        SchemaTool.deleteSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose );
-        SchemaTool.createSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose, null );
+        SchemaTool.deleteSchemaTables( jdoFileUrls, new URL[]{}, propsFile, verbose );
+        SchemaTool.createSchemaTables( jdoFileUrls, new URL[]{}, propsFile, verbose, null );
 
         PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
 
@@ -133,156 +135,275 @@ public class ArchivaRepositoryScanningTaskExecutorTest
         this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
 
         taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-repository-scanning" );
-        
+
         File sourceRepoDir = new File( getBasedir(), "src/test/repositories/default-repository" );
-        repoDir = new File( getBasedir(), "target/default-repository" );  
-        
+        repoDir = new File( getBasedir(), "target/default-repository" );
+
+        FileUtils.deleteDirectory( repoDir );
+        assertFalse( "Default Test Repository should not exist.", repoDir.exists() );
+
         repoDir.mkdir();
-        
+
         FileUtils.copyDirectoryStructure( sourceRepoDir, repoDir );
-        
-        assertTrue( repoDir.exists() );
-    }
-    
-    protected void tearDown() throws Exception
-    {   
-        FileUtils.deleteDirectory( repoDir );
-        
-        assertFalse( repoDir.exists() );
-        
-        super.tearDown();
-    }
-    
-    public void testExecutor() throws Exception
-    {
+        // set the timestamps to a time well in the past
+        Calendar cal = Calendar.getInstance();
+        cal.add( Calendar.YEAR, -1 );
+        for ( File f : (List<File>) FileUtils.getFiles( repoDir, "**", null ) )
+        {
+            f.setLastModified( cal.getTimeInMillis() );
+        }
+        for ( String dir : (List<String>) FileUtils.getDirectoryNames( repoDir, "**/.svn", null, false ) )
+        {
+            FileUtils.deleteDirectory( new File( repoDir, dir ) );
+        }
+
         assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
 
         ArchivaConfiguration archivaConfig = (ArchivaConfiguration) lookup( ArchivaConfiguration.class );
         assertNotNull( archivaConfig );
-        
+
         // Create it
-        ManagedRepositoryConfiguration repo = createRepository( "testRepo", "Test Repository", repoDir );
-        assertNotNull( repo );
+        ManagedRepositoryConfiguration repositoryConfiguration = new ManagedRepositoryConfiguration();
+        repositoryConfiguration.setId( TEST_REPO_ID );
+        repositoryConfiguration.setName( "Test Repository" );
+        repositoryConfiguration.setLocation( repoDir.getAbsolutePath() );
         archivaConfig.getConfiguration().getManagedRepositories().clear();
-        archivaConfig.getConfiguration().addManagedRepository( repo );
+        archivaConfig.getConfiguration().addManagedRepository( repositoryConfiguration );
+    }
+
+    protected void tearDown()
+        throws Exception
+    {
+        FileUtils.deleteDirectory( repoDir );
+
+        assertFalse( repoDir.exists() );
 
+        super.tearDown();
+    }
+
+    public void testExecutor()
+        throws Exception
+    {
         RepositoryTask repoTask = new RepositoryTask();
-        
-        repoTask.setRepositoryId( "testRepo" );
-        
+
+        repoTask.setRepositoryId( TEST_REPO_ID );
+
         taskExecutor.executeTask( repoTask );
 
         ArtifactDAO adao = dao.getArtifactDAO();
         List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( null );
-        
+
         assertNotNull( unprocessedResultList );
-        assertEquals("Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
+        assertEquals( "Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
     }
-    
+
     public void testExecutorScanOnlyNewArtifacts()
         throws Exception
-    {  
-        assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
-
-        ArchivaConfiguration archivaConfig = (ArchivaConfiguration) lookup( ArchivaConfiguration.class );
-        assertNotNull( archivaConfig );
-        
-        // Create it
-        ManagedRepositoryConfiguration repo = createRepository( "testRepo", "Test Repository", repoDir );
-        assertNotNull( repo );
-        archivaConfig.getConfiguration().getManagedRepositories().clear();
-        archivaConfig.getConfiguration().addManagedRepository( repo );
-
+    {
         RepositoryTask repoTask = new RepositoryTask();
-        
-        repoTask.setRepositoryId( "testRepo" );
+
+        repoTask.setRepositoryId( TEST_REPO_ID );
         repoTask.setScanAll( false );
-        
-        RepositoryContentStatistics stats = new RepositoryContentStatistics();
-        stats.setDuration( 1234567 );
-        stats.setNewFileCount( 8 );
-        stats.setRepositoryId( "testRepo" );
-        stats.setTotalArtifactCount( 8 );
-        stats.setTotalFileCount( 8 );
-        stats.setTotalGroupCount( 3 );
-        stats.setTotalProjectCount( 5 );
-        stats.setTotalSize( 999999 );
-        stats.setWhenGathered( Calendar.getInstance().getTime() );
-        
-        dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( stats );
-        
+
+        createAndSaveTestStats();
+
         taskExecutor.executeTask( repoTask );
 
+        // check no artifacts processed
         ArtifactDAO adao = dao.getArtifactDAO();
         List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( null );
-        
+
         assertNotNull( unprocessedResultList );
-        assertEquals("Incorrect number of unprocessed artifacts detected. No new artifacts should have been found.", 0, unprocessedResultList.size() );
-        
-        File newArtifactGroup = new File( repoDir, "org/apache/archiva");
-        
-        FileUtils.copyDirectoryStructure( new File( getBasedir(), "target/test-classes/test-repo/org/apache/archiva"), newArtifactGroup );
+        assertEquals( "Incorrect number of unprocessed artifacts detected. No new artifacts should have been found.", 0,
+                      unprocessedResultList.size() );
+
+        // check correctness of new stats
+        List<RepositoryContentStatistics> results =
+            (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
+        RepositoryContentStatistics newStats = results.get( 0 );
+        assertEquals( 0, newStats.getNewFileCount() );
+        assertEquals( TEST_REPO_ID, newStats.getRepositoryId() );
+        assertEquals( 31, newStats.getTotalFileCount() );
+        // TODO: can't test these as they weren't stored in the database
+//        assertEquals( 8, newStats.getTotalArtifactCount() );
+//        assertEquals( 3, newStats.getTotalGroupCount() );
+//        assertEquals( 5, newStats.getTotalProjectCount() );
+        assertEquals( 14159, newStats.getTotalSize() );
+
+        File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
+
+        FileUtils.copyDirectoryStructure( new File( getBasedir(), "target/test-classes/test-repo/org/apache/archiva" ),
+                                          newArtifactGroup );
 
         // update last modified date
-        new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified( Calendar.getInstance().getTimeInMillis() + 1000 );
-        new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified( Calendar.getInstance().getTimeInMillis() + 1000 );
+        new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
+            Calendar.getInstance().getTimeInMillis() + 1000 );
+        new File( newArtifactGroup,
+                  "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
+            Calendar.getInstance().getTimeInMillis() + 1000 );
 
         assertTrue( newArtifactGroup.exists() );
-        
+
         taskExecutor.executeTask( repoTask );
-        
+
         unprocessedResultList = adao.queryArtifacts( null );
         assertNotNull( unprocessedResultList );
-        assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1, unprocessedResultList.size() );        
+        assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
+                      unprocessedResultList.size() );
+
+        // check correctness of new stats
+        results =
+            (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
+        RepositoryContentStatistics updatedStats = results.get( 0 );
+        assertEquals( 2, updatedStats.getNewFileCount() );
+        assertEquals( TEST_REPO_ID, updatedStats.getRepositoryId() );
+        assertEquals( 33, updatedStats.getTotalFileCount() );
+        // TODO: can't test these as they weren't stored in the database
+//        assertEquals( 8, newStats.getTotalArtifactCount() );
+//        assertEquals( 3, newStats.getTotalGroupCount() );
+//        assertEquals( 5, newStats.getTotalProjectCount() );
+        assertEquals( 19301, updatedStats.getTotalSize() );
     }
-    
-    public void testExecutorForceScanAll()
+
+    public void testExecutorScanOnlyNewArtifactsChangeTimes()
         throws Exception
     {
-        assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
+        RepositoryTask repoTask = new RepositoryTask();
 
-        ArchivaConfiguration archivaConfig = (ArchivaConfiguration) lookup( ArchivaConfiguration.class );
-        assertNotNull( archivaConfig );
-        
-        // Create it
-        ManagedRepositoryConfiguration repo = createRepository( "testRepo", "Test Repository", repoDir );
-        assertNotNull( repo );
-        archivaConfig.getConfiguration().getManagedRepositories().clear();
-        archivaConfig.getConfiguration().addManagedRepository( repo );
+        repoTask.setRepositoryId( TEST_REPO_ID );
+        repoTask.setScanAll( false );
+
+        createAndSaveTestStats();
+
+        File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
+
+        FileUtils.copyDirectoryStructure( new File( getBasedir(), "target/test-classes/test-repo/org/apache/archiva" ),
+                                          newArtifactGroup );
+
+        // update last modified date, placing shortly after last scan
+        new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
+            Calendar.getInstance().getTimeInMillis() + 1000 );
+        new File( newArtifactGroup,
+                  "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
+            Calendar.getInstance().getTimeInMillis() + 1000 );
+
+        assertTrue( newArtifactGroup.exists() );
 
+        // scan using the really long previous duration
+        taskExecutor.executeTask( repoTask );
+
+        // check no artifacts processed
+        ArtifactDAO adao = dao.getArtifactDAO();
+        List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( null );
+        assertNotNull( unprocessedResultList );
+        assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
+                      unprocessedResultList.size() );
+
+        // check correctness of new stats
+        List<RepositoryContentStatistics> results =
+            (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
+        RepositoryContentStatistics newStats = results.get( 0 );
+        assertEquals( 2, newStats.getNewFileCount() );
+        assertEquals( TEST_REPO_ID, newStats.getRepositoryId() );
+        assertEquals( 33, newStats.getTotalFileCount() );
+        // TODO: can't test these as they weren't stored in the database
+//        assertEquals( 8, newStats.getTotalArtifactCount() );
+//        assertEquals( 3, newStats.getTotalGroupCount() );
+//        assertEquals( 5, newStats.getTotalProjectCount() );
+        assertEquals( 19301, newStats.getTotalSize() );
+    }
+
+    public void testExecutorScanOnlyNewArtifactsMidScan()
+        throws Exception
+    {
         RepositoryTask repoTask = new RepositoryTask();
-        
-        repoTask.setRepositoryId( "testRepo" );
+
+        repoTask.setRepositoryId( TEST_REPO_ID );
+        repoTask.setScanAll( false );
+
+        createAndSaveTestStats();
+
+        File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
+
+        FileUtils.copyDirectoryStructure( new File( getBasedir(), "target/test-classes/test-repo/org/apache/archiva" ),
+                                          newArtifactGroup );
+
+        // update last modified date, placing in middle of last scan
+        new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
+            Calendar.getInstance().getTimeInMillis() - 50000 );
+        new File( newArtifactGroup,
+                  "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
+            Calendar.getInstance().getTimeInMillis() - 50000 );
+
+        assertTrue( newArtifactGroup.exists() );
+
+        // scan using the really long previous duration
+        taskExecutor.executeTask( repoTask );
+
+        // check no artifacts processed
+        ArtifactDAO adao = dao.getArtifactDAO();
+        List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( null );
+        assertNotNull( unprocessedResultList );
+        assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
+                      unprocessedResultList.size() );
+
+        // check correctness of new stats
+        List<RepositoryContentStatistics> results =
+            (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
+        RepositoryContentStatistics newStats = results.get( 0 );
+        assertEquals( 2, newStats.getNewFileCount() );
+        assertEquals( TEST_REPO_ID, newStats.getRepositoryId() );
+        assertEquals( 33, newStats.getTotalFileCount() );
+        // TODO: can't test these as they weren't stored in the database
+//        assertEquals( 8, newStats.getTotalArtifactCount() );
+//        assertEquals( 3, newStats.getTotalGroupCount() );
+//        assertEquals( 5, newStats.getTotalProjectCount() );
+        assertEquals( 19301, newStats.getTotalSize() );
+    }
+
+    private void createAndSaveTestStats()
+    {
+        RepositoryContentStatistics stats = new RepositoryContentStatistics();
+        stats.setDuration( 1234567 );
+        stats.setNewFileCount( 31 );
+        stats.setRepositoryId( TEST_REPO_ID );
+        stats.setTotalArtifactCount( 8 );
+        stats.setTotalFileCount( 31 );
+        stats.setTotalGroupCount( 3 );
+        stats.setTotalProjectCount( 5 );
+        stats.setTotalSize( 38545 );
+        stats.setWhenGathered( Calendar.getInstance().getTime() );
+
+        dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( stats );
+    }
+
+    public void testExecutorForceScanAll()
+        throws Exception
+    {
+        RepositoryTask repoTask = new RepositoryTask();
+
+        repoTask.setRepositoryId( TEST_REPO_ID );
         repoTask.setScanAll( true );
-        
+
         RepositoryContentStatistics stats = new RepositoryContentStatistics();
         stats.setDuration( 1234567 );
         stats.setNewFileCount( 8 );
-        stats.setRepositoryId( "testRepo" );
+        stats.setRepositoryId( TEST_REPO_ID );
         stats.setTotalArtifactCount( 8 );
         stats.setTotalFileCount( 8 );
         stats.setTotalGroupCount( 3 );
         stats.setTotalProjectCount( 5 );
         stats.setTotalSize( 999999 );
         stats.setWhenGathered( Calendar.getInstance().getTime() );
-        
+
         dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( stats );
-        
+
         taskExecutor.executeTask( repoTask );
 
         ArtifactDAO adao = dao.getArtifactDAO();
         List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( null );
-        
+
         assertNotNull( unprocessedResultList );
-        assertEquals("Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
-    }
-    
-    protected ManagedRepositoryConfiguration createRepository( String id, String name, File location )
-    {
-        ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
-        repo.setId( id );
-        repo.setName( name );
-        repo.setLocation( location.getAbsolutePath() );
-        return repo;
+        assertEquals( "Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
     }
 }