]> source.dussan.org Git - archiva.git/blob
f7b188c348b1900062587a7a651e32489270f742
[archiva.git] /
1 package org.apache.archiva.scheduler.repository;
2
3 /*
4  * Licensed to the Apache Software Foundation (ASF) under one
5  * or more contributor license agreements.  See the NOTICE file
6  * distributed with this work for additional information
7  * regarding copyright ownership.  The ASF licenses this file
8  * to you under the Apache License, Version 2.0 (the
9  * "License"); you may not use this file except in compliance
10  * with the License.  You may obtain a copy of the License at
11  *
12  *  http://www.apache.org/licenses/LICENSE-2.0
13  *
14  * Unless required by applicable law or agreed to in writing,
15  * software distributed under the License is distributed on an
16  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
17  * KIND, either express or implied.  See the License for the
18  * specific language governing permissions and limitations
19  * under the License.
20  */
21
22 import org.apache.archiva.metadata.repository.MetadataRepository;
23 import org.apache.archiva.metadata.repository.MetadataRepositoryException;
24 import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
25 import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
26 import org.apache.maven.archiva.configuration.ArchivaConfiguration;
27 import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
28 import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
29 import org.apache.maven.archiva.model.ArtifactReference;
30 import org.codehaus.plexus.spring.PlexusInSpringTestCase;
31 import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
32 import org.codehaus.plexus.util.FileUtils;
33
34 import java.io.File;
35 import java.util.Calendar;
36 import java.util.Collection;
37 import java.util.Date;
38 import java.util.List;
39
40 import static org.mockito.Mockito.mock;
41
42 /**
43  * ArchivaRepositoryScanningTaskExecutorTest
44  *
45  * @version $Id$
46  */
47 public class ArchivaRepositoryScanningTaskExecutorTest
48     extends PlexusInSpringTestCase
49 {
50     private TaskExecutor taskExecutor;
51
52     private File repoDir;
53
54     private static final String TEST_REPO_ID = "testRepo";
55
56     private RepositoryStatisticsManager repositoryStatisticsManager;
57
58     private TestConsumer testConsumer;
59
60     private MetadataRepository metadataRepository;
61
62     protected void setUp()
63         throws Exception
64     {
65         super.setUp();
66
67         try
68         {
69             taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-repository-scanning" );
70         }
71         catch ( Exception e )
72         {
73             // TODO: handle cleanup in plexus-spring lookup method instead
74             applicationContext.close();
75             throw e;
76         }
77
78         File sourceRepoDir = new File( getBasedir(), "src/test/repositories/default-repository" );
79         repoDir = new File( getBasedir(), "target/default-repository" );
80
81         FileUtils.deleteDirectory( repoDir );
82         assertFalse( "Default Test Repository should not exist.", repoDir.exists() );
83
84         repoDir.mkdir();
85
86         FileUtils.copyDirectoryStructure( sourceRepoDir, repoDir );
87         // set the timestamps to a time well in the past
88         Calendar cal = Calendar.getInstance();
89         cal.add( Calendar.YEAR, -1 );
90         for ( File f : (List<File>) FileUtils.getFiles( repoDir, "**", null ) )
91         {
92             f.setLastModified( cal.getTimeInMillis() );
93         }
94         // TODO: test they are excluded instead
95         for ( String dir : (List<String>) FileUtils.getDirectoryNames( repoDir, "**/.svn", null, false ) )
96         {
97             FileUtils.deleteDirectory( new File( repoDir, dir ) );
98         }
99
100         assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
101
102         ArchivaConfiguration archivaConfig = (ArchivaConfiguration) lookup( ArchivaConfiguration.class );
103         assertNotNull( archivaConfig );
104
105         // Create it
106         ManagedRepositoryConfiguration repositoryConfiguration = new ManagedRepositoryConfiguration();
107         repositoryConfiguration.setId( TEST_REPO_ID );
108         repositoryConfiguration.setName( "Test Repository" );
109         repositoryConfiguration.setLocation( repoDir.getAbsolutePath() );
110         archivaConfig.getConfiguration().getManagedRepositories().clear();
111         archivaConfig.getConfiguration().addManagedRepository( repositoryConfiguration );
112
113         repositoryStatisticsManager = (RepositoryStatisticsManager) lookup( RepositoryStatisticsManager.class );
114         testConsumer = (TestConsumer) lookup( KnownRepositoryContentConsumer.class, "test-consumer" );
115
116         metadataRepository = mock( MetadataRepository.class );
117     }
118
119     protected void tearDown()
120         throws Exception
121     {
122         FileUtils.deleteDirectory( repoDir );
123
124         assertFalse( repoDir.exists() );
125
126         super.tearDown();
127     }
128
129     public void testExecutor()
130         throws Exception
131     {
132         RepositoryTask repoTask = new RepositoryTask();
133
134         repoTask.setRepositoryId( TEST_REPO_ID );
135
136         taskExecutor.executeTask( repoTask );
137
138         Collection<ArtifactReference> unprocessedResultList = testConsumer.getConsumed();
139
140         assertNotNull( unprocessedResultList );
141         assertEquals( "Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
142     }
143
144     public void testExecutorScanOnlyNewArtifacts()
145         throws Exception
146     {
147         RepositoryTask repoTask = new RepositoryTask();
148
149         repoTask.setRepositoryId( TEST_REPO_ID );
150         repoTask.setScanAll( false );
151
152         createAndSaveTestStats();
153
154         taskExecutor.executeTask( repoTask );
155
156         // check no artifacts processed
157         Collection<ArtifactReference> unprocessedResultList = testConsumer.getConsumed();
158
159         assertNotNull( unprocessedResultList );
160         assertEquals( "Incorrect number of unprocessed artifacts detected. No new artifacts should have been found.", 0,
161                       unprocessedResultList.size() );
162
163         // check correctness of new stats
164         RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( metadataRepository,
165                                                                                        TEST_REPO_ID );
166         assertEquals( 0, newStats.getNewFileCount() );
167         assertEquals( 31, newStats.getTotalFileCount() );
168         // FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
169 //        assertEquals( 8, newStats.getTotalArtifactCount() );
170 //        assertEquals( 3, newStats.getTotalGroupCount() );
171 //        assertEquals( 5, newStats.getTotalProjectCount() );
172 //        assertEquals( 14159, newStats.getTotalArtifactFileSize() );
173
174         File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
175
176         FileUtils.copyDirectoryStructure( new File( getBasedir(), "target/test-classes/test-repo/org/apache/archiva" ),
177                                           newArtifactGroup );
178
179         // update last modified date
180         new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
181             Calendar.getInstance().getTimeInMillis() + 1000 );
182         new File( newArtifactGroup,
183                   "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
184             Calendar.getInstance().getTimeInMillis() + 1000 );
185
186         assertTrue( newArtifactGroup.exists() );
187
188         taskExecutor.executeTask( repoTask );
189
190         unprocessedResultList = testConsumer.getConsumed();
191         assertNotNull( unprocessedResultList );
192         assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
193                       unprocessedResultList.size() );
194
195         // check correctness of new stats
196         RepositoryStatistics updatedStats = repositoryStatisticsManager.getLastStatistics( metadataRepository,
197                                                                                            TEST_REPO_ID );
198         assertEquals( 2, updatedStats.getNewFileCount() );
199         assertEquals( 33, updatedStats.getTotalFileCount() );
200         // FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
201 //        assertEquals( 8, newStats.getTotalArtifactCount() );
202 //        assertEquals( 3, newStats.getTotalGroupCount() );
203 //        assertEquals( 5, newStats.getTotalProjectCount() );
204 //        assertEquals( 19301, updatedStats.getTotalArtifactFileSize() );
205     }
206
207     public void testExecutorScanOnlyNewArtifactsChangeTimes()
208         throws Exception
209     {
210         RepositoryTask repoTask = new RepositoryTask();
211
212         repoTask.setRepositoryId( TEST_REPO_ID );
213         repoTask.setScanAll( false );
214
215         createAndSaveTestStats();
216
217         File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
218
219         FileUtils.copyDirectoryStructure( new File( getBasedir(), "target/test-classes/test-repo/org/apache/archiva" ),
220                                           newArtifactGroup );
221
222         // update last modified date, placing shortly after last scan
223         new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
224             Calendar.getInstance().getTimeInMillis() + 1000 );
225         new File( newArtifactGroup,
226                   "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
227             Calendar.getInstance().getTimeInMillis() + 1000 );
228
229         assertTrue( newArtifactGroup.exists() );
230
231         // scan using the really long previous duration
232         taskExecutor.executeTask( repoTask );
233
234         // check no artifacts processed
235         Collection<ArtifactReference> unprocessedResultList = testConsumer.getConsumed();
236         assertNotNull( unprocessedResultList );
237         assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
238                       unprocessedResultList.size() );
239
240         // check correctness of new stats
241         RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( metadataRepository,
242                                                                                        TEST_REPO_ID );
243         assertEquals( 2, newStats.getNewFileCount() );
244         assertEquals( 33, newStats.getTotalFileCount() );
245         // FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
246 //        assertEquals( 8, newStats.getTotalArtifactCount() );
247 //        assertEquals( 3, newStats.getTotalGroupCount() );
248 //        assertEquals( 5, newStats.getTotalProjectCount() );
249 //        assertEquals( 19301, newStats.getTotalArtifactFileSize() );
250     }
251
252     public void testExecutorScanOnlyNewArtifactsMidScan()
253         throws Exception
254     {
255         RepositoryTask repoTask = new RepositoryTask();
256
257         repoTask.setRepositoryId( TEST_REPO_ID );
258         repoTask.setScanAll( false );
259
260         createAndSaveTestStats();
261
262         File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
263
264         FileUtils.copyDirectoryStructure( new File( getBasedir(), "target/test-classes/test-repo/org/apache/archiva" ),
265                                           newArtifactGroup );
266
267         // update last modified date, placing in middle of last scan
268         new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
269             Calendar.getInstance().getTimeInMillis() - 50000 );
270         new File( newArtifactGroup,
271                   "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
272             Calendar.getInstance().getTimeInMillis() - 50000 );
273
274         assertTrue( newArtifactGroup.exists() );
275
276         // scan using the really long previous duration
277         taskExecutor.executeTask( repoTask );
278
279         // check no artifacts processed
280         Collection<ArtifactReference> unprocessedResultList = testConsumer.getConsumed();
281         assertNotNull( unprocessedResultList );
282         assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
283                       unprocessedResultList.size() );
284
285         // check correctness of new stats
286         RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( metadataRepository,
287                                                                                        TEST_REPO_ID );
288         assertEquals( 2, newStats.getNewFileCount() );
289         assertEquals( 33, newStats.getTotalFileCount() );
290         // FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
291 //        assertEquals( 8, newStats.getTotalArtifactCount() );
292 //        assertEquals( 3, newStats.getTotalGroupCount() );
293 //        assertEquals( 5, newStats.getTotalProjectCount() );
294 //        assertEquals( 19301, newStats.getTotalArtifactFileSize() );
295     }
296
297     public void testExecutorForceScanAll()
298         throws Exception
299     {
300         RepositoryTask repoTask = new RepositoryTask();
301
302         repoTask.setRepositoryId( TEST_REPO_ID );
303         repoTask.setScanAll( true );
304
305         Date date = Calendar.getInstance().getTime();
306         repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, new Date(
307             date.getTime() - 1234567 ), date, 8, 8 );
308
309         taskExecutor.executeTask( repoTask );
310
311         Collection<ArtifactReference> unprocessedResultList = testConsumer.getConsumed();
312
313         assertNotNull( unprocessedResultList );
314         assertEquals( "Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
315     }
316
317     private void createAndSaveTestStats()
318         throws MetadataRepositoryException
319     {
320         Date date = Calendar.getInstance().getTime();
321         RepositoryStatistics stats = new RepositoryStatistics();
322         stats.setScanStartTime( new Date( date.getTime() - 1234567 ) );
323         stats.setScanEndTime( date );
324         stats.setNewFileCount( 31 );
325         stats.setTotalArtifactCount( 8 );
326         stats.setTotalFileCount( 31 );
327         stats.setTotalGroupCount( 3 );
328         stats.setTotalProjectCount( 5 );
329         stats.setTotalArtifactFileSize( 38545 );
330
331         repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, new Date(
332             date.getTime() - 1234567 ), date, 31, 31 );
333     }
334 }