From ad41c470ccbba58daaf003b11fe5f1cbbdd54491 Mon Sep 17 00:00:00 2001 From: Brett Porter Date: Thu, 19 Nov 2009 01:55:27 +0000 Subject: [PATCH] clean up some notes, plans and formatting git-svn-id: https://svn.apache.org/repos/asf/archiva/branches/MRM-1025@882027 13f79535-47bb-0310-9956-ffa450edef68 --- .../archiva-metadata-consumer/pom.xml | 2 +- .../ArchivaMetadataCreationConsumer.java | 2 +- ...ArchivaRepositoryScanningTaskExecutor.java | 98 ++++++++++++------- branch-working-notes.txt | 30 ------ 4 files changed, 64 insertions(+), 68 deletions(-) delete mode 100644 branch-working-notes.txt diff --git a/archiva-modules/archiva-base/archiva-consumers/archiva-metadata-consumer/pom.xml b/archiva-modules/archiva-base/archiva-consumers/archiva-metadata-consumer/pom.xml index a90fa3145..5c9ec1c04 100644 --- a/archiva-modules/archiva-base/archiva-consumers/archiva-metadata-consumer/pom.xml +++ b/archiva-modules/archiva-base/archiva-consumers/archiva-metadata-consumer/pom.xml @@ -40,7 +40,7 @@ org.apache.archiva metadata-repository-api - + org.apache.archiva metadata-repository-file diff --git a/archiva-modules/archiva-base/archiva-consumers/archiva-metadata-consumer/src/main/java/org/apache/archiva/consumers/metadata/ArchivaMetadataCreationConsumer.java b/archiva-modules/archiva-base/archiva-consumers/archiva-metadata-consumer/src/main/java/org/apache/archiva/consumers/metadata/ArchivaMetadataCreationConsumer.java index 9baf572ea..b5609e2f8 100644 --- a/archiva-modules/archiva-base/archiva-consumers/archiva-metadata-consumer/src/main/java/org/apache/archiva/consumers/metadata/ArchivaMetadataCreationConsumer.java +++ b/archiva-modules/archiva-base/archiva-consumers/archiva-metadata-consumer/src/main/java/org/apache/archiva/consumers/metadata/ArchivaMetadataCreationConsumer.java @@ -115,7 +115,7 @@ public class ArchivaMetadataCreationConsumer throws ConsumerException { this.repository.setRepository( repo ); - // TODO: remove hardcoding + // FIXME: remove hardcoding this.metadataRepository = new FileMetadataRepository( new File( repository.getRepoRoot(), ".metadata" ) ); this.whenGathered = whenGathered; } diff --git a/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/executors/ArchivaRepositoryScanningTaskExecutor.java b/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/executors/ArchivaRepositoryScanningTaskExecutor.java index 6fbf22e13..090eeaa3b 100644 --- a/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/executors/ArchivaRepositoryScanningTaskExecutor.java +++ b/archiva-modules/archiva-scheduled/src/main/java/org/apache/maven/archiva/scheduled/executors/ArchivaRepositoryScanningTaskExecutor.java @@ -41,6 +41,7 @@ import org.apache.maven.archiva.database.constraints.UniqueArtifactIdConstraint; import org.apache.maven.archiva.database.constraints.UniqueGroupIdConstraint; import org.apache.maven.archiva.model.ArchivaArtifact; import org.apache.maven.archiva.model.RepositoryContentStatistics; +import org.apache.maven.archiva.repository.events.RepositoryListener; import org.apache.maven.archiva.scheduled.tasks.RepositoryTask; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; @@ -51,10 +52,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * ArchivaRepositoryScanningTaskExecutor + * ArchivaRepositoryScanningTaskExecutor * * @version $Id$ - * + * * @plexus.component * role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" * role-hint="repository-scanning" @@ -63,14 +64,14 @@ public class ArchivaRepositoryScanningTaskExecutor implements TaskExecutor, Initializable { private Logger log = LoggerFactory.getLogger( ArchivaRepositoryScanningTaskExecutor.class ); - + /** * TODO: just for stats, remove this and use the main stats module - * + * * @plexus.requirement role-hint="jdo" */ private ArchivaDAO dao; - + /** * @plexus.requirement */ @@ -78,16 +79,21 @@ public class ArchivaRepositoryScanningTaskExecutor /** * The repository scanner component. - * + * * @plexus.requirement */ private RepositoryScanner repoScanner; - + + /** + * @plexus.requirement role="org.apache.maven.archiva.repository.events.RepositoryListener" + */ + private List repositoryListeners; + /** * @plexus.requirement */ private RepositoryContentConsumers consumers; - + private Task task; public void initialize() @@ -100,17 +106,26 @@ public class ArchivaRepositoryScanningTaskExecutor public void executeTask( Task task ) throws TaskExecutionException { + + // TODO: replace this whole class with the prescribed content scanning service/action + // - scan repository for artifacts that do not have corresponding metadata or have been updated and + // send events for each + // - scan metadata for artifacts that have been removed and send events for each + // - scan metadata for missing plugin data + // - store information so that it can restart upon failure (publish event on the server recovery + // queue, remove it on successful completion) + this.task = task; - + RepositoryTask repoTask = (RepositoryTask) task; - + if ( StringUtils.isBlank( repoTask.getRepositoryId() ) ) { throw new TaskExecutionException("Unable to execute RepositoryTask with blank repository Id."); } ManagedRepositoryConfiguration arepo = archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() ); - + // execute consumers on resource file if set if( repoTask.getResourceFile() != null ) { @@ -120,19 +135,19 @@ public class ArchivaRepositoryScanningTaskExecutor else { log.info( "Executing task from queue with job name: " + repoTask ); - + // otherwise, execute consumers on whole repository try - { + { if ( arepo == null ) { throw new TaskExecutionException( "Unable to execute RepositoryTask with invalid repository id: " + repoTask.getRepositoryId() ); } - + long sinceWhen = RepositoryScanner.FRESH_SCAN; - + List results = (List) dao.query( new MostRecentRepositoryScanStatistics( arepo.getId() ) ); - + if ( CollectionUtils.isNotEmpty( results ) ) { RepositoryContentStatistics lastStats = results.get( 0 ); @@ -141,15 +156,26 @@ public class ArchivaRepositoryScanningTaskExecutor sinceWhen = lastStats.getWhenGathered().getTime() + lastStats.getDuration(); } } - + RepositoryScanStatistics stats = repoScanner.scan( arepo, sinceWhen ); - - log.info( "Finished repository task: " + stats.toDump( arepo ) ); - + + log.info( "Finished first scan: " + stats.toDump( arepo ) ); + RepositoryContentStatistics dbstats = constructRepositoryStatistics( arepo, sinceWhen, results, stats ); - - dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( dbstats ); - + + dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( dbstats ); + +// log.info( "Scanning for removed repository content" ); + + // FIXME: remove hardcoding +// MetadataRepository metadataRepository = +// new FileMetadataRepository( new File( arepo.getLocation(), ".metadata" ) ); + +// metadataRepository.findAllProjects(); + // FIXME: do something + + log.info( "Finished repository task: " + repoTask ); + this.task = null; } catch ( RepositoryScannerException e ) @@ -163,7 +189,7 @@ public class ArchivaRepositoryScanningTaskExecutor private RepositoryContentStatistics constructRepositoryStatistics( ManagedRepositoryConfiguration arepo, long sinceWhen, List results, - RepositoryScanStatistics stats ) + RepositoryScanStatistics stats ) { // I hate jpox and modello <-- and so do I RepositoryContentStatistics dbstats = new RepositoryContentStatistics(); @@ -172,12 +198,12 @@ public class ArchivaRepositoryScanningTaskExecutor dbstats.setRepositoryId( stats.getRepositoryId() ); dbstats.setTotalFileCount( stats.getTotalFileCount() ); dbstats.setWhenGathered( stats.getWhenGathered() ); - + // total artifact count try { - List artifacts = dao.getArtifactDAO().queryArtifacts( - new ArtifactsByRepositoryConstraint( arepo.getId(), stats.getWhenGathered(), "groupId", true ) ); + List artifacts = dao.getArtifactDAO().queryArtifacts( + new ArtifactsByRepositoryConstraint( arepo.getId(), stats.getWhenGathered(), "groupId", true ) ); dbstats.setTotalArtifactCount( artifacts.size() ); } catch ( ObjectNotFoundException oe ) @@ -185,27 +211,27 @@ public class ArchivaRepositoryScanningTaskExecutor log.error( "Object not found in the database : " + oe.getMessage() ); } catch ( ArchivaDatabaseException ae ) - { + { log.error( "Error occurred while querying artifacts for artifact count : " + ae.getMessage() ); } - + // total repo size long size = FileUtils.sizeOfDirectory( new File( arepo.getLocation() ) ); dbstats.setTotalSize( size ); - + // total unique groups List repos = new ArrayList(); - repos.add( arepo.getId() ); - + repos.add( arepo.getId() ); + List groupIds = (List) dao.query( new UniqueGroupIdConstraint( repos ) ); dbstats.setTotalGroupCount( groupIds.size() ); - + List artifactIds = (List) dao.query( new UniqueArtifactIdConstraint( arepo.getId(), true ) ); dbstats.setTotalProjectCount( artifactIds.size() ); - + return dbstats; - } - + } + public Task getCurrentTaskInExecution() { return task; diff --git a/branch-working-notes.txt b/branch-working-notes.txt deleted file mode 100644 index 0c0193772..000000000 --- a/branch-working-notes.txt +++ /dev/null @@ -1,30 +0,0 @@ -Stage 1: remove use of database and index from core consumers (move implementation into respective database and index modules) - -Done! - -Stage 2: separate model from JPOX annotated classes, centralising JPOX use in database - -Done! - -Stage 3: add a basic repository querying API for base artifact information and retrieval of metadata - -* RSS, browse -* consider repository-api refactorings -* replace archiva-model with separated repository APIs -* at this point, should be able to have functional Archiva without a database -* note that metadata need not be stored with the artifacts themselves, but will be by default - -Stage 4: incorporation of event API - -* used to centralise arrival, removal, etc of files/artifacts in the repository -* errors should be events as well to avoid exceptions in the logs and instead meaningful handling/reporting -* could also be used for configuration events -* consider hooking the audit log to this as well - -Stage 5: isolate scanning code - -* Repository should operate without scanning code, it should push events if enabled -* better assessment of its progress, performance -* removal of database / repository scanning duality - all operations are driven by the event bus -* move some database operations to a housekeeping scheduled task (same for index), make scheduled tasks a listable item based on available plugins - -- 2.39.5